text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#!/usr/bin/env python
"""
unit test for filters module
author: Michael Grupp
This file is part of evo (github.com/MichaelGrupp/evo).
evo is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
evo is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with evo. If not, see <http://www.gnu.org/licenses/>.
"""
import math
import unittest
import numpy as np
from evo.core import filters
from evo.core import lie_algebra as lie
# TODO: clean these up and use proper fixtures.
POSES_1 = [
lie.se3(np.eye(3), np.array([0, 0, 0])),
lie.se3(np.eye(3), np.array([0, 0, 0.5])),
lie.se3(np.eye(3), np.array([0, 0, 0])),
lie.se3(np.eye(3), np.array([0, 0, 1]))
]
POSES_2 = [
lie.se3(np.eye(3), np.array([0, 0, 0])),
lie.se3(np.eye(3), np.array([0, 0, 0.5])),
lie.se3(np.eye(3), np.array([0, 0, 0.99])),
lie.se3(np.eye(3), np.array([0, 0, 1.0]))
]
POSES_3 = [
lie.se3(np.eye(3), np.array([0, 0, 0.0])),
lie.se3(np.eye(3), np.array([0, 0, 0.9])),
lie.se3(np.eye(3), np.array([0, 0, 0.99])),
lie.se3(np.eye(3), np.array([0, 0, 0.999])),
lie.se3(np.eye(3), np.array([0, 0, 0.9999])),
lie.se3(np.eye(3), np.array([0, 0, 0.99999])),
lie.se3(np.eye(3), np.array([0, 0, 0.999999])),
lie.se3(np.eye(3), np.array([0, 0, 0.9999999]))
]
POSES_4 = [
lie.se3(np.eye(3), np.array([0, 0, 0])),
lie.se3(np.eye(3), np.array([0, 0, 1])),
lie.se3(np.eye(3), np.array([0, 0, 1])),
lie.se3(np.eye(3), np.array([0, 0, 1]))
]
class TestFilterPairsByPath(unittest.TestCase):
def test_poses1_all_pairs(self):
target_path = 1.0
tol = 0.0
id_pairs = filters.filter_pairs_by_path(POSES_1, target_path, tol,
all_pairs=True)
self.assertEqual(id_pairs, [(0, 2), (2, 3)])
def test_poses1_wrong_target(self):
target_path = 2.5
tol = 0.0
id_pairs = filters.filter_pairs_by_path(POSES_1, target_path, tol,
all_pairs=True)
self.assertEqual(id_pairs, [])
def test_poses2_all_pairs_low_tolerance(self):
target_path = 1.0
tol = 0.001
id_pairs = filters.filter_pairs_by_path(POSES_2, target_path, tol,
all_pairs=True)
self.assertEqual(id_pairs, [(0, 3)])
def test_convergence_all_pairs(self):
target_path = 1.0
tol = 0.2
id_pairs = filters.filter_pairs_by_path(POSES_3, target_path, tol,
all_pairs=True)
self.assertEqual(id_pairs, [(0, 7)])
axis = np.array([1, 0, 0])
POSES_5 = [
lie.se3(lie.so3_exp(axis * 0.0), np.array([0, 0, 0])),
lie.se3(lie.so3_exp(axis * math.pi), np.array([0, 0, 0])),
lie.se3(lie.so3_exp(axis * 0.0), np.array([0, 0, 0])),
lie.se3(lie.so3_exp(axis * math.pi / 3), np.array([0, 0, 0])),
lie.se3(lie.so3_exp(axis * math.pi), np.array([0, 0, 0]))
]
TRANSFORM = lie.random_se3()
POSES_5_TRANSFORMED = [TRANSFORM.dot(p) for p in POSES_5]
axis = np.array([1, 0, 0])
p0 = lie.se3(lie.so3_exp(axis * 0.0), np.array([0, 0, 0]))
pd = lie.se3(lie.so3_exp(axis * (math.pi / 3.)), np.array([1, 2, 3]))
p1 = np.dot(p0, pd)
p2 = np.dot(p1, pd)
p3 = np.dot(p2, pd)
POSES_6 = [p0, p1, p2, p3, p3]
POSES_6_TRANSFORMED = [TRANSFORM.dot(p) for p in POSES_6]
class TestFilterPairsByAngle(unittest.TestCase):
def test_poses5(self):
tol = 0.001
expected_result = [(0, 1), (1, 2), (2, 4)]
# Result should be unaffected by global transformation.
for poses in (POSES_5, POSES_5_TRANSFORMED):
target_angle = math.pi - tol
id_pairs = filters.filter_pairs_by_angle(poses, target_angle, tol,
all_pairs=False)
self.assertEqual(id_pairs, expected_result)
# Check for same result when using degrees:
target_angle = np.rad2deg(target_angle)
id_pairs = filters.filter_pairs_by_angle(poses, target_angle, tol,
all_pairs=False,
degrees=True)
self.assertEqual(id_pairs, expected_result)
def test_poses5_all_pairs(self):
tol = 0.01
expected_result = [(0, 1), (0, 4), (1, 2), (2, 4)]
# Result should be unaffected by global transformation.
for poses in (POSES_5, POSES_5_TRANSFORMED):
target_angle = math.pi
id_pairs = filters.filter_pairs_by_angle(poses, target_angle, tol,
all_pairs=True)
self.assertEqual(id_pairs, expected_result)
# Check for same result when using degrees:
target_angle = np.rad2deg(target_angle)
id_pairs = filters.filter_pairs_by_angle(poses, target_angle, tol,
all_pairs=True,
degrees=True)
self.assertEqual(id_pairs, expected_result)
def test_poses6(self):
tol = 0.001
target_angle = math.pi - tol
expected_result = [(0, 3)]
# Result should be unaffected by global transformation.
for poses in (POSES_6, POSES_6_TRANSFORMED):
id_pairs = filters.filter_pairs_by_angle(poses, target_angle, tol,
all_pairs=False)
self.assertEqual(id_pairs, expected_result)
def test_poses6_all_pairs(self):
target_angle = math.pi
tol = 0.001
expected_result = [(0, 3), (0, 4)]
# Result should be unaffected by global transformation.
for poses in (POSES_6, POSES_6_TRANSFORMED):
id_pairs = filters.filter_pairs_by_angle(poses, target_angle, tol,
all_pairs=True)
self.assertEqual(id_pairs, expected_result)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
MichaelGrupp/evo
|
test/test_filters.py
|
Python
|
gpl-3.0
| 6,476 | 0 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""create_table_container
Revision ID: 9fe371393a24
Revises: a9a92eebd9a8
Create Date: 2016-06-12 16:09:35.686539
"""
# revision identifiers, used by Alembic.
revision = '9fe371393a24'
down_revision = 'a9a92eebd9a8'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
import zun
def upgrade():
op.create_table(
'container',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('project_id', sa.String(length=255), nullable=True),
sa.Column('user_id', sa.String(length=255), nullable=True),
sa.Column('uuid', sa.String(length=36), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('image', sa.String(length=255), nullable=True),
sa.Column('command', sa.String(length=255), nullable=True),
sa.Column('status', sa.String(length=20), nullable=True),
sa.Column('environment', zun.db.sqlalchemy.models.JSONEncodedDict(),
nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('uuid', name='uniq_container0uuid')
)
|
kevin-zhaoshuai/zun
|
zun/db/sqlalchemy/alembic/versions/9fe371393a24_create_table_container.py
|
Python
|
apache-2.0
| 1,820 | 0.001648 |
import datetime
from tracker.models import Member, Report
from django.template.defaultfilters import slugify
import csv
import urllib
import simplejson as json
from dateutil.parser import *
import time
def update_twitter(branch='house', official=True, batch=1):
if official:
screen_names = [x.official_twitter_name for x in Member.objects.filter(branch=branch, official_twitter_name__isnull=False).order_by('last_name')]
else:
screen_names = [x.campaign_twitter_name for x in Member.objects.filter(branch=branch, campaign_twitter_name__isnull=False).order_by('last_name')]
if batch == 1:
screen_names = screen_names[:100]
elif batch == 2:
screen_names = screen_names[100:200]
elif batch == 3:
screen_names = screen_names[200:300]
elif batch == 4:
screen_names = screen_names[300:400]
elif batch == 5:
screen_names = screen_names[400:]
url = "http://api.twitter.com/1/users/lookup.json?screen_name=%s" % ",".join(screen_names)
response = urllib.urlopen(url).read()
results = json.loads(response)
for result in results:
if official:
member = Member.objects.get(official_twitter_name__iexact=result['screen_name'])
report, created = Report.objects.get_or_create(member=member, date=datetime.date.today())
report.official_twitter_followers=result['followers_count']
report.official_twitter_updates=result['statuses_count']
report.save()
else:
member = Member.objects.get(campaign_twitter_name__iexact=result['screen_name'])
report, created = Report.objects.get_or_create(member=member, date=datetime.date.today())
report.campaign_twitter_followers=result['followers_count']
report.campaign_twitter_updates=result['statuses_count']
report.save()
def update_facebook(members, token):
for member in members:
print member
report, created = Report.objects.get_or_create(member=member, date=datetime.date.today())
params = {}
params['access_token'] = token
batch = [{'method': 'GET', 'relative_url': str(member.official_facebook_name)}, {'method': 'GET', 'relative_url': str(member.campaign_facebook_name)}]
params['batch'] = [x for x in batch if x['relative_url'] != '']
encoded_params = urllib.urlencode(params)
f = urllib.urlopen("https://graph.facebook.com", encoded_params).read()
results = json.loads(f)
for result in results:
try:
body = json.loads(result['body'])
except:
continue
if body == False:
continue
else:
try:
if str(member.official_facebook_name.lower()) == body['username'].lower():
report.official_facebook_likes= body['likes']
elif str(member.campaign_facebook_name.lower()) == body['username'].lower():
report.campaign_facebook_likes= body['likes']
except:
try:
if member.official_facebook_name == body['id']:
report.official_facebook_likes= body['likes']
elif member.campaign_facebook_name == body['id']:
report.campaign_facebook_likes= body['likes']
except KeyError:
print "No match found for %s" % member
report.save()
time.sleep(3)
def update_member(member):
official_likes = member.facebook_likes(member.official_facebook_name, token)
campaign_likes = member.facebook_likes(member.campaign_facebook_name, token)
report, created = Report.objects.get_or_create(member=member, date=datetime.date.today(), official_twitter_followers=official_twitter, official_facebook_likes=official_likes, campaign_facebook_likes=campaign_likes, campaign_twitter_followers=campaign_twitter)
report.save()
def load_chamber(chamber):
if chamber == 'senate':
f = open("senate.csv","r")
elif chamber == 'house':
f = open("house.csv","r")
else:
raise("Must be house or senate")
rows = csv.DictReader(f, delimiter=',')
for row in rows:
member, created = Member.objects.get_or_create(last_name=row['last'], first_name=row['first'], slug=slugify(row['first']+' '+row['last']), party=row['party'], branch=chamber, state=row['state'], district=row['district'])
if row['username'] != '':
member.official_facebook_name = row['username']
member.save()
elif row['username_campaign'] != '':
member.campaign_facebook_name = row['username_campaign']
member.save()
if row['twitter'] != '':
member.official_twitter_name = row['twitter']
member.save()
def update_from_al():
f = open("congress_upload_9_14_11.csv","r")
rows = csv.DictReader(f, delimiter=',')
for row in rows:
print row['Name']
member, created = Member.objects.get_or_create(bioguide_id=row['bioguide'])
member.date_of_birth = parse(str(row['dob'])).date()
member.race = row['race']
member.gender = row['gender']
member.service = int(row['service'])
member.status = row['status'][0]
member.youtube_name = row['youtube_name']
member.margin_2010 = float(row['margin_2010'])
member.social_networks = int(row['social_networks'])
if row['facebook_10'] == '':
member.facebook_10 = None
else:
member.facebook_10 = int(row['facebook_10'])
member.facebook_status = int(row['facebook_status'])
if row['twitter_10'] == '':
member.twitter_10 = None
else:
member.twitter_10 = int(row['twitter_10'])
member.twitter_status = int(row['twitter_status'])
if row['official_twitter_name'] == '':
member.official_twitter_name = None
else:
member.official_twitter_name = row['official_twitter_name']
if row['campaign_twitter_name'] == '':
member.campaign_twitter_name = None
else:
member.campaign_twitter_name = row['campaign_twitter_name']
if row['index_10'] == None:
member.index_10 = None
else:
member.index_10 = int(row['index_10'])
member.save()
|
dwillis/socialcongress
|
tracker/utils.py
|
Python
|
unlicense
| 6,562 | 0.006248 |
"""
===============
Clump Match All
===============
Merge catalogs based on clump label masks
"""
import os
import numpy as _np
import pandas as _pd
import catalog
from .image import sample_bgps_img
def clump_match_water(bgps=[], out_filen='bgps_maser', verbose=False):
"""
Match maser catalog observations to the BGPS. Includes BGPS GBT, Red MSX,
Arcetri, MMB, and HOPS.
Paramters
---------
bgps : pandas.DataFrame, default []
BGPS catalog to match to, defaults to read vanilla catalog
out_filen : string, default 'bgps_maser'
Name of output catalog, comma seperated
verbose : boolean, default False
Print clump and number of matches
Returns
-------
bgps : pd.DataFrame
"""
# read in catalogs
gbt_h2o = catalog.read_gbt_h2o()
rms_h2o = catalog.read_rms_h2o()
arc_val = catalog.read_arcetri_valdettaro()
hops = catalog.read_hops()
if len(bgps) == 0:
bgps = catalog.read_bgps()
# add new columns
new_cols = ['h2o_gbt_f', 'h2o_gbt_n', 'h2o_arc_f', 'h2o_arc_n',
'h2o_hops_f', 'h2o_rms_f', 'h2o_rms_n']
gbt_cols = gbt_h2o.columns.drop(labels=['h2o_glon', 'h2o_glat', 'h2o_f'])
for col in new_cols:
bgps[col] = _np.nan
for col in gbt_cols:
bgps[col] = _np.nan
# make haystacks
gbt_h2o_hs = gbt_h2o[['h2o_glon', 'h2o_glat']].values # galactic
rms_h2o_hs = rms_h2o[['_Glon_y', '_Glat_y']].values # galactic
arc_val_hs = arc_val[['_Glon', '_Glat']].values # galactic
hops_hs = hops[['lWeight_deg', 'bWeight_deg']].values # galactic
# loop through clumps
for cnum in bgps['cnum']:
cnum_select = bgps.cnum == cnum
c_index = _np.argwhere(cnum_select)[0][0]
glat = bgps[cnum_select].glat_cen.values[0]
glon = bgps[cnum_select].glon_cen.values[0]
c_ra = bgps[cnum_select].ra.values[0]
c_dec = bgps[cnum_select].dec.values[0]
# match hops
if ((glat < 0.5) & (glat > -0.5) & ((glon > 290) | (glon < 30))):
hop_match_list = catalog.clump_match(hops_hs, cnum,
coord_type='gal')
bgps['h2o_hops_f'][cnum_select] = len(hop_match_list)
# match bgps gbt
gbt_match_list = catalog.clump_match(gbt_h2o_hs, cnum,
coord_type='gal')
h2o_gbt_num_detects = _np.sum(gbt_h2o.h2o_f.ix[gbt_match_list])
bgps['h2o_gbt_n'][cnum_select] = len(gbt_match_list)
bgps['h2o_gbt_f'][cnum_select] = h2o_gbt_num_detects
if h2o_gbt_num_detects > 0:
max_index = gbt_h2o['h2o_tpk'].ix[gbt_match_list].argmax()
bgps.ix[c_index, gbt_cols] = \
gbt_h2o.ix[gbt_match_list[max_index]]
# match rms h2o
rms_match_list = catalog.clump_match(rms_h2o_hs, cnum,
coord_type='gal')
bgps['h2o_rms_n'][cnum_select] = len(rms_match_list)
bgps['h2o_rms_f'][cnum_select] = \
_np.sum(rms_h2o.h2o_f.ix[rms_match_list])
# match arcetri
arc_match_list = catalog.clump_match(arc_val_hs, cnum,
coord_type='gal')
bgps['h2o_arc_n'][cnum_select] = len(arc_match_list)
bgps['h2o_arc_f'][cnum_select] = \
_np.sum(arc_val.h2o_f.ix[arc_match_list])
if verbose:
print '-- clump {:>4d}'.format(cnum)
bgps['h2o_f'] = _np.nan
bgps['h2o_f'][(bgps.h2o_gbt_f > 0) | (bgps.h2o_arc_f > 0) |
(bgps.h2o_rms_f > 0) | (bgps.h2o_hops_f > 0)] = 1
bgps['h2o_f'][(bgps.h2o_f != 1) & ((bgps.h2o_gbt_f == 0) & (bgps.h2o_gbt_n >
0))] = 0
bgps.to_csv(os.getcwd() + '/' + out_filen + '.csv', index=False)
print '-- Maser catalog file written to {}.csv'.format(out_filen)
return bgps
def clump_match_hii(bgps=[], out_filen='bgps_hii', verbose=False):
"""
Match HII and UCHII catalog observations to the BGPS. Include CORNISH and
HRDS.
Paramters
---------
bgps : pandas.DataFrame, default []
BGPS catalog to match to, defaults to read vanilla catalog
out_filen : string, default 'bgps_hii'
Name of output catalog, comma seperated
verbose : boolean, default False
Print clump and number of matches
Returns
-------
bgps : pd.DataFrame
"""
# read in catalogs
corn = catalog.read_cornish(exten='hii')
if len(bgps) == 0:
bgps = catalog.read_bgps()
# add new columns
new_cols = ['corn_n']
for col in new_cols:
bgps[col] = _np.nan
# make haystacks
corn_hs = corn[['glon', 'glat']].values # galactic
# loop through clumps
for cnum in bgps['cnum']:
cnum_select = bgps.cnum == cnum
glat = bgps[cnum_select].glat_cen.values[0]
glon = bgps[cnum_select].glon_cen.values[0]
c_ra = bgps[cnum_select].ra.values[0]
c_dec = bgps[cnum_select].dec.values[0]
if verbose:
print '-- clump {:>4d}'.format(cnum)
# match cornish
if (glat < 1.0) & (glat > -1.0) & (glon > 9.95) & (glon < 65.55):
corn_match_list = catalog.clump_match(corn_hs, cnum,
coord_type='gal')
bgps['corn_n'][cnum_select] = len(corn_match_list)
bgps.to_csv(os.getcwd() + '/' + out_filen + '.csv', index=False)
print '-- Hii catalog file written to {}.csv'.format(out_filen)
return bgps
def clump_match_ir(bgps=[], out_filen='bgps_ir', verbose=False):
"""
Match IR point source catalog observations to the BGPS. Includes EGO, RMS,
and Robitaille.
Paramters
---------
bgps : pandas.DataFrame, default []
BGPS catalog to match to, defaults to read vanilla catalog
out_filen : string, default 'bgps_ir'
Name of output catalog, comma seperated
verbose : boolean, default False
Print clump and number of matches
Returns
-------
bgps : pd.DataFrame
"""
# read in catalogs
ego = catalog.read_ego()
robit = catalog.read_robitaille()
msx = catalog.read_msx()
if len(bgps) == 0:
bgps = catalog.read_bgps()
# add new columns
new_cols = ['ego_n', 'msx_n', 'robit_n']
for col in new_cols:
bgps[col] = _np.nan
# make haystacks
ego_hs = ego[['_Glon', '_Glat']].values # galactic
robit_hs = robit[['_Glon', '_Glat']].values # galactic
msx_hs = msx[['ra', 'dec']].values # equatorial
# loop through clumps
for cnum in bgps['cnum']:
cnum_select = bgps.cnum == cnum
glat = bgps[cnum_select].glat_cen.values[0]
glon = bgps[cnum_select].glon_cen.values[0]
if verbose:
print '-- clump {:>4d}'.format(cnum)
# match egos
if (glat < 1.05) & (glat > -1.05) & (glon < 65):
ego_match_list = catalog.clump_match(ego_hs, cnum,
coord_type='gal')
bgps['ego_n'][cnum_select] = len(ego_match_list)
# match robit
if (glat < 65):
robit_agb_match_list = catalog.clump_match(robit_hs, cnum,
coord_type='gal')
robit_yso_match_list = catalog.clump_match(robit_hs, cnum,
coord_type='gal')
bgps['robit_agb_n'][cnum_select] = len(robit_agb_match_list)
bgps['robit_yso_n'][cnum_select] = len(robit_yso_match_list)
# match rms msx
if (glat < 5) & (glat > -5) & (glon > 10) & (glat < 220):
msx_match_list = catalog.clump_match(msx_hs, cnum,
coord_type='eq')
bgps['msx_n'][cnum_select] = len(msx_match_list)
# TODO add red wise
bgps['ir_f'] = -9
bgps['ir_f'][(bgps.ego_n > 0) | (bgps.msx_n > 0) |
(bgps.robit_n > 0)] = 1
bgps['ir_f'][(bgps.ego_n == 0) & (bgps.msx_n == 0) &
(bgps.robit_n == 0)] = 0
bgps.to_csv(os.getcwd() + '/' + out_filen + '.csv', index=False)
print '-- IR catalog file written to {}.csv'.format(out_filen)
return bgps
def clump_match_molcat(bgps=[], out_filen='bgps_molcat', verbose=False):
"""
Match the BGPS HCO+/N2H+ molecular line survey observations to the BGPS.
Citation: Shirley et al. (2013).
Paramters
---------
bgps : pandas.DataFrame, default []
BGPS catalog to match to, defaults to read vanilla catalog
out_filen : string, default 'bgps_molcat.csv'
Name of output catalog, comma seperated
verbose : boolean, default False
Print clump and number of matches
Returns
-------
bgps : pd.DataFrame
"""
# read in catalogs
molcat = catalog.read_molcat()
if len(bgps) == 0:
bgps = catalog.read_bgps()
# add new columns, molcat cnum clobbers bgps cnum
molcat = molcat.rename(columns={'cnum': 'v1cnum'})
mol_type = {'HCOP': 'hco_tpk', 'N2HP': 'nnh_tpk'}
# add column for intensity of dominant molecule
molcat['mol_int'] = _np.nan
for i in molcat['mol_vlsr_src'].index:
mol_select = molcat.ix[i, 'mol_vlsr_src']
if mol_select in mol_type.keys():
mol_int = molcat.ix[i, mol_type[mol_select]]
molcat.ix[i, 'mol_int'] = mol_int
# columns
molcat_cols = molcat.columns
for col in molcat_cols:
bgps[col] = _np.nan
bgps['mol_mult_n'] = _np.nan
bgps['mol_mult_f'] = _np.nan
bgps['mol_mult_vsep'] = _np.nan
# make haystacks
molcat_hs = molcat[['hht_ra', 'hht_dec']].values # galactic
# loop through clumps
for cnum in bgps['cnum']:
cnum_select = bgps.cnum == cnum
c_index = _np.argwhere(cnum_select)[0][0]
glat = bgps.ix[c_index, 'glat_cen']
glon = bgps.ix[c_index, 'glon_cen']
# match molcat
# TODO mark -9 in outer regions which not observed in v1
if (glon > 7.5) & (glon < 195):
molcat_match_list = catalog.clump_match(molcat_hs, cnum,
coord_type='eq')
bgps['mol_mult_n'][cnum_select] = len(molcat_match_list)
if verbose:
print '-- {} matched to {} pointings'.format(cnum,
len(molcat_match_list))
if len(molcat_match_list) == 1:
bgps.ix[c_index, molcat_cols] = molcat.ix[molcat_match_list[0]]
bgps['mol_mult_f'][cnum_select] = 0
if molcat.ix[molcat_match_list]['mol_vlsr_f'] == 2:
bgps['mol_mult_f'][cnum_select] = 1
elif len(molcat_match_list) > 1:
flags = molcat.ix[molcat_match_list]['mol_vlsr_f'].values
# if multiple component in a single spectra then confused
if 2 in flags:
bgps['mol_mult_f'][cnum_select] = 1
# if only single detection then not confused
elif flags[(flags == 1) | (flags == 3)].shape[0] <= 1:
bgps['mol_mult_f'][cnum_select] = 0
# if only non-detections
elif flags[flags == 0].shape[0] == len(molcat_match_list):
bgps['mol_mult_f'][cnum_select] = 0
print molcat[molcat.mol_vlsr_f ==
0].ix[molcat_match_list]['mol_vlsr_f']
# if more than one detection or self-absorbed and the
# the velocity seperation between two components is more than
# 5 km/s mark as confused, otherwise, not confused
elif flags[(flags == 1) | (flags == 3)].shape[0] > 1:
vmin = molcat[(molcat.mol_vlsr_f == 1) |
(molcat.mol_vlsr_f ==
3)].ix[molcat_match_list]['mol_vlsr'].min()
vmax = molcat[(molcat.mol_vlsr_f == 1) |
(molcat.mol_vlsr_f == 3)].ix[molcat_match_list]['mol_vlsr'].max()
vsep = _np.abs(vmin - vmax)
bgps['mol_mult_vsep'][cnum_select] = vsep
if vsep < 5:
bgps['mol_mult_f'][cnum_select] = 0
else:
bgps['mol_mult_f'][cnum_select] = 1
else:
raise Exception("Unexpected number of flags")
# match values for component with peak intensity
max_index = molcat['mol_int'].ix[molcat_match_list].argmax()
bgps.ix[c_index, molcat_cols] = molcat.ix[molcat_match_list[max_index]]
if verbose:
print bgps.ix[c_index, molcat_cols]
bgps.to_csv(os.getcwd() + '/' + out_filen + '.csv', index=False)
print '-- Molcat catalog file written to {}.csv'.format(out_filen)
return bgps
def clump_match_gbt_nh3(bgps=[], out_filen='bgps_nh3', verbose=False):
"""
Match the BGPS GBT NH3 survey observations to the BGPS. Citation:
Dunham et al. (2011), Rosolowsky et al. (in prep.).
Paramters
---------
bgps : pandas.DataFrame, default []
BGPS catalog to match to, defaults to read vanilla catalog
out_filen : string, default 'bgps_nh3.csv'
Name of output catalog, comma seperated
verbose : boolean, default False
Print clump and number of matches
Returns
-------
bgps : pd.DataFrame
"""
# read in catalogs
nh3 = catalog.read_gbt_nh3()
if len(bgps) == 0:
bgps = catalog.read_bgps()
# add new columns, molcat cnum clobbers bgps cnum
nh3_cols = ['GLON', 'GLAT', 'TKIN', 'TKIN_ERR', 'VLSR', 'PK11', 'NOISE11',
'PK22', 'NOISE22', 'PK33', 'NOISE33']
nh3 = nh3[nh3_cols]
nh3 = nh3.rename(columns={s: 'nh3_' + s.lower() for s in nh3_cols})
nh3_cols = ['nh3_' + s.lower() for s in nh3_cols]
for col in nh3_cols:
bgps[col] = _np.nan
bgps['nh3_mult_n'] = _np.nan
# make haystacks
nh3_hs = nh3[['nh3_glon', 'nh3_glat']].values # galactic
# loop through clumps
for cnum in bgps['cnum']:
cnum_select = bgps.cnum == cnum
c_index = _np.argwhere(cnum_select)[0][0]
glat = bgps[cnum_select].glat_cen.values[0]
glon = bgps[cnum_select].glon_cen.values[0]
c_ra = bgps[cnum_select].ra.values[0]
c_dec = bgps[cnum_select].dec.values[0]
# match gbt nh3
if verbose:
print '-- clump {:>4d}'.format(cnum)
if (glon > 7.5) & (glon < 200):
nh3_match_list = catalog.clump_match(nh3_hs, cnum,
coord_type='gal')
bgps['nh3_mult_n'][cnum_select] = len(nh3_match_list)
if len(nh3_match_list) > 0:
max_index = nh3['nh3_pk11'].ix[nh3_match_list].argmax()
bgps.ix[c_index, nh3_cols] = \
nh3.ix[nh3_match_list[max_index]]
# TODO check for multiple components
bgps.to_csv(os.getcwd() + '/' + out_filen + '.csv', index=False)
print '-- NH3 Catalog file written to {}.csv'.format(out_filen)
return bgps
def clump_match_metho(bgps=[], out_filen='bgps_metho', verbose=False):
"""
Match known CH3OH maser catalogs to the BGPS. Citation: Pandian et al.
(2007, 2011), Pestalozzi et al. (2005), Caswell et al. (2010), Green et al.
(2010).
Paramters
---------
bgps : pandas.DataFrame, default []
BGPS catalog to match to, defaults to read vanilla catalog
out_filen : string, default 'bgps_nh3.csv'
Name of output catalog, comma seperated
verbose : boolean, default False
Print clump and number of matches
Returns
-------
bgps : pd.DataFrame
"""
# read in catalogs
pandi = catalog.read_cat('pandian11')
pesta = catalog.read_cat('pestalozzi05')
mmb = catalog.read_mmb()
if len(bgps) == 0:
bgps = catalog.read_bgps()
# use general match TODO put BGPS label masks in cache
bgps = clump_match_gen(pandi, bgps=bgps, prefix='pandi',
verbose=verbose)
bgps = clump_match_gen(pesta, bgps=bgps, prefix='pesta',
verbose=verbose)
bgps = clump_match_gen(mmb, bgps=bgps, coord_labels=['glon', 'glat'],
prefix='mmb', verbose=verbose)
# mark master ch3oh flag
bgps['ch3oh_f'] = _np.nan
bgps['ch3oh_f'][(bgps.pandi_n > 0) | (bgps.pesta_n > 0) |
(bgps.mmb_n > 0)] = 1
# print to file
bgps.to_csv(os.getcwd() + '/' + out_filen + '.csv', index=False)
print '-- Catalog file written to {}.csv'.format(out_filen)
return bgps
def clump_match_gen(cat, bgps=[], coord_labels=['_Glon', '_Glat'],
prefix='', out_filen=None, coord_type='gal', verbose=False,
det_col=None, best_col=None, add_all_cols=False):
"""
Match the BGPS to a general catalog with coordinate columns _Glon and
_Glat.
Parameters
----------
cat : pd.DataFrame
Catalog to match
bgps : pd.DataFrame, default []
BGPS or matched catalog to merge into. If empty list is passed then the
default BGPS v2 is read in.
prefix : str, default ''
Name to append to beginning of columns from cat
out_filen : str, default None
Name of output file, if left None, no output file is written
coord_type : str, default 'gal'
Coordinate type, either Galactic 'gal' or Equatorial 'eq'
verbose : True
Print progress per clump
det_col : str, default None
Column name for detection flags, valid for 0 or 1. If `None` then do
nothing.
best_col : str, default None
Column to use max value from when discriminating multiple matches if
`add_all_cols` is set to True.
add_all_cols : bool, default False
Join all columns from input catalog `cat` to the BGPS.
Returns
-------
bgps : pd.DataFrame
Merged catalog
"""
if cat[coord_labels[0]].min() < 0:
raise ValueError('Longitude must be from 0 to 360')
if len(bgps) == 0:
bgps = catalog.read_bgps()
# rename columns
for col in cat.columns:
cat = cat.rename(columns={col: prefix + '_' + col})
# make sure not to clobber column names
if _np.any(_np.in1d(cat.columns, bgps.columns)):
overlap_cols = cat.columns[_np.in1d(cat.columns, bgps.columns)]
for col in overlap_cols:
cat = cat.rename(columns={col: '_' + col})
# assign new columns to empty values
for col in cat.columns:
bgps[col] = _np.nan
bgps[prefix + '_n'] = _np.nan
if det_col is not None:
bgps[prefix + '_f'] = _np.nan
if add_all_cols:
for col in cat.columns:
bgps[col] = _np.nan
# make haystack
coord_labels = [prefix + '_' + col for col in coord_labels]
cat_hs = cat[coord_labels].values
# loop through clumps
for cnum in bgps['cnum']:
# BGPS properties
cnum_select = bgps.cnum == cnum
c_index = _np.argwhere(cnum_select)[0][0]
# match cat
match_list = catalog.clump_match(cat_hs, cnum, coord_type=coord_type)
bgps[prefix + '_n'][cnum_select] = len(match_list)
if det_col is not None:
bgps[prefix + '_f'][cnum_select] = \
cat[det_col].ix[match_list].sum()
if add_all_cols:
max_index = cat[best_col].ix[match_list].argmax()
bgps.ix[c_index, cat.columns] = cat.ix[match_list[max_index]]
if verbose:
print '-- clump {0:>4d} : {1:>4d}'.format(cnum, len(match_list))
if out_filen is not None:
bgps.to_csv(os.getcwd() + '/' + out_filen + '.csv', index=False)
print '-- Catalog file written to {}.csv'.format(out_filen)
return bgps
def clump_match_all():
"""
Match BGPS to all evolutionary indicators. Matches to NH3, HCO+/N2H+,
H2O/CH3OH, IR, and HII.
Returns
-------
bgps : pd.DataFrame
"""
bgps = catalog.read_cat('bgps_v210')
bgps_all = bgps.copy()
df_list = []
fn_list = [clump_match_molcat,
clump_match_gbt_nh3,
clump_match_water,
clump_match_metho,
clump_match_ir,
clump_match_hii]
for fn in fn_list:
df_list.append(fn())
for df in df_list:
bgps_all = _pd.merge(bgps_all, df, how='outer')
bgps_all.to_csv('bgps_all.csv', index=False)
bgps_all.save('bgps_all.pickle')
return bgps_all
###############################################################################
# Class Based Approach
###############################################################################
class Matcher(object):
"""
Matching class to match sources sources to the BGPS based on the label
masks. A data object is given and then the catalog is processed.
Parameters
----------
The `data` object is required to have the following attributes:
name : str
Name used for a unique identifier for output
cat : pd.DataFrame
The catalog to match
lon_col : str
Galactic longitude column name in decimal degrees
lat_col : str
Galactic latitude column name in decimal degrees
Attributes
----------
"""
v = 210
cnum_col = 'v210cnum'
n_bgps_cols = 20 # with cnum as index
def __init__(self, data):
# Parameters from data object
self.name = data.name
self.cat = data.cat
self.lon_col = data.lon_col
self.lat_col = data.lat_col
self.det_col = data.det_col
self.det_flags = data.det_flags
self.choose_col = data.choose_col
self.noise_col = data.noise_col
self.data = data
# BGPS data
self.cat['in_bgps'] = _np.nan
self.bgps = catalog.read_bgps(v=self.v).set_index(self.cnum_col)
# Process and match
self._add_new_cols()
self._make_haystack()
self._match()
def _add_new_cols(self):
"""
Add new columns to the BGPS catalog depending on the available
columns to match to in the child catalog.
"""
# Don't clobber original columns in BGPS
# Do first so as to not rename following named flags
for col in self.cat.columns:
if col in self.bgps.columns:
self.bgps['_' + col] = _np.nan
else:
self.bgps[col] = _np.nan
# New column for number of matched sources
self.bgps_count_col = 'n'
self.bgps[self.bgps_count_col] = _np.nan
# For number of detections
if self.det_col is not None:
self.bgps_det_col = 'f'
self.bgps[self.bgps_det_col] = _np.nan
def _make_haystack(self):
"""
Make 2xN array of coordinate positions for each source in the child
catalog.
"""
self.haystack = self.cat[[self.lon_col,
self.lat_col]].values
def _match(self):
"""
Match each source in the child catalog to the BGPS.
"""
self.matched_ix = {}
ids = self.cat.index
haystack = self.haystack
for cat_ix, coord in zip(ids, haystack):
cnum = sample_bgps_img(coord[0], coord[1], v=self.v)
self._enter_matched(cat_ix, cnum)
def _enter_matched(self, ix, cnum):
"""
Insert the indices of the matched sources into the matched source
dictionary with the BGPS cnum as the key.
"""
self.cat.loc[ix, 'in_bgps'] = cnum
if (not _np.isnan(cnum)) & (cnum != 0):
self.matched_ix.setdefault(cnum, [])
self.matched_ix[cnum].append(ix)
def _drop_orig_cols(self):
"""
Drop original columns to the BGPS leaving just the matched and the
catlaog number as the index.
"""
self.bgps_culled = self.bgps.drop(labels=self.bgps.columns[
:self.n_bgps_cols], axis=1)
def process(self):
"""
Simple processing to add:
number of matches
number of detections
If `det_col` is specified.
all columns
If `choose_col` and `noise_col` are specified then the
maximum in `choose_col` will be used, or if all are null
values, then source with minimum noise will be chosen.
If `noise_col` is `None` then the first source in the sub-index
will be used.
columns to BGPS catalog.
"""
# New column for number of matched sources
for cnum, cat_indices in self.matched_ix.iteritems():
self.bgps.ix[cnum, self.bgps_count_col] = len(cat_indices)
if self.det_col is not None:
matches = self.cat.ix[cat_indices, self.det_col]
num_dets = matches[matches.isin(self.det_flags)].shape[0]
self.bgps.ix[cnum, self.bgps_det_col] = num_dets
if self.choose_col is not None:
choose_ix = self.cat.ix[cat_indices, self.choose_col].idxmax()
if _np.isnan(choose_ix) & (self.noise_col is not None):
choose_ix = self.cat.ix[cat_indices,
self.noise_col].idxmin()
else:
choose_ix = cat_indices[0]
self.bgps.ix[cnum, self.cat.columns] = self.cat.ix[choose_ix]
self.bgps = self.bgps.rename(columns={col: self.name + '_' + col for
col in self.bgps.columns[
self.n_bgps_cols:]})
self._drop_orig_cols()
def to_csv(self):
"""
Write BGPS catalog to `.csv` file.
"""
self.cat.to_csv('cat_' + self.name + '.csv')
self.bgps.to_csv('bgps_' + self.name + '.csv')
class DataSet(object):
def __init__(self):
self.all_data = []
all_objs = [
WaterGbt,
WaterArcetri,
WaterHops,
WaterRms,
Cornish,
Egos,
AmmoniaGbt,
MethoPandian,
MethoPestalozzi,
MethoMmb,
Higal70,
RedSpitzer,
RedMsx,
Molcat,
WienenNh3,
MipsgalCatalog,
MipsgalArchive,
]
for obj in all_objs:
data = obj()
data.match()
data.write()
self.all_data.append(data)
self.process()
def _merge(self):
print '-- Merging data'
merged_data = catalog.read_cat('bgps_v210').set_index('v210cnum')
for data in self.all_data:
merge_cat = data.matcher.bgps_culled
merged_data = merged_data.merge(merge_cat,
left_index=True,
right_index=True)
self.merged_data = merged_data
def _append_evo_flags(self):
print '-- Adding evolutionary flags'
self.merged_data = append_evo_flags(bgps=self.merged_data)
def _write(self):
print '-- Writing all merged data'
self.merged_data.to_csv('bgps_v210_all_full.csv', index=False)
def process(self):
self._merge()
self._append_evo_flags()
self._write()
class Data(object):
"""
Parent class for object-catalogs to be matched with `Matcher`
"""
def match(self):
print '-- Matching {0}'.format(self.name)
self.matcher = Matcher(self)
self.matcher.process()
def write(self):
self.matcher.to_csv()
def append_evo_flags(bgps):
"""
Calculate and append evolutionary flags to BGPS catalog
Parameters
----------
Returns
-------
"""
evo_flags = ['h2o_f', 'ch3oh_f', 'ego_f', 'ir_f', 'uchii_f', 'sf_f']
for col in evo_flags:
bgps[col] = _np.nan
# H2O flags
bgps['h2o_f'][((bgps['h2o_gbt_n'] > 0) & (bgps['h2o_gbt_f'] == 0)) &
_np.logical_not(bgps['h2o_arc_f'] > 0) &
_np.logical_not(bgps['h2o_rms_n'] > 0) &
_np.logical_not(bgps['h2o_hops_n'] > 0)] = 0
bgps['h2o_f'][(bgps['h2o_gbt_f'] > 0) |
(bgps['h2o_rms_n'] > 0) |
(bgps['h2o_arc_n'] > 0) |
(bgps['h2o_hops_n'] > 0)] = 1
# CH3OH flags
bgps['ch3oh_f'][(bgps['ch3oh_pesta_n'] > 0) |
(bgps['ch3oh_pandi_n'] > 0) |
(bgps['ch3oh_mmb_n'] > 0)] = 1
# EGO flags
bgps['ego_f'][(bgps['ego_n'] > 0)] = 1
# IR flags
bgps['ir_f'][(bgps['robit_f'] > 0) |
(bgps['red_msx_f'] > 0) |
(bgps['ego_n'] > 0)] = 1 # IR YSO
bgps['ir_f'][(bgps['ir_f'] != 1) &
(bgps['robit_n'] > 0) &
(bgps['robit_f'] == 0)] = 2 # robitaille AGB
# UCHII flags
bgps['uchii_f'][(bgps['corn_n'] > 0)] = 1
# Starless
bgps['sf_f'][(bgps['h2o_f'] == 0) &
(bgps['ch3oh_f'] != 1) &
(bgps['ir_f'] != 1) &
(bgps['uchii_f'] != 1)] = 0
bgps['sf_f'][(bgps['h2o_f'] == 1) |
(bgps['ch3oh_f'] == 1) |
(bgps['ir_f'] == 1) |
(bgps['uchii_f'] == 1)] = 1
return bgps
###############################################################################
# Catalog Data Objects
###############################################################################
class WaterGbt(Data):
def __init__(self):
# Catalog parameters
self.name = 'h2o_gbt'
self.cat = catalog.read_cat('gbt_h2o')
self.lon_col = 'h2o_glon'
self.lat_col = 'h2o_glat'
self.det_col = 'h2o_f'
self.det_flags = [1]
self.choose_col = 'h2o_tpk'
self.noise_col = 'h2o_tpk_err'
class WaterArcetri(Data):
def __init__(self):
# Catalog parameters
self.name = 'h2o_arc'
self.cat = catalog.read_cat('valdettaro01_arcetri')
self.lon_col = '_Glon'
self.lat_col = '_Glat'
self.det_col = 'h2o_f'
self.det_flags = [1]
self.choose_col = 'Stot'
self.noise_col = 'Sig'
class WaterHops(Data):
def __init__(self):
# Catalog parameters
self.name = 'h2o_hops'
self.cat = catalog.read_cat('walsh14_hops_h2o')
self.lon_col = '_Glon'
self.lat_col = '_Glat'
self.det_col = None
self.det_flags = None
self.choose_col = 'Sp'
self.noise_col = None
class WaterRms(Data):
def __init__(self):
# Catalog parameters
self.name = 'h2o_rms'
self.cat = catalog.read_cat('urquhart11_red_msx_h2o')
self.lon_col = '_Glon_1_'
self.lat_col = '_Glat_1_'
self.det_col = 'H2O_1_'
self.det_flags = ['y']
self.choose_col = 'log_SdV__2_'
self.noise_col = 'rms_2_'
class Cornish(Data):
def __init__(self):
# Catalog parameters
self.name = 'corn'
self.cat = catalog.read_cat('cornish_uchii')
self.lon_col = 'l_deg'
self.lat_col = 'b_deg'
self.det_col = None
self.det_flags = None
self.choose_col = 'Flux_mJy'
self.noise_col = 'dFlux_mJy'
class Egos(Data):
def __init__(self):
# Catalog parameters
self.name = 'ego'
self.cat = catalog.read_cat('ego_all')
self.lon_col = '_Glon'
self.lat_col = '_Glat'
self.det_col = None
self.det_flags = None
self.choose_col = '[4.5]'
self.noise_col = None
class AmmoniaGbt(Data):
def __init__(self):
# Catalog parameters
self.name = 'nh3_gbt'
self.cat = catalog.read_cat('gbt_nh3')
self.lon_col = 'glon'
self.lat_col = 'glat'
self.det_col = None
self.det_flags = None
self.choose_col = 'pk11'
self.noise_col = 'noise11'
class MethoPandian(Data):
def __init__(self):
# Catalog parameters
self.name = 'ch3oh_pandi'
self.cat = catalog.read_cat('pandian11')
self.lon_col = 'glon'
self.lat_col = 'glat'
self.det_col = None
self.det_flags = None
self.choose_col = None
self.noise_col = None
class MethoPestalozzi(Data):
def __init__(self):
# Catalog parameters
self.name = 'ch3oh_pesta'
self.cat = catalog.read_cat('pestalozzi05')
self.lon_col = '_Glon'
self.lat_col = '_Glat'
self.det_col = None
self.det_flags = None
self.choose_col = 'PFlux'
self.noise_col = None
class MethoMmb(Data):
def __init__(self):
# Catalog parameters
self.name = 'ch3oh_mmb'
self.cat = catalog.read_cat('mmb_all')
self.lon_col = 'glon'
self.lat_col = 'glat'
self.det_col = None
self.det_flags = None
self.choose_col = 'spk_mx'
self.noise_col = None
class Higal70(Data):
def __init__(self):
# Catalog parameters
self.name = 'higal70'
self.cat = catalog.read_cat('higal_70_clean')
self.lon_col = 'GLON'
self.lat_col = 'GLAT'
self.det_col = None
self.det_flags = None
self.choose_col = None
self.noise_col = None
class RedSpitzer(Data):
def __init__(self):
# Catalog parameters
self.name = 'robit'
self.cat = catalog.read_cat('robitaille08_red_spitzer')
self.lon_col = '_Glon'
self.lat_col = '_Glat'
self.det_col = 'Class'
self.det_flags = ['cYSO']
self.choose_col = '[8.0]'
self.noise_col = None
class RedMsx(Data):
def __init__(self):
# Catalog parameters
self.name = 'red_msx'
self.cat = catalog.read_cat('lumsden13_red_msx')
self.lon_col = 'glon'
self.lat_col = 'glat'
self.det_col = 'Type'
self.det_flags = ['YSO', 'HII/YSO', 'Young/old star']
self.choose_col = None
self.noise_col = None
class Molcat(Data):
def __init__(self):
# Catalog parameters
self.name = 'mol'
self.cat = catalog.read_cat('shirley13_molcat')
self.lon_col = 'hht_glon'
self.lat_col = 'hht_glat'
self.det_col = 'mol_vlsr_f'
self.det_flags = [1, 2, 3]
self.choose_col = 'hco_tpk'
self.noise_col = 'hco_tpk_err'
class WienenNh3(Data):
def __init__(self):
# Catalog parameters
self.name = 'wien'
cat = catalog.read_cat('wienen12_nh3')
cat = cat[cat['tkin'].notnull()]
self.cat = cat
self.lon_col = '_Glon'
self.lat_col = '_Glat'
self.det_col = 'tkin'
self.det_flags = [1, 2, 3]
self.choose_col = 'tkin'
self.noise_col = 'tkin_err'
class MipsgalCatalog(Data):
def __init__(self):
# Catalog parameters
self.name = 'mipsc'
self.cat = catalog.read_cat('mipsgal_catalog_lclip')
self.lon_col = 'l'
self.lat_col = 'b'
self.det_col = None
self.det_flags = None
self.choose_col = None
self.noise_col = None
class MipsgalArchive(Data):
def __init__(self):
# Catalog parameters
self.name = 'mipsa'
self.cat = catalog.read_cat('mipsgal_archive_lclip')
self.lon_col = 'l'
self.lat_col = 'b'
self.det_col = None
self.det_flags = None
self.choose_col = None
self.noise_col = None
class Sharc(Data):
def __init__(self):
# Catalog parameters
self.name = 'sharc'
self.cat = catalog.read_cat('merello15_table3')
self.lon_col = 'GLON'
self.lat_col = 'GLAT'
self.det_col = None
self.det_flags = None
self.choose_col = None
self.noise_col = None
###############################################################################
# Protostellar Probability
###############################################################################
class ProtoProb(object):
pyso = 0.5
pagb = 0.4
pcol = 'proto_prob'
def __init__(self, df=None):
if df is None:
self.df = catalog.read_cat('bgps_v210_evo').set_index('v210cnum')
else:
self.df = df
self.df[self.pcol] = 0.0
self.calc()
def calc(self):
# Compute probabilities for R08
rix = self.df.query('robit_n > 0').index
for ii in rix:
nagb = self.df.loc[ii, 'robit_n'] - self.df.loc[ii, 'robit_f']
nyso = self.df.loc[ii, 'robit_f']
self.df.loc[ii, self.pcol] = self.proto_prob(nagb, nyso)
# Correct for certain sources
oix = self.df.query('hg70_eye_f in [1,2,4,5] |'
'ego_n > 0 | '
'red_msx_f > 0 | '
'corn_n > 0 | '
'h2o_f > 0 | '
'ch3oh_f > 0').index
self.df.loc[oix, self.pcol] = 1.0
def proto_prob(self, nagb, nyso):
# Binomial for compliment of zero successes
bagb = 1 - (1 - self.pagb)**nagb
byso = 1 - (1 - self.pyso)**nyso
return byso + bagb - byso * bagb
###############################################################################
# Randomized Cross Matching
###############################################################################
class Jiggler(object):
"""
Catalog coordinate randomizer for randomized cross-matching.
"""
def __init__(self, data):
self.data = data()
self.lons = data.cat[data.lon_col].values
self.lats = data.cat[data.lat_col].values
|
autocorr/besl
|
besl/clump_match.py
|
Python
|
gpl-3.0
| 37,622 | 0.000983 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c)2012 Rackspace US, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from functools import wraps
import six
from pyos.client import BaseClient
import pyos.exceptions as exc
from pyos.manager import BaseManager
from pyos.resource import BaseResource
import pyos.utils as utils
def assure_instance(fnc):
@wraps(fnc)
def _wrapped(self, instance, *args, **kwargs):
if not isinstance(instance, CloudDatabaseInstance):
# Must be the ID
instance = self._manager.get(instance)
return fnc(self, instance, *args, **kwargs)
return _wrapped
class CloudDatabaseVolume(object):
instance = None
size = None
used = None
def __init__(self, instance, info):
self.instance = instance
for key, val in info.items():
setattr(self, key, val)
def resize(self, size):
"""
Resize the volume to the specified size (in GB).
"""
self.instance.resize_volume(size)
self.size = size
def get(self, att):
"""
For compatibility with regular resource objects.
"""
return getattr(self, att)
class CloudDatabaseManager(BaseManager):
"""
This class manages communication with Cloud Database instances.
"""
def get(self, item):
"""
This additional code is necessary to properly return the 'volume'
attribute of the instance as a CloudDatabaseVolume object instead of
a raw dict.
"""
resource = super(CloudDatabaseManager, self).get(item)
resource.volume = CloudDatabaseVolume(resource, resource.volume)
return resource
def _create_body(self, name, flavor=None, volume=None, databases=None,
users=None):
"""
Used to create the dict required to create a Cloud Database instance.
"""
if flavor is None:
flavor = 1
flavor_ref = self.api._get_flavor_ref(flavor)
if volume is None:
volume = 1
if databases is None:
databases = []
if users is None:
users = []
body = {"instance": {
"name": name,
"flavorRef": flavor_ref,
"volume": {"size": volume},
"databases": databases,
"users": users,
}}
return body
def create_backup(self, instance, name, description=None):
"""
Creates a backup of the specified instance, giving it the specified
name along with an optional description.
"""
body = {"backup": {
"instance": utils.get_id(instance),
"name": name,
}}
if description is not None:
body["backup"]["description"] = description
uri = "/backups"
resp, resp_body = self.api.method_post(uri, body=body)
mgr = self.api._backup_manager
return CloudDatabaseBackup(mgr, body.get("backup"))
def restore_backup(self, backup, name, flavor, volume):
"""
Restores a backup to a new database instance. You must supply a backup
(either the ID or a CloudDatabaseBackup object), a name for the new
instance, as well as a flavor and volume size (in GB) for the instance.
"""
flavor_ref = self.api._get_flavor_ref(flavor)
body = {"instance": {
"name": name,
"flavorRef": flavor_ref,
"volume": {"size": volume},
"restorePoint": {"backupRef": utils.get_id(backup)},
}}
uri = "/%s" % self.uri_base
resp, resp_body = self.api.method_post(uri, body=body)
return CloudDatabaseInstance(self, resp_body.get("instance", {}))
def list_backups(self, instance=None):
"""
Returns a list of all backups by default, or just for a particular
instance.
"""
return self.api._backup_manager.list(instance=instance)
def _list_backups_for_instance(self, instance):
"""
Instance-specific backups are handled through the instance manager,
not the backup manager.
"""
uri = "/%s/%s/backups" % (self.uri_base, utils.get_id(instance))
resp, resp_body = self.api.method_get(uri)
mgr = self.api._backup_manager
return [CloudDatabaseBackup(mgr, backup)
for backup in resp_body.get("backups")]
class CloudDatabaseDatabaseManager(BaseManager):
"""
This class manages communication with databases on Cloud Database instances.
"""
def _create_body(self, name, character_set=None, collate=None):
body = {"databases": [
{"name": name,
"character_set": character_set,
"collate": collate,
}]}
return body
class CloudDatabaseUserManager(BaseManager):
"""
This class handles operations on the users in a database on a Cloud
Database instance.
"""
def _create_body(self, name, password, databases=None, database_names=None,
host=None):
db_dicts = [{"name": db} for db in database_names]
body = {"users": [
{"name": name,
"password": password,
"databases": db_dicts,
}]}
if host:
body["users"][0]["host"] = host
return body
def _get_db_names(self, dbs, strict=True):
"""
Accepts a single db (name or object) or a list of dbs, and returns a
list of database names. If any of the supplied dbs do not exist, a
NoSuchDatabase exception will be raised, unless you pass strict=False.
"""
dbs = utils.coerce_string_to_list(dbs)
db_names = [utils.get_name(db) for db in dbs]
if strict:
good_dbs = self.instance.list_databases()
good_names = [utils.get_name(good_db) for good_db in good_dbs]
bad_names = [db_name for db_name in db_names
if db_name not in good_names]
if bad_names:
bad = ", ".join(bad_names)
raise exc.NoSuchDatabase("The following database(s) were not "
"found: %s" % bad)
return db_names
def change_user_password(self, user, new_pass):
"""
Changes the password for the user to the supplied value.
Returns None upon success; raises PasswordChangeFailed if the call
does not complete successfully.
"""
return self.update(user, password=new_pass)
def update(self, user, name=None, password=None, host=None):
"""
Allows you to change one or more of the user's username, password, or
host.
"""
if not any((name, password, host)):
raise exc.MissingDBUserParameters("You must supply at least one of "
"the following: new username, new password, or new host "
"specification.")
if not isinstance(user, CloudDatabaseUser):
# Must be the ID/name
user = self.get(user)
dct = {}
if name and (name != user.name):
dct["name"] = name
if host and (host != user.host):
dct["host"] = host
if password:
dct["password"] = password
if not dct:
raise exc.DBUpdateUnchanged("You must supply at least one changed "
"value when updating a user.")
uri = "/%s/%s" % (self.uri_base, user.name)
body = {"user": dct}
resp, resp_body = self.api.method_put(uri, body=body)
return None
def list_user_access(self, user):
"""
Returns a list of all database names for which the specified user
has access rights.
"""
user = utils.get_name(user)
uri = "/%s/%s/databases" % (self.uri_base, user)
try:
resp, resp_body = self.api.method_get(uri)
except exc.NotFound as e:
raise exc.NoSuchDatabaseUser("User '%s' does not exist." % user)
dbs = resp_body.get("databases", {})
return [CloudDatabaseDatabase(self, db) for db in dbs]
def grant_user_access(self, user, db_names, strict=True):
"""
Gives access to the databases listed in `db_names` to the user. You may
pass in either a single db or a list of dbs.
If any of the databases do not exist, a NoSuchDatabase exception will
be raised, unless you specify `strict=False` in the call.
"""
user = utils.get_name(user)
uri = "/%s/%s/databases" % (self.uri_base, user)
db_names = self._get_db_names(db_names, strict=strict)
dbs = [{"name": db_name} for db_name in db_names]
body = {"databases": dbs}
try:
resp, resp_body = self.api.method_put(uri, body=body)
except exc.NotFound as e:
raise exc.NoSuchDatabaseUser("User '%s' does not exist." % user)
def revoke_user_access(self, user, db_names, strict=True):
"""
Revokes access to the databases listed in `db_names` for the user.
If any of the databases do not exist, a NoSuchDatabase exception will
be raised, unless you specify `strict=False` in the call.
"""
user = utils.get_name(user)
db_names = self._get_db_names(db_names, strict=strict)
bad_names = []
for db_name in db_names:
uri = "/%s/%s/databases/%s" % (self.uri_base, user, db_name)
resp, resp_body = self.api.method_delete(uri)
class CloudDatabaseBackupManager(BaseManager):
"""
This class handles operations on backups for a Cloud Database instance.
"""
def _create_body(self, name, instance, description=None):
body = {"backup": {
"instance": utils.get_id(instance),
"name": name,
}}
if description is not None:
body["backup"]["description"] = description
return body
def list(self, instance=None):
"""
Return a list of all backups by default, or just for a particular
instance.
"""
if instance is None:
return super(CloudDatabaseBackupManager, self).list()
return self.api._manager._list_backups_for_instance(instance)
class CloudDatabaseInstance(BaseResource):
"""
This class represents a MySQL instance in the cloud.
"""
def __init__(self, *args, **kwargs):
super(CloudDatabaseInstance, self).__init__(*args, **kwargs)
self._database_manager = CloudDatabaseDatabaseManager(self.manager.api,
resource_class=CloudDatabaseDatabase, response_key="database",
uri_base="instances/%s/databases" % self.id)
self._user_manager = CloudDatabaseUserManager(self.manager.api,
resource_class=CloudDatabaseUser, response_key="user",
uri_base="instances/%s/users" % self.id)
# Add references to the parent instance to the managers.
self._database_manager.instance = self._user_manager.instance = self
# Remove the lazy load
if not self.loaded:
self.get()
def get(self):
"""
Need to override the default get() behavior by making the 'volume'
attribute into a CloudDatabaseVolume object instead of the raw dict.
"""
super(CloudDatabaseInstance, self).get()
# Make the volume into an accessible object instead of a dict
self.volume = CloudDatabaseVolume(self, self.volume)
def list_databases(self, limit=None, marker=None):
"""Returns a list of the names of all databases for this instance."""
return self._database_manager.list(limit=limit, marker=marker)
def list_users(self, limit=None, marker=None):
"""Returns a list of the names of all users for this instance."""
return self._user_manager.list(limit=limit, marker=marker)
def get_user(self, name):
"""
Finds the user in this instance with the specified name, and
returns a CloudDatabaseUser object. If no match is found, a
NoSuchDatabaseUser exception is raised.
"""
try:
return self._user_manager.get(name)
except exc.NotFound:
raise exc.NoSuchDatabaseUser("No user by the name '%s' exists." %
name)
def get_database(self, name):
"""
Finds the database in this instance with the specified name, and
returns a CloudDatabaseDatabase object. If no match is found, a
NoSuchDatabase exception is raised.
"""
try:
return [db for db in self.list_databases()
if db.name == name][0]
except IndexError:
raise exc.NoSuchDatabase("No database by the name '%s' exists." %
name)
def create_database(self, name, character_set=None, collate=None):
"""
Creates a database with the specified name. If a database with
that name already exists, a BadRequest (400) exception will
be raised.
"""
if character_set is None:
character_set = "utf8"
if collate is None:
collate = "utf8_general_ci"
self._database_manager.create(name=name, character_set=character_set,
collate=collate, return_none=True)
# Since the API doesn't return the info for creating the database
# object, we have to do it manually.
return self._database_manager.find(name=name)
def create_user(self, name, password, database_names, host=None):
"""
Creates a user with the specified name and password, and gives that
user access to the specified database(s).
If a user with that name already exists, a BadRequest (400) exception
will be raised.
"""
if not isinstance(database_names, (list, tuple)):
database_names = [database_names]
# The API only accepts names, not DB objects
database_names = [db if isinstance(db, six.string_types) else db.name
for db in database_names]
self._user_manager.create(name=name, password=password,
database_names=database_names, host=host, return_none=True)
# Since the API doesn't return the info for creating the user object,
# we have to do it manually.
return self._user_manager.find(name=name)
def delete_database(self, name_or_obj):
"""
Deletes the specified database. If no database by that name
exists, no exception will be raised; instead, nothing at all
is done.
"""
name = utils.get_name(name_or_obj)
self._database_manager.delete(name)
def change_user_password(self, user, new_pass):
"""
Changes the password for the user to the supplied value.
Returns None upon success; raises PasswordChangeFailed if the call
does not complete successfully.
"""
return self._user_manager.change_user_password(user, new_pass)
def update_user(self, user, name=None, password=None, host=None):
"""
Allows you to change one or more of the user's username, password, or
host.
"""
return self._user_manager.update(user, name=name, password=password,
host=host)
def list_user_access(self, user):
"""
Returns a list of all database names for which the specified user
has access rights.
"""
return self._user_manager.list_user_access(user)
def grant_user_access(self, user, db_names, strict=True):
"""
Gives access to the databases listed in `db_names` to the user.
"""
return self._user_manager.grant_user_access(user, db_names,
strict=strict)
def revoke_user_access(self, user, db_names, strict=True):
"""
Revokes access to the databases listed in `db_names` for the user.
"""
return self._user_manager.revoke_user_access(user, db_names,
strict=strict)
def delete_user(self, user):
"""
Deletes the specified user. If no user by that name
exists, no exception will be raised; instead, nothing at all
is done.
"""
name = utils.get_name(user)
self._user_manager.delete(name)
def enable_root_user(self):
"""
Enables login from any host for the root user and provides
the user with a generated root password.
"""
uri = "/instances/%s/root" % self.id
resp, body = self.manager.api.method_post(uri)
return body["user"]["password"]
def root_user_status(self):
"""
Returns True or False, depending on whether the root user
for this instance has been enabled.
"""
uri = "/instances/%s/root" % self.id
resp, body = self.manager.api.method_get(uri)
return body["rootEnabled"]
def restart(self):
"""Restarts this instance."""
self.manager.action(self, "restart")
def resize(self, flavor):
"""Set the size of this instance to a different flavor."""
# We need the flavorRef, not the flavor or size.
flavorRef = self.manager.api._get_flavor_ref(flavor)
body = {"flavorRef": flavorRef}
self.manager.action(self, "resize", body=body)
def resize_volume(self, size):
"""Changes the size of the volume for this instance."""
curr_size = self.volume.size
if size <= curr_size:
raise exc.InvalidVolumeResize("The new volume size must be larger "
"than the current volume size of '%s'." % curr_size)
body = {"volume": {"size": size}}
self.manager.action(self, "resize", body=body)
def list_backups(self):
"""
Returns a list of all backups for this instance.
"""
return self.manager._list_backups_for_instance(self)
def create_backup(self, name, description=None):
"""
Creates a backup of this instance, giving it the specified name along
with an optional description.
"""
return self.manager.create_backup(self, name, description=description)
def _get_flavor(self):
try:
ret = self._flavor
except AttributeError:
ret = self._flavor = CloudDatabaseFlavor(
self.manager.api._flavor_manager, {})
return ret
def _set_flavor(self, flavor):
if isinstance(flavor, dict):
self._flavor = CloudDatabaseFlavor(self.manager.api._flavor_manager,
flavor)
else:
# Must be an instance
self._flavor = flavor
flavor = property(_get_flavor, _set_flavor)
class CloudDatabaseDatabase(BaseResource):
"""
This class represents a database on a CloudDatabaseInstance. It is not
a true cloud entity, but a convenience object for dealing with databases
on instances.
"""
get_details = True
def delete(self):
"""This class doesn't have an 'id', so pass the name."""
self.manager.delete(self.name)
class CloudDatabaseUser(BaseResource):
"""
This class represents a user on a CloudDatabaseInstance. It is not
a true cloud entity, but a convenience object for dealing with users
for instances.
"""
get_details = False
name = None
host = None
def delete(self):
"""This class doesn't have an 'id', so pass the name."""
self.manager.delete(self.name)
def change_password(self, new_pass):
"""
Changes the password for this user to the supplied value.
Returns None upon success; raises PasswordChangeFailed if the call
does not complete successfully.
"""
self.manager.change_user_password(self, new_pass)
def update(self, name=None, password=None, host=None):
"""
Allows you to change one or more of the user's username, password, or
host.
"""
return self.manager.update(self, name=name, password=password,
host=host)
def list_user_access(self):
"""
Returns a list of all database names for which the specified user
has access rights.
"""
return self.manager.list_user_access(self)
def grant_user_access(self, db_names, strict=True):
"""
Gives access to the databases listed in `db_names` to the user.
"""
return self.manager.grant_user_access(self, db_names, strict=strict)
def revoke_user_access(self, db_names, strict=True):
"""
Revokes access to the databases listed in `db_names` for the user.
"""
return self.manager.revoke_user_access(self, db_names, strict=strict)
class CloudDatabaseFlavor(BaseResource):
"""
This class represents the available instance configurations, or 'flavors',
which you use to define the memory and CPU size of your instance. These
objects are read-only.
"""
get_details = True
_non_display = ["links"]
class CloudDatabaseBackup(BaseResource):
"""
This class represents a database backup.
"""
get_details = True
_non_display = ["locationRef"]
class CloudDatabaseClient(BaseClient):
"""
This is the primary class for interacting with Cloud Databases.
"""
name = "Cloud Databases"
def _configure_manager(self):
"""
Creates a manager to handle the instances, and another
to handle flavors.
"""
self._manager = CloudDatabaseManager(self,
resource_class=CloudDatabaseInstance, response_key="instance",
uri_base="instances")
self._flavor_manager = BaseManager(self,
resource_class=CloudDatabaseFlavor, response_key="flavor",
uri_base="flavors")
self._backup_manager = CloudDatabaseBackupManager(self,
resource_class=CloudDatabaseBackup, response_key="backup",
uri_base="backups")
@assure_instance
def list_databases(self, instance, limit=None, marker=None):
"""Returns all databases for the specified instance."""
return instance.list_databases(limit=limit, marker=marker)
@assure_instance
def create_database(self, instance, name, character_set=None,
collate=None):
"""Creates a database with the specified name on the given instance."""
return instance.create_database(name, character_set=character_set,
collate=collate)
@assure_instance
def get_database(self, instance, name):
"""
Finds the database in the given instance with the specified name, and
returns a CloudDatabaseDatabase object. If no match is found, a
NoSuchDatabase exception is raised.
"""
return instance.get_database(name)
@assure_instance
def delete_database(self, instance, name):
"""Deletes the database by name on the given instance."""
return instance.delete_database(name)
@assure_instance
def list_users(self, instance, limit=None, marker=None):
"""Returns all users for the specified instance."""
return instance.list_users(limit=limit, marker=marker)
@assure_instance
def create_user(self, instance, name, password, database_names, host=None):
"""
Creates a user with the specified name and password, and gives that
user access to the specified database(s).
"""
return instance.create_user(name=name, password=password,
database_names=database_names, host=host)
@assure_instance
def get_user(self, instance, name):
"""
Finds the user in the given instance with the specified name, and
returns a CloudDatabaseUser object. If no match is found, a
NoSuchUser exception is raised.
"""
return instance.get_user(name)
@assure_instance
def delete_user(self, instance, name):
"""Deletes the user by name on the given instance."""
return instance.delete_user(name)
@assure_instance
def change_user_password(self, instance, user, new_pass):
"""
Changes the password for the user of the specified instance to the
supplied value.
Returns None upon success; raises PasswordChangeFailed if the call
does not complete successfully.
"""
return instance.change_user_password(user, new_pass)
@assure_instance
def update_user(self, instance, user, name=None, password=None, host=None):
"""
Allows you to change one or more of the user's username, password, or
host.
"""
return instance.update_user(user, name=name, password=password,
host=host)
@assure_instance
def list_user_access(self, instance, user):
"""
Returns a list of all database names for which the specified user
has access rights on the specified instance.
"""
return instance.list_user_access(user)
@assure_instance
def grant_user_access(self, instance, user, db_names, strict=True):
"""
Gives access to the databases listed in `db_names` to the user
on the specified instance.
"""
return instance.grant_user_access(user, db_names, strict=strict)
@assure_instance
def revoke_user_access(self, instance, user, db_names, strict=True):
"""
Revokes access to the databases listed in `db_names` for the user
on the specified instance.
"""
return instance.revoke_user_access(user, db_names, strict=strict)
@assure_instance
def enable_root_user(self, instance):
"""
This enables login from any host for the root user and provides
the user with a generated root password.
"""
return instance.enable_root_user()
@assure_instance
def root_user_status(self, instance):
"""Returns True if the given instance is root-enabled."""
return instance.root_user_status()
@assure_instance
def restart(self, instance):
"""Restarts the instance."""
return instance.restart()
@assure_instance
def resize(self, instance, flavor):
"""Sets the size of the instance to a different flavor."""
return instance.resize(flavor)
def get_limits(self):
"""Not implemented in Cloud Databases."""
raise NotImplementedError("Limits are not available for Cloud Databases")
def list_flavors(self, limit=None, marker=None):
"""Returns a list of all available Flavors."""
return self._flavor_manager.list(limit=limit, marker=marker)
def get_flavor(self, flavor_id):
"""Returns a specific Flavor object by ID."""
return self._flavor_manager.get(flavor_id)
def _get_flavor_ref(self, flavor):
"""
Flavors are odd in that the API expects an href link, not an ID, as with
nearly every other resource. This method takes either a
CloudDatabaseFlavor object, a flavor ID, a RAM size, or a flavor name,
and uses that to determine the appropriate href.
"""
flavor_obj = None
if isinstance(flavor, CloudDatabaseFlavor):
flavor_obj = flavor
elif isinstance(flavor, int):
# They passed an ID or a size
try:
flavor_obj = self.get_flavor(flavor)
except exc.NotFound:
# Must be either a size or bad ID, which will
# be handled below
pass
if flavor_obj is None:
# Try flavor name
flavors = self.list_flavors()
try:
flavor_obj = [flav for flav in flavors
if flav.name == flavor][0]
except IndexError:
# No such name; try matching RAM
try:
flavor_obj = [flav for flav in flavors
if flav.ram == flavor][0]
except IndexError:
raise exc.FlavorNotFound("Could not determine flavor from "
"'%s'." % flavor)
# OK, we have a Flavor object. Get the href
href = [link["href"] for link in flavor_obj.links
if link["rel"] == "self"][0]
return href
def list_backups(self, instance=None):
"""
Returns a list of all backups by default, or just for a particular
instance.
"""
return self._backup_manager.list(instance=instance)
def get_backup(self, backup):
"""
Returns the CloudDatabaseBackup instance for a given ID.
"""
return self._backup_manager.get(backup)
def delete_backup(self, backup):
"""
Deletes the CloudDatabaseBackup instance for a given ID.
"""
return self._backup_manager.delete(backup)
@assure_instance
def create_backup(self, instance, name, description=None):
"""
Creates a backup of the specified instance, giving it the specified
name along with an optional description.
"""
return instance.create_backup(name, description=description)
def restore_backup(self, backup, name, flavor, volume):
"""
Restores a backup to a new database instance. You must supply a backup
(either the ID or a CloudDatabaseBackup object), a name for the new
instance, as well as a flavor and size (in GB) for the instance.
"""
return self._manager.restore_backup(backup, name, flavor, volume)
|
emonty/pyos
|
pyos/clouddatabases.py
|
Python
|
apache-2.0
| 30,566 | 0.003991 |
import pytz
from django.apps import apps
from django.contrib.auth.models import Group
from django.core.exceptions import ObjectDoesNotExist
from django.db import models, transaction
from django.utils import timezone
from django.utils.functional import cached_property
from guardian.shortcuts import assign_perm
from guardian.shortcuts import get_perms
from guardian.shortcuts import remove_perm
from include import IncludeQuerySet
from api.preprint_providers.workflows import Workflows, PUBLIC_STATES
from framework.analytics import increment_user_activity_counters
from framework.exceptions import PermissionsError
from osf.exceptions import InvalidTriggerError
from osf.models.node_relation import NodeRelation
from osf.models.nodelog import NodeLog
from osf.models.subject import Subject
from osf.models.tag import Tag
from osf.models.validators import validate_subject_hierarchy
from osf.utils.fields import NonNaiveDateTimeField
from osf.utils.machines import ReviewsMachine, RequestMachine
from osf.utils.permissions import ADMIN
from osf.utils.workflows import DefaultStates, DefaultTriggers
from website.exceptions import NodeStateError
from website import settings
class Versioned(models.Model):
"""A Model mixin class that saves delta versions."""
@classmethod
def _sig_pre_delete(cls, instance, *args, **kwargs):
"""dispatch the pre_delete method to a regular instance method. """
return instance.sig_pre_delete(*args, **kwargs)
@classmethod
def _sig_post_delete(cls, instance, *args, **kwargs):
"""dispatch the post_delete method to a regular instance method. """
return instance.sig_post_delete(*args, **kwargs)
@classmethod
def _sig_pre_save(cls, instance, *args, **kwargs):
"""dispatch the pre_save method to a regular instance method. """
return instance.sig_pre_save(*args, **kwargs)
@classmethod
def _sig_post_save(cls, instance, *args, **kwargs):
"""dispatch the post_save method to a regular instance method. """
return instance.sig_post_save(*args, **kwargs)
@classmethod
def connect(cls, signal):
"""Connect a django signal with this model."""
# List all signals you want to connect with here:
from django.db.models.signals import (pre_save, post_save, pre_delete, post_delete)
sig_handler = {
pre_save: cls._sig_pre_save,
post_save: cls._sig_post_save,
pre_delete: cls._sig_pre_delete,
post_delete: cls._sig_post_delete,
}[signal]
signal.connect(sig_handler, sender=cls)
class Meta:
abstract = True
class Loggable(models.Model):
last_logged = NonNaiveDateTimeField(db_index=True, null=True, blank=True, default=timezone.now)
def add_log(self, action, params, auth, foreign_user=None, log_date=None, save=True, request=None):
AbstractNode = apps.get_model('osf.AbstractNode')
user = None
if auth:
user = auth.user
elif request:
user = request.user
params['node'] = params.get('node') or params.get('project') or self._id
original_node = self if self._id == params['node'] else AbstractNode.load(params.get('node'))
log = NodeLog(
action=action, user=user, foreign_user=foreign_user,
params=params, node=self, original_node=original_node
)
if log_date:
log.date = log_date
log.save()
if self.logs.count() == 1:
self.last_logged = log.date.replace(tzinfo=pytz.utc)
else:
self.last_logged = self.logs.first().date
if save:
self.save()
if user and not self.is_collection:
increment_user_activity_counters(user._primary_key, action, log.date.isoformat())
return log
class Meta:
abstract = True
class Taggable(models.Model):
tags = models.ManyToManyField('Tag', related_name='%(class)s_tagged')
def update_tags(self, new_tags, auth=None, save=True, log=True, system=False):
old_tags = set(self.tags.values_list('name', flat=True))
to_add = (set(new_tags) - old_tags)
to_remove = (old_tags - set(new_tags))
if to_add:
self.add_tags(to_add, auth=auth, save=save, log=log, system=system)
if to_remove:
self.remove_tags(to_remove, auth=auth, save=save)
def add_tags(self, tags, auth=None, save=True, log=True, system=False):
"""
Optimization method for use with update_tags. Unlike add_tag, already assumes tag is
not on the object.
"""
if not system and not auth:
raise ValueError('Must provide auth if adding a non-system tag')
for tag in tags:
tag_instance, created = Tag.all_tags.get_or_create(name=tag, system=system)
self.tags.add(tag_instance)
# TODO: Logging belongs in on_tag_added hook
if log:
self.add_tag_log(tag_instance, auth)
self.on_tag_added(tag_instance)
if save:
self.save()
def add_tag(self, tag, auth=None, save=True, log=True, system=False):
if not system and not auth:
raise ValueError('Must provide auth if adding a non-system tag')
if not isinstance(tag, Tag):
tag_instance, created = Tag.all_tags.get_or_create(name=tag, system=system)
else:
tag_instance = tag
if not self.tags.filter(id=tag_instance.id).exists():
self.tags.add(tag_instance)
# TODO: Logging belongs in on_tag_added hook
if log:
self.add_tag_log(tag_instance, auth)
if save:
self.save()
self.on_tag_added(tag_instance)
return tag_instance
def remove_tag(self, *args, **kwargs):
raise NotImplementedError('Removing tags requires that remove_tag is implemented')
def add_system_tag(self, tag, save=True):
if isinstance(tag, Tag) and not tag.system:
raise ValueError('Non-system tag passed to add_system_tag')
return self.add_tag(tag=tag, auth=None, save=save, log=False, system=True)
def add_tag_log(self, *args, **kwargs):
raise NotImplementedError('Logging requires that add_tag_log method is implemented')
def on_tag_added(self, tag):
pass
class Meta:
abstract = True
class AddonModelMixin(models.Model):
# from addons.base.apps import BaseAddonConfig
settings_type = None
ADDONS_AVAILABLE = sorted([config for config in apps.get_app_configs() if config.name.startswith('addons.') and
config.label != 'base'])
class Meta:
abstract = True
@classmethod
def get_addon_key(cls, config):
return 2 << cls.ADDONS_AVAILABLE.index(config)
@property
def addons(self):
return self.get_addons()
def get_addons(self):
return filter(None, [
self.get_addon(config.short_name)
for config in self.ADDONS_AVAILABLE
])
def get_oauth_addons(self):
# TODO: Using hasattr is a dirty hack - we should be using issubclass().
# We can't, because importing the parent classes here causes a
# circular import error.
return [
addon for addon in self.get_addons()
if hasattr(addon, 'oauth_provider')
]
def has_addon(self, addon_name, deleted=False):
return bool(self.get_addon(addon_name, deleted=deleted))
def get_addon_names(self):
return [each.short_name for each in self.get_addons()]
def get_or_add_addon(self, name, *args, **kwargs):
addon = self.get_addon(name)
if addon:
return addon
return self.add_addon(name, *args, **kwargs)
def get_addon(self, name, deleted=False):
try:
settings_model = self._settings_model(name)
except LookupError:
return None
if not settings_model:
return None
try:
settings_obj = settings_model.objects.get(owner=self)
if not settings_obj.deleted or deleted:
return settings_obj
except ObjectDoesNotExist:
pass
return None
def add_addon(self, addon_name, auth=None, override=False, _force=False):
"""Add an add-on to the node.
:param str addon_name: Name of add-on
:param Auth auth: Consolidated authorization object
:param bool override: For shell use only, Allows adding of system addons
:param bool _force: For migration testing ONLY. Do not set to True
in the application, or else projects will be allowed to have
duplicate addons!
:return bool: Add-on was added
"""
if not override and addon_name in settings.SYSTEM_ADDED_ADDONS[self.settings_type]:
return False
# Reactivate deleted add-on if present
addon = self.get_addon(addon_name, deleted=True)
if addon:
if addon.deleted:
addon.undelete(save=True)
return addon
if not _force:
return False
config = apps.get_app_config('addons_{}'.format(addon_name))
model = self._settings_model(addon_name, config=config)
ret = model(owner=self)
ret.on_add()
ret.save() # TODO This doesn't feel right
return ret
def config_addons(self, config, auth=None, save=True):
"""Enable or disable a set of add-ons.
:param dict config: Mapping between add-on names and enabled / disabled
statuses
"""
for addon_name, enabled in config.iteritems():
if enabled:
self.add_addon(addon_name, auth)
else:
self.delete_addon(addon_name, auth)
if save:
self.save()
def delete_addon(self, addon_name, auth=None, _force=False):
"""Delete an add-on from the node.
:param str addon_name: Name of add-on
:param Auth auth: Consolidated authorization object
:param bool _force: For migration testing ONLY. Do not set to True
in the application, or else projects will be allowed to delete
mandatory add-ons!
:return bool: Add-on was deleted
"""
addon = self.get_addon(addon_name)
if not addon:
return False
if self.settings_type in addon.config.added_mandatory and not _force:
raise ValueError('Cannot delete mandatory add-on.')
if getattr(addon, 'external_account', None):
addon.deauthorize(auth=auth)
addon.delete(save=True)
return True
def _settings_model(self, addon_model, config=None):
if not config:
config = apps.get_app_config('addons_{}'.format(addon_model))
return getattr(config, '{}_settings'.format(self.settings_type))
class NodeLinkMixin(models.Model):
class Meta:
abstract = True
def add_node_link(self, node, auth, save=True):
"""Add a node link to a node.
:param Node node: Node to add
:param Auth auth: Consolidated authorization
:param bool save: Save changes
:return: Created pointer
"""
# Fail if node already in nodes / pointers. Note: cast node and node
# to primary keys to test for conflicts with both nodes and pointers
# contained in `self.nodes`.
if NodeRelation.objects.filter(parent=self, child=node, is_node_link=True).exists():
raise ValueError(
'Link to node {0} already exists'.format(node._id)
)
if self.is_registration:
raise NodeStateError('Cannot add a node link to a registration')
# Append node link
node_relation, created = NodeRelation.objects.get_or_create(
parent=self,
child=node,
is_node_link=True
)
# Add log
if hasattr(self, 'add_log'):
self.add_log(
action=NodeLog.NODE_LINK_CREATED,
params={
'parent_node': self.parent_id,
'node': self._id,
'pointer': {
'id': node._id,
'url': node.url,
'title': node.title,
'category': node.category,
},
},
auth=auth,
save=False,
)
# Optionally save changes
if save:
self.save()
return node_relation
add_pointer = add_node_link # For v1 compat
def rm_node_link(self, node_relation, auth):
"""Remove a pointer.
:param Pointer pointer: Pointer to remove
:param Auth auth: Consolidated authorization
"""
AbstractNode = apps.get_model('osf.AbstractNode')
node_rel = None
if isinstance(node_relation, NodeRelation):
try:
node_rel = self.node_relations.get(is_node_link=True, id=node_relation.id)
except NodeRelation.DoesNotExist:
raise ValueError('Node link does not belong to the requested node.')
elif isinstance(node_relation, AbstractNode):
try:
node_rel = self.node_relations.get(is_node_link=True, child__id=node_relation.id)
except NodeRelation.DoesNotExist:
raise ValueError('Node link does not belong to the requested node.')
if node_rel is not None:
node_rel.delete()
node = node_rel.child
# Add log
if hasattr(self, 'add_log'):
self.add_log(
action=NodeLog.POINTER_REMOVED,
params={
'parent_node': self.parent_id,
'node': self._primary_key,
'pointer': {
'id': node._id,
'url': node.url,
'title': node.title,
'category': node.category,
},
},
auth=auth,
save=False,
)
rm_pointer = rm_node_link # For v1 compat
@property
def nodes_pointer(self):
"""For v1 compat"""
return self.linked_nodes
def fork_node_link(self, node_relation, auth, save=True):
"""Replace a linked node with a fork.
:param NodeRelation node_relation:
:param Auth auth:
:param bool save:
:return: Forked node
"""
# Fail if pointer not contained in `nodes`
try:
node = self.node_relations.get(is_node_link=True, id=node_relation.id).child
except NodeRelation.DoesNotExist:
raise ValueError('Node link {0} not in list'.format(node_relation._id))
# Fork node to which current nodelink points
forked = node.fork_node(auth)
if forked is None:
raise ValueError('Could not fork node')
if hasattr(self, 'add_log'):
# Add log
self.add_log(
NodeLog.NODE_LINK_FORKED,
params={
'parent_node': self.parent_id,
'node': self._id,
'pointer': {
'id': node._id,
'url': node.url,
'title': node.title,
'category': node.category,
},
},
auth=auth,
save=False,
)
# Optionally save changes
if save:
self.save()
# Return forked content
return forked
fork_pointer = fork_node_link # For v1 compat
class CommentableMixin(object):
"""Abstract class that defines the interface for models that have comments attached to them."""
@property
def target_type(self):
""" The object "type" used in the OSF v2 API. E.g. Comment objects have the type 'comments'."""
raise NotImplementedError
@property
def root_target_page(self):
"""The page type associated with the object/Comment.root_target.
E.g. For a WikiPage, the page name is 'wiki'."""
raise NotImplementedError
is_deleted = False
def belongs_to_node(self, node_id):
"""Check whether an object (e.g. file, wiki, comment) is attached to the specified node."""
raise NotImplementedError
def get_extra_log_params(self, comment):
"""Return extra data to pass as `params` to `Node.add_log` when a new comment is
created, edited, deleted or restored."""
return {}
class MachineableMixin(models.Model):
class Meta:
abstract = True
# NOTE: machine_state should rarely/never be modified directly -- use the state transition methods below
machine_state = models.CharField(max_length=15, db_index=True, choices=DefaultStates.choices(), default=DefaultStates.INITIAL.value)
date_last_transitioned = models.DateTimeField(null=True, blank=True, db_index=True)
@property
def MachineClass(self):
raise NotImplementedError()
def run_submit(self, user):
"""Run the 'submit' state transition and create a corresponding Action.
Params:
user: The user triggering this transition.
"""
return self.__run_transition(DefaultTriggers.SUBMIT.value, user=user)
def run_accept(self, user, comment):
"""Run the 'accept' state transition and create a corresponding Action.
Params:
user: The user triggering this transition.
comment: Text describing why.
"""
return self.__run_transition(DefaultTriggers.ACCEPT.value, user=user, comment=comment)
def run_reject(self, user, comment):
"""Run the 'reject' state transition and create a corresponding Action.
Params:
user: The user triggering this transition.
comment: Text describing why.
"""
return self.__run_transition(DefaultTriggers.REJECT.value, user=user, comment=comment)
def run_edit_comment(self, user, comment):
"""Run the 'edit_comment' state transition and create a corresponding Action.
Params:
user: The user triggering this transition.
comment: New comment text.
"""
return self.__run_transition(DefaultTriggers.EDIT_COMMENT.value, user=user, comment=comment)
def __run_transition(self, trigger, **kwargs):
machine = self.MachineClass(self, 'machine_state')
trigger_fn = getattr(machine, trigger)
with transaction.atomic():
result = trigger_fn(**kwargs)
action = machine.action
if not result or action is None:
valid_triggers = machine.get_triggers(self.machine_state)
raise InvalidTriggerError(trigger, self.machine_state, valid_triggers)
return action
class RequestableMixin(MachineableMixin):
"""Something that users may request access or changes to.
"""
class Meta:
abstract = True
MachineClass = RequestMachine
class ReviewableMixin(MachineableMixin):
"""Something that may be included in a reviewed collection and is subject to a reviews workflow.
"""
class Meta:
abstract = True
MachineClass = ReviewsMachine
@property
def in_public_reviews_state(self):
public_states = PUBLIC_STATES.get(self.provider.reviews_workflow)
if not public_states:
return False
return self.machine_state in public_states
class ReviewProviderMixin(models.Model):
"""A reviewed/moderated collection of objects.
"""
REVIEWABLE_RELATION_NAME = None
class Meta:
abstract = True
reviews_workflow = models.CharField(null=True, blank=True, max_length=15, choices=Workflows.choices())
reviews_comments_private = models.NullBooleanField()
reviews_comments_anonymous = models.NullBooleanField()
@property
def is_reviewed(self):
return self.reviews_workflow is not None
def get_reviewable_state_counts(self):
assert self.REVIEWABLE_RELATION_NAME, 'REVIEWABLE_RELATION_NAME must be set to compute state counts'
qs = getattr(self, self.REVIEWABLE_RELATION_NAME)
if isinstance(qs, IncludeQuerySet):
qs = qs.include(None)
qs = qs.filter(node__isnull=False, node__is_deleted=False, node__is_public=True).values('machine_state').annotate(count=models.Count('*'))
counts = {state.value: 0 for state in DefaultStates}
counts.update({row['machine_state']: row['count'] for row in qs if row['machine_state'] in counts})
return counts
def add_to_group(self, user, group):
from api.preprint_providers.permissions import GroupHelper
# Add default notification subscription
notification = self.notification_subscriptions.get(_id='{}_new_pending_submissions'.format(self._id))
user_id = user.id
is_subscriber = notification.none.filter(id=user_id).exists() \
or notification.email_digest.filter(id=user_id).exists() \
or notification.email_transactional.filter(id=user_id).exists()
if not is_subscriber:
notification.add_user_to_subscription(user, 'email_transactional', save=True)
return GroupHelper(self).get_group(group).user_set.add(user)
def remove_from_group(self, user, group, unsubscribe=True):
from api.preprint_providers.permissions import GroupHelper
_group = GroupHelper(self).get_group(group)
if group == 'admin':
if _group.user_set.filter(id=user.id).exists() and not _group.user_set.exclude(id=user.id).exists():
raise ValueError('Cannot remove last admin.')
if unsubscribe:
# remove notification subscription
notification = self.notification_subscriptions.get(_id='{}_new_pending_submissions'.format(self._id))
notification.remove_user_from_subscription(user, save=True)
return _group.user_set.remove(user)
class GuardianMixin(models.Model):
""" Helper for managing object-level permissions with django-guardian
Expects:
- Permissions to be defined in class Meta->permissions
- Groups to be defined in self.groups
- Group naming scheme to:
* Be defined in self.group_format
* Use `self` and `group` as format params. E.g: model_{self.id}_{group}
"""
class Meta:
abstract = True
@property
def groups(self):
raise NotImplementedError()
@property
def group_format(self):
raise NotImplementedError()
@property
def perms_list(self):
# Django expects permissions to be specified in an N-ple of 2-ples
return [p[0] for p in self._meta.permissions]
@property
def group_names(self):
return [self.format_group(name) for name in self.groups_dict]
@property
def group_objects(self):
# TODO: consider subclassing Group if this becomes inefficient
return Group.objects.filter(name__in=self.group_names)
def format_group(self, name):
if name not in self.groups:
raise ValueError('Invalid group: "{}"'.format(name))
return self.group_format.format(self=self, group=name)
def get_group(self, name):
return Group.objects.get(name=self.format_group(name))
def update_group_permissions(self):
for group_name, group_permissions in self.groups.items():
group, created = Group.objects.get_or_create(name=self.format_group(group_name))
to_remove = set(get_perms(group, self)).difference(group_permissions)
for p in to_remove:
remove_perm(p, group, self)
for p in group_permissions:
assign_perm(p, group, self)
def get_permissions(self, user):
return list(set(get_perms(user, self)) & set(self.perms_list))
class TaxonomizableMixin(models.Model):
class Meta:
abstract = True
subjects = models.ManyToManyField(blank=True, to='osf.Subject', related_name='%(class)ss')
@cached_property
def subject_hierarchy(self):
return [
s.object_hierarchy for s in self.subjects.exclude(children__in=self.subjects.all())
]
def set_subjects(self, new_subjects, auth, add_log=True):
""" Helper for setting M2M subjects field from list of hierarchies received from UI.
Only authorized admins may set subjects.
:param list[list[Subject._id]] new_subjects: List of subject hierarchies to be validated and flattened
:param Auth auth: Auth object for requesting user
:param bool add_log: Whether or not to add a log (if called on a Loggable object)
:return: None
"""
if getattr(self, 'is_registration', False):
raise PermissionsError('Registrations may not be modified.')
if getattr(self, 'is_collection', False):
raise NodeStateError('Collections may not have subjects')
if not self.has_permission(auth.user, ADMIN):
raise PermissionsError('Only admins can change subjects.')
old_subjects = list(self.subjects.values_list('id', flat=True))
self.subjects.clear()
for subj_list in new_subjects:
subj_hierarchy = []
for s in subj_list:
subj_hierarchy.append(s)
if subj_hierarchy:
validate_subject_hierarchy(subj_hierarchy)
for s_id in subj_hierarchy:
self.subjects.add(Subject.load(s_id))
if add_log and hasattr(self, 'add_log'):
self.add_log(
action=NodeLog.SUBJECTS_UPDATED,
params={
'subjects': list(self.subjects.values('_id', 'text')),
'old_subjects': list(Subject.objects.filter(id__in=old_subjects).values('_id', 'text'))
},
auth=auth,
save=False,
)
self.save(old_subjects=old_subjects)
|
binoculars/osf.io
|
osf/models/mixins.py
|
Python
|
apache-2.0
| 26,369 | 0.002237 |
# Copyright 2018 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.6+ and Openssl 1.0+
#
import socket
import glob
import mock
import traceback
import azurelinuxagent.common.osutil.default as osutil
import azurelinuxagent.common.utils.shellutil as shellutil
import azurelinuxagent.common.utils.textutil as textutil
from azurelinuxagent.common.exception import OSUtilError
from azurelinuxagent.common.future import ustr
from azurelinuxagent.common.osutil import get_osutil
from tests.tools import *
actual_get_proc_net_route = 'azurelinuxagent.common.osutil.default.DefaultOSUtil._get_proc_net_route'
def fake_is_loopback(_, iface):
return iface.startswith('lo')
def running_under_travis():
return 'TRAVIS' in os.environ and os.environ['TRAVIS'] == 'true'
class TestOSUtil(AgentTestCase):
def test_restart(self):
# setup
retries = 3
ifname = 'dummy'
with patch.object(shellutil, "run") as run_patch:
run_patch.return_value = 1
# execute
osutil.DefaultOSUtil.restart_if(osutil.DefaultOSUtil(), ifname=ifname, retries=retries, wait=0)
# assert
self.assertEqual(run_patch.call_count, retries)
self.assertEqual(run_patch.call_args_list[0][0][0], 'ifdown {0} && ifup {0}'.format(ifname))
def test_get_dvd_device_success(self):
with patch.object(os, 'listdir', return_value=['cpu', 'cdrom0']):
osutil.DefaultOSUtil().get_dvd_device()
def test_get_dvd_device_failure(self):
with patch.object(os, 'listdir', return_value=['cpu', 'notmatching']):
try:
osutil.DefaultOSUtil().get_dvd_device()
self.fail('OSUtilError was not raised')
except OSUtilError as ose:
self.assertTrue('notmatching' in ustr(ose))
@patch('time.sleep')
def test_mount_dvd_success(self, _):
msg = 'message'
with patch.object(osutil.DefaultOSUtil,
'get_dvd_device',
return_value='/dev/cdrom'):
with patch.object(shellutil,
'run_get_output',
return_value=(0, msg)) as patch_run:
with patch.object(os, 'makedirs'):
try:
osutil.DefaultOSUtil().mount_dvd()
except OSUtilError:
self.fail("mounting failed")
@patch('time.sleep')
def test_mount_dvd_failure(self, _):
msg = 'message'
with patch.object(osutil.DefaultOSUtil,
'get_dvd_device',
return_value='/dev/cdrom'):
with patch.object(shellutil,
'run_get_output',
return_value=(1, msg)) as patch_run:
with patch.object(os, 'makedirs'):
try:
osutil.DefaultOSUtil().mount_dvd()
self.fail('OSUtilError was not raised')
except OSUtilError as ose:
self.assertTrue(msg in ustr(ose))
self.assertTrue(patch_run.call_count == 6)
def test_empty_proc_net_route(self):
routing_table = ""
mo = mock.mock_open(read_data=routing_table)
with patch(open_patch(), mo):
self.assertEqual(len(osutil.DefaultOSUtil().read_route_table()), 0)
def test_no_routes(self):
routing_table = 'Iface\tDestination\tGateway \tFlags\tRefCnt\tUse\tMetric\tMask\t\tMTU\tWindow\tIRTT \n'
mo = mock.mock_open(read_data=routing_table)
with patch(open_patch(), mo):
raw_route_list = osutil.DefaultOSUtil().read_route_table()
self.assertEqual(len(osutil.DefaultOSUtil().get_list_of_routes(raw_route_list)), 0)
def test_bogus_proc_net_route(self):
routing_table = 'Iface\tDestination\tGateway \tFlags\t\tUse\tMetric\t\neth0\t00000000\t00000000\t0001\t\t0\t0\n'
mo = mock.mock_open(read_data=routing_table)
with patch(open_patch(), mo):
raw_route_list = osutil.DefaultOSUtil().read_route_table()
self.assertEqual(len(osutil.DefaultOSUtil().get_list_of_routes(raw_route_list)), 0)
def test_valid_routes(self):
routing_table = \
'Iface\tDestination\tGateway \tFlags\tRefCnt\tUse\tMetric\tMask\t\tMTU\tWindow\tIRTT \n' \
'eth0\t00000000\tC1BB910A\t0003\t0\t0\t0\t00000000\t0\t0\t0 \n' \
'eth0\tC0BB910A\t00000000\t0001\t0\t0\t0\tC0FFFFFF\t0\t0\t0 \n' \
'eth0\t10813FA8\tC1BB910A\t000F\t0\t0\t0\tFFFFFFFF\t0\t0\t0 \n' \
'eth0\tFEA9FEA9\tC1BB910A\t0007\t0\t0\t0\tFFFFFFFF\t0\t0\t0 \n' \
'docker0\t002BA8C0\t00000000\t0001\t0\t0\t10\t00FFFFFF\t0\t0\t0 \n'
known_sha1_hash = b'\x1e\xd1k\xae[\xf8\x9b\x1a\x13\xd0\xbbT\xa4\xe3Y\xa3\xdd\x0b\xbd\xa9'
mo = mock.mock_open(read_data=routing_table)
with patch(open_patch(), mo):
raw_route_list = osutil.DefaultOSUtil().read_route_table()
self.assertEqual(len(raw_route_list), 6)
self.assertEqual(textutil.hash_strings(raw_route_list), known_sha1_hash)
route_list = osutil.DefaultOSUtil().get_list_of_routes(raw_route_list)
self.assertEqual(len(route_list), 5)
self.assertEqual(route_list[0].gateway_quad(), '10.145.187.193')
self.assertEqual(route_list[1].gateway_quad(), '0.0.0.0')
self.assertEqual(route_list[1].mask_quad(), '255.255.255.192')
self.assertEqual(route_list[2].destination_quad(), '168.63.129.16')
self.assertEqual(route_list[1].flags, 1)
self.assertEqual(route_list[2].flags, 15)
self.assertEqual(route_list[3].flags, 7)
self.assertEqual(route_list[3].metric, 0)
self.assertEqual(route_list[4].metric, 10)
self.assertEqual(route_list[0].interface, 'eth0')
self.assertEqual(route_list[4].interface, 'docker0')
@patch('azurelinuxagent.common.osutil.default.DefaultOSUtil.get_primary_interface', return_value='eth0')
@patch('azurelinuxagent.common.osutil.default.DefaultOSUtil._get_all_interfaces', return_value={'eth0':'10.0.0.1'})
@patch('azurelinuxagent.common.osutil.default.DefaultOSUtil.is_loopback', fake_is_loopback)
def test_get_first_if(self, get_all_interfaces_mock, get_primary_interface_mock):
"""
Validate that the agent can find the first active non-loopback
interface.
This test case used to run live, but not all developers have an eth*
interface. It is perfectly valid to have a br*, but this test does not
account for that.
"""
ifname, ipaddr = osutil.DefaultOSUtil().get_first_if()
self.assertEqual(ifname, 'eth0')
self.assertEqual(ipaddr, '10.0.0.1')
@patch('azurelinuxagent.common.osutil.default.DefaultOSUtil.get_primary_interface', return_value='bogus0')
@patch('azurelinuxagent.common.osutil.default.DefaultOSUtil._get_all_interfaces', return_value={'eth0':'10.0.0.1', 'lo': '127.0.0.1'})
@patch('azurelinuxagent.common.osutil.default.DefaultOSUtil.is_loopback', fake_is_loopback)
def test_get_first_if_nosuchprimary(self, get_all_interfaces_mock, get_primary_interface_mock):
ifname, ipaddr = osutil.DefaultOSUtil().get_first_if()
self.assertTrue(ifname.startswith('eth'))
self.assertTrue(ipaddr is not None)
try:
socket.inet_aton(ipaddr)
except socket.error:
self.fail("not a valid ip address")
def test_get_first_if_all_loopback(self):
fake_ifaces = {'lo':'127.0.0.1'}
with patch.object(osutil.DefaultOSUtil, 'get_primary_interface', return_value='bogus0'):
with patch.object(osutil.DefaultOSUtil, '_get_all_interfaces', return_value=fake_ifaces):
self.assertEqual(('', ''), osutil.DefaultOSUtil().get_first_if())
def test_get_all_interfaces(self):
loopback_count = 0
non_loopback_count = 0
for iface in osutil.DefaultOSUtil()._get_all_interfaces():
if iface == 'lo':
loopback_count += 1
else:
non_loopback_count += 1
self.assertEqual(loopback_count, 1, 'Exactly 1 loopback network interface should exist')
self.assertGreater(loopback_count, 0, 'At least 1 non-loopback network interface should exist')
def test_isloopback(self):
for iface in osutil.DefaultOSUtil()._get_all_interfaces():
if iface == 'lo':
self.assertTrue(osutil.DefaultOSUtil().is_loopback(iface))
else:
self.assertFalse(osutil.DefaultOSUtil().is_loopback(iface))
def test_isprimary(self):
routing_table = "\
Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT \n\
eth0 00000000 01345B0A 0003 0 0 5 00000000 0 0 0 \n\
eth0 00345B0A 00000000 0001 0 0 5 00000000 0 0 0 \n\
lo 00000000 01345B0A 0003 0 0 1 00FCFFFF 0 0 0 \n"
mo = mock.mock_open(read_data=routing_table)
with patch(open_patch(), mo):
self.assertFalse(osutil.DefaultOSUtil().is_primary_interface('lo'))
self.assertTrue(osutil.DefaultOSUtil().is_primary_interface('eth0'))
def test_sriov(self):
routing_table = "\
Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT \n" \
"bond0 00000000 0100000A 0003 0 0 0 00000000 0 0 0 \n" \
"bond0 0000000A 00000000 0001 0 0 0 00000000 0 0 0 \n" \
"eth0 0000000A 00000000 0001 0 0 0 00000000 0 0 0 \n" \
"bond0 10813FA8 0100000A 0007 0 0 0 00000000 0 0 0 \n" \
"bond0 FEA9FEA9 0100000A 0007 0 0 0 00000000 0 0 0 \n"
mo = mock.mock_open(read_data=routing_table)
with patch(open_patch(), mo):
self.assertFalse(osutil.DefaultOSUtil().is_primary_interface('eth0'))
self.assertTrue(osutil.DefaultOSUtil().is_primary_interface('bond0'))
def test_multiple_default_routes(self):
routing_table = "\
Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT \n\
high 00000000 01345B0A 0003 0 0 5 00000000 0 0 0 \n\
low1 00000000 01345B0A 0003 0 0 1 00FCFFFF 0 0 0 \n"
mo = mock.mock_open(read_data=routing_table)
with patch(open_patch(), mo):
self.assertTrue(osutil.DefaultOSUtil().is_primary_interface('low1'))
def test_multiple_interfaces(self):
routing_table = "\
Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT \n\
first 00000000 01345B0A 0003 0 0 1 00000000 0 0 0 \n\
secnd 00000000 01345B0A 0003 0 0 1 00FCFFFF 0 0 0 \n"
mo = mock.mock_open(read_data=routing_table)
with patch(open_patch(), mo):
self.assertTrue(osutil.DefaultOSUtil().is_primary_interface('first'))
def test_interface_flags(self):
routing_table = "\
Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT \n\
nflg 00000000 01345B0A 0001 0 0 1 00000000 0 0 0 \n\
flgs 00000000 01345B0A 0003 0 0 1 00FCFFFF 0 0 0 \n"
mo = mock.mock_open(read_data=routing_table)
with patch(open_patch(), mo):
self.assertTrue(osutil.DefaultOSUtil().is_primary_interface('flgs'))
def test_no_interface(self):
routing_table = "\
Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT \n\
ndst 00000001 01345B0A 0003 0 0 1 00000000 0 0 0 \n\
nflg 00000000 01345B0A 0001 0 0 1 00FCFFFF 0 0 0 \n"
mo = mock.mock_open(read_data=routing_table)
with patch(open_patch(), mo):
self.assertFalse(osutil.DefaultOSUtil().is_primary_interface('ndst'))
self.assertFalse(osutil.DefaultOSUtil().is_primary_interface('nflg'))
self.assertFalse(osutil.DefaultOSUtil().is_primary_interface('invalid'))
def test_no_primary_does_not_throw(self):
with patch.object(osutil.DefaultOSUtil, 'get_primary_interface') \
as patch_primary:
exception = False
patch_primary.return_value = ''
try:
osutil.DefaultOSUtil().get_first_if()[0]
except Exception as e:
print(traceback.format_exc())
exception = True
self.assertFalse(exception)
def test_dhcp_lease_default(self):
self.assertTrue(osutil.DefaultOSUtil().get_dhcp_lease_endpoint() is None)
def test_dhcp_lease_ubuntu(self):
with patch.object(glob, "glob", return_value=['/var/lib/dhcp/dhclient.eth0.leases']):
with patch(open_patch(), mock.mock_open(read_data=load_data("dhcp.leases"))):
endpoint = get_osutil(distro_name='ubuntu', distro_version='12.04').get_dhcp_lease_endpoint()
self.assertTrue(endpoint is not None)
self.assertEqual(endpoint, "168.63.129.16")
endpoint = get_osutil(distro_name='ubuntu', distro_version='12.04').get_dhcp_lease_endpoint()
self.assertTrue(endpoint is not None)
self.assertEqual(endpoint, "168.63.129.16")
endpoint = get_osutil(distro_name='ubuntu', distro_version='14.04').get_dhcp_lease_endpoint()
self.assertTrue(endpoint is not None)
self.assertEqual(endpoint, "168.63.129.16")
def test_dhcp_lease_custom_dns(self):
"""
Validate that the wireserver address is coming from option 245
(on default configurations the address is also available in the domain-name-servers option, but users
may set up a custom dns server on their vnet)
"""
with patch.object(glob, "glob", return_value=['/var/lib/dhcp/dhclient.eth0.leases']):
with patch(open_patch(), mock.mock_open(read_data=load_data("dhcp.leases.custom.dns"))):
endpoint = get_osutil(distro_name='ubuntu', distro_version='14.04').get_dhcp_lease_endpoint()
self.assertEqual(endpoint, "168.63.129.16")
def test_dhcp_lease_multi(self):
with patch.object(glob, "glob", return_value=['/var/lib/dhcp/dhclient.eth0.leases']):
with patch(open_patch(), mock.mock_open(read_data=load_data("dhcp.leases.multi"))):
endpoint = get_osutil(distro_name='ubuntu', distro_version='12.04').get_dhcp_lease_endpoint()
self.assertTrue(endpoint is not None)
self.assertEqual(endpoint, "168.63.129.2")
def test_get_total_mem(self):
"""
Validate the returned value matches to the one retrieved by invoking shell command
"""
cmd = "grep MemTotal /proc/meminfo |awk '{print $2}'"
ret = shellutil.run_get_output(cmd)
if ret[0] == 0:
self.assertEqual(int(ret[1]) / 1024, get_osutil().get_total_mem())
else:
self.fail("Cannot retrieve total memory using shell command.")
def test_get_processor_cores(self):
"""
Validate the returned value matches to the one retrieved by invoking shell command
"""
cmd = "grep 'processor.*:' /proc/cpuinfo |wc -l"
ret = shellutil.run_get_output(cmd)
if ret[0] == 0:
self.assertEqual(int(ret[1]), get_osutil().get_processor_cores())
else:
self.fail("Cannot retrieve number of process cores using shell command.")
def test_conf_sshd(self):
new_file = "\
Port 22\n\
Protocol 2\n\
ChallengeResponseAuthentication yes\n\
#PasswordAuthentication yes\n\
UsePAM yes\n\
"
expected_output = "\
Port 22\n\
Protocol 2\n\
ChallengeResponseAuthentication no\n\
#PasswordAuthentication yes\n\
UsePAM yes\n\
PasswordAuthentication no\n\
ClientAliveInterval 180\n\
"
with patch.object(fileutil, 'write_file') as patch_write:
with patch.object(fileutil, 'read_file', return_value=new_file):
osutil.DefaultOSUtil().conf_sshd(disable_password=True)
patch_write.assert_called_once_with(
conf.get_sshd_conf_file_path(),
expected_output)
def test_conf_sshd_with_match(self):
new_file = "\
Port 22\n\
ChallengeResponseAuthentication yes\n\
Match host 192.168.1.1\n\
ChallengeResponseAuthentication yes\n\
"
expected_output = "\
Port 22\n\
ChallengeResponseAuthentication no\n\
PasswordAuthentication no\n\
ClientAliveInterval 180\n\
Match host 192.168.1.1\n\
ChallengeResponseAuthentication yes\n\
"
with patch.object(fileutil, 'write_file') as patch_write:
with patch.object(fileutil, 'read_file', return_value=new_file):
osutil.DefaultOSUtil().conf_sshd(disable_password=True)
patch_write.assert_called_once_with(
conf.get_sshd_conf_file_path(),
expected_output)
def test_conf_sshd_with_match_last(self):
new_file = "\
Port 22\n\
Match host 192.168.1.1\n\
ChallengeResponseAuthentication yes\n\
"
expected_output = "\
Port 22\n\
PasswordAuthentication no\n\
ChallengeResponseAuthentication no\n\
ClientAliveInterval 180\n\
Match host 192.168.1.1\n\
ChallengeResponseAuthentication yes\n\
"
with patch.object(fileutil, 'write_file') as patch_write:
with patch.object(fileutil, 'read_file', return_value=new_file):
osutil.DefaultOSUtil().conf_sshd(disable_password=True)
patch_write.assert_called_once_with(
conf.get_sshd_conf_file_path(),
expected_output)
def test_conf_sshd_with_match_middle(self):
new_file = "\
Port 22\n\
match host 192.168.1.1\n\
ChallengeResponseAuthentication yes\n\
match all\n\
#Other config\n\
"
expected_output = "\
Port 22\n\
match host 192.168.1.1\n\
ChallengeResponseAuthentication yes\n\
match all\n\
#Other config\n\
PasswordAuthentication no\n\
ChallengeResponseAuthentication no\n\
ClientAliveInterval 180\n\
"
with patch.object(fileutil, 'write_file') as patch_write:
with patch.object(fileutil, 'read_file', return_value=new_file):
osutil.DefaultOSUtil().conf_sshd(disable_password=True)
patch_write.assert_called_once_with(
conf.get_sshd_conf_file_path(),
expected_output)
def test_conf_sshd_with_match_multiple(self):
new_file = "\
Port 22\n\
Match host 192.168.1.1\n\
ChallengeResponseAuthentication yes\n\
Match host 192.168.1.2\n\
ChallengeResponseAuthentication yes\n\
Match all\n\
#Other config\n\
"
expected_output = "\
Port 22\n\
Match host 192.168.1.1\n\
ChallengeResponseAuthentication yes\n\
Match host 192.168.1.2\n\
ChallengeResponseAuthentication yes\n\
Match all\n\
#Other config\n\
PasswordAuthentication no\n\
ChallengeResponseAuthentication no\n\
ClientAliveInterval 180\n\
"
with patch.object(fileutil, 'write_file') as patch_write:
with patch.object(fileutil, 'read_file', return_value=new_file):
osutil.DefaultOSUtil().conf_sshd(disable_password=True)
patch_write.assert_called_once_with(
conf.get_sshd_conf_file_path(),
expected_output)
def test_conf_sshd_with_match_multiple_first_last(self):
new_file = "\
Match host 192.168.1.1\n\
ChallengeResponseAuthentication yes\n\
Match host 192.168.1.2\n\
ChallengeResponseAuthentication yes\n\
"
expected_output = "\
PasswordAuthentication no\n\
ChallengeResponseAuthentication no\n\
ClientAliveInterval 180\n\
Match host 192.168.1.1\n\
ChallengeResponseAuthentication yes\n\
Match host 192.168.1.2\n\
ChallengeResponseAuthentication yes\n\
"
with patch.object(fileutil, 'write_file') as patch_write:
with patch.object(fileutil, 'read_file', return_value=new_file):
osutil.DefaultOSUtil().conf_sshd(disable_password=True)
patch_write.assert_called_once_with(
conf.get_sshd_conf_file_path(),
expected_output)
def test_correct_instance_id(self):
util = osutil.DefaultOSUtil()
self.assertEqual(
"12345678-1234-1234-1234-123456789012",
util._correct_instance_id("78563412-3412-3412-1234-123456789012"))
self.assertEqual(
"D0DF4C54-4ECB-4A4B-9954-5BDF3ED5C3B8",
util._correct_instance_id("544CDFD0-CB4E-4B4A-9954-5BDF3ED5C3B8"))
@patch('os.path.isfile', return_value=True)
@patch('azurelinuxagent.common.utils.fileutil.read_file',
return_value="33C2F3B9-1399-429F-8EB3-BA656DF32502")
def test_get_instance_id_from_file(self, mock_read, mock_isfile):
util = osutil.DefaultOSUtil()
self.assertEqual(
util.get_instance_id(),
"B9F3C233-9913-9F42-8EB3-BA656DF32502")
@patch('os.path.isfile', return_value=True)
@patch('azurelinuxagent.common.utils.fileutil.read_file',
return_value="")
def test_get_instance_id_empty_from_file(self, mock_read, mock_isfile):
util = osutil.DefaultOSUtil()
self.assertEqual(
"",
util.get_instance_id())
@patch('os.path.isfile', return_value=True)
@patch('azurelinuxagent.common.utils.fileutil.read_file',
return_value="Value")
def test_get_instance_id_malformed_from_file(self, mock_read, mock_isfile):
util = osutil.DefaultOSUtil()
self.assertEqual(
"Value",
util.get_instance_id())
@patch('os.path.isfile', return_value=False)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output',
return_value=[0, '33C2F3B9-1399-429F-8EB3-BA656DF32502'])
def test_get_instance_id_from_dmidecode(self, mock_shell, mock_isfile):
util = osutil.DefaultOSUtil()
self.assertEqual(
util.get_instance_id(),
"B9F3C233-9913-9F42-8EB3-BA656DF32502")
@patch('os.path.isfile', return_value=False)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output',
return_value=[1, 'Error Value'])
def test_get_instance_id_missing(self, mock_shell, mock_isfile):
util = osutil.DefaultOSUtil()
self.assertEqual("", util.get_instance_id())
@patch('os.path.isfile', return_value=False)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output',
return_value=[0, 'Unexpected Value'])
def test_get_instance_id_unexpected(self, mock_shell, mock_isfile):
util = osutil.DefaultOSUtil()
self.assertEqual("", util.get_instance_id())
@patch('os.path.isfile', return_value=True)
@patch('azurelinuxagent.common.utils.fileutil.read_file')
def test_is_current_instance_id_from_file(self, mock_read, mock_isfile):
util = osutil.DefaultOSUtil()
mock_read.return_value = "B9F3C233-9913-9F42-8EB3-BA656DF32502"
self.assertTrue(util.is_current_instance_id(
"B9F3C233-9913-9F42-8EB3-BA656DF32502"))
mock_read.return_value = "33C2F3B9-1399-429F-8EB3-BA656DF32502"
self.assertTrue(util.is_current_instance_id(
"B9F3C233-9913-9F42-8EB3-BA656DF32502"))
@patch('os.path.isfile', return_value=False)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
def test_is_current_instance_id_from_dmidecode(self, mock_shell, mock_isfile):
util = osutil.DefaultOSUtil()
mock_shell.return_value = [0, 'B9F3C233-9913-9F42-8EB3-BA656DF32502']
self.assertTrue(util.is_current_instance_id(
"B9F3C233-9913-9F42-8EB3-BA656DF32502"))
mock_shell.return_value = [0, '33C2F3B9-1399-429F-8EB3-BA656DF32502']
self.assertTrue(util.is_current_instance_id(
"B9F3C233-9913-9F42-8EB3-BA656DF32502"))
@patch('azurelinuxagent.common.conf.get_sudoers_dir')
def test_conf_sudoer(self, mock_dir):
tmp_dir = tempfile.mkdtemp()
mock_dir.return_value = tmp_dir
util = osutil.DefaultOSUtil()
# Assert the sudoer line is added if missing
util.conf_sudoer("FooBar")
waagent_sudoers = os.path.join(tmp_dir, 'waagent')
self.assertTrue(os.path.isfile(waagent_sudoers))
count = -1
with open(waagent_sudoers, 'r') as f:
count = len(f.readlines())
self.assertEqual(1, count)
# Assert the line does not get added a second time
util.conf_sudoer("FooBar")
count = -1
with open(waagent_sudoers, 'r') as f:
count = len(f.readlines())
print("WRITING TO {0}".format(waagent_sudoers))
self.assertEqual(1, count)
def test_get_firewall_dropped_packets_returns_zero_if_firewall_disabled(self):
osutil._enable_firewall = False
util = osutil.DefaultOSUtil()
self.assertEqual(0, util.get_firewall_dropped_packets("not used"))
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
def test_get_firewall_dropped_packets_returns_negative_if_error(self, mock_output):
osutil._enable_firewall = True
util = osutil.DefaultOSUtil()
mock_output.side_effect = [
(0, "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION)),
(1, "not used")]
self.assertEqual(-1, util.get_firewall_dropped_packets("not used"))
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
def test_get_firewall_dropped_packets_returns_negative_if_exception(self, mock_output):
osutil._enable_firewall = True
util = osutil.DefaultOSUtil()
mock_output.side_effect = [
(0, "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION)),
(1, Exception)]
self.assertEqual(-1, util.get_firewall_dropped_packets("not used"))
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
def test_get_firewall_dropped_packets_transient_error_ignored(self, mock_output):
osutil._enable_firewall = True
util = osutil.DefaultOSUtil()
mock_output.side_effect = [
(0, "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION)),
(3, "can't initialize iptables table `security': iptables who? (do you need to insmod?)")]
self.assertEqual(0, util.get_firewall_dropped_packets("not used"))
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
def test_get_firewall_dropped_packets(self, mock_output):
osutil._enable_firewall = True
util = osutil.DefaultOSUtil()
mock_output.side_effect = [
(0, "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION)),
(0,
'''
Chain OUTPUT (policy ACCEPT 104 packets, 43628 bytes)
pkts bytes target prot opt in out source destination
0 0 ACCEPT tcp -- any any anywhere 168.63.129.16 owner UID match daemon
32 1920 DROP tcp -- any any anywhere 168.63.129.16
''')]
dst = '168.63.129.16'
self.assertEqual(32, util.get_firewall_dropped_packets(dst))
@patch('os.getuid', return_value=42)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
@patch('azurelinuxagent.common.utils.shellutil.run')
def test_enable_firewall(self, mock_run, mock_output, mock_uid):
osutil._enable_firewall = True
util = osutil.DefaultOSUtil()
dst = '1.2.3.4'
uid = 42
version = "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION)
wait = "-w"
mock_run.side_effect = [1, 0, 0]
mock_output.side_effect = [(0, version), (0, "Output")]
self.assertTrue(util.enable_firewall(dst_ip=dst, uid=uid))
mock_run.assert_has_calls([
call(osutil.FIREWALL_DROP.format(wait, "C", dst), chk_err=False),
call(osutil.FIREWALL_ACCEPT.format(wait, "A", dst, uid)),
call(osutil.FIREWALL_DROP.format(wait, "A", dst))
])
mock_output.assert_has_calls([
call(osutil.IPTABLES_VERSION),
call(osutil.FIREWALL_LIST.format(wait))
])
self.assertTrue(osutil._enable_firewall)
@patch('os.getuid', return_value=42)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
@patch('azurelinuxagent.common.utils.shellutil.run')
def test_enable_firewall_no_wait(self, mock_run, mock_output, mock_uid):
osutil._enable_firewall = True
util = osutil.DefaultOSUtil()
dst = '1.2.3.4'
uid = 42
version = "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION-1)
wait = ""
mock_run.side_effect = [1, 0, 0]
mock_output.side_effect = [(0, version), (0, "Output")]
self.assertTrue(util.enable_firewall(dst_ip=dst, uid=uid))
mock_run.assert_has_calls([
call(osutil.FIREWALL_DROP.format(wait, "C", dst), chk_err=False),
call(osutil.FIREWALL_ACCEPT.format(wait, "A", dst, uid)),
call(osutil.FIREWALL_DROP.format(wait, "A", dst))
])
mock_output.assert_has_calls([
call(osutil.IPTABLES_VERSION),
call(osutil.FIREWALL_LIST.format(wait))
])
self.assertTrue(osutil._enable_firewall)
@patch('os.getuid', return_value=42)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
@patch('azurelinuxagent.common.utils.shellutil.run')
def test_enable_firewall_skips_if_drop_exists(self, mock_run, mock_output, mock_uid):
osutil._enable_firewall = True
util = osutil.DefaultOSUtil()
dst = '1.2.3.4'
uid = 42
version = "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION)
wait = "-w"
mock_run.side_effect = [0, 0, 0]
mock_output.return_value = (0, version)
self.assertTrue(util.enable_firewall(dst_ip=dst, uid=uid))
mock_run.assert_has_calls([
call(osutil.FIREWALL_DROP.format(wait, "C", dst), chk_err=False),
])
mock_output.assert_has_calls([
call(osutil.IPTABLES_VERSION)
])
self.assertTrue(osutil._enable_firewall)
@patch('os.getuid', return_value=42)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
@patch('azurelinuxagent.common.utils.shellutil.run')
def test_enable_firewall_ignores_exceptions(self, mock_run, mock_output, mock_uid):
osutil._enable_firewall = True
util = osutil.DefaultOSUtil()
dst = '1.2.3.4'
uid = 42
version = "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION)
wait = "-w"
mock_run.side_effect = [1, Exception]
mock_output.return_value = (0, version)
self.assertFalse(util.enable_firewall(dst_ip=dst, uid=uid))
mock_run.assert_has_calls([
call(osutil.FIREWALL_DROP.format(wait, "C", dst), chk_err=False),
call(osutil.FIREWALL_ACCEPT.format(wait, "A", dst, uid))
])
mock_output.assert_has_calls([
call(osutil.IPTABLES_VERSION)
])
self.assertFalse(osutil._enable_firewall)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
@patch('azurelinuxagent.common.utils.shellutil.run')
def test_enable_firewall_checks_for_invalid_iptables_options(self, mock_run, mock_output):
osutil._enable_firewall = True
util = osutil.DefaultOSUtil()
dst = '1.2.3.4'
version = "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION)
wait = "-w"
# iptables uses the following exit codes
# 0 - correct function
# 1 - other errors
# 2 - errors which appear to be caused by invalid or abused command
# line parameters
mock_run.side_effect = [2]
mock_output.return_value = (0, version)
self.assertFalse(util.enable_firewall(dst_ip='1.2.3.4', uid=42))
self.assertFalse(osutil._enable_firewall)
mock_run.assert_has_calls([
call(osutil.FIREWALL_DROP.format(wait, "C", dst), chk_err=False),
])
mock_output.assert_has_calls([
call(osutil.IPTABLES_VERSION)
])
@patch('os.getuid', return_value=42)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
@patch('azurelinuxagent.common.utils.shellutil.run')
def test_enable_firewall_skips_if_disabled(self, mock_run, mock_output, mock_uid):
osutil._enable_firewall = False
util = osutil.DefaultOSUtil()
dst = '1.2.3.4'
uid = 42
version = "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION)
mock_run.side_effect = [1, 0, 0]
mock_output.side_effect = [(0, version), (0, "Output")]
self.assertFalse(util.enable_firewall(dst_ip=dst, uid=uid))
mock_run.assert_not_called()
mock_output.assert_not_called()
mock_uid.assert_not_called()
self.assertFalse(osutil._enable_firewall)
@patch('os.getuid', return_value=42)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
@patch('azurelinuxagent.common.utils.shellutil.run')
def test_remove_firewall(self, mock_run, mock_output, mock_uid):
osutil._enable_firewall = True
util = osutil.DefaultOSUtil()
dst = '1.2.3.4'
uid = 42
version = "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION)
wait = "-w"
mock_run.side_effect = [0, 1, 0, 1, 0, 1]
mock_output.side_effect = [(0, version), (0, "Output")]
self.assertTrue(util.remove_firewall(dst, uid))
mock_run.assert_has_calls([
# delete rules < 2.2.26
call(osutil.FIREWALL_DELETE_CONNTRACK_ACCEPT.format(wait, dst), chk_err=False),
call(osutil.FIREWALL_DELETE_CONNTRACK_ACCEPT.format(wait, dst), chk_err=False),
call(osutil.FIREWALL_DELETE_OWNER_ACCEPT.format(wait, dst, uid), chk_err=False),
call(osutil.FIREWALL_DELETE_OWNER_ACCEPT.format(wait, dst, uid), chk_err=False),
# delete rules >= 2.2.26
call(osutil.FIREWALL_DELETE_CONNTRACK_DROP.format(wait, dst), chk_err=False),
call(osutil.FIREWALL_DELETE_CONNTRACK_DROP.format(wait, dst), chk_err=False),
])
mock_output.assert_has_calls([
call(osutil.IPTABLES_VERSION)
])
self.assertTrue(osutil._enable_firewall)
@patch('os.getuid', return_value=42)
@patch('azurelinuxagent.common.utils.shellutil.run_get_output')
@patch('azurelinuxagent.common.utils.shellutil.run')
def test_remove_firewall_does_not_repeat(self, mock_run, mock_output, _):
osutil._enable_firewall = True
util = osutil.DefaultOSUtil()
dst_ip='1.2.3.4'
uid=42
version = "iptables v{0}".format(osutil.IPTABLES_LOCKING_VERSION)
wait = "-w"
mock_run.side_effect = [2]
mock_output.side_effect = [(0, version), (1, "Output")]
self.assertFalse(util.remove_firewall(dst_ip, uid))
mock_run.assert_has_calls([
call(osutil.FIREWALL_DELETE_CONNTRACK_ACCEPT.format(wait, dst_ip), chk_err=False),
])
mock_output.assert_has_calls([
call(osutil.IPTABLES_VERSION)
])
self.assertFalse(osutil._enable_firewall)
self.assertTrue(mock_run.call_count == 1)
self.assertTrue(mock_output.call_count == 1)
self.assertFalse(util.remove_firewall())
self.assertFalse(util.remove_firewall())
self.assertTrue(mock_run.call_count == 1)
self.assertTrue(mock_output.call_count == 1)
@skip_if_predicate_true(running_under_travis, "The ip command isn't available in Travis")
def test_get_nic_state(self):
state = osutil.DefaultOSUtil().get_nic_state()
self.assertNotEqual(state, {})
self.assertGreater(len(state.keys()), 1)
another_state = osutil.DefaultOSUtil().get_nic_state()
name = list(another_state.keys())[0]
another_state[name].add_ipv4("xyzzy")
self.assertNotEqual(state, another_state)
if __name__ == '__main__':
unittest.main()
|
hglkrijger/WALinuxAgent
|
tests/common/osutil/test_default.py
|
Python
|
apache-2.0
| 36,933 | 0.002626 |
# Patchwork - automated patch tracking system
# Copyright (C) 2016 Linaro Corporation
#
# SPDX-License-Identifier: GPL-2.0-or-later
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework.views import APIView
class IndexView(APIView):
def get(self, request, *args, **kwargs):
"""List API resources."""
return Response({
'projects': reverse('api-project-list', request=request),
'users': reverse('api-user-list', request=request),
'people': reverse('api-person-list', request=request),
'patches': reverse('api-patch-list', request=request),
'covers': reverse('api-cover-list', request=request),
'series': reverse('api-series-list', request=request),
'events': reverse('api-event-list', request=request),
'bundles': reverse('api-bundle-list', request=request),
})
|
stephenfin/patchwork
|
patchwork/api/index.py
|
Python
|
gpl-2.0
| 942 | 0 |
import unittest
from unittest.mock import Mock
class Mailer:
def send_email(self, email, message):
raise NotImplementedError("Not implemented yet")
class DB:
def insert_user(self, user):
raise NotImplementedError("Not implemented yet")
class User:
def __init__(self, email, name):
self.email = email
self.name = name
def registerUser(email, name, db, mailer):
user = User(email, name)
db.insert_user(user)
mailer.send_email(user.email, "Welcome")
return user
class MockTest(unittest.TestCase):
TEST_EMAIL = 'student@campus.uib.es'
TEST_NAME = 'Student'
def testRegisterUser(self):
mock_db = Mock(DB)
mock_mailer = Mock(Mailer)
user = registerUser(self.TEST_EMAIL, self.TEST_NAME, mock_db, mock_mailer)
mock_db.insert_user.assert_called_once_with(user)
mock_mailer.send_email.assert_called_once_with(self.TEST_EMAIL, "Welcome")
self.assertIsInstance(user, User)
self.assertEqual(user.email, self.TEST_EMAIL)
self.assertEqual(user.name, self.TEST_NAME)
def testRegisterUserThrowsNotImplemented(self):
with self.assertRaises(NotImplementedError):
user = registerUser(self.TEST_EMAIL, self.TEST_NAME, DB(), Mailer())
if __name__ == '__main__':
unittest.main()
|
jordillull/unit-tests-uib-2015
|
code_sample/python/mock_finished.py
|
Python
|
mit
| 1,336 | 0.005988 |
import xbmc, xbmcaddon, xbmcgui, xbmcplugin,os,base64,sys,xbmcvfs
import urllib2,urllib
import zipfile
import extract
import downloader
import re
import time
import common as Common
import wipe
import plugintools
from random import randint
USERDATA = xbmc.translatePath(os.path.join('special://home/userdata',''))
ADDON = xbmc.translatePath(os.path.join('special://home/addons/plugin.program.jogosEmuladores',''))
CHECKVERSION = os.path.join(USERDATA,'version.txt')
KIDS = os.path.join(USERDATA,'kids.txt')
PROFILE = os.path.join(USERDATA,'profiles.xml')
LOCK = os.path.join(USERDATA,'lock.txt')
NOTICE = os.path.join(ADDON,'notice.txt')
WIPE = xbmc.translatePath('special://home/wipe.xml')
CLEAN = xbmc.translatePath('special://home/clean.xml')
my_addon = xbmcaddon.Addon()
dp = xbmcgui.DialogProgress()
checkver=my_addon.getSetting('checkupdates')
dialog = xbmcgui.Dialog()
AddonTitle="[COLOR ghostwhite]Project X[/COLOR] [COLOR lightsteelblue]Wizard[/COLOR]"
GoogleOne = "http://www.google.com"
GoogleTwo = "http://www.google.co.uk"
JarvisUpdate = 0
KryptonUpdate = 0
BetaUpdate = 0
check = plugintools.get_setting("checkupdates")
auto = plugintools.get_setting("autoupdates")
addonupdate = plugintools.get_setting("updaterepos")
if xbmc.getCondVisibility('system.platform.ios') or xbmc.getCondVisibility('system.platform.osx'):
LoginServer = "http://www.projectxwizard/login.php"
JarvisOne = "http://projectxwizard.netne.net/ProjectXwizard/JarvisOne.xml"
JarvisTwo = "http://projectxwizard.netne.net/ProjectXwizard/JarvisTwo.xml"
KryptonOne = "http://projectxwizard.netne.net/ProjectXwizard/KryptonOne.xml"
KryptonTwo = "http://projectxwizard.netne.net/ProjectXwizard/KryptonTwo.xml"
BetaOne = "http://projectxwizard.netne.net/ProjectXwizard/BetaOne.xml"
BetaTwo = "http://projectxwizard.netne.net/ProjectXwizard/BetaTwo.xml"
else:
LoginServer = "http://www.projectxwizard/login.php"
JarvisOne = "http://projectxwizard.netne.net/ProjectXwizard/JarvisOne.xml"
JarvisTwo = "http://projectxwizard.netne.net/ProjectXwizard/JarvisTwo.xml"
KryptonOne = "http://projectxwizard.netne.net/ProjectXwizard/KryptonOne.xml"
KryptonTwo = "http://projectxwizard.netne.net/ProjectXwizard/KryptonTwo.xml"
BetaOne = "http://projectxwizard.netne.net/ProjectXwizard/BetaOne.xml"
BetaTwo = "http://projectxwizard.netne.net/ProjectXwizard/BetaTwo.xml"
COMP = "http://kodiapps.com/how-to-install-Project X-build-on-kodi"
if auto == 'true':
check = 'true'
if os.path.exists(WIPE):
choice = xbmcgui.Dialog().yesno(AddonTitle, '[COLOR slategray]A system reset has been successfully performed.[/COLOR]','Your device has now returned to factory settings.','[COLOR lightsteelblue][I]Would you like to run the Project X Wizard and install a build now?[/COLOR][/I]', yeslabel='[COLOR green][B]YES[/B][/COLOR]',nolabel='[COLOR red][B]NO[/B][/COLOR]')
if choice == 1:
os.remove(WIPE)
xbmc.executebuiltin("RunAddon(plugin.program.jogosEmuladores)")
else:
os.remove(WIPE)
time.sleep(5)
if os.path.exists(NOTICE):
if os.path.exists(CHECKVERSION):
dialog.ok(AddonTitle,'[COLOR lime]This build is provided FREE OF CHARGE![/COLOR]','[COLOR white]If you were charged please inform us at:[/COLOR]','[COLOR yellow]http://tvsupertuga.forum-gratuito.com/[/COLOR]')
os.remove(NOTICE)
def Open_URL(url):
req = urllib2.Request(url)
req.add_header('User-Agent','Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
response = urllib2.urlopen(req)
link = response.read()
response.close()
return link.replace('\r','').replace('\n','').replace('\t','')
if (randint(1,6) == 5):
try:
Open_URL(COMP)
except:
pass
nointernet = 0
isplaying = 0
if isplaying == 0:
try:
Open_URL(GoogleOne)
except:
try:
Open_URL(GoogleTwo)
except:
dialog.ok(AddonTitle,'Sorry we are unable to check for updates!','The device is not connected to the internet','Please check your connection settings.')
nointernet = 1
pass
try:
response = urllib2.urlopen(JarvisTwo)
except:
JarvisUpdate = 1
try:
response = urllib2.urlopen(KryptonTwo)
except:
KryptonUpdate = 1
try:
response = urllib2.urlopen(BetaTwo)
except:
BetaUpdate = 1
if nointernet == 0 and JarvisUpdate == 0:
if auto == 'true':
if os.path.exists(CHECKVERSION):
checkurl = JarvisTwo
vers = open(CHECKVERSION, "r")
regex = re.compile(r'<build>(.+?)</build><version>(.+?)</version>')
for line in vers:
currversion = regex.findall(line)
for build,vernumber in currversion:
if vernumber > 0:
req = urllib2.Request(checkurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><version>(.+?)</version><fresh>(.+?)</fresh>').findall(link)
for newversion,fresh in match:
if fresh =='false': # TRUE
if newversion > vernumber:
updateurl = JarvisOne
req = urllib2.Request(updateurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><url>(.+?)</url>').findall(link)
for url in match:
path = xbmc.translatePath(os.path.join('special://home/addons','packages'))
name = "build"
lib=os.path.join(path, name+'.zip')
try:
os.remove(lib)
except:
pass
downloader.auto(url, lib)
addonfolder = xbmc.translatePath(os.path.join('special://','home'))
time.sleep(2)
unzip(lib,addonfolder)
sys.exit(1)
if nointernet == 0 and KryptonUpdate == 0:
if auto == 'true':
if os.path.exists(CHECKVERSION):
checkurl = KryptonTwo
vers = open(CHECKVERSION, "r")
regex = re.compile(r'<build>(.+?)</build><version>(.+?)</version>')
for line in vers:
currversion = regex.findall(line)
for build,vernumber in currversion:
if vernumber > 0:
req = urllib2.Request(checkurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><version>(.+?)</version><fresh>(.+?)</fresh>').findall(link)
for newversion,fresh in match:
if fresh =='false': # TRUE
if newversion > vernumber:
updateurl = KryptonOne
req = urllib2.Request(updateurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><url>(.+?)</url>').findall(link)
for url in match:
path = xbmc.translatePath(os.path.join('special://home/addons','packages'))
name = "build"
lib=os.path.join(path, name+'.zip')
try:
os.remove(lib)
except:
pass
downloader.auto(url, lib)
addonfolder = xbmc.translatePath(os.path.join('special://','home'))
time.sleep(2)
unzip(lib,addonfolder)
sys.exit(1)
if nointernet == 0 and BetaUpdate == 0:
if auto == 'true':
if os.path.exists(CHECKVERSION):
checkurl = BetaTwo
vers = open(CHECKVERSION, "r")
regex = re.compile(r'<build>(.+?)</build><version>(.+?)</version>')
for line in vers:
currversion = regex.findall(line)
for build,vernumber in currversion:
if vernumber > 0:
req = urllib2.Request(checkurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><version>(.+?)</version><fresh>(.+?)</fresh>').findall(link)
for newversion,fresh in match:
if fresh =='false': # TRUE
if newversion > vernumber:
updateurl = BetaOne
req = urllib2.Request(updateurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><url>(.+?)</url>').findall(link)
for url in match:
path = xbmc.translatePath(os.path.join('special://home/addons','packages'))
name = "build"
lib=os.path.join(path, name+'.zip')
try:
os.remove(lib)
except:
pass
downloader.auto(url, lib)
addonfolder = xbmc.translatePath(os.path.join('special://','home'))
time.sleep(2)
unzip(lib,addonfolder)
sys.exit(1)
if nointernet == 0 and JarvisUpdate == 0:
if check == 'true':
if os.path.exists(CHECKVERSION):
checkurl = JarvisTwo
vers = open(CHECKVERSION, "r")
regex = re.compile(r'<build>(.+?)</build><version>(.+?)</version>')
for line in vers:
currversion = regex.findall(line)
for build,vernumber in currversion:
if vernumber > 0:
req = urllib2.Request(checkurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
dialog.ok(AddonTitle,'Sorry we are unable to check for [B]JARVIS[/B] updates!','The update host appears to be down.','Please check for updates later via the wizard.')
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><version>(.+?)</version><fresh>(.+?)</fresh>').findall(link)
for newversion,fresh in match:
if newversion > vernumber:
if fresh =='false': # TRUE
choice = xbmcgui.Dialog().yesno("NEW UPDATE AVAILABLE", 'Found a new update for the Build', build + " ver: "+newversion, 'Do you want to install it now?', yeslabel='[B][COLOR green]YES[/COLOR][/B]',nolabel='[B][COLOR red]NO[/COLOR][/B]')
if choice == 1:
updateurl = JarvisOne
req = urllib2.Request(updateurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
dialog.ok(AddonTitle,'Sorry we were unable to download the update!','The update host appears to be down.','Please check for updates later via the wizard.')
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><url>(.+?)</url>').findall(link)
for url in match:
path = xbmc.translatePath(os.path.join('special://home/addons','packages'))
name = "build"
dp = xbmcgui.DialogProgress()
dp.create(AddonTitle,"Downloading ",'', 'Please Wait')
lib=os.path.join(path, name+'.zip')
try:
os.remove(lib)
except:
pass
downloader.download(url, lib, dp)
addonfolder = xbmc.translatePath(os.path.join('special://home','userdata'))
time.sleep(2)
dp.update(0,"", "Extracting Zip Please Wait")
unzipprogress(lib,addonfolder,dp)
dialog = xbmcgui.Dialog()
dialog.ok(AddonTitle, "To save changes you now need to force close Kodi, Press OK to force close Kodi")
Common.killxbmc()
else:
dialog.ok('[COLOR red]A WIPE is required for the update[/COLOR]','Select the [COLOR green]YES[/COLOR] option in the NEXT WINDOW to wipe now.','Select the [COLOR red]NO[/COLOR] option in the NEXT WINDOW to update later.','[I][COLOR snow]If you wish to update later you can do so in [/COLOR][COLOR blue]Project X[/COLOR] [COLOR lime]Wizard[/COLOR][/I]')
wipe.FRESHSTART()
if nointernet == 0 and KryptonUpdate == 0:
if check == 'true':
if os.path.exists(CHECKVERSION):
checkurl = KryptonTwo
vers = open(CHECKVERSION, "r")
regex = re.compile(r'<build>(.+?)</build><version>(.+?)</version>')
for line in vers:
currversion = regex.findall(line)
for build,vernumber in currversion:
if vernumber > 0:
req = urllib2.Request(checkurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
dialog.ok(AddonTitle,'Sorry we are unable to check for [B]KRYPTON[/B] updates!','The update host appears to be down.','Please check for updates later via the wizard.')
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><version>(.+?)</version><fresh>(.+?)</fresh>').findall(link)
for newversion,fresh in match:
if newversion > vernumber:
if fresh =='false': # TRUE
choice = xbmcgui.Dialog().yesno("NEW UPDATE AVAILABLE", 'Found a new update for the Build', build + " ver: "+newversion, 'Do you want to install it now?', yeslabel='[B][COLOR green]YES[/COLOR][/B]',nolabel='[B][COLOR red]NO[/COLOR][/B]')
if choice == 1:
updateurl = KryptonOne
req = urllib2.Request(updateurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
dialog.ok(AddonTitle,'Sorry we were unable to download the update.','The update host appears to be down.','Please check for updates later via the wizard.')
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><url>(.+?)</url>').findall(link)
for url in match:
path = xbmc.translatePath(os.path.join('special://home/addons','packages'))
name = "build"
dp = xbmcgui.DialogProgress()
dp.create(AddonTitle,"Downloading ",'', 'Please Wait')
lib=os.path.join(path, name+'.zip')
try:
os.remove(lib)
except:
pass
downloader.download(url, lib, dp)
addonfolder = xbmc.translatePath(os.path.join('special://','home'))
time.sleep(2)
dp.update(0,"", "Extracting Zip Please Wait")
unzipprogress(lib,addonfolder,dp)
dialog = xbmcgui.Dialog()
dialog.ok(AddonTitle, "To save changes you now need to force close Kodi, Press OK to force close Kodi")
Common.killxbmc()
else:
dialog.ok('[COLOR red]A WIPE is required for the update[/COLOR]','Select the [COLOR green]YES[/COLOR] option in the NEXT WINDOW to wipe now.','Select the [COLOR red]NO[/COLOR] option in the NEXT WINDOW to update later.','[I][COLOR snow]If you wish to update later you can do so in [/COLOR][COLOR blue]Project X[/COLOR] [COLOR lime]Wizard[/COLOR][/I]')
wipe.FRESHSTART()
if nointernet == 0 and BetaUpdate == 0:
if check == 'true':
if os.path.exists(CHECKVERSION):
checkurl = BetaTwo
vers = open(CHECKVERSION, "r")
regex = re.compile(r'<build>(.+?)</build><version>(.+?)</version>')
for line in vers:
currversion = regex.findall(line)
for build,vernumber in currversion:
if vernumber > 0:
req = urllib2.Request(checkurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
dialog.ok(AddonTitle,'Sorry we are unable to check for [B]JARVIS[/B] updates!','The update host appears to be down.','Please check for updates later via the wizard.')
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><version>(.+?)</version><fresh>(.+?)</fresh>').findall(link)
for newversion,fresh in match:
if newversion > vernumber:
if fresh =='false': # TRUE
choice = xbmcgui.Dialog().yesno("NEW UPDATE AVAILABLE", 'Found a new update for the Build', build + " ver: "+newversion, 'Do you want to install it now?', yeslabel='[B][COLOR green]YES[/COLOR][/B]',nolabel='[B][COLOR red]NO[/COLOR][/B]')
if choice == 1:
updateurl = BetaOne
req = urllib2.Request(updateurl)
req.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.9.0.3) Gecko/2008092417 Firefox/3.0.3')
try:
response = urllib2.urlopen(req)
except:
dialog.ok(AddonTitle,'Sorry we were unable to download the update!','The update host appears to be down.','Please check for updates later via the wizard.')
sys.exit(1)
link=response.read()
response.close()
match = re.compile('<build>'+build+'</build><url>(.+?)</url>').findall(link)
for url in match:
path = xbmc.translatePath(os.path.join('special://home/addons','packages'))
name = "build"
dp = xbmcgui.DialogProgress()
dp.create(AddonTitle,"Downloading ",'', 'Please Wait')
lib=os.path.join(path, name+'.zip')
try:
os.remove(lib)
except:
pass
downloader.download(url, lib, dp)
addonfolder = xbmc.translatePath(os.path.join('special://','home'))
time.sleep(2)
dp.update(0,"", "Extracting Zip Please Wait")
unzipprogress(lib,addonfolder,dp)
dialog = xbmcgui.Dialog()
dialog.ok(AddonTitle, "To save changes you now need to force close Kodi, Press OK to force close Kodi")
Common.killxbmc()
else:
dialog.ok('[COLOR red]A WIPE is required for the update[/COLOR]','Select the [COLOR green]YES[/COLOR] option in the NEXT WINDOW to wipe now.','Select the [COLOR red]NO[/COLOR] option in the NEXT WINDOW to update later.','[I][COLOR snow]If you wish to update later you can do so in [/COLOR][COLOR blue]Project X[/COLOR] [COLOR lime]Wizard[/COLOR][/I]')
wipe.FRESHSTART()
if addonupdate == 'true':
#Update all repos and packages.
xbmc.executebuiltin("UpdateAddonRepos")
xbmc.executebuiltin("UpdateLocalAddons")
def unzip(_in, _out):
try:
zin = zipfile.ZipFile(_in, 'r')
zin.extractall(_out)
except Exception, e:
print str(e)
return False
return True
def unzipprogress(_in, _out, dp):
__in = zipfile.ZipFile(_in, 'r')
nofiles = float(len(__in.infolist()))
count = 0
try:
for item in __in.infolist():
count += 1
update = (count / nofiles) * 100
if dp.iscanceled():
dialog = xbmcgui.Dialog()
dialog.ok(AddonTitle, 'Extraction was cancelled.')
sys.exit()
dp.close()
dp.update(int(update))
__in.extract(item, _out)
except Exception, e:
return False
return True
## ################################################## ##
## ################################################## ##
|
repotvsupertuga/repo
|
plugin.program.jogosEmuladores/service.py
|
Python
|
gpl-2.0
| 20,151 | 0.033199 |
from django.apps import AppConfig
class CheckoutAppConfig(AppConfig):
name = 'ecommerce.extensions.checkout'
verbose_name = 'Checkout'
def ready(self):
super(CheckoutAppConfig, self).ready()
# noinspection PyUnresolvedReferences
import ecommerce.extensions.checkout.signals # pylint: disable=unused-variable
|
mferenca/HMS-ecommerce
|
ecommerce/extensions/checkout/apps.py
|
Python
|
agpl-3.0
| 349 | 0.002865 |
"""
Created on November 20, 2019
This file is subject to the terms and conditions defined in the
file 'LICENSE.txt', which is part of this source code package.
@author: David Moss
"""
# Section ID's
SECTION_ID_ALERTS = "alerts"
SECTION_ID_NOTES = "notes"
SECTION_ID_TASKS = "tasks"
SECTION_ID_SLEEP = "sleep"
SECTION_ID_ACTIVITIES = "activities"
SECTION_ID_MEALS = "meals"
SECTION_ID_MEDICATION = "medication"
SECTION_ID_BATHROOM = "bathroom"
SECTION_ID_SOCIAL = "social"
SECTION_ID_MEMORIES = "memories"
SECTION_ID_SYSTEM = "system"
def add_entry(botengine, location_object, section_id, comment=None, subtitle=None, identifier=None, include_timestamp=False, timestamp_override_ms=None):
"""
Add a section and bullet point the current daily report
:param botengine: BotEngine environment
:param location_object: Location object
:param section_id: Section ID like dailyreport.SECTION_ID_ACTIVITIES
:param comment: Comment like "Woke up."
:param subtitle: Subtitle comment like "Consistent sleep schedule and good quality sleep last night."
:param identifier: Optional identifier to come back and edit this entry later.
:param include_timestamp: True to include a timestamp like "7:00 AM - <comment>" (default is False)
:param timestamp_override_ms: Optional timestamp in milliseconds to override the current time when citing the timestamp with include_timestamp=True
"""
content = {
"section_id": section_id,
"comment": comment,
"subtitle": subtitle,
"identifier": identifier,
"include_timestamp": include_timestamp,
"timestamp_override_ms": timestamp_override_ms
}
location_object.distribute_datastream_message(botengine, "daily_report_entry", content, internal=True, external=False)
|
peoplepower/botengine
|
com.ppc.Bot/utilities/dailyreport.py
|
Python
|
apache-2.0
| 1,792 | 0.004464 |
# -*- coding: utf-8 -*-
"""
Custom model managers for finance.
"""
from .entity_manager import FinanceEntityManager
__all__ = (
'FinanceEntityManager',
)
|
access-missouri/am-django-project
|
am/finance/models/managers/__init__.py
|
Python
|
bsd-2-clause
| 159 | 0.006289 |
#THIS IS /helicopter_providence/middletown_3_29_11/site1_planes/boxm2_site1_1/boxm2_create_scene.py
from boxm2WriteSceneXML import *
import optparse
from xml.etree.ElementTree import ElementTree
import os, sys
#Parse inputs
parser = optparse.OptionParser(description='Create BOXM2 xml file');
parser.add_option('--scene_info', action="store", dest="scene_info");
parser.add_option('--boxm2_dir', action="store", dest="boxm2_dir");
options, args = parser.parse_args();
boxm2_dir = options.boxm2_dir;
scene_info = options.scene_info;
if not os.path.isdir(boxm2_dir + '/'):
os.mkdir(boxm2_dir + '/');
print 'Parsing: '
print scene_info
print boxm2_dir
#parse xml file
tree = ElementTree();
tree.parse(scene_info);
#find scene dimensions
bbox_elm = tree.getroot().find('bbox');
if bbox_elm is None:
print "Invalid info file: No bbox"
sys.exit(-1);
minx = float(bbox_elm.get('minx'));
miny = float(bbox_elm.get('miny'));
minz = float(bbox_elm.get('minz'));
maxx = float(bbox_elm.get('maxx'));
maxy = float(bbox_elm.get('maxy'));
maxz = float(bbox_elm.get('maxz'));
#find scene resolution
res_elm = tree.getroot().find('resolution');
if res_elm is None:
print "Invalid info file: No resolution"
sys.exit(-1);
resolution = float(res_elm.get('val'));
print ("Resolution: " + str(resolution));
#PARAMETERS
ntrees=32
max_num_lvls=4
min_pt = [minx, miny, minz]
max_pt = [maxx, maxy, maxz]
writeSceneFromBox(boxm2_dir,resolution,min_pt,max_pt,ntrees,max_num_lvls);
|
mirestrepo/voxels-at-lems
|
super3d/boxm2_create_scene.py
|
Python
|
bsd-2-clause
| 1,566 | 0.030013 |
from __future__ import absolute_import, unicode_literals
from django.http import HttpResponseBadRequest
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from debug_toolbar.panels.sql.forms import SQLSelectForm
@csrf_exempt
def sql_select(request):
"""Returns the output of the SQL SELECT statement"""
form = SQLSelectForm(request.POST or None)
if form.is_valid():
sql = form.cleaned_data['raw_sql']
params = form.cleaned_data['params']
cursor = form.cursor
cursor.execute(sql, params)
headers = [d[0] for d in cursor.description]
result = cursor.fetchall()
cursor.close()
context = {
'result': result,
'sql': form.reformat_sql(),
'duration': form.cleaned_data['duration'],
'headers': headers,
'alias': form.cleaned_data['alias'],
}
return render(request, 'debug_toolbar/panels/sql_select.html', context)
return HttpResponseBadRequest('Form errors')
@csrf_exempt
def sql_explain(request):
"""Returns the output of the SQL EXPLAIN on the given query"""
form = SQLSelectForm(request.POST or None)
if form.is_valid():
sql = form.cleaned_data['raw_sql']
params = form.cleaned_data['params']
vendor = form.connection.vendor
cursor = form.cursor
if vendor == 'sqlite':
# SQLite's EXPLAIN dumps the low-level opcodes generated for a query;
# EXPLAIN QUERY PLAN dumps a more human-readable summary
# See http://www.sqlite.org/lang_explain.html for details
cursor.execute("EXPLAIN QUERY PLAN %s" % (sql,), params)
elif vendor == 'postgresql':
cursor.execute("EXPLAIN ANALYZE %s" % (sql,), params)
else:
cursor.execute("EXPLAIN %s" % (sql,), params)
headers = [d[0] for d in cursor.description]
result = cursor.fetchall()
cursor.close()
context = {
'result': result,
'sql': form.reformat_sql(),
'duration': form.cleaned_data['duration'],
'headers': headers,
'alias': form.cleaned_data['alias'],
}
return render(request, 'debug_toolbar/panels/sql_explain.html', context)
return HttpResponseBadRequest('Form errors')
@csrf_exempt
def sql_profile(request):
"""Returns the output of running the SQL and getting the profiling statistics"""
form = SQLSelectForm(request.POST or None)
if form.is_valid():
sql = form.cleaned_data['raw_sql']
params = form.cleaned_data['params']
cursor = form.cursor
result = None
headers = None
result_error = None
try:
cursor.execute("SET PROFILING=1") # Enable profiling
cursor.execute(sql, params) # Execute SELECT
cursor.execute("SET PROFILING=0") # Disable profiling
# The Query ID should always be 1 here but I'll subselect to get
# the last one just in case...
cursor.execute("""
SELECT *
FROM information_schema.profiling
WHERE query_id = (
SELECT query_id
FROM information_schema.profiling
ORDER BY query_id DESC
LIMIT 1
)
""")
headers = [d[0] for d in cursor.description]
result = cursor.fetchall()
except Exception:
result_error = "Profiling is either not available or not supported by your database."
cursor.close()
context = {
'result': result,
'result_error': result_error,
'sql': form.reformat_sql(),
'duration': form.cleaned_data['duration'],
'headers': headers,
'alias': form.cleaned_data['alias'],
}
return render(request, 'debug_toolbar/panels/sql_profile.html', context)
return HttpResponseBadRequest('Form errors')
|
ivelum/django-debug-toolbar
|
debug_toolbar/panels/sql/views.py
|
Python
|
bsd-3-clause
| 3,971 | 0.001259 |
from dcgpy import expression_gdual_double as expression
from dcgpy import kernel_set_gdual_double as kernel_set
from pyaudi import gdual_double as gdual
# 1- Instantiate a random expression using the 4 basic arithmetic operations
ks = kernel_set(["sum", "diff", "div", "mul"])
ex = expression(inputs = 1,
outputs = 1,
rows = 1,
cols = 6,
levels_back = 6,
arity = 2,
kernels = ks(),
n_eph = 0,
seed = 4232123212)
# 2 - Define the symbol set to be used in visualizing the expression
# (in our case, 1 input variable named "x") and visualize the expression
in_sym = ["x"]
print("Expression:", ex(in_sym)[0])
# 3 - Print the simplified expression
print("Simplified expression:", ex.simplify(in_sym))
# 4 - Visualize the dCGP graph
ex.visualize(in_sym)
# 5 - Define a gdual number of value 1.2 and truncation order 2
x = gdual(1.2, "x", 2)
# 6 - Compute the output of the expression and its second derivative in x = 1.2 and print
print("Expression in x=1.2:", ex([x])[0])
print("Second derivative:", ex([x])[0].get_derivative([2]))
# 5 - Mutate the expression with 2 random mutations of active genes and print
ex.mutate_active(2)
print("Mutated expression:", ex(in_sym)[0])
|
darioizzo/d-CGP
|
doc/examples/getting_started.py
|
Python
|
gpl-3.0
| 1,313 | 0.020564 |
import _plotly_utils.basevalidators
class BorderwidthValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self,
plotly_name="borderwidth",
parent_name="histogram2dcontour.colorbar",
**kwargs
):
super(BorderwidthValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
min=kwargs.pop("min", 0),
role=kwargs.pop("role", "style"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/histogram2dcontour/colorbar/_borderwidth.py
|
Python
|
mit
| 555 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# KDE, App-Indicator or Qt Systray
# Copyright (C) 2011-2018 Filipe Coelho <falktx@falktx.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# For a full copy of the GNU General Public License see the COPYING file
# Imports (Global)
import os, sys
if True:
from PyQt5.QtCore import QTimer
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QAction, QMainWindow, QMenu, QSystemTrayIcon
else:
from PyQt4.QtCore import QTimer
from PyQt4.QtGui import QIcon
from PyQt4.QtGui import QAction, QMainWindow, QMenu, QSystemTrayIcon
try:
if False and os.getenv("DESKTOP_SESSION") in ("ubuntu", "ubuntu-2d") and not os.path.exists("/var/cadence/no_app_indicators"):
from gi import require_version
require_version('Gtk', '3.0')
from gi.repository import Gtk
require_version('AppIndicator3', '0.1')
from gi.repository import AppIndicator3 as AppIndicator
TrayEngine = "AppIndicator"
#elif os.getenv("KDE_SESSION_VERSION") >= 5:
#TrayEngine = "Qt"
#elif os.getenv("KDE_FULL_SESSION") or os.getenv("DESKTOP_SESSION") == "kde-plasma":
#from PyKDE5.kdeui import KAction, KIcon, KMenu, KStatusNotifierItem
#TrayEngine = "KDE"
else:
TrayEngine = "Qt"
except:
TrayEngine = "Qt"
print("Using Tray Engine '%s'" % TrayEngine)
iActNameId = 0
iActWidget = 1
iActParentMenuId = 2
iActFunc = 3
iSepNameId = 0
iSepWidget = 1
iSepParentMenuId = 2
iMenuNameId = 0
iMenuWidget = 1
iMenuParentMenuId = 2
# Get Icon from user theme, using our own as backup (Oxygen)
def getIcon(icon, size=16):
return QIcon.fromTheme(icon, QIcon(":/%ix%i/%s.png" % (size, size, icon)))
# Global Systray class
class GlobalSysTray(object):
def __init__(self, parent, name, icon):
object.__init__(self)
self._app = None
self._parent = parent
self._gtk_running = False
self._quit_added = False
self.act_indexes = []
self.sep_indexes = []
self.menu_indexes = []
if TrayEngine == "KDE":
self.menu = KMenu(parent)
self.menu.setTitle(name)
self.tray = KStatusNotifierItem()
self.tray.setAssociatedWidget(parent)
self.tray.setCategory(KStatusNotifierItem.ApplicationStatus)
self.tray.setContextMenu(self.menu)
self.tray.setIconByPixmap(getIcon(icon))
self.tray.setTitle(name)
self.tray.setToolTipTitle(" ")
self.tray.setToolTipIconByPixmap(getIcon(icon))
# Double-click is managed by KDE
elif TrayEngine == "AppIndicator":
self.menu = Gtk.Menu()
self.tray = AppIndicator.Indicator.new(name, icon, AppIndicator.IndicatorCategory.APPLICATION_STATUS)
self.tray.set_menu(self.menu)
# Double-click is not possible with App-Indicators
elif TrayEngine == "Qt":
self.menu = QMenu(parent)
self.tray = QSystemTrayIcon(getIcon(icon))
self.tray.setContextMenu(self.menu)
self.tray.setParent(parent)
self.tray.activated.connect(self.qt_systray_clicked)
# -------------------------------------------------------------------------------------------
def addAction(self, act_name_id, act_name_string, is_check=False):
if TrayEngine == "KDE":
act_widget = KAction(act_name_string, self.menu)
act_widget.setCheckable(is_check)
self.menu.addAction(act_widget)
elif TrayEngine == "AppIndicator":
if is_check:
act_widget = Gtk.CheckMenuItem(act_name_string)
else:
act_widget = Gtk.ImageMenuItem(act_name_string)
act_widget.set_image(None)
act_widget.show()
self.menu.append(act_widget)
elif TrayEngine == "Qt":
act_widget = QAction(act_name_string, self.menu)
act_widget.setCheckable(is_check)
self.menu.addAction(act_widget)
else:
act_widget = None
act_obj = [None, None, None, None]
act_obj[iActNameId] = act_name_id
act_obj[iActWidget] = act_widget
self.act_indexes.append(act_obj)
def addSeparator(self, sep_name_id):
if TrayEngine == "KDE":
sep_widget = self.menu.addSeparator()
elif TrayEngine == "AppIndicator":
sep_widget = Gtk.SeparatorMenuItem()
sep_widget.show()
self.menu.append(sep_widget)
elif TrayEngine == "Qt":
sep_widget = self.menu.addSeparator()
else:
sep_widget = None
sep_obj = [None, None, None]
sep_obj[iSepNameId] = sep_name_id
sep_obj[iSepWidget] = sep_widget
self.sep_indexes.append(sep_obj)
def addMenu(self, menu_name_id, menu_name_string):
if TrayEngine == "KDE":
menu_widget = KMenu(menu_name_string, self.menu)
self.menu.addMenu(menu_widget)
elif TrayEngine == "AppIndicator":
menu_widget = Gtk.MenuItem(menu_name_string)
menu_parent = Gtk.Menu()
menu_widget.set_submenu(menu_parent)
menu_widget.show()
self.menu.append(menu_widget)
elif TrayEngine == "Qt":
menu_widget = QMenu(menu_name_string, self.menu)
self.menu.addMenu(menu_widget)
else:
menu_widget = None
menu_obj = [None, None, None]
menu_obj[iMenuNameId] = menu_name_id
menu_obj[iMenuWidget] = menu_widget
self.menu_indexes.append(menu_obj)
# -------------------------------------------------------------------------------------------
def addMenuAction(self, menu_name_id, act_name_id, act_name_string, is_check=False):
i = self.get_menu_index(menu_name_id)
if i < 0: return
menu_widget = self.menu_indexes[i][iMenuWidget]
if TrayEngine == "KDE":
act_widget = KAction(act_name_string, menu_widget)
act_widget.setCheckable(is_check)
menu_widget.addAction(act_widget)
elif TrayEngine == "AppIndicator":
menu_widget = menu_widget.get_submenu()
if is_check:
act_widget = Gtk.CheckMenuItem(act_name_string)
else:
act_widget = Gtk.ImageMenuItem(act_name_string)
act_widget.set_image(None)
act_widget.show()
menu_widget.append(act_widget)
elif TrayEngine == "Qt":
act_widget = QAction(act_name_string, menu_widget)
act_widget.setCheckable(is_check)
menu_widget.addAction(act_widget)
else:
act_widget = None
act_obj = [None, None, None, None]
act_obj[iActNameId] = act_name_id
act_obj[iActWidget] = act_widget
act_obj[iActParentMenuId] = menu_name_id
self.act_indexes.append(act_obj)
def addMenuSeparator(self, menu_name_id, sep_name_id):
i = self.get_menu_index(menu_name_id)
if i < 0: return
menu_widget = self.menu_indexes[i][iMenuWidget]
if TrayEngine == "KDE":
sep_widget = menu_widget.addSeparator()
elif TrayEngine == "AppIndicator":
menu_widget = menu_widget.get_submenu()
sep_widget = Gtk.SeparatorMenuItem()
sep_widget.show()
menu_widget.append(sep_widget)
elif TrayEngine == "Qt":
sep_widget = menu_widget.addSeparator()
else:
sep_widget = None
sep_obj = [None, None, None]
sep_obj[iSepNameId] = sep_name_id
sep_obj[iSepWidget] = sep_widget
sep_obj[iSepParentMenuId] = menu_name_id
self.sep_indexes.append(sep_obj)
#def addSubMenu(self, menu_name_id, new_menu_name_id, new_menu_name_string):
#menu_index = self.get_menu_index(menu_name_id)
#if menu_index < 0: return
#menu_widget = self.menu_indexes[menu_index][1]
##if TrayEngine == "KDE":
##new_menu_widget = KMenu(new_menu_name_string, self.menu)
##menu_widget.addMenu(new_menu_widget)
##elif TrayEngine == "AppIndicator":
##new_menu_widget = Gtk.MenuItem(new_menu_name_string)
##new_menu_widget.show()
##menu_widget.get_submenu().append(new_menu_widget)
##parent_menu_widget = Gtk.Menu()
##new_menu_widget.set_submenu(parent_menu_widget)
##else:
#if (1):
#new_menu_widget = QMenu(new_menu_name_string, self.menu)
#menu_widget.addMenu(new_menu_widget)
#self.menu_indexes.append([new_menu_name_id, new_menu_widget, menu_name_id])
# -------------------------------------------------------------------------------------------
def connect(self, act_name_id, act_func):
i = self.get_act_index(act_name_id)
if i < 0: return
act_widget = self.act_indexes[i][iActWidget]
if TrayEngine == "AppIndicator":
act_widget.connect("activate", self.gtk_call_func, act_name_id)
elif TrayEngine in ("KDE", "Qt"):
act_widget.triggered.connect(act_func)
self.act_indexes[i][iActFunc] = act_func
# -------------------------------------------------------------------------------------------
#def setActionChecked(self, act_name_id, yesno):
#index = self.get_act_index(act_name_id)
#if index < 0: return
#act_widget = self.act_indexes[index][1]
##if TrayEngine == "KDE":
##act_widget.setChecked(yesno)
##elif TrayEngine == "AppIndicator":
##if type(act_widget) != Gtk.CheckMenuItem:
##return # Cannot continue
##act_widget.set_active(yesno)
##else:
#if (1):
#act_widget.setChecked(yesno)
def setActionEnabled(self, act_name_id, yesno):
i = self.get_act_index(act_name_id)
if i < 0: return
act_widget = self.act_indexes[i][iActWidget]
if TrayEngine == "KDE":
act_widget.setEnabled(yesno)
elif TrayEngine == "AppIndicator":
act_widget.set_sensitive(yesno)
elif TrayEngine == "Qt":
act_widget.setEnabled(yesno)
def setActionIcon(self, act_name_id, icon):
i = self.get_act_index(act_name_id)
if i < 0: return
act_widget = self.act_indexes[i][iActWidget]
if TrayEngine == "KDE":
act_widget.setIcon(KIcon(icon))
elif TrayEngine == "AppIndicator":
if not isinstance(act_widget, Gtk.ImageMenuItem):
# Cannot use icons here
return
act_widget.set_image(Gtk.Image.new_from_icon_name(icon, Gtk.IconSize.MENU))
#act_widget.set_always_show_image(True)
elif TrayEngine == "Qt":
act_widget.setIcon(getIcon(icon))
def setActionText(self, act_name_id, text):
i = self.get_act_index(act_name_id)
if i < 0: return
act_widget = self.act_indexes[i][iActWidget]
if TrayEngine == "KDE":
act_widget.setText(text)
elif TrayEngine == "AppIndicator":
if isinstance(act_widget, Gtk.ImageMenuItem):
# Fix icon reset
last_icon = act_widget.get_image()
act_widget.set_label(text)
act_widget.set_image(last_icon)
else:
act_widget.set_label(text)
elif TrayEngine == "Qt":
act_widget.setText(text)
def setIcon(self, icon):
if TrayEngine == "KDE":
self.tray.setIconByPixmap(getIcon(icon))
#self.tray.setToolTipIconByPixmap(getIcon(icon))
elif TrayEngine == "AppIndicator":
self.tray.set_icon(icon)
elif TrayEngine == "Qt":
self.tray.setIcon(getIcon(icon))
def setToolTip(self, text):
if TrayEngine == "KDE":
self.tray.setToolTipSubTitle(text)
elif TrayEngine == "AppIndicator":
# ToolTips are disabled in App-Indicators by design
pass
elif TrayEngine == "Qt":
self.tray.setToolTip(text)
# -------------------------------------------------------------------------------------------
#def removeAction(self, act_name_id):
#index = self.get_act_index(act_name_id)
#if index < 0: return
#act_widget = self.act_indexes[index][1]
#parent_menu_widget = self.get_parent_menu_widget(self.act_indexes[index][2])
##if TrayEngine == "KDE":
##parent_menu_widget.removeAction(act_widget)
##elif TrayEngine == "AppIndicator":
##act_widget.hide()
##parent_menu_widget.remove(act_widget)
##else:
#if (1):
#parent_menu_widget.removeAction(act_widget)
#self.act_indexes.pop(index)
#def removeSeparator(self, sep_name_id):
#index = self.get_sep_index(sep_name_id)
#if index < 0: return
#sep_widget = self.sep_indexes[index][1]
#parent_menu_widget = self.get_parent_menu_widget(self.sep_indexes[index][2])
##if TrayEngine == "KDE":
##parent_menu_widget.removeAction(sep_widget)
##elif TrayEngine == "AppIndicator":
##sep_widget.hide()
##parent_menu_widget.remove(sep_widget)
##else:
#if (1):
#parent_menu_widget.removeAction(sep_widget)
#self.sep_indexes.pop(index)
#def removeMenu(self, menu_name_id):
#index = self.get_menu_index(menu_name_id)
#if index < 0: return
#menu_widget = self.menu_indexes[index][1]
#parent_menu_widget = self.get_parent_menu_widget(self.menu_indexes[index][2])
##if TrayEngine == "KDE":
##parent_menu_widget.removeAction(menu_widget.menuAction())
##elif TrayEngine == "AppIndicator":
##menu_widget.hide()
##parent_menu_widget.remove(menu_widget.get_submenu())
##else:
#if (1):
#parent_menu_widget.removeAction(menu_widget.menuAction())
#self.remove_actions_by_menu_name_id(menu_name_id)
#self.remove_separators_by_menu_name_id(menu_name_id)
#self.remove_submenus_by_menu_name_id(menu_name_id)
# -------------------------------------------------------------------------------------------
#def clearAll(self):
##if TrayEngine == "KDE":
##self.menu.clear()
##elif TrayEngine == "AppIndicator":
##for child in self.menu.get_children():
##self.menu.remove(child)
##else:
#if (1):
#self.menu.clear()
#self.act_indexes = []
#self.sep_indexes = []
#self.menu_indexes = []
#def clearMenu(self, menu_name_id):
#menu_index = self.get_menu_index(menu_name_id)
#if menu_index < 0: return
#menu_widget = self.menu_indexes[menu_index][1]
##if TrayEngine == "KDE":
##menu_widget.clear()
##elif TrayEngine == "AppIndicator":
##for child in menu_widget.get_submenu().get_children():
##menu_widget.get_submenu().remove(child)
##else:
#if (1):
#menu_widget.clear()
#list_of_submenus = [menu_name_id]
#for x in range(0, 10): # 10x level deep, should cover all cases...
#for this_menu_name_id, menu_widget, parent_menu_id in self.menu_indexes:
#if parent_menu_id in list_of_submenus and this_menu_name_id not in list_of_submenus:
#list_of_submenus.append(this_menu_name_id)
#for this_menu_name_id in list_of_submenus:
#self.remove_actions_by_menu_name_id(this_menu_name_id)
#self.remove_separators_by_menu_name_id(this_menu_name_id)
#self.remove_submenus_by_menu_name_id(this_menu_name_id)
# -------------------------------------------------------------------------------------------
def getTrayEngine(self):
return TrayEngine
def isTrayAvailable(self):
if TrayEngine in ("KDE", "Qt"):
# Ask Qt
return QSystemTrayIcon.isSystemTrayAvailable()
if TrayEngine == "AppIndicator":
# Ubuntu/Unity always has a systray
return True
return False
def handleQtCloseEvent(self, event):
if self.isTrayAvailable() and self._parent.isVisible():
event.accept()
self.__hideShowCall()
return
self.close()
QMainWindow.closeEvent(self._parent, event)
# -------------------------------------------------------------------------------------------
def show(self):
if not self._quit_added:
self._quit_added = True
if TrayEngine != "KDE":
self.addSeparator("_quit")
self.addAction("show", self._parent.tr("Minimize"))
self.addAction("quit", self._parent.tr("Quit"))
self.setActionIcon("quit", "application-exit")
self.connect("show", self.__hideShowCall)
self.connect("quit", self.__quitCall)
if TrayEngine == "KDE":
self.tray.setStatus(KStatusNotifierItem.Active)
elif TrayEngine == "AppIndicator":
self.tray.set_status(AppIndicator.IndicatorStatus.ACTIVE)
elif TrayEngine == "Qt":
self.tray.show()
def hide(self):
if TrayEngine == "KDE":
self.tray.setStatus(KStatusNotifierItem.Passive)
elif TrayEngine == "AppIndicator":
self.tray.set_status(AppIndicator.IndicatorStatus.PASSIVE)
elif TrayEngine == "Qt":
self.tray.hide()
def close(self):
if TrayEngine == "KDE":
self.menu.close()
elif TrayEngine == "AppIndicator":
if self._gtk_running:
self._gtk_running = False
Gtk.main_quit()
elif TrayEngine == "Qt":
self.menu.close()
def exec_(self, app):
self._app = app
if TrayEngine == "AppIndicator":
self._gtk_running = True
return Gtk.main()
else:
return app.exec_()
# -------------------------------------------------------------------------------------------
def get_act_index(self, act_name_id):
for i in range(len(self.act_indexes)):
if self.act_indexes[i][iActNameId] == act_name_id:
return i
else:
print("systray.py - Failed to get action index for %s" % act_name_id)
return -1
def get_sep_index(self, sep_name_id):
for i in range(len(self.sep_indexes)):
if self.sep_indexes[i][iSepNameId] == sep_name_id:
return i
else:
print("systray.py - Failed to get separator index for %s" % sep_name_id)
return -1
def get_menu_index(self, menu_name_id):
for i in range(len(self.menu_indexes)):
if self.menu_indexes[i][iMenuNameId] == menu_name_id:
return i
else:
print("systray.py - Failed to get menu index for %s" % menu_name_id)
return -1
#def get_parent_menu_widget(self, parent_menu_id):
#if parent_menu_id != None:
#menu_index = self.get_menu_index(parent_menu_id)
#if menu_index >= 0:
#return self.menu_indexes[menu_index][1]
#else:
#print("systray.py::Failed to get parent Menu widget for", parent_menu_id)
#return None
#else:
#return self.menu
#def remove_actions_by_menu_name_id(self, menu_name_id):
#h = 0
#for i in range(len(self.act_indexes)):
#act_name_id, act_widget, parent_menu_id, act_func = self.act_indexes[i - h]
#if parent_menu_id == menu_name_id:
#self.act_indexes.pop(i - h)
#h += 1
#def remove_separators_by_menu_name_id(self, menu_name_id):
#h = 0
#for i in range(len(self.sep_indexes)):
#sep_name_id, sep_widget, parent_menu_id = self.sep_indexes[i - h]
#if parent_menu_id == menu_name_id:
#self.sep_indexes.pop(i - h)
#h += 1
#def remove_submenus_by_menu_name_id(self, submenu_name_id):
#h = 0
#for i in range(len(self.menu_indexes)):
#menu_name_id, menu_widget, parent_menu_id = self.menu_indexes[i - h]
#if parent_menu_id == submenu_name_id:
#self.menu_indexes.pop(i - h)
#h += 1
# -------------------------------------------------------------------------------------------
def gtk_call_func(self, gtkmenu, act_name_id):
i = self.get_act_index(act_name_id)
if i < 0: return None
return self.act_indexes[i][iActFunc]
def qt_systray_clicked(self, reason):
if reason in (QSystemTrayIcon.DoubleClick, QSystemTrayIcon.Trigger):
self.__hideShowCall()
# -------------------------------------------------------------------------------------------
def __hideShowCall(self):
if self._parent.isVisible():
self.setActionText("show", self._parent.tr("Restore"))
self._parent.hide()
if self._app:
self._app.setQuitOnLastWindowClosed(False)
else:
self.setActionText("show", self._parent.tr("Minimize"))
if self._parent.isMaximized():
self._parent.showMaximized()
else:
self._parent.showNormal()
if self._app:
self._app.setQuitOnLastWindowClosed(True)
QTimer.singleShot(500, self.__raiseWindow)
def __quitCall(self):
if self._app:
self._app.setQuitOnLastWindowClosed(True)
self._parent.hide()
self._parent.close()
if self._app:
self._app.quit()
def __raiseWindow(self):
self._parent.activateWindow()
self._parent.raise_()
#--------------- main ------------------
if __name__ == '__main__':
from PyQt5.QtWidgets import QApplication, QDialog, QMessageBox
class ExampleGUI(QDialog):
def __init__(self, parent=None):
QDialog.__init__(self, parent)
self.setWindowIcon(getIcon("audacity"))
self.systray = GlobalSysTray(self, "Claudia", "claudia")
self.systray.addAction("about", self.tr("About"))
self.systray.setIcon("audacity")
self.systray.setToolTip("Demo systray app")
self.systray.connect("about", self.about)
self.systray.show()
def about(self):
QMessageBox.about(self, self.tr("About"), self.tr("Systray Demo"))
def done(self, r):
QDialog.done(self, r)
self.close()
def closeEvent(self, event):
self.systray.close()
QDialog.closeEvent(self, event)
app = QApplication(sys.argv)
gui = ExampleGUI()
gui.show()
sys.exit(gui.systray.exec_(app))
|
falkTX/Cadence
|
src/systray.py
|
Python
|
gpl-2.0
| 23,718 | 0.008812 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2018 João Pedro Rodrigues
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Removes all non-coordinate records from the file.
Keeps only MODEL, ENDMDL, END, ATOM, HETATM, CONECT.
Usage:
python pdb_keepcoord.py <pdb file>
Example:
python pdb_keepcoord.py 1CTF.pdb
This program is part of the `pdb-tools` suite of utilities and should not be
distributed isolatedly. The `pdb-tools` were created to quickly manipulate PDB
files using the terminal, and can be used sequentially, with one tool streaming
data to another. They are based on old FORTRAN77 code that was taking too much
effort to maintain and compile. RIP.
"""
import os
import sys
__author__ = "Joao Rodrigues"
__email__ = "j.p.g.l.m.rodrigues@gmail.com"
def check_input(args):
"""Checks whether to read from stdin/file and validates user input/options.
"""
# Defaults
fh = sys.stdin # file handle
if not len(args):
# Reading from pipe with default option
if sys.stdin.isatty():
sys.stderr.write(__doc__)
sys.exit(1)
elif len(args) == 1:
if not os.path.isfile(args[0]):
emsg = 'ERROR!! File not found or not readable: \'{}\'\n'
sys.stderr.write(emsg.format(args[0]))
sys.stderr.write(__doc__)
sys.exit(1)
fh = open(args[0], 'r')
else: # Whatever ...
emsg = 'ERROR!! Script takes 1 argument, not \'{}\'\n'
sys.stderr.write(emsg.format(len(args)))
sys.stderr.write(__doc__)
sys.exit(1)
return fh
def keep_coordinates(fhandle):
"""Keeps only coordinate records in the PDB file.
"""
records = ('MODEL ', 'ATOM ', 'HETATM',
'ENDMDL', 'END ',
'TER ', 'CONECT')
for line in fhandle:
if line.startswith(records):
yield line
def main():
# Check Input
pdbfh = check_input(sys.argv[1:])
# Do the job
new_pdb = keep_coordinates(pdbfh)
try:
_buffer = []
_buffer_size = 5000 # write N lines at a time
for lineno, line in enumerate(new_pdb):
if not (lineno % _buffer_size):
sys.stdout.write(''.join(_buffer))
_buffer = []
_buffer.append(line)
sys.stdout.write(''.join(_buffer))
sys.stdout.flush()
except IOError:
# This is here to catch Broken Pipes
# for example to use 'head' or 'tail' without
# the error message showing up
pass
# last line of the script
# We can close it even if it is sys.stdin
pdbfh.close()
sys.exit(0)
if __name__ == '__main__':
main()
|
JoaoRodrigues/pdb-tools
|
pdbtools/pdb_keepcoord.py
|
Python
|
apache-2.0
| 3,212 | 0 |
from clickFuUtils import cfAction
class osmViewMap(cfAction):
def __init__(self,iface):
cfAction.__init__(self,self.name(),iface)
return None
def name(self):
return "View OSM map"
def desc(self):
return "Goto Location on OpenStreetMap"
def createURL(self,lat,long):
url = "http://www.openstreetmap.org/#map=17/%s/%s" % (lat,long)
return url
class osmEditMap(cfAction):
def __init__(self,iface):
cfAction.__init__(self,self.name(),iface)
return None
def name(self):
return "Edit OSM with iD"
def desc(self):
return "Goto Location on OpenStreetMap and start editing with iD"
def createURL(self,lat,long):
url = "http://www.openstreetmap.org/edit?editor=id#map=17/%s/%s" % (lat,long)
return url
class osmEditMapJOSM(cfAction):
def __init__(self,iface):
cfAction.__init__(self,self.name(),iface)
return None
def name(self):
return "Edit OSM with JOSM"
def desc(self):
return "Goto Location on OpenStreetMap and start editing with JOSM"
def createURL(self,lat,long):
url = "http://127.0.0.1:8111/load_and_zoom?left=%s&top=%s&right=%s&bottom=%s" % (long-0.005,lat+0.005,long+0.005,lat-0.005)
return url
|
spatialhast/clickfu
|
osm.py
|
Python
|
gpl-3.0
| 1,309 | 0.02139 |
from django.conf import settings
# Safe User import for Django < 1.5
try:
from django.contrib.auth import get_user_model
except ImportError:
from django.contrib.auth.models import User
else:
User = get_user_model()
# Safe version of settings.AUTH_USER_MODEL for Django < 1.5
auth_user_model = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
|
kevindias/django-chalk
|
chalk/compat.py
|
Python
|
bsd-3-clause
| 358 | 0 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Trading As Brands
# Copyright (C) 2015 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Trading As Brands',
'version': '0.1',
'author': 'OpusVL',
'website': 'http://opusvl.com/',
'summary': 'Allow company to present different branding on documents sent to different customers',
'description': """Allow company to present different branding on documents sent to different customers,
""",
'images': [
],
'depends': [
],
'data': [
'security/brand_groups.xml',
'security/ir.model.access.csv',
'res_partner_view.xml',
'res_company_brand_view.xml',
'res_company_view.xml',
'report_external_layout_modification.xml',
],
'demo': [
],
'test': [
],
'license': 'AGPL-3',
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
OpusVL/odoo-trading-as
|
trading_as/__openerp__.py
|
Python
|
agpl-3.0
| 1,741 | 0.001723 |
#-*- coding: utf-8 -*-
#
# Copyright (C) 2005-2010 TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
# Standart Python Modules
import os
import glob
import shutil
import string
import pwd
import grp
import gettext
__trans = gettext.translation('pisi', fallback=True)
_ = __trans.ugettext
# Pisi Modules
import pisi.context as ctx
# ActionsAPI Modules
import pisi.actionsapi
import pisi.actionsapi.get
from pisi.actionsapi import error
from pisi.util import run_logged
from pisi.util import join_path
def can_access_file(filePath):
'''test the existence of file'''
return os.access(filePath, os.F_OK)
def can_access_directory(destinationDirectory):
'''test readability, writability and executablility of directory'''
return os.access(destinationDirectory, os.R_OK | os.W_OK | os.X_OK)
def makedirs(destinationDirectory):
'''recursive directory creation function'''
try:
if not os.access(destinationDirectory, os.F_OK):
os.makedirs(destinationDirectory)
except OSError:
error(_('Cannot create directory %s') % destinationDirectory)
def echo(destionationFile, content):
try:
f = open(destionationFile, 'a')
f.write('%s\n' % content)
f.close()
except IOError:
error(_('ActionsAPI [echo]: Can\'t append to file %s.') % (destionationFile))
def chmod(filePath, mode = 0755):
'''change the mode of filePath to the mode'''
filePathGlob = glob.glob(filePath)
if len(filePathGlob) == 0:
error(_("ActionsAPI [chmod]: No file matched pattern \"%s\".") % filePath)
for fileName in filePathGlob:
if can_access_file(fileName):
try:
os.chmod(fileName, mode)
except OSError:
ctx.ui.error(_('ActionsAPI [chmod]: Operation not permitted: %s (mode: 0%o)') \
% (fileName, mode))
else:
ctx.ui.error(_('ActionsAPI [chmod]: File %s doesn\'t exists.') % (fileName))
def chown(filePath, uid = 'root', gid = 'root'):
'''change the owner and group id of filePath to uid and gid'''
if can_access_file(filePath):
try:
os.chown(filePath, pwd.getpwnam(uid)[2], grp.getgrnam(gid)[2])
except OSError:
ctx.ui.error(_('ActionsAPI [chown]: Operation not permitted: %s (uid: %s, gid: %s)') \
% (filePath, uid, gid))
else:
ctx.ui.error(_('ActionsAPI [chown]: File %s doesn\'t exists.') % filePath)
def sym(source, destination):
'''creates symbolic link'''
try:
os.symlink(source, destination)
except OSError:
ctx.ui.error(_('ActionsAPI [sym]: Permission denied: %s to %s') % (source, destination))
def unlink(pattern):
'''remove the file path'''
filePathGlob = glob.glob(pattern)
if len(filePathGlob) == 0:
ctx.ui.error(_("No file matched pattern \"%s\". Remove operation failed.") % pattern)
return
for filePath in filePathGlob:
if isFile(filePath) or isLink(filePath):
try:
os.unlink(filePath)
except OSError:
ctx.ui.error(_('ActionsAPI [unlink]: Permission denied: %s.') % (filePath))
elif isDirectory(filePath):
pass
else:
ctx.ui.error(_('ActionsAPI [unlink]: File %s doesn\'t exists.') % (filePath))
def unlinkDir(sourceDirectory):
'''delete an entire directory tree'''
if isDirectory(sourceDirectory) or isLink(sourceDirectory):
try:
shutil.rmtree(sourceDirectory)
except OSError:
error(_('ActionsAPI [unlinkDir]: Operation not permitted: %s') % (sourceDirectory))
elif isFile(sourceDirectory):
pass
else:
error(_('ActionsAPI [unlinkDir]: Directory %s doesn\'t exists.') % (sourceDirectory))
def move(source, destination):
'''recursively move a "source" file or directory to "destination"'''
sourceGlob = glob.glob(source)
if len(sourceGlob) == 0:
error(_("ActionsAPI [move]: No file matched pattern \"%s\".") % source)
for filePath in sourceGlob:
if isFile(filePath) or isLink(filePath) or isDirectory(filePath):
try:
shutil.move(filePath, destination)
except OSError:
error(_('ActionsAPI [move]: Permission denied: %s to %s') % (filePath, destination))
else:
error(_('ActionsAPI [move]: File %s doesn\'t exists.') % (filePath))
# FIXME: instead of passing a sym parameter, split copy and copytree into 4 different function
def copy(source, destination, sym = True):
'''recursively copy a "source" file or directory to "destination"'''
sourceGlob = glob.glob(source)
if len(sourceGlob) == 0:
error(_("ActionsAPI [copy]: No file matched pattern \"%s\".") % source)
for filePath in sourceGlob:
if isFile(filePath) and not isLink(filePath):
try:
shutil.copy(filePath, destination)
except IOError:
error(_('ActionsAPI [copy]: Permission denied: %s to %s') % (filePath, destination))
elif isLink(filePath) and sym:
if isDirectory(destination):
os.symlink(os.readlink(filePath), join_path(destination, os.path.basename(filePath)))
else:
if isFile(destination):
os.remove(destination)
os.symlink(os.readlink(filePath), destination)
elif isLink(filePath) and not sym:
if isDirectory(filePath):
copytree(filePath, destination)
else:
shutil.copy(filePath, destination)
elif isDirectory(filePath):
copytree(filePath, destination, sym)
else:
error(_('ActionsAPI [copy]: File %s does not exist.') % filePath)
def copytree(source, destination, sym = True):
'''recursively copy an entire directory tree rooted at source'''
if isDirectory(source):
if os.path.exists(destination):
if isDirectory(destination):
copytree(source, join_path(destination, os.path.basename(source.strip('/'))))
return
else:
copytree(source, join_path(destination, os.path.basename(source)))
return
try:
shutil.copytree(source, destination, sym)
except OSError, e:
error(_('ActionsAPI [copytree] %s to %s: %s') % (source, destination, e))
else:
error(_('ActionsAPI [copytree]: Directory %s doesn\'t exists.') % (source))
def touch(filePath):
'''changes the access time of the 'filePath', or creates it if it does not exist'''
filePathGlob = glob.glob(filePath)
if filePathGlob:
if len(filePathGlob) == 0:
error(_("ActionsAPI [touch]: No file matched pattern \"%s\".") % filePath)
for f in filePathGlob:
os.utime(f, None)
else:
try:
f = open(filePath, 'w')
f.close()
except IOError:
error(_('ActionsAPI [touch]: Permission denied: %s') % (filePath))
def cd(directoryName = ''):
'''change directory'''
current = os.getcwd()
if directoryName:
os.chdir(directoryName)
else:
os.chdir(os.path.dirname(current))
def ls(source):
'''listdir'''
if os.path.isdir(source):
return os.listdir(source)
else:
return glob.glob(source)
def export(key, value):
'''export environ variable'''
os.environ[key] = value
def isLink(filePath):
'''return True if filePath refers to a symbolic link'''
return os.path.islink(filePath)
def isFile(filePath):
'''return True if filePath is an existing regular file'''
return os.path.isfile(filePath)
def isDirectory(filePath):
'''Return True if filePath is an existing directory'''
return os.path.isdir(filePath)
def isEmpty(filePath):
'''Return True if filePath is an empty file'''
return os.path.getsize(filePath) == 0
def realPath(filePath):
'''return the canonical path of the specified filename, eliminating any symbolic links encountered in the path'''
return os.path.realpath(filePath)
def baseName(filePath):
'''return the base name of pathname filePath'''
return os.path.basename(filePath)
def dirName(filePath):
'''return the directory name of pathname path'''
return os.path.dirname(filePath)
def system(command):
command = string.join(string.split(command))
retValue = run_logged(command)
#if return value is different than 0, it means error, raise exception
if retValue != 0:
error(_("Command \"%s\" failed, return value was %d.") % (command, retValue))
return retValue
|
solus-project/package-management
|
pisi/actionsapi/shelltools.py
|
Python
|
gpl-2.0
| 9,041 | 0.008185 |
"""Workaround for formatting issue
Source: http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GettingStarted.Python.04.html
"""
import decimal
import json
class DecimalEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
if o % 1 > 0:
return float(o)
else:
return int(o)
return super(DecimalEncoder, self).default(o)
|
jcolekaplan/WNCYC
|
src/main/api/decEncoder.py
|
Python
|
mit
| 441 | 0.004535 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import contextlib
import multiprocessing
import multiprocessing.managers
import os
import platform
import random
import signal
import socket
import subprocess
import sys
import threading
import time
from .compat import str_join
from .test import TestEntry, domain_socket_path
from .report import ExecReporter, SummaryReporter
RESULT_TIMEOUT = 128
RESULT_ERROR = 64
class ExecutionContext(object):
def __init__(self, cmd, cwd, env, report):
self._log = multiprocessing.get_logger()
self.report = report
self.cmd = cmd
self.cwd = cwd
self.env = env
self.timer = None
self.expired = False
def _expire(self):
self._log.info('Timeout')
self.expired = True
self.kill()
def kill(self):
self._log.debug('Killing process : %d' % self.proc.pid)
if platform.system() != 'Windows':
try:
os.killpg(self.proc.pid, signal.SIGKILL)
except Exception as err:
self._log.info('Failed to kill process group : %s' % str(err))
try:
self.proc.kill()
except Exception as err:
self._log.info('Failed to kill process : %s' % str(err))
self.report.killed()
def _popen_args(self):
args = {
'cwd': self.cwd,
'env': self.env,
'stdout': self.report.out,
'stderr': subprocess.STDOUT,
}
# make sure child processes doesn't remain after killing
if platform.system() == 'Windows':
DETACHED_PROCESS = 0x00000008
args.update(creationflags=DETACHED_PROCESS | subprocess.CREATE_NEW_PROCESS_GROUP)
else:
args.update(preexec_fn=os.setsid)
return args
def start(self, timeout=0):
joined = str_join(' ', self.cmd)
self._log.debug('COMMAND: %s', joined)
self._log.debug('WORKDIR: %s', self.cwd)
self._log.debug('LOGFILE: %s', self.report.logpath)
self.report.begin()
self.proc = subprocess.Popen(self.cmd, **self._popen_args())
if timeout > 0:
self.timer = threading.Timer(timeout, self._expire)
self.timer.start()
return self._scoped()
@contextlib.contextmanager
def _scoped(self):
yield self
self._log.debug('Killing scoped process')
self.kill()
def wait(self):
self.proc.communicate()
if self.timer:
self.timer.cancel()
self.report.end(self.returncode)
@property
def returncode(self):
return self.proc.returncode if self.proc else None
def exec_context(port, testdir, test, prog):
report = ExecReporter(testdir, test, prog)
prog.build_command(port)
return ExecutionContext(prog.command, prog.workdir, prog.env, report)
def run_test(testdir, test_dict, async=True, max_retry=3):
try:
logger = multiprocessing.get_logger()
retry_count = 0
test = TestEntry(testdir, **test_dict)
while True:
if stop.is_set():
logger.debug('Skipping because shutting down')
return None
logger.debug('Start')
with PortAllocator.alloc_port_scoped(ports, test.socket) as port:
logger.debug('Start with port %d' % port)
sv = exec_context(port, testdir, test, test.server)
cl = exec_context(port, testdir, test, test.client)
logger.debug('Starting server')
with sv.start():
if test.delay > 0:
logger.debug('Delaying client for %.2f seconds' % test.delay)
time.sleep(test.delay)
cl_retry_count = 0
cl_max_retry = 10
cl_retry_wait = 0.5
while True:
logger.debug('Starting client')
cl.start(test.timeout)
logger.debug('Waiting client')
cl.wait()
if not cl.report.maybe_false_positive() or cl_retry_count >= cl_max_retry:
if cl_retry_count > 0 and cl_retry_count < cl_max_retry:
logger.warn('[%s]: Connected after %d retry (%.2f sec each)' % (test.server.name, cl_retry_count, cl_retry_wait))
break
logger.debug('Server may not be ready, waiting %.2f second...' % cl_retry_wait)
time.sleep(cl_retry_wait)
cl_retry_count += 1
if not sv.report.maybe_false_positive() or retry_count >= max_retry:
logger.debug('Finish')
return RESULT_TIMEOUT if cl.expired else cl.proc.returncode
logger.warn('[%s]: Detected socket bind failure, retrying...' % test.server.name)
retry_count += 1
except (KeyboardInterrupt, SystemExit):
logger.info('Interrupted execution')
if not async:
raise
stop.set()
return None
except Exception as ex:
logger.warn('%s', ex)
if not async:
raise
logger.debug('Error executing [%s]', test.name, exc_info=sys.exc_info())
return RESULT_ERROR
class PortAllocator(object):
def __init__(self):
self._log = multiprocessing.get_logger()
self._lock = multiprocessing.Lock()
self._ports = set()
self._dom_ports = set()
self._last_alloc = 0
def _get_tcp_port(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('127.0.0.1', 0))
port = sock.getsockname()[1]
self._lock.acquire()
try:
ok = port not in self._ports
if ok:
self._ports.add(port)
self._last_alloc = time.time()
finally:
self._lock.release()
sock.close()
return port if ok else self._get_tcp_port()
def _get_domain_port(self):
port = random.randint(1024, 65536)
self._lock.acquire()
try:
ok = port not in self._dom_ports
if ok:
self._dom_ports.add(port)
finally:
self._lock.release()
return port if ok else self._get_domain_port()
def alloc_port(self, socket_type):
if socket_type in ('domain', 'abstract'):
return self._get_domain_port()
else:
return self._get_tcp_port()
# static method for inter-process invokation
@staticmethod
@contextlib.contextmanager
def alloc_port_scoped(allocator, socket_type):
port = allocator.alloc_port(socket_type)
yield port
allocator.free_port(socket_type, port)
def free_port(self, socket_type, port):
self._log.debug('free_port')
self._lock.acquire()
try:
if socket_type == 'domain':
self._dom_ports.remove(port)
path = domain_socket_path(port)
if os.path.exists(path):
os.remove(path)
elif socket_type == 'abstract':
self._dom_ports.remove(port)
else:
self._ports.remove(port)
except IOError as err:
self._log.info('Error while freeing port : %s' % str(err))
finally:
self._lock.release()
class NonAsyncResult(object):
def __init__(self, value):
self._value = value
def get(self, timeout=None):
return self._value
def wait(self, timeout=None):
pass
def ready(self):
return True
def successful(self):
return self._value == 0
class TestDispatcher(object):
def __init__(self, testdir, concurrency):
self._log = multiprocessing.get_logger()
self.testdir = testdir
# seems needed for python 2.x to handle keyboard interrupt
self._stop = multiprocessing.Event()
self._async = concurrency > 1
if not self._async:
self._pool = None
global stop
global ports
stop = self._stop
ports = PortAllocator()
else:
self._m = multiprocessing.managers.BaseManager()
self._m.register('ports', PortAllocator)
self._m.start()
self._pool = multiprocessing.Pool(concurrency, self._pool_init, (self._m.address,))
self._report = SummaryReporter(testdir, concurrency > 1)
self._log.debug(
'TestDispatcher started with %d concurrent jobs' % concurrency)
def _pool_init(self, address):
global stop
global m
global ports
stop = self._stop
m = multiprocessing.managers.BaseManager(address)
m.connect()
ports = m.ports()
def _dispatch_sync(self, test, cont):
r = run_test(self.testdir, test, False)
cont(r)
return NonAsyncResult(r)
def _dispatch_async(self, test, cont):
return self._pool.apply_async(func=run_test, args=(self.testdir, test,), callback=cont)
def dispatch(self, test):
index = self._report.add_test(test)
def cont(r):
if not self._stop.is_set():
self._log.debug('freeing port')
self._log.debug('adding result')
self._report.add_result(index, r, r == RESULT_TIMEOUT)
self._log.debug('finish continuation')
fn = self._dispatch_async if self._async else self._dispatch_sync
return fn(test, cont)
def wait(self):
if self._async:
self._pool.close()
self._pool.join()
self._m.shutdown()
return self._report.end()
def terminate(self):
self._stop.set()
if self._async:
self._pool.terminate()
self._pool.join()
self._m.shutdown()
|
msonnabaum/thrift
|
test/crossrunner/run.py
|
Python
|
apache-2.0
| 9,516 | 0.010824 |
import sys
import pytest
from opentracing.ext import tags
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from opentracing_instrumentation.client_hooks import mysqldb as mysqldb_hooks
from opentracing_instrumentation.request_context import span_in_context
from .sql_common import metadata, User
SKIP_REASON_PYTHON_3 = 'MySQLdb is not compatible with Python 3'
SKIP_REASON_CONNECTION = 'MySQL is not running or cannot connect'
MYSQL_CONNECTION_STRING = 'mysql://root@127.0.0.1/test'
@pytest.fixture
def session():
Session = sessionmaker()
engine = create_engine(MYSQL_CONNECTION_STRING)
Session.configure(bind=engine)
metadata.create_all(engine)
try:
yield Session()
except:
pass
@pytest.fixture(autouse=True, scope='module')
def patch_sqlalchemy():
mysqldb_hooks.install_patches()
try:
yield
finally:
mysqldb_hooks.reset_patches()
def is_mysql_running():
try:
import MySQLdb
with MySQLdb.connect(host='127.0.0.1', user='root'):
pass
return True
except:
return False
def assert_span(span, operation, parent=None):
assert span.operation_name == 'MySQLdb:' + operation
assert span.tags.get(tags.SPAN_KIND) == tags.SPAN_KIND_RPC_CLIENT
if parent:
assert span.parent_id == parent.context.span_id
assert span.context.trace_id == parent.context.trace_id
else:
assert span.parent_id is None
@pytest.mark.skipif(not is_mysql_running(), reason=SKIP_REASON_CONNECTION)
@pytest.mark.skipif(sys.version_info.major == 3, reason=SKIP_REASON_PYTHON_3)
def test_db(tracer, session):
root_span = tracer.start_span('root-span')
# span recording works for regular operations within a context only
with span_in_context(root_span):
user = User(name='user', fullname='User', password='password')
session.add(user)
session.commit()
spans = tracer.recorder.get_spans()
assert len(spans) == 4
connect_span, insert_span, commit_span, rollback_span = spans
assert_span(connect_span, 'Connect')
assert_span(insert_span, 'INSERT', root_span)
assert_span(commit_span, 'commit', root_span)
assert_span(rollback_span, 'rollback', root_span)
|
uber-common/opentracing-python-instrumentation
|
tests/opentracing_instrumentation/test_mysqldb.py
|
Python
|
mit
| 2,279 | 0.000878 |
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2013 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <info@star2billing.com>
#
from django.utils.translation import gettext as _
from apiplayground import APIPlayground
class CampaignDelCascadeAPIPlayground(APIPlayground):
schema = {
"title": _("campaign delete cascade"),
"base_url": "http://localhost/api/v1/",
"resources": [
{
"name": "/campaign_delete_cascade/",
"description": _("this resource allows you to delete campaign."),
"endpoints": [
{
"method": "DELETE",
"url": "/api/v1/campaign_delete_cascade/{campaign-id}/",
"description": _("delete campaign"),
}
]
},
]
}
|
garyjs/Newfiesautodialer
|
newfies/api/api_playgrounds/campaign_delete_cascade_playground.py
|
Python
|
mpl-2.0
| 1,138 | 0.001757 |
#!/usr/bin/env python3
class Employee:
num_of_emps = 0
raise_amount = 1.04
def __init__(self,first,last):
self.first = first
self.last = last
self.email = first + '.' + last + '@kellynoah.com'
def fullname(self):
return '{} {}'.format(self.first,self.last)
def apply_raise(self):
self.pay = int(self.pay * self.raise_amount)
def __repr__(self):
return "Employee('{}', '{}', '{}')".format(self.first, self.last, self.pay)
def __str__(self):
return '{} - {}'.format(self.fullname(), self.email)
def __add__(self, other):
return self.pay + other.pay
def __len__(self):
return len(self.fullname())
emp_1 = Employee('John', 'Smith')
print(emp_1.first)
print(emp_1.email)
print(emp_1.fullname())
|
lmascare/utils
|
python/tutorials/oop6a.py
|
Python
|
artistic-2.0
| 811 | 0.008631 |
#第四集(包含部分文件3.py和部分第二集)
# courses=['History','Math','Physics','Compsci']#此行代码在Mutable之前都要打开
# print(courses)
# courses.append('Art')#在最后添加一个元素
# courses.insert(0,'English')#在0的位置添加一个元素
# courses_2=['Chinese','Education']
# courses.insert(1,courses_2)#看看这条代码与下面两条代码有什么不同
# courses.append(courses_2)
# courses.extend(courses_2)
# #用pop删除和用remove删除可以详见3.py
# # courses.remove('Math')#删除一个元素
# popped=courses.pop()#删除一个元素并将该元素赋值给popped (括号内无数字则默认最后一个)
# print(popped)#输出被删除的元素
# courses.reverse()#将元素倒叙
# courses.sort()#排序 按开头字母的顺序 数字排在字母前
# print(courses)
# courses.sort(reverse=True)#按顺序倒叙(若=False则无效)
# print(courses)
# sorted_courses=sorted(courses)
# print(sorted_courses)
# alphabet=['DA1','SA2','AD3','3AD']
# alphabet.sort()
# print(alphabet)
# nums=[3,5,1,4,2]
# nums.sort()
# print(nums)
# print(min(nums))#输出最小数
# print(max(nums))#输出最大数
# print(sum(nums))#输出总和
# #中文不知道是什么规则
# Chinese=['啊了','吧即','啦']
# Chinese.sort()
# print(Chinese)
# print(courses.index('Math'))#查找某元素在列表中的位置
# print('Art' in courses)#True则表示该元素存在于列表,False则是不存在
#for和in语言
# for item in courses: #将courses中的元素一个一个输出
# print(item)
# #输出元素位置和元素
# for course in enumerate(courses):
# print(course)
# for index,course in enumerate(courses):
# print(index,course)
# for index,course in enumerate(courses,start=1):
# print(index,course)
# courses_str=' - '.join(courses)#将' - '插入courses中输出
# new_list=courses_str.split(' - ')#将' - '从courses_str中删除
# print(courses_str)
# print(new_list)
# #Mutable (可变的)
# list_1=['History','Math','Physics','Compsci']
# list_2=list_1
# print(list_1)
# print(list_2)
# list_1[0]='Art'
# print(list_1)
# print(list_2)
# #Immutable (不可变的)(这里很神奇,视频上不可以但是我可以)
# tuple_1=['History','Math','Physics','Compsci']
# tuple_2=tuple_1
# print(tuple_1)
# print(tuple_2)
# tuple_1[0]='Art'
# print(tuple_1)
# print(tuple_2)
# #Sets
# cs_courses={'History', 'Math', 'Physics', 'Compsci','Math'}#用大括号则会将两个相同的元素只输出前一个
# art_courses={'History', 'Math', 'Art', 'Design'}
# print(cs_courses)
# print(cs_courses.intersection(art_courses))#输出两个列表中相同的元素
# print(cs_courses.difference(art_courses))#输出两个列表中不相同的元素
# print(cs_courses.union(art_courses))#将两个列表合并(每次运行顺序都不同)
#Empty Lists
empty_list=[]
empty_list=list()
#Empty Tuples
empty_tuple=()
empty_tuple=tuple()
#Empty Sets
empty_set={} #错误的
empty_set=set()
|
Tiger-C/python
|
python教程/第四集.py
|
Python
|
mit
| 3,007 | 0.006783 |
#!/usr/bin/python
# coding=utf-8
import datetime
import mock
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import patch
from test import run_only
from mock import Mock
from diamond.collector import Collector
from elb import ElbCollector
def run_only_if_boto_is_available(func):
try:
import boto
except ImportError:
boto = None
pred = lambda: boto is not None
return run_only(func, pred)
class TestElbCollector(CollectorTestCase):
@run_only_if_boto_is_available
def test_throws_exception_when_interval_not_multiple_of_60(self):
config = get_collector_config('ElbCollector',
{'enabled': True,
'interval': 10})
assertRaisesAndContains(Exception, 'multiple of',
ElbCollector, *[config, None])
@run_only_if_boto_is_available
@patch('elb.cloudwatch')
@patch('boto.ec2.connect_to_region')
@patch('boto.ec2.elb.connect_to_region')
@patch.object(Collector, 'publish_metric')
def test_ignore(self, publish_metric, elb_connect_to_region,
ec2_connect_to_region, cloudwatch):
config = get_collector_config(
'ElbCollector',
{
'enabled': True,
'interval': 60,
'regions': {
'us-west-1': {}
},
'elbs_ignored': ['^to_ignore', ],
})
az = Mock()
az.name = 'us-west-1a'
ec2_conn = Mock()
ec2_conn.get_all_zones = Mock()
ec2_conn.get_all_zones.return_value = [az]
ec2_connect_to_region.return_value = ec2_conn
elb1 = Mock()
elb1.name = 'elb1'
elb2 = Mock()
elb2.name = 'to_ignore'
elb_conn = Mock()
elb_conn.get_all_load_balancers = Mock()
elb_conn.get_all_load_balancers.return_value = [elb1, elb2]
elb_connect_to_region.return_value = elb_conn
cw_conn = Mock()
cw_conn.region = Mock()
cw_conn.region.name = 'us-west-1'
cw_conn.get_metric_statistics = Mock()
ts = datetime.datetime.utcnow().replace(second=0, microsecond=0)
cw_conn.get_metric_statistics.side_effect = [
[{u'Timestamp': ts, u'Average': 1.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Average': 2.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 3.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Average': 4.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 6.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 7.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 8.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 9.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 10.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 11.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 12.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Maximum': 13.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 14.0, u'Unit': u'Count'}],
]
cloudwatch.connect_to_region = Mock()
cloudwatch.connect_to_region.return_value = cw_conn
collector = ElbCollector(config, handlers=[])
target = ts + datetime.timedelta(minutes=1)
with mock.patch.object(datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)) as patched:
patched.utcnow.return_value = target
collector.collect()
self.assertPublishedMetricMany(
publish_metric,
{
'us-west-1a.elb1.HealthyHostCount': 1,
'us-west-1a.elb1.UnHealthyHostCount': 2,
'us-west-1a.elb1.RequestCount': 3,
'us-west-1a.elb1.Latency': 4,
'us-west-1a.elb1.HTTPCode_ELB_4XX': 6,
'us-west-1a.elb1.HTTPCode_ELB_5XX': 7,
'us-west-1a.elb1.HTTPCode_Backend_2XX': 8,
'us-west-1a.elb1.HTTPCode_Backend_3XX': 9,
'us-west-1a.elb1.HTTPCode_Backend_4XX': 10,
'us-west-1a.elb1.HTTPCode_Backend_5XX': 11,
'us-west-1a.elb1.BackendConnectionErrors': 12,
'us-west-1a.elb1.SurgeQueueLength': 13,
'us-west-1a.elb1.SpilloverCount': 14,
})
@run_only_if_boto_is_available
@patch('elb.cloudwatch')
@patch('boto.ec2.connect_to_region')
@patch.object(Collector, 'publish_metric')
def test_collect(self, publish_metric, connect_to_region, cloudwatch):
config = get_collector_config(
'ElbCollector',
{
'enabled': True,
'interval': 60,
'regions': {
'us-west-1': {
'elb_names': ['elb1'],
}
}
})
az = Mock()
az.name = 'us-west-1a'
ec2_conn = Mock()
ec2_conn.get_all_zones = Mock()
ec2_conn.get_all_zones.return_value = [az]
connect_to_region.return_value = ec2_conn
cw_conn = Mock()
cw_conn.region = Mock()
cw_conn.region.name = 'us-west-1'
cw_conn.get_metric_statistics = Mock()
ts = datetime.datetime.utcnow().replace(second=0, microsecond=0)
cw_conn.get_metric_statistics.side_effect = [
[{u'Timestamp': ts, u'Average': 1.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Average': 2.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 3.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Average': 4.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 6.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 7.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 8.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 9.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 10.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 11.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 12.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Maximum': 13.0, u'Unit': u'Count'}],
[{u'Timestamp': ts, u'Sum': 14.0, u'Unit': u'Count'}],
]
cloudwatch.connect_to_region = Mock()
cloudwatch.connect_to_region.return_value = cw_conn
collector = ElbCollector(config, handlers=[])
target = ts + datetime.timedelta(minutes=1)
with mock.patch.object(datetime, 'datetime',
mock.Mock(wraps=datetime.datetime)) as patched:
patched.utcnow.return_value = target
collector.collect()
self.assertPublishedMetricMany(
publish_metric,
{
'us-west-1a.elb1.HealthyHostCount': 1,
'us-west-1a.elb1.UnHealthyHostCount': 2,
'us-west-1a.elb1.RequestCount': 3,
'us-west-1a.elb1.Latency': 4,
'us-west-1a.elb1.HTTPCode_ELB_4XX': 6,
'us-west-1a.elb1.HTTPCode_ELB_5XX': 7,
'us-west-1a.elb1.HTTPCode_Backend_2XX': 8,
'us-west-1a.elb1.HTTPCode_Backend_3XX': 9,
'us-west-1a.elb1.HTTPCode_Backend_4XX': 10,
'us-west-1a.elb1.HTTPCode_Backend_5XX': 11,
'us-west-1a.elb1.BackendConnectionErrors': 12,
'us-west-1a.elb1.SurgeQueueLength': 13,
'us-west-1a.elb1.SpilloverCount': 14,
})
def assertRaisesAndContains(excClass, contains_str, callableObj, *args,
**kwargs):
try:
callableObj(*args, **kwargs)
except excClass as e:
msg = str(e)
if contains_str in msg:
return
else:
raise AssertionError(
"Exception message does not contain '%s': '%s'" % (
contains_str, msg))
else:
if hasattr(excClass, '__name__'):
excName = excClass.__name__
else:
excName = str(excClass)
raise AssertionError("%s not raised" % excName)
if __name__ == "__main__":
unittest.main()
|
MichaelDoyle/Diamond
|
src/collectors/elb/test/testelb.py
|
Python
|
mit
| 8,325 | 0.00024 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Blei's LDA-C format.
"""
from __future__ import with_statement
import logging
from gensim import interfaces, utils
from gensim.corpora import IndexedCorpus
logger = logging.getLogger('gensim.corpora.bleicorpus')
class BleiCorpus(IndexedCorpus):
"""
Corpus in Blei's LDA-C format.
The corpus is represented as two files: one describing the documents, and another
describing the mapping between words and their ids.
Each document is one line::
N fieldId1:fieldValue1 fieldId2:fieldValue2 ... fieldIdN:fieldValueN
The vocabulary is a file with words, one word per line; word at line K has an
implicit ``id=K``.
"""
def __init__(self, fname, fname_vocab=None):
"""
Initialize the corpus from a file.
`fname_vocab` is the file with vocabulary; if not specified, it defaults to
`fname.vocab`.
"""
IndexedCorpus.__init__(self, fname)
logger.info("loading corpus from %s" % fname)
if fname_vocab is None:
fname_vocab = fname + '.vocab'
self.fname = fname
words = [word.rstrip() for word in open(fname_vocab)]
self.id2word = dict(enumerate(words))
self.length = None
def __iter__(self):
"""
Iterate over the corpus, returning one sparse vector at a time.
"""
length = 0
for lineNo, line in enumerate(open(self.fname)):
length += 1
yield self.line2doc(line)
self.length = length
def line2doc(self, line):
parts = line.split()
if int(parts[0]) != len(parts) - 1:
raise ValueError("invalid format in %s: %s" %
(self.fname, repr(line)))
doc = [part.rsplit(':', 1) for part in parts[1:]]
doc = [(int(p1), float(p2)) for p1, p2 in doc]
return doc
@staticmethod
def save_corpus(fname, corpus, id2word=None):
"""
Save a corpus in the LDA-C format.
There are actually two files saved: `fname` and `fname.vocab`, where
`fname.vocab` is the vocabulary file.
This function is automatically called by `BleiCorpus.serialize`; don't
call it directly, call `serialize` instead.
"""
if id2word is None:
logger.info("no word id mapping provided; initializing from corpus")
id2word = utils.dict_from_corpus(corpus)
num_terms = len(id2word)
else:
num_terms = 1 + max([-1] + id2word.keys())
logger.info("storing corpus in Blei's LDA-C format: %s" % fname)
with open(fname, 'w') as fout:
offsets = []
for doc in corpus:
doc = list(doc)
offsets.append(fout.tell())
fout.write("%i %s\n" % (len(doc),
' '.join("%i:%s" % p for p in doc if abs(p[1]) > 1e-12)))
# write out vocabulary, in a format compatible with Blei's topics.py script
fname_vocab = fname + '.vocab'
logger.info("saving vocabulary of %i words to %s" % (num_terms, fname_vocab))
with open(fname_vocab, 'w') as fout:
for featureid in xrange(num_terms):
fout.write("%s\n" % utils.to_utf8(id2word.get(featureid, '---')))
return offsets
def docbyoffset(self, offset):
"""
Return the document stored at file position `offset`.
"""
with open(self.fname) as f:
f.seek(offset)
return self.line2doc(f.readline())
#endclass BleiCorpus
|
auduno/gensim
|
gensim/corpora/bleicorpus.py
|
Python
|
gpl-3.0
| 3,768 | 0.00345 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ResourceWriteFailureData(Model):
"""Schema of the Data property of an EventGridEvent for a
Microsoft.Resources.ResourceWriteFailure event. This is raised when a
resource create or update operation fails.
:param tenant_id: The tenant ID of the resource.
:type tenant_id: str
:param subscription_id: The subscription ID of the resource.
:type subscription_id: str
:param resource_group: The resource group of the resource.
:type resource_group: str
:param resource_provider: The resource provider performing the operation.
:type resource_provider: str
:param resource_uri: The URI of the resource in the operation.
:type resource_uri: str
:param operation_name: The operation that was performed.
:type operation_name: str
:param status: The status of the operation.
:type status: str
:param authorization: The requested authorization for the operation.
:type authorization: str
:param claims: The properties of the claims.
:type claims: str
:param correlation_id: An operation ID used for troubleshooting.
:type correlation_id: str
:param http_request: The details of the operation.
:type http_request: str
"""
_attribute_map = {
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'subscription_id': {'key': 'subscriptionId', 'type': 'str'},
'resource_group': {'key': 'resourceGroup', 'type': 'str'},
'resource_provider': {'key': 'resourceProvider', 'type': 'str'},
'resource_uri': {'key': 'resourceUri', 'type': 'str'},
'operation_name': {'key': 'operationName', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'authorization': {'key': 'authorization', 'type': 'str'},
'claims': {'key': 'claims', 'type': 'str'},
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'http_request': {'key': 'httpRequest', 'type': 'str'},
}
def __init__(self, tenant_id=None, subscription_id=None, resource_group=None, resource_provider=None, resource_uri=None, operation_name=None, status=None, authorization=None, claims=None, correlation_id=None, http_request=None):
super(ResourceWriteFailureData, self).__init__()
self.tenant_id = tenant_id
self.subscription_id = subscription_id
self.resource_group = resource_group
self.resource_provider = resource_provider
self.resource_uri = resource_uri
self.operation_name = operation_name
self.status = status
self.authorization = authorization
self.claims = claims
self.correlation_id = correlation_id
self.http_request = http_request
|
lmazuel/azure-sdk-for-python
|
azure-eventgrid/azure/eventgrid/models/resource_write_failure_data.py
|
Python
|
mit
| 3,203 | 0.000312 |
"""(Re)builds feeds for categories"""
import os
import datetime
import jinja2
from google.appengine.api import app_identity
import dao
import util
def build_and_save_for_category(cat, store, prefix):
"""Build and save feeds for category"""
feed = build_feed(cat)
save_feeds(store, feed, prefix, cat.key.id())
def build_feed(cat):
"""Build feed for category"""
feed = Feed(title=cat.title, link=get_app_url())
items = dao.latest_torrents(feed_size(cat), cat.key)
for item in items:
feed.add_item(item)
return feed
def get_app_url():
"""Returns full URL for app engine app"""
app_id = app_identity.get_application_id()
return 'http://{}.appspot.com/'.format(app_id)
def save_feeds(store, feed, prefix, name):
"""Saves feeds to storage"""
xml = feed.render_short_rss()
path = os.path.join(prefix, 'short', '{}.xml'.format(name))
store.put(path, xml.encode('utf-8'), 'application/rss+xml')
class Feed(object):
"""Represents feed with torrent entries"""
def __init__(self, title, link, ttl=60, description=None):
self.title = title
self.link = link
self.description = description or title
self.ttl = ttl
self.items = []
self.lastBuildDate = None
self.latest_item_dt = datetime.datetime.utcfromtimestamp(0)
def add_item(self, item):
self.items.append(item)
if self.latest_item_dt < item.dt:
self.latest_item_dt = item.dt
def render_short_rss(self):
self.lastBuildDate = self.latest_item_dt
env = make_jinja_env()
template = env.get_template('rss_short.xml')
return template.render(feed=self)
def make_jinja_env():
jinja2_env = jinja2.Environment(
loader=jinja2.FileSystemLoader('templates'),
# loader=PackageLoader('package_name', 'templates'),
autoescape=True,
extensions=['jinja2.ext.autoescape']
)
jinja2_env.filters['rfc822date'] = util.datetime_to_rfc822
return jinja2_env
def feed_size(category):
"""Returns number of feed entries for category"""
if category.key.id() == 'r0': # Root category
return 100
elif category.key.id().startswith('c'): # Level 2 category
return 50
return 25 # category with subcategories
|
notapresent/rutracker_rss
|
feeds.py
|
Python
|
apache-2.0
| 2,363 | 0 |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Functions that deal with local and device ports."""
import contextlib
import fcntl
import httplib
import logging
import os
import socket
import traceback
# The net test server is started from port 10201.
_TEST_SERVER_PORT_FIRST = 10201
_TEST_SERVER_PORT_LAST = 30000
# A file to record next valid port of test server.
_TEST_SERVER_PORT_FILE = '/tmp/test_server_port'
_TEST_SERVER_PORT_LOCKFILE = '/tmp/test_server_port.lock'
# The following two methods are used to allocate the port source for various
# types of test servers. Because some net-related tests can be run on shards at
# same time, it's important to have a mechanism to allocate the port
# process-safe. In here, we implement the safe port allocation by leveraging
# flock.
def ResetTestServerPortAllocation():
"""Resets the port allocation to start from TEST_SERVER_PORT_FIRST.
Returns:
Returns True if reset successes. Otherwise returns False.
"""
try:
with open(_TEST_SERVER_PORT_FILE, 'w') as fp:
fp.write('%d' % _TEST_SERVER_PORT_FIRST)
if os.path.exists(_TEST_SERVER_PORT_LOCKFILE):
os.unlink(_TEST_SERVER_PORT_LOCKFILE)
return True
except Exception: # pylint: disable=broad-except
logging.exception('Error while resetting port allocation')
return False
def AllocateTestServerPort():
"""Allocates a port incrementally.
Returns:
Returns a valid port which should be in between TEST_SERVER_PORT_FIRST and
TEST_SERVER_PORT_LAST. Returning 0 means no more valid port can be used.
"""
port = 0
ports_tried = []
try:
fp_lock = open(_TEST_SERVER_PORT_LOCKFILE, 'w')
fcntl.flock(fp_lock, fcntl.LOCK_EX)
# Get current valid port and calculate next valid port.
if not os.path.exists(_TEST_SERVER_PORT_FILE):
ResetTestServerPortAllocation()
with open(_TEST_SERVER_PORT_FILE, 'r+') as fp:
port = int(fp.read())
ports_tried.append(port)
while not IsHostPortAvailable(port):
port += 1
ports_tried.append(port)
if (port > _TEST_SERVER_PORT_LAST or
port < _TEST_SERVER_PORT_FIRST):
port = 0
else:
fp.seek(0, os.SEEK_SET)
fp.write('%d' % (port + 1))
except Exception: # pylint: disable=broad-except
logging.exception('ERror while allocating port')
finally:
if fp_lock:
fcntl.flock(fp_lock, fcntl.LOCK_UN)
fp_lock.close()
if port:
logging.info('Allocate port %d for test server.', port)
else:
logging.error('Could not allocate port for test server. '
'List of ports tried: %s', str(ports_tried))
return port
def IsHostPortAvailable(host_port):
"""Checks whether the specified host port is available.
Args:
host_port: Port on host to check.
Returns:
True if the port on host is available, otherwise returns False.
"""
s = socket.socket()
try:
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(('', host_port))
s.close()
return True
except socket.error:
return False
def IsDevicePortUsed(device, device_port, state=''):
"""Checks whether the specified device port is used or not.
Args:
device: A DeviceUtils instance.
device_port: Port on device we want to check.
state: String of the specified state. Default is empty string, which
means any state.
Returns:
True if the port on device is already used, otherwise returns False.
"""
base_urls = ('127.0.0.1:%d' % device_port, 'localhost:%d' % device_port)
netstat_results = device.RunShellCommand(
['netstat', '-a'], check_return=True, large_output=True)
for single_connect in netstat_results:
# Column 3 is the local address which we want to check with.
connect_results = single_connect.split()
if connect_results[0] != 'tcp':
continue
if len(connect_results) < 6:
raise Exception('Unexpected format while parsing netstat line: ' +
single_connect)
is_state_match = connect_results[5] == state if state else True
if connect_results[3] in base_urls and is_state_match:
return True
return False
def IsHttpServerConnectable(host, port, tries=3, command='GET', path='/',
expected_read='', timeout=2):
"""Checks whether the specified http server is ready to serve request or not.
Args:
host: Host name of the HTTP server.
port: Port number of the HTTP server.
tries: How many times we want to test the connection. The default value is
3.
command: The http command we use to connect to HTTP server. The default
command is 'GET'.
path: The path we use when connecting to HTTP server. The default path is
'/'.
expected_read: The content we expect to read from the response. The default
value is ''.
timeout: Timeout (in seconds) for each http connection. The default is 2s.
Returns:
Tuple of (connect status, client error). connect status is a boolean value
to indicate whether the server is connectable. client_error is the error
message the server returns when connect status is false.
"""
assert tries >= 1
for i in xrange(0, tries):
client_error = None
try:
with contextlib.closing(httplib.HTTPConnection(
host, port, timeout=timeout)) as http:
# Output some debug information when we have tried more than 2 times.
http.set_debuglevel(i >= 2)
http.request(command, path)
r = http.getresponse()
content = r.read()
if r.status == 200 and r.reason == 'OK' and content == expected_read:
return (True, '')
client_error = ('Bad response: %s %s version %s\n ' %
(r.status, r.reason, r.version) +
'\n '.join([': '.join(h) for h in r.getheaders()]))
except (httplib.HTTPException, socket.error) as e:
# Probably too quick connecting: try again.
exception_error_msgs = traceback.format_exception_only(type(e), e)
if exception_error_msgs:
client_error = ''.join(exception_error_msgs)
# Only returns last client_error.
return (False, client_error or 'Timeout')
|
SummerLW/Perf-Insight-Report
|
devil/devil/android/ports.py
|
Python
|
bsd-3-clause
| 6,336 | 0.007418 |
# coding:utf-8
from urllib import parse as url_parse
from logger.log import crawler
from apps.celery_init import celery
from page_get.basic import get_page
from config.conf import get_max_search_page
from page_parse import search as parse_search
from db.search_words import get_search_keywords
from db.keywords_wbdata import insert_keyword_wbid
from db.wb_data import insert_weibo_data, get_wb_by_mid
# This url is just for original weibos.
# If you want other kind of search, you can change the url below
url = 'http://s.weibo.com/weibo/{}&scope=ori&suball=1&page={}'
limit = get_max_search_page() + 1
@celery.task(ignore_result=True)
def search_keyword(keyword, keyword_id):
cur_page = 1
encode_keyword = url_parse.quote(keyword)
while cur_page < limit:
cur_url = url.format(encode_keyword, cur_page)
search_page = get_page(cur_url)
if not search_page:
crawler.warning('No result for keyword {}, the source page is {}'.format(keyword, search_page))
return
search_list = parse_search.get_search_info(search_page)
# Because the search results are sorted by time, if any result has been stored in mysql,
# we need not crawl the same keyword in this turn
for wb_data in search_list:
rs = get_wb_by_mid(wb_data.weibo_id)
if rs:
crawler.info('keyword {} has been crawled in this turn'.format(keyword))
return
else:
insert_weibo_data(wb_data)
insert_keyword_wbid(keyword_id, wb_data.weibo_id)
# send task for crawling user info
celery.send_task('celery_tasks.weibo.user.crawl_person_infos', args=(wb_data.uid,), queue='user_crawler',
routing_key='for_user_info')
if 'page next S_txt1 S_line1' in search_page:
cur_page += 1
else:
crawler.info('keyword {} has been crawled in this turn'.format(keyword))
return
@celery.task(ignore_result=True)
def excute_search_task():
keywords = get_search_keywords()
for each in keywords:
celery.send_task('celery_tasks.weibo.search.search_keyword', args=(each[0], each[1]), queue='search_crawler',
routing_key='for_search_info')
|
xtuyaowu/jtyd_python_spider
|
celery_tasks/weibo/search.py
|
Python
|
mit
| 2,317 | 0.003021 |
from distutils.core import setup
from ripwrap import __VERSION__
setup(
name = 'ripwrap',
version = __VERSION__,
description = 'A wrapper for ReSTinPeace, for Django applications.',
long_description = open('README').read()
author = 'P.C. Shyamshankar',
packages = ['ripwrap'],
url = 'http://github.com/sykora/django-ripwrap/',
license = 'GNU General Public License v3.0',
classifiers = (
'Development Status :: 1 - Planning',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python',
)
)
|
sykora/django-ripwrap
|
setup.py
|
Python
|
gpl-3.0
| 712 | 0.025281 |
"""
The tests exercise the casting machinery in a more low-level manner.
The reason is mostly to test a new implementation of the casting machinery.
Unlike most tests in NumPy, these are closer to unit-tests rather
than integration tests.
"""
import pytest
import textwrap
import enum
import itertools
import random
import numpy as np
from numpy.lib.stride_tricks import as_strided
from numpy.testing import assert_array_equal
from numpy.core._multiarray_umath import _get_castingimpl as get_castingimpl
# Simple skips object, parametric and long double (unsupported by struct)
simple_dtypes = "?bhilqBHILQefdFD"
if np.dtype("l").itemsize != np.dtype("q").itemsize:
# Remove l and L, the table was generated with 64bit linux in mind.
simple_dtypes = simple_dtypes.replace("l", "").replace("L", "")
simple_dtypes = [type(np.dtype(c)) for c in simple_dtypes]
def simple_dtype_instances():
for dtype_class in simple_dtypes:
dt = dtype_class()
yield pytest.param(dt, id=str(dt))
if dt.byteorder != "|":
dt = dt.newbyteorder()
yield pytest.param(dt, id=str(dt))
def get_expected_stringlength(dtype):
"""Returns the string length when casting the basic dtypes to strings.
"""
if dtype == np.bool_:
return 5
if dtype.kind in "iu":
if dtype.itemsize == 1:
length = 3
elif dtype.itemsize == 2:
length = 5
elif dtype.itemsize == 4:
length = 10
elif dtype.itemsize == 8:
length = 20
else:
raise AssertionError(f"did not find expected length for {dtype}")
if dtype.kind == "i":
length += 1 # adds one character for the sign
return length
# Note: Can't do dtype comparison for longdouble on windows
if dtype.char == "g":
return 48
elif dtype.char == "G":
return 48 * 2
elif dtype.kind == "f":
return 32 # also for half apparently.
elif dtype.kind == "c":
return 32 * 2
raise AssertionError(f"did not find expected length for {dtype}")
class Casting(enum.IntEnum):
no = 0
equiv = 1
safe = 2
same_kind = 3
unsafe = 4
cast_is_view = 1 << 16
def _get_cancast_table():
table = textwrap.dedent("""
X ? b h i l q B H I L Q e f d g F D G S U V O M m
? # = = = = = = = = = = = = = = = = = = = = = . =
b . # = = = = . . . . . = = = = = = = = = = = . =
h . ~ # = = = . . . . . ~ = = = = = = = = = = . =
i . ~ ~ # = = . . . . . ~ ~ = = ~ = = = = = = . =
l . ~ ~ ~ # # . . . . . ~ ~ = = ~ = = = = = = . =
q . ~ ~ ~ # # . . . . . ~ ~ = = ~ = = = = = = . =
B . ~ = = = = # = = = = = = = = = = = = = = = . =
H . ~ ~ = = = ~ # = = = ~ = = = = = = = = = = . =
I . ~ ~ ~ = = ~ ~ # = = ~ ~ = = ~ = = = = = = . =
L . ~ ~ ~ ~ ~ ~ ~ ~ # # ~ ~ = = ~ = = = = = = . ~
Q . ~ ~ ~ ~ ~ ~ ~ ~ # # ~ ~ = = ~ = = = = = = . ~
e . . . . . . . . . . . # = = = = = = = = = = . .
f . . . . . . . . . . . ~ # = = = = = = = = = . .
d . . . . . . . . . . . ~ ~ # = ~ = = = = = = . .
g . . . . . . . . . . . ~ ~ ~ # ~ ~ = = = = = . .
F . . . . . . . . . . . . . . . # = = = = = = . .
D . . . . . . . . . . . . . . . ~ # = = = = = . .
G . . . . . . . . . . . . . . . ~ ~ # = = = = . .
S . . . . . . . . . . . . . . . . . . # = = = . .
U . . . . . . . . . . . . . . . . . . . # = = . .
V . . . . . . . . . . . . . . . . . . . . # = . .
O . . . . . . . . . . . . . . . . . . . . = # . .
M . . . . . . . . . . . . . . . . . . . . = = # .
m . . . . . . . . . . . . . . . . . . . . = = . #
""").strip().split("\n")
dtypes = [type(np.dtype(c)) for c in table[0][2::2]]
convert_cast = {".": Casting.unsafe, "~": Casting.same_kind,
"=": Casting.safe, "#": Casting.equiv,
" ": -1}
cancast = {}
for from_dt, row in zip(dtypes, table[1:]):
cancast[from_dt] = {}
for to_dt, c in zip(dtypes, row[2::2]):
cancast[from_dt][to_dt] = convert_cast[c]
return cancast
CAST_TABLE = _get_cancast_table()
class TestChanges:
"""
These test cases exercise some behaviour changes
"""
@pytest.mark.parametrize("string", ["S", "U"])
@pytest.mark.parametrize("floating", ["e", "f", "d", "g"])
def test_float_to_string(self, floating, string):
assert np.can_cast(floating, string)
# 100 is long enough to hold any formatted floating
assert np.can_cast(floating, f"{string}100")
def test_to_void(self):
# But in general, we do consider these safe:
assert np.can_cast("d", "V")
assert np.can_cast("S20", "V")
# Do not consider it a safe cast if the void is too smaller:
assert not np.can_cast("d", "V1")
assert not np.can_cast("S20", "V1")
assert not np.can_cast("U1", "V1")
# Structured to unstructured is just like any other:
assert np.can_cast("d,i", "V", casting="same_kind")
# Unstructured void to unstructured is actually no cast at all:
assert np.can_cast("V3", "V", casting="no")
assert np.can_cast("V0", "V", casting="no")
class TestCasting:
size = 1500 # Best larger than NPY_LOWLEVEL_BUFFER_BLOCKSIZE * itemsize
def get_data(self, dtype1, dtype2):
if dtype2 is None or dtype1.itemsize >= dtype2.itemsize:
length = self.size // dtype1.itemsize
else:
length = self.size // dtype2.itemsize
# Assume that the base array is well enough aligned for all inputs.
arr1 = np.empty(length, dtype=dtype1)
assert arr1.flags.c_contiguous
assert arr1.flags.aligned
values = [random.randrange(-128, 128) for _ in range(length)]
for i, value in enumerate(values):
# Use item assignment to ensure this is not using casting:
arr1[i] = value
if dtype2 is None:
if dtype1.char == "?":
values = [bool(v) for v in values]
return arr1, values
if dtype2.char == "?":
values = [bool(v) for v in values]
arr2 = np.empty(length, dtype=dtype2)
assert arr2.flags.c_contiguous
assert arr2.flags.aligned
for i, value in enumerate(values):
# Use item assignment to ensure this is not using casting:
arr2[i] = value
return arr1, arr2, values
def get_data_variation(self, arr1, arr2, aligned=True, contig=True):
"""
Returns a copy of arr1 that may be non-contiguous or unaligned, and a
matching array for arr2 (although not a copy).
"""
if contig:
stride1 = arr1.dtype.itemsize
stride2 = arr2.dtype.itemsize
elif aligned:
stride1 = 2 * arr1.dtype.itemsize
stride2 = 2 * arr2.dtype.itemsize
else:
stride1 = arr1.dtype.itemsize + 1
stride2 = arr2.dtype.itemsize + 1
max_size1 = len(arr1) * 3 * arr1.dtype.itemsize + 1
max_size2 = len(arr2) * 3 * arr2.dtype.itemsize + 1
from_bytes = np.zeros(max_size1, dtype=np.uint8)
to_bytes = np.zeros(max_size2, dtype=np.uint8)
# Sanity check that the above is large enough:
assert stride1 * len(arr1) <= from_bytes.nbytes
assert stride2 * len(arr2) <= to_bytes.nbytes
if aligned:
new1 = as_strided(from_bytes[:-1].view(arr1.dtype),
arr1.shape, (stride1,))
new2 = as_strided(to_bytes[:-1].view(arr2.dtype),
arr2.shape, (stride2,))
else:
new1 = as_strided(from_bytes[1:].view(arr1.dtype),
arr1.shape, (stride1,))
new2 = as_strided(to_bytes[1:].view(arr2.dtype),
arr2.shape, (stride2,))
new1[...] = arr1
if not contig:
# Ensure we did not overwrite bytes that should not be written:
offset = arr1.dtype.itemsize if aligned else 0
buf = from_bytes[offset::stride1].tobytes()
assert buf.count(b"\0") == len(buf)
if contig:
assert new1.flags.c_contiguous
assert new2.flags.c_contiguous
else:
assert not new1.flags.c_contiguous
assert not new2.flags.c_contiguous
if aligned:
assert new1.flags.aligned
assert new2.flags.aligned
else:
assert not new1.flags.aligned or new1.dtype.alignment == 1
assert not new2.flags.aligned or new2.dtype.alignment == 1
return new1, new2
@pytest.mark.parametrize("from_Dt", simple_dtypes)
def test_simple_cancast(self, from_Dt):
for to_Dt in simple_dtypes:
cast = get_castingimpl(from_Dt, to_Dt)
for from_dt in [from_Dt(), from_Dt().newbyteorder()]:
default = cast._resolve_descriptors((from_dt, None))[1][1]
assert default == to_Dt()
del default
for to_dt in [to_Dt(), to_Dt().newbyteorder()]:
casting, (from_res, to_res) = cast._resolve_descriptors(
(from_dt, to_dt))
assert(type(from_res) == from_Dt)
assert(type(to_res) == to_Dt)
if casting & Casting.cast_is_view:
# If a view is acceptable, this is "no" casting
# and byte order must be matching.
assert casting == Casting.no | Casting.cast_is_view
# The above table lists this as "equivalent"
assert Casting.equiv == CAST_TABLE[from_Dt][to_Dt]
# Note that to_res may not be the same as from_dt
assert from_res.isnative == to_res.isnative
else:
if from_Dt == to_Dt:
# Note that to_res may not be the same as from_dt
assert from_res.isnative != to_res.isnative
assert casting == CAST_TABLE[from_Dt][to_Dt]
if from_Dt is to_Dt:
assert(from_dt is from_res)
assert(to_dt is to_res)
@pytest.mark.filterwarnings("ignore::numpy.ComplexWarning")
@pytest.mark.parametrize("from_dt", simple_dtype_instances())
def test_simple_direct_casts(self, from_dt):
"""
This test checks numeric direct casts for dtypes supported also by the
struct module (plus complex). It tries to be test a wide range of
inputs, but skips over possibly undefined behaviour (e.g. int rollover).
Longdouble and CLongdouble are tested, but only using double precision.
If this test creates issues, it should possibly just be simplified
or even removed (checking whether unaligned/non-contiguous casts give
the same results is useful, though).
"""
for to_dt in simple_dtype_instances():
to_dt = to_dt.values[0]
cast = get_castingimpl(type(from_dt), type(to_dt))
casting, (from_res, to_res) = cast._resolve_descriptors(
(from_dt, to_dt))
if from_res is not from_dt or to_res is not to_dt:
# Do not test this case, it is handled in multiple steps,
# each of which should is tested individually.
return
safe = (casting & ~Casting.cast_is_view) <= Casting.safe
del from_res, to_res, casting
arr1, arr2, values = self.get_data(from_dt, to_dt)
cast._simple_strided_call((arr1, arr2))
# Check via python list
assert arr2.tolist() == values
# Check that the same results are achieved for strided loops
arr1_o, arr2_o = self.get_data_variation(arr1, arr2, True, False)
cast._simple_strided_call((arr1_o, arr2_o))
assert_array_equal(arr2_o, arr2)
assert arr2_o.tobytes() == arr2.tobytes()
# Check if alignment makes a difference, but only if supported
# and only if the alignment can be wrong
if ((from_dt.alignment == 1 and to_dt.alignment == 1) or
not cast._supports_unaligned):
return
arr1_o, arr2_o = self.get_data_variation(arr1, arr2, False, True)
cast._simple_strided_call((arr1_o, arr2_o))
assert_array_equal(arr2_o, arr2)
assert arr2_o.tobytes() == arr2.tobytes()
arr1_o, arr2_o = self.get_data_variation(arr1, arr2, False, False)
cast._simple_strided_call((arr1_o, arr2_o))
assert_array_equal(arr2_o, arr2)
assert arr2_o.tobytes() == arr2.tobytes()
del arr1_o, arr2_o, cast
@pytest.mark.parametrize("from_Dt", simple_dtypes)
def test_numeric_to_times(self, from_Dt):
# We currently only implement contiguous loops, so only need to
# test those.
from_dt = from_Dt()
time_dtypes = [np.dtype("M8"), np.dtype("M8[ms]"), np.dtype("M8[4D]"),
np.dtype("m8"), np.dtype("m8[ms]"), np.dtype("m8[4D]")]
for time_dt in time_dtypes:
cast = get_castingimpl(type(from_dt), type(time_dt))
casting, (from_res, to_res) = cast._resolve_descriptors(
(from_dt, time_dt))
assert from_res is from_dt
assert to_res is time_dt
del from_res, to_res
assert(casting & CAST_TABLE[from_Dt][type(time_dt)])
int64_dt = np.dtype(np.int64)
arr1, arr2, values = self.get_data(from_dt, int64_dt)
arr2 = arr2.view(time_dt)
arr2[...] = np.datetime64("NaT")
if time_dt == np.dtype("M8"):
# This is a bit of a strange path, and could probably be removed
arr1[-1] = 0 # ensure at least one value is not NaT
# The cast currently succeeds, but the values are invalid:
cast._simple_strided_call((arr1, arr2))
with pytest.raises(ValueError):
str(arr2[-1]) # e.g. conversion to string fails
return
cast._simple_strided_call((arr1, arr2))
assert [int(v) for v in arr2.tolist()] == values
# Check that the same results are achieved for strided loops
arr1_o, arr2_o = self.get_data_variation(arr1, arr2, True, False)
cast._simple_strided_call((arr1_o, arr2_o))
assert_array_equal(arr2_o, arr2)
assert arr2_o.tobytes() == arr2.tobytes()
@pytest.mark.parametrize(
["from_dt", "to_dt", "expected_casting", "nom", "denom"],
[("M8[ns]", None,
Casting.no | Casting.cast_is_view, 1, 1),
(str(np.dtype("M8[ns]").newbyteorder()), None, Casting.equiv, 1, 1),
("M8", "M8[ms]", Casting.safe | Casting.cast_is_view, 1, 1),
("M8[ms]", "M8", Casting.unsafe, 1, 1), # should be invalid cast
("M8[5ms]", "M8[5ms]", Casting.no | Casting.cast_is_view, 1, 1),
("M8[ns]", "M8[ms]", Casting.same_kind, 1, 10**6),
("M8[ms]", "M8[ns]", Casting.safe, 10**6, 1),
("M8[ms]", "M8[7ms]", Casting.same_kind, 1, 7),
("M8[4D]", "M8[1M]", Casting.same_kind, None,
# give full values based on NumPy 1.19.x
[-2**63, 0, -1, 1314, -1315, 564442610]),
("m8[ns]", None, Casting.no | Casting.cast_is_view, 1, 1),
(str(np.dtype("m8[ns]").newbyteorder()), None, Casting.equiv, 1, 1),
("m8", "m8[ms]", Casting.safe | Casting.cast_is_view, 1, 1),
("m8[ms]", "m8", Casting.unsafe, 1, 1), # should be invalid cast
("m8[5ms]", "m8[5ms]", Casting.no | Casting.cast_is_view, 1, 1),
("m8[ns]", "m8[ms]", Casting.same_kind, 1, 10**6),
("m8[ms]", "m8[ns]", Casting.safe, 10**6, 1),
("m8[ms]", "m8[7ms]", Casting.same_kind, 1, 7),
("m8[4D]", "m8[1M]", Casting.unsafe, None,
# give full values based on NumPy 1.19.x
[-2**63, 0, 0, 1314, -1315, 564442610])])
def test_time_to_time(self, from_dt, to_dt, expected_casting, nom, denom):
from_dt = np.dtype(from_dt)
if to_dt is not None:
to_dt = np.dtype(to_dt)
# Test a few values for casting (results generated with NumPy 1.19)
values = np.array([-2**63, 1, 2**63-1, 10000, -10000, 2**32])
values = values.astype(np.dtype("int64").newbyteorder(from_dt.byteorder))
assert values.dtype.byteorder == from_dt.byteorder
assert np.isnat(values.view(from_dt)[0])
DType = type(from_dt)
cast = get_castingimpl(DType, DType)
casting, (from_res, to_res) = cast._resolve_descriptors((from_dt, to_dt))
assert from_res is from_dt
assert to_res is to_dt or to_dt is None
assert casting == expected_casting
if nom is not None:
expected_out = (values * nom // denom).view(to_res)
expected_out[0] = "NaT"
else:
expected_out = np.empty_like(values)
expected_out[...] = denom
expected_out = expected_out.view(to_dt)
orig_arr = values.view(from_dt)
orig_out = np.empty_like(expected_out)
if casting == Casting.unsafe and (to_dt == "m8" or to_dt == "M8"):
# Casting from non-generic to generic units is an error and should
# probably be reported as an invalid cast earlier.
with pytest.raises(ValueError):
cast._simple_strided_call((orig_arr, orig_out))
return
for aligned in [True, True]:
for contig in [True, True]:
arr, out = self.get_data_variation(
orig_arr, orig_out, aligned, contig)
out[...] = 0
cast._simple_strided_call((arr, out))
assert_array_equal(out.view("int64"), expected_out.view("int64"))
def string_with_modified_length(self, dtype, change_length):
fact = 1 if dtype.char == "S" else 4
length = dtype.itemsize // fact + change_length
return np.dtype(f"{dtype.byteorder}{dtype.char}{length}")
@pytest.mark.parametrize("other_DT", simple_dtypes)
@pytest.mark.parametrize("string_char", ["S", "U"])
def test_string_cancast(self, other_DT, string_char):
fact = 1 if string_char == "S" else 4
string_DT = type(np.dtype(string_char))
cast = get_castingimpl(other_DT, string_DT)
other_dt = other_DT()
expected_length = get_expected_stringlength(other_dt)
string_dt = np.dtype(f"{string_char}{expected_length}")
safety, (res_other_dt, res_dt) = cast._resolve_descriptors((other_dt, None))
assert res_dt.itemsize == expected_length * fact
assert safety == Casting.safe # we consider to string casts "safe"
assert isinstance(res_dt, string_DT)
# These casts currently implement changing the string length, so
# check the cast-safety for too long/fixed string lengths:
for change_length in [-1, 0, 1]:
if change_length >= 0:
expected_safety = Casting.safe
else:
expected_safety = Casting.same_kind
to_dt = self.string_with_modified_length(string_dt, change_length)
safety, (_, res_dt) = cast._resolve_descriptors((other_dt, to_dt))
assert res_dt is to_dt
assert safety == expected_safety
# The opposite direction is always considered unsafe:
cast = get_castingimpl(string_DT, other_DT)
safety, _ = cast._resolve_descriptors((string_dt, other_dt))
assert safety == Casting.unsafe
cast = get_castingimpl(string_DT, other_DT)
safety, (_, res_dt) = cast._resolve_descriptors((string_dt, None))
assert safety == Casting.unsafe
assert other_dt is res_dt # returns the singleton for simple dtypes
@pytest.mark.parametrize("string_char", ["S", "U"])
@pytest.mark.parametrize("other_dt", simple_dtype_instances())
def test_simple_string_casts_roundtrip(self, other_dt, string_char):
"""
Tests casts from and to string by checking the roundtripping property.
The test also covers some string to string casts (but not all).
If this test creates issues, it should possibly just be simplified
or even removed (checking whether unaligned/non-contiguous casts give
the same results is useful, though).
"""
string_DT = type(np.dtype(string_char))
cast = get_castingimpl(type(other_dt), string_DT)
cast_back = get_castingimpl(string_DT, type(other_dt))
_, (res_other_dt, string_dt) = cast._resolve_descriptors((other_dt, None))
if res_other_dt is not other_dt:
# do not support non-native byteorder, skip test in that case
assert other_dt.byteorder != res_other_dt.byteorder
return
orig_arr, values = self.get_data(other_dt, None)
str_arr = np.zeros(len(orig_arr), dtype=string_dt)
string_dt_short = self.string_with_modified_length(string_dt, -1)
str_arr_short = np.zeros(len(orig_arr), dtype=string_dt_short)
string_dt_long = self.string_with_modified_length(string_dt, 1)
str_arr_long = np.zeros(len(orig_arr), dtype=string_dt_long)
assert not cast._supports_unaligned # if support is added, should test
assert not cast_back._supports_unaligned
for contig in [True, False]:
other_arr, str_arr = self.get_data_variation(
orig_arr, str_arr, True, contig)
_, str_arr_short = self.get_data_variation(
orig_arr, str_arr_short.copy(), True, contig)
_, str_arr_long = self.get_data_variation(
orig_arr, str_arr_long, True, contig)
cast._simple_strided_call((other_arr, str_arr))
cast._simple_strided_call((other_arr, str_arr_short))
assert_array_equal(str_arr.astype(string_dt_short), str_arr_short)
cast._simple_strided_call((other_arr, str_arr_long))
assert_array_equal(str_arr, str_arr_long)
if other_dt.kind == "b":
# Booleans do not roundtrip
continue
other_arr[...] = 0
cast_back._simple_strided_call((str_arr, other_arr))
assert_array_equal(orig_arr, other_arr)
other_arr[...] = 0
cast_back._simple_strided_call((str_arr_long, other_arr))
assert_array_equal(orig_arr, other_arr)
@pytest.mark.parametrize("other_dt", ["S8", "<U8", ">U8"])
@pytest.mark.parametrize("string_char", ["S", "U"])
def test_string_to_string_cancast(self, other_dt, string_char):
other_dt = np.dtype(other_dt)
fact = 1 if string_char == "S" else 4
div = 1 if other_dt.char == "S" else 4
string_DT = type(np.dtype(string_char))
cast = get_castingimpl(type(other_dt), string_DT)
expected_length = other_dt.itemsize // div
string_dt = np.dtype(f"{string_char}{expected_length}")
safety, (res_other_dt, res_dt) = cast._resolve_descriptors((other_dt, None))
assert res_dt.itemsize == expected_length * fact
assert isinstance(res_dt, string_DT)
if other_dt.char == string_char:
if other_dt.isnative:
expected_safety = Casting.no | Casting.cast_is_view
else:
expected_safety = Casting.equiv
elif string_char == "U":
expected_safety = Casting.safe
else:
expected_safety = Casting.unsafe
assert expected_safety == safety
for change_length in [-1, 0, 1]:
to_dt = self.string_with_modified_length(string_dt, change_length)
safety, (_, res_dt) = cast._resolve_descriptors((other_dt, to_dt))
assert res_dt is to_dt
if expected_safety == Casting.unsafe:
assert safety == expected_safety
elif change_length < 0:
assert safety == Casting.same_kind
elif change_length == 0:
assert safety == expected_safety
elif change_length > 0:
assert safety == Casting.safe
@pytest.mark.parametrize("order1", [">", "<"])
@pytest.mark.parametrize("order2", [">", "<"])
def test_unicode_byteswapped_cast(self, order1, order2):
# Very specific tests (not using the castingimpl directly)
# that tests unicode bytedwaps including for unaligned array data.
dtype1 = np.dtype(f"{order1}U30")
dtype2 = np.dtype(f"{order2}U30")
data1 = np.empty(30 * 4 + 1, dtype=np.uint8)[1:].view(dtype1)
data2 = np.empty(30 * 4 + 1, dtype=np.uint8)[1:].view(dtype2)
if dtype1.alignment != 1:
# alignment should always be >1, but skip the check if not
assert not data1.flags.aligned
assert not data2.flags.aligned
element = "this is a ünicode string‽"
data1[()] = element
# Test both `data1` and `data1.copy()` (which should be aligned)
for data in [data1, data1.copy()]:
data2[...] = data1
assert data2[()] == element
assert data2.copy()[()] == element
def test_void_to_string_special_case(self):
# Cover a small special case in void to string casting that could
# probably just as well be turned into an error (compare
# `test_object_to_parametric_internal_error` below).
assert np.array([], dtype="V5").astype("S").dtype.itemsize == 5
assert np.array([], dtype="V5").astype("U").dtype.itemsize == 4 * 5
def test_object_to_parametric_internal_error(self):
# We reject casting from object to a parametric type, without
# figuring out the correct instance first.
object_dtype = type(np.dtype(object))
other_dtype = type(np.dtype(str))
cast = get_castingimpl(object_dtype, other_dtype)
with pytest.raises(TypeError,
match="casting from object to the parametric DType"):
cast._resolve_descriptors((np.dtype("O"), None))
@pytest.mark.parametrize("dtype", simple_dtype_instances())
def test_object_and_simple_resolution(self, dtype):
# Simple test to exercise the cast when no instance is specified
object_dtype = type(np.dtype(object))
cast = get_castingimpl(object_dtype, type(dtype))
safety, (_, res_dt) = cast._resolve_descriptors((np.dtype("O"), dtype))
assert safety == Casting.unsafe
assert res_dt is dtype
safety, (_, res_dt) = cast._resolve_descriptors((np.dtype("O"), None))
assert safety == Casting.unsafe
assert res_dt == dtype.newbyteorder("=")
@pytest.mark.parametrize("dtype", simple_dtype_instances())
def test_simple_to_object_resolution(self, dtype):
# Simple test to exercise the cast when no instance is specified
object_dtype = type(np.dtype(object))
cast = get_castingimpl(type(dtype), object_dtype)
safety, (_, res_dt) = cast._resolve_descriptors((dtype, None))
assert safety == Casting.safe
assert res_dt is np.dtype("O")
@pytest.mark.parametrize("casting", ["no", "unsafe"])
def test_void_and_structured_with_subarray(self, casting):
# test case corresponding to gh-19325
dtype = np.dtype([("foo", "<f4", (3, 2))])
expected = casting == "unsafe"
assert np.can_cast("V4", dtype, casting=casting) == expected
assert np.can_cast(dtype, "V4", casting=casting) == expected
@pytest.mark.parametrize("dtype", np.typecodes["All"])
def test_object_casts_NULL_None_equivalence(self, dtype):
# None to <other> casts may succeed or fail, but a NULL'ed array must
# behave the same as one filled with None's.
arr_normal = np.array([None] * 5)
arr_NULLs = np.empty_like([None] * 5)
# If the check fails (maybe it should) the test would lose its purpose:
assert arr_NULLs.tobytes() == b"\x00" * arr_NULLs.nbytes
try:
expected = arr_normal.astype(dtype)
except TypeError:
with pytest.raises(TypeError):
arr_NULLs.astype(dtype),
else:
assert_array_equal(expected, arr_NULLs.astype(dtype))
def test_float_to_bool(self):
# test case corresponding to gh-19514
# simple test for casting bool_ to float16
res = np.array([0, 3, -7], dtype=np.int8).view(bool)
expected = [0, 1, 1]
assert_array_equal(res, expected)
|
simongibbons/numpy
|
numpy/core/tests/test_casting_unittests.py
|
Python
|
bsd-3-clause
| 29,168 | 0.000583 |
from unittest import TestCase
from cloudshell.cp.vcenter.network.vlan.factory import VlanSpecFactory
class TestVlanSpecFactory(TestCase):
def test_get_vlan_spec(self):
vlan_spec_factory = VlanSpecFactory()
vlan_spec = vlan_spec_factory.get_vlan_spec('Access')
self.assertIsNotNone(vlan_spec)
|
QualiSystems/vCenterShell
|
package/cloudshell/tests/test_network/vlan/test_factory.py
|
Python
|
apache-2.0
| 324 | 0 |
# Austin Jenchi
# 1/30/2015
# 8th Period
# Paycheck
print "Welcome to How to Job"
print
wage_per_hour = raw_input("How much is your hourly wage? ==> $")
if not wage_per_hour == "":
try:
wage_per_hour = float(wage_per_hour)
except:
wage_per_hour = 12.00
else:
wage_per_hour = 12.00
print "Your pay is $%2.2f per hour." % wage_per_hour
print
print "You've worked 26 hours. (in one 24-hour day! remarkable!)"
print
total_wage = wage_per_hour * 26
print "Your Pay Before Taxes is $%2.2f" % total_wage
print
print "After taxes of 23%%, your total pay is $%2.2f." % (total_wage * .23)
print
print "After paying your union fees, you recieved a measly $%2.2f of your previous $%2.2f." % ((total_wage * .23) - 25, total_wage)
|
timtim17/IntroToGameProg
|
Labs/Paycheck.py
|
Python
|
gpl-2.0
| 746 | 0.002681 |
"""
sentry.web.frontend.generic
~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import os
import posixpath
from django.conf import settings
from django.http import HttpResponseNotFound, Http404
from django.contrib.staticfiles import finders
from django.utils.six.moves.urllib.parse import unquote
from django.views import static
from django.views.generic import TemplateView as BaseTemplateView
from sentry.web.helpers import render_to_response
FOREVER_CACHE = 'max-age=315360000'
NEVER_CACHE = 'max-age=0, no-cache, no-store, must-revalidate'
def dev_favicon(request):
document_root, path = resolve('sentry/images/favicon_dev.png')
return static.serve(request, path, document_root=document_root)
def resolve(path):
# Mostly yanked from Django core and changed to return the path:
# See: https://github.com/django/django/blob/1.6.11/django/contrib/staticfiles/views.py
normalized_path = posixpath.normpath(unquote(path)).lstrip('/')
try:
absolute_path = finders.find(normalized_path)
except Exception:
# trying to access bad paths like, `../../etc/passwd`, etc that
# Django rejects, but respond nicely instead of erroring.
absolute_path = None
if not absolute_path:
raise Http404("'%s' could not be found" % path)
if path[-1] == '/' or os.path.isdir(absolute_path):
raise Http404('Directory indexes are not allowed here.')
return os.path.split(absolute_path)
def static_media(request, **kwargs):
"""
Serve static files below a given point in the directory structure.
"""
module = kwargs.get('module')
path = kwargs.get('path', '')
version = kwargs.get('version')
if module:
path = '%s/%s' % (module, path)
try:
document_root, path = resolve(path)
except Http404:
# Return back a simpler plain-text 404 response, more suitable
# for static files, rather than our full blown HTML.
return HttpResponseNotFound('', content_type='text/plain')
if 'gzip' in request.META.get('HTTP_ACCEPT_ENCODING', ''
) and not path.endswith('.gz') and not settings.DEBUG:
paths = (path + '.gz', path)
else:
paths = (path, )
for p in paths:
try:
response = static.serve(request, p, document_root=document_root)
break
except Http404:
# We don't need to handle this since `resolve()` is assuring to us that
# at least the non-gzipped version exists, so in theory, this can
# only happen on the first .gz path
continue
# Make sure we Vary: Accept-Encoding for gzipped responses
response['Vary'] = 'Accept-Encoding'
# We need CORS for font files
if path.endswith(('.js', '.ttf', '.ttc', '.otf', '.eot', '.woff', '.woff2')):
response['Access-Control-Allow-Origin'] = '*'
# If we have a version and not DEBUG, we can cache it FOREVER
if version is not None and not settings.DEBUG:
response['Cache-Control'] = FOREVER_CACHE
else:
# Otherwise, we explicitly don't want to cache at all
response['Cache-Control'] = NEVER_CACHE
return response
class TemplateView(BaseTemplateView):
def render_to_response(self, context, **response_kwargs):
return render_to_response(
request=self.request,
template=self.get_template_names(),
context=context,
**response_kwargs
)
|
ifduyue/sentry
|
src/sentry/web/frontend/generic.py
|
Python
|
bsd-3-clause
| 3,640 | 0.001099 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Database migrations for resource-providers."""
from migrate import UniqueConstraint
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Float
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import Unicode
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
if migrate_engine.name == 'mysql':
nameargs = {'collation': 'utf8_bin'}
else:
nameargs = {}
resource_providers = Table(
'resource_providers', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(36), nullable=False),
Column('name', Unicode(200, **nameargs), nullable=True),
Column('generation', Integer, default=0),
Column('can_host', Integer, default=0),
UniqueConstraint('uuid', name='uniq_resource_providers0uuid'),
UniqueConstraint('name', name='uniq_resource_providers0name'),
Index('resource_providers_name_idx', 'name'),
Index('resource_providers_uuid_idx', 'uuid'),
mysql_engine='InnoDB',
mysql_charset='latin1'
)
inventories = Table(
'inventories', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('resource_provider_id', Integer, nullable=False),
Column('resource_class_id', Integer, nullable=False),
Column('total', Integer, nullable=False),
Column('reserved', Integer, nullable=False),
Column('min_unit', Integer, nullable=False),
Column('max_unit', Integer, nullable=False),
Column('step_size', Integer, nullable=False),
Column('allocation_ratio', Float, nullable=False),
Index('inventories_resource_provider_id_idx',
'resource_provider_id'),
Index('inventories_resource_provider_resource_class_idx',
'resource_provider_id', 'resource_class_id'),
Index('inventories_resource_class_id_idx',
'resource_class_id'),
UniqueConstraint('resource_provider_id', 'resource_class_id',
name='uniq_inventories0resource_provider_resource_class'),
mysql_engine='InnoDB',
mysql_charset='latin1'
)
allocations = Table(
'allocations', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('resource_provider_id', Integer, nullable=False),
Column('consumer_id', String(36), nullable=False),
Column('resource_class_id', Integer, nullable=False),
Column('used', Integer, nullable=False),
Index('allocations_resource_provider_class_used_idx',
'resource_provider_id', 'resource_class_id',
'used'),
Index('allocations_resource_class_id_idx',
'resource_class_id'),
Index('allocations_consumer_id_idx', 'consumer_id'),
mysql_engine='InnoDB',
mysql_charset='latin1'
)
resource_provider_aggregates = Table(
'resource_provider_aggregates', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('resource_provider_id', Integer, primary_key=True,
nullable=False),
Column('aggregate_id', Integer, primary_key=True, nullable=False),
Index('resource_provider_aggregates_aggregate_id_idx',
'aggregate_id'),
mysql_engine='InnoDB',
mysql_charset='latin1'
)
for table in [resource_providers, inventories, allocations,
resource_provider_aggregates]:
table.create(checkfirst=True)
|
rahulunair/nova
|
nova/db/sqlalchemy/api_migrations/migrate_repo/versions/016_resource_providers.py
|
Python
|
apache-2.0
| 4,495 | 0.000222 |
#!/usr/bin/env python
import os
import sys
from optparse import OptionParser
from jobTree.src.bioio import logger, setLoggingFromOptions
from jobTree.scriptTree.stack import Stack
from margin.mappers.last import Last, LastChain, LastRealign
from margin.mappers.bwa import Bwa, BwaChain, BwaRealign
from margin.mappers.graphmap import GraphMap, GraphMapChain, GraphMapRealign, GraphMapAnchor, GraphMapAnchorChain, GraphMapAnchorRealign
from margin.utils import pathToBaseNanoporeDir
import cPecan.cPecanEm
from cPecan.cPecanEm import addExpectationMaximisationOptions
def main():
#Parse the inputs args/options
parser = OptionParser(usage="usage: inputFastqFile referenceFastaFile outputSamFile [options]",
version="%prog 0.1")
#Options
parser.add_option("--em", dest="em",
help="Run expectation maximisation (EM)",
default=False, action="store_true")
##Most people would not want to use the following, but I put them here for debug purposes
parser.add_option("--bwa", dest="bwa", help="Use BWA instead of LAST",
default=False, action="store_true")
parser.add_option("--graphmap", dest="graphmap", help="Use GraphMap instead of LAST",
default=False, action="store_true")
parser.add_option("--graphmapanchor", dest="graphmapanchor", help="Use GraphMap with anchor alignment instead of LAST",
default=False, action="store_true")
parser.add_option("--noRealign", dest="noRealign", help="Don't run any realignment step",
default=False, action="store_true")
parser.add_option("--noChain", dest="noChain", help="Don't run any chaining step",
default=False, action="store_true")
parser.add_option("--gapGamma", dest="gapGamma", help="Set the gap gamma for the AMAP function",
default=0.5, type=float)
parser.add_option("--matchGamma", dest="matchGamma", help="Set the match gamma for the AMAP function",
default=0.0, type=float)
#Add the cPecan expectation maximisation options
options = cPecan.cPecanEm.Options()
options.inputModel = os.path.join(pathToBaseNanoporeDir(), "src", "margin", "mappers", "last_hmm_20.txt")
options.modelType="fiveStateAsymmetric" #"threeStateAsymmetric"
options.optionsToRealign="--diagonalExpansion=10 --splitMatrixBiggerThanThis=300"
options.randomStart = True
options.trials = 3
options.outputTrialHmms = True
options.iterations = 100
options.maxAlignmentLengthPerJob=700000
options.maxAlignmentLengthToSample = 50000000
#options.outputXMLModelFile = outputModel + ".xml"
#options.updateTheBand = True
#options.useDefaultModelAsStart = True
#options.setJukesCantorStartingEmissions=0.3
options.trainEmissions=True
#options.tieEmissions = True
addExpectationMaximisationOptions(parser, options)
#Add the jobTree options
Stack.addJobTreeOptions(parser)
#Parse the options/arguments
options, args = parser.parse_args()
#Setup logging
setLoggingFromOptions(options)
#Print help message if no input
if len(sys.argv) == 1:
parser.print_help()
sys.exit(0)
#Exit if the arguments are not what we expect
if len(args) != 3:
raise RuntimeError("Expected three arguments, got: %s" % " ".join(args))
#Set the mapper
if options.noRealign:
if options.noChain: # i.e. --noChain --noRealign
# mapper = Bwa if options.bwa else Last
mapper = Last;
if (options.bwa):
mapper = Bwa;
if (options.graphmap):
mapper = GraphMap;
if (options.graphmapanchor):
mapper = GraphMapAnchor;
else: # i.e. --noRealign
# mapper = BwaChain if options.bwa else LastChain
mapper = LastChain;
if (options.bwa):
mapper = BwaChain;
if (options.graphmap):
mapper = GraphMapChain;
if (options.graphmapanchor):
mapper = GraphMapAnchorChain;
else:
# mapper = BwaRealign if options.bwa else LastRealign
mapper = LastRealign;
if (options.bwa):
mapper = BwaRealign;
if (options.graphmap):
mapper = GraphMapRealign;
if (options.graphmapanchor):
mapper = GraphMapAnchorRealign;
#This line invokes jobTree
i = Stack(mapper(readFastqFile=args[0], referenceFastaFile=args[1], outputSamFile=args[2],
options=options)).startJobTree(options)
#The return value of the jobtree script is the number of failed jobs. If we have any then
#report this.
if i != 0:
raise RuntimeError("Got failed jobs")
if __name__ == '__main__':
from margin.marginAlign import *
main()
|
isovic/marginAlign
|
src/margin/marginAlign.py
|
Python
|
mit
| 4,995 | 0.015816 |
from ray.rllib.utils.deprecation import deprecation_warning
deprecation_warning(
old="ray/rllib/examples/recsim_with_slateq.py",
new="ray/rllib/examples/recommender_system_with_recsim_and_slateq.py",
error=True,
)
|
ray-project/ray
|
rllib/examples/recsim_with_slateq.py
|
Python
|
apache-2.0
| 227 | 0 |
import networkx
from yaiep.graph.Node import Node
##
# Classe che rappresenta l'intero spazio di ricerca che viene
# generato via via che il metodo di ricerca ispeziona nuovi nodi
#
class SearchGraph(networkx.DiGraph):
##
# Crea il grafo di ricerca come un grafo direzionato
# il quale ha come nodo iniziale lo stato iniziale
# dal quale il metodo di ricerca partirà per poter esplorare
# lo spazio delle soluzioni
#
# @param init_state stato iniziale dal quale inizia la ricerca
def __init__(self, init_state):
networkx.DiGraph.__init__(self)
self._init_state = Node(init_state.copy(), None)
# inserisci lo stato iniziale a partire dal quale ispezionare lo spazio di ricerca
self.add_node(self._init_state)
##
# Restituisce il riferimento allo stato iniziale dal
# quale è iniziata la ricerca
#
def get_init_state(self):
return self._init_state
def __str__(self):
res = ''
for node in self:
res += '{0} -> '.format(str(node.wm))
for adj in self.neighbors(node):
res += str(adj.wm) + '\n'
return res
|
aleSuglia/YAIEP
|
yaiep/graph/SearchGraph.py
|
Python
|
mit
| 1,169 | 0.001714 |
# -*- coding: utf-8 -*-
#
# pysdn documentation build configuration file, created by
# sphinx-quickstart on Wed Aug 5 08:56:12 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../pysdn'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'pysdn'
copyright = u'2015, Sergei Garbuzov'
author = u'Sergei Garbuzov'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.3.4'
# The full version, including alpha/beta/rc tags.
release = '1.3.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'nature'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'pysdndoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'pysdn.tex', u'pysdn Documentation',
u'Sergei Garbuzov', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'pysdn', u'pysdn Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'pysdn', u'pysdn Documentation',
author, 'pysdn', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
brocade/pysdn
|
docs/source/conf.py
|
Python
|
bsd-3-clause
| 9,179 | 0.005992 |
import math
def isPrime(num):
if num < 2:
return False # 0, 1不是质数
# num为100时, 它是不可能有因子是大于50的. 比如说60 * ? = 100, 这是不可能的, 所以这里只要比较sqrt(), 平方根
boundary = int(math.sqrt(num)) + 1
for i in range(2, boundary):
if num % i == 0:
return False
return True
def primeSieve(size):
sieve = [True] * size # 某格一为乘积, 就置为False
sieve[0] = False
sieve[1] = True
# num为100时, 它是不可能有因子是大于50的. 比如说60 * ? = 100, 这是不可能的, 所以这里只要比较sqrt(), 平方根
boundary = int(math.sqrt(size)) + 1
for i in range(2, boundary):
pointer = i * 2 # startPosition. 以3为例, 3其实是质数, 但它的位数6,9, 12, ...都不是质数
while pointer < size:
sieve[pointer] = False
pointer += i
ret = [] # contains all the prime number within "size"
for i in range(size):
if sieve[i] == True:
ret.append(str(i))
return ret
if __name__ == '__main__':
primes = primeSieve(100)
primesString = ", ".join(primes)
print("prime : ", primesString)
'''
prime : 1, 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97
'''
|
songzhw/Hello-kotlin
|
Python101/src/math/PrimeSieve.py
|
Python
|
apache-2.0
| 1,333 | 0.007048 |
import unittest
from graph_theory.spfa import spfa
class GraphTheoryTests(unittest.TestCase):
def setUp(self):
source = 0
num_nodes = 5
neighbour_list = [[1], # 0
[2], # 1
[3], # 2
[4, 1], # 3
[1], # 4
]
weights = {(0,1): 20,
(1,2) : 1,
(2,3) : 2,
(3,4) : -2,
(4, 1): -1,
(3, 1): -4,
}
self.example_graph = (source, num_nodes, weights, neighbour_list)
self.example_graph_cycle = [1,2,3]
def is_cyclicily_equal(self, list1, list2):
if len(list1) != len(list2):
return False
n = len(list1)
for shift in range(n):
if list1 == list2[shift:] + list2[:shift]:
return True
return False
def test_negative_cycle(self):
_, negative_cycle = spfa(*self.example_graph)
# Careful, double negation ahead
assert(negative_cycle is not None)
assert(self.is_cyclicily_equal(negative_cycle, self.example_graph_cycle))
|
yu-peng/cdru
|
graph_theory/tests.py
|
Python
|
gpl-3.0
| 1,214 | 0.014003 |
# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from __future__ import with_statement
from boto.sdb.db.model import Model
from boto.sdb.db.property import StringProperty, IntegerProperty, ListProperty, ReferenceProperty, CalculatedProperty
from boto.manage.server import Server
from boto.manage import propget
import boto.ec2
import time
import traceback
from contextlib import closing
import dateutil.parser
import datetime
class CommandLineGetter(object):
def get_region(self, params):
if not params.get('region', None):
prop = self.cls.find_property('region_name')
params['region'] = propget.get(prop, choices=boto.ec2.regions)
def get_zone(self, params):
if not params.get('zone', None):
prop = StringProperty(name='zone', verbose_name='EC2 Availability Zone',
choices=self.ec2.get_all_zones)
params['zone'] = propget.get(prop)
def get_name(self, params):
if not params.get('name', None):
prop = self.cls.find_property('name')
params['name'] = propget.get(prop)
def get_size(self, params):
if not params.get('size', None):
prop = IntegerProperty(name='size', verbose_name='Size (GB)')
params['size'] = propget.get(prop)
def get_mount_point(self, params):
if not params.get('mount_point', None):
prop = self.cls.find_property('mount_point')
params['mount_point'] = propget.get(prop)
def get_device(self, params):
if not params.get('device', None):
prop = self.cls.find_property('device')
params['device'] = propget.get(prop)
def get(self, cls, params):
self.cls = cls
self.get_region(params)
self.ec2 = params['region'].connect()
self.get_zone(params)
self.get_name(params)
self.get_size(params)
self.get_mount_point(params)
self.get_device(params)
class Volume(Model):
name = StringProperty(required=True, unique=True, verbose_name='Name')
region_name = StringProperty(required=True, verbose_name='EC2 Region')
zone_name = StringProperty(required=True, verbose_name='EC2 Zone')
mount_point = StringProperty(verbose_name='Mount Point')
device = StringProperty(verbose_name="Device Name", default='/dev/sdp')
volume_id = StringProperty(required=True)
past_volume_ids = ListProperty(item_type=str)
server = ReferenceProperty(Server, collection_name='volumes',
verbose_name='Server Attached To')
volume_state = CalculatedProperty(verbose_name="Volume State",
calculated_type=str, use_method=True)
attachment_state = CalculatedProperty(verbose_name="Attachment State",
calculated_type=str, use_method=True)
size = CalculatedProperty(verbose_name="Size (GB)",
calculated_type=int, use_method=True)
@classmethod
def create(cls, **params):
getter = CommandLineGetter()
getter.get(cls, params)
region = params.get('region')
ec2 = region.connect()
zone = params.get('zone')
size = params.get('size')
ebs_volume = ec2.create_volume(size, zone.name)
v = cls()
v.ec2 = ec2
v.volume_id = ebs_volume.id
v.name = params.get('name')
v.mount_point = params.get('mount_point')
v.device = params.get('device')
v.region_name = region.name
v.zone_name = zone.name
v.put()
return v
@classmethod
def create_from_volume_id(cls, region_name, volume_id, name):
vol = None
ec2 = boto.ec2.connect_to_region(region_name)
rs = ec2.get_all_volumes([volume_id])
if len(rs) == 1:
v = rs[0]
vol = cls()
vol.volume_id = v.id
vol.name = name
vol.region_name = v.region.name
vol.zone_name = v.zone
vol.put()
return vol
def create_from_latest_snapshot(self, name, size=None):
snapshot = self.get_snapshots()[-1]
return self.create_from_snapshot(name, snapshot, size)
def create_from_snapshot(self, name, snapshot, size=None):
if size < self.size:
size = self.size
ec2 = self.get_ec2_connection()
if self.zone_name == None or self.zone_name == '':
# deal with the migration case where the zone is not set in the logical volume:
current_volume = ec2.get_all_volumes([self.volume_id])[0]
self.zone_name = current_volume.zone
ebs_volume = ec2.create_volume(size, self.zone_name, snapshot)
v = Volume()
v.ec2 = self.ec2
v.volume_id = ebs_volume.id
v.name = name
v.mount_point = self.mount_point
v.device = self.device
v.region_name = self.region_name
v.zone_name = self.zone_name
v.put()
return v
def get_ec2_connection(self):
if self.server:
return self.server.ec2
if not hasattr(self, 'ec2') or self.ec2 == None:
self.ec2 = boto.ec2.connect_to_region(self.region_name)
return self.ec2
def _volume_state(self):
ec2 = self.get_ec2_connection()
rs = ec2.get_all_volumes([self.volume_id])
return rs[0].volume_state()
def _attachment_state(self):
ec2 = self.get_ec2_connection()
rs = ec2.get_all_volumes([self.volume_id])
return rs[0].attachment_state()
def _size(self):
if not hasattr(self, '__size'):
ec2 = self.get_ec2_connection()
rs = ec2.get_all_volumes([self.volume_id])
self.__size = rs[0].size
return self.__size
def install_xfs(self):
if self.server:
self.server.install('xfsprogs xfsdump')
def get_snapshots(self):
"""
Returns a list of all completed snapshots for this volume ID.
"""
ec2 = self.get_ec2_connection()
rs = ec2.get_all_snapshots()
all_vols = [self.volume_id] + self.past_volume_ids
snaps = []
for snapshot in rs:
if snapshot.volume_id in all_vols:
if snapshot.progress == '100%':
snapshot.date = dateutil.parser.parse(snapshot.start_time)
snapshot.keep = True
snaps.append(snapshot)
snaps.sort(cmp=lambda x,y: cmp(x.date, y.date))
return snaps
def attach(self, server=None):
if self.attachment_state == 'attached':
print 'already attached'
return None
if server:
self.server = server
self.put()
ec2 = self.get_ec2_connection()
ec2.attach_volume(self.volume_id, self.server.instance_id, self.device)
def detach(self, force=False):
state = self.attachment_state
if state == 'available' or state == None or state == 'detaching':
print 'already detached'
return None
ec2 = self.get_ec2_connection()
ec2.detach_volume(self.volume_id, self.server.instance_id, self.device, force)
self.server = None
self.put()
def checkfs(self, use_cmd=None):
if self.server == None:
raise ValueError, 'server attribute must be set to run this command'
# detemine state of file system on volume, only works if attached
if use_cmd:
cmd = use_cmd
else:
cmd = self.server.get_cmdshell()
status = cmd.run('xfs_check %s' % self.device)
if not use_cmd:
cmd.close()
if status[1].startswith('bad superblock magic number 0'):
return False
return True
def wait(self):
if self.server == None:
raise ValueError, 'server attribute must be set to run this command'
with closing(self.server.get_cmdshell()) as cmd:
# wait for the volume device to appear
cmd = self.server.get_cmdshell()
while not cmd.exists(self.device):
boto.log.info('%s still does not exist, waiting 10 seconds' % self.device)
time.sleep(10)
def format(self):
if self.server == None:
raise ValueError, 'server attribute must be set to run this command'
status = None
with closing(self.server.get_cmdshell()) as cmd:
if not self.checkfs(cmd):
boto.log.info('make_fs...')
status = cmd.run('mkfs -t xfs %s' % self.device)
return status
def mount(self):
if self.server == None:
raise ValueError, 'server attribute must be set to run this command'
boto.log.info('handle_mount_point')
with closing(self.server.get_cmdshell()) as cmd:
cmd = self.server.get_cmdshell()
if not cmd.isdir(self.mount_point):
boto.log.info('making directory')
# mount directory doesn't exist so create it
cmd.run("mkdir %s" % self.mount_point)
else:
boto.log.info('directory exists already')
status = cmd.run('mount -l')
lines = status[1].split('\n')
for line in lines:
t = line.split()
if t and t[2] == self.mount_point:
# something is already mounted at the mount point
# unmount that and mount it as /tmp
if t[0] != self.device:
cmd.run('umount %s' % self.mount_point)
cmd.run('mount %s /tmp' % t[0])
cmd.run('chmod 777 /tmp')
break
# Mount up our new EBS volume onto mount_point
cmd.run("mount %s %s" % (self.device, self.mount_point))
cmd.run('xfs_growfs %s' % self.mount_point)
def make_ready(self, server):
self.server = server
self.put()
self.install_xfs()
self.attach()
self.wait()
self.format()
self.mount()
def freeze(self):
if self.server:
return self.server.run("/usr/sbin/xfs_freeze -f %s" % self.mount_point)
def unfreeze(self):
if self.server:
return self.server.run("/usr/sbin/xfs_freeze -u %s" % self.mount_point)
def snapshot(self):
# if this volume is attached to a server
# we need to freeze the XFS file system
try:
self.freeze()
if self.server == None:
snapshot = self.get_ec2_connection().create_snapshot(self.volume_id)
else:
snapshot = self.server.ec2.create_snapshot(self.volume_id)
boto.log.info('Snapshot of Volume %s created: %s' % (self.name, snapshot))
except Exception:
boto.log.info('Snapshot error')
boto.log.info(traceback.format_exc())
finally:
status = self.unfreeze()
return status
def get_snapshot_range(self, snaps, start_date=None, end_date=None):
l = []
for snap in snaps:
if start_date and end_date:
if snap.date >= start_date and snap.date <= end_date:
l.append(snap)
elif start_date:
if snap.date >= start_date:
l.append(snap)
elif end_date:
if snap.date <= end_date:
l.append(snap)
else:
l.append(snap)
return l
def trim_snapshots(self, delete=False):
"""
Trim the number of snapshots for this volume. This method always
keeps the oldest snapshot. It then uses the parameters passed in
to determine how many others should be kept.
The algorithm is to keep all snapshots from the current day. Then
it will keep the first snapshot of the day for the previous seven days.
Then, it will keep the first snapshot of the week for the previous
four weeks. After than, it will keep the first snapshot of the month
for as many months as there are.
"""
snaps = self.get_snapshots()
# Always keep the oldest and the newest
if len(snaps) <= 2:
return snaps
snaps = snaps[1:-1]
now = datetime.datetime.now(snaps[0].date.tzinfo)
midnight = datetime.datetime(year=now.year, month=now.month,
day=now.day, tzinfo=now.tzinfo)
# Keep the first snapshot from each day of the previous week
one_week = datetime.timedelta(days=7, seconds=60*60)
print midnight-one_week, midnight
previous_week = self.get_snapshot_range(snaps, midnight-one_week, midnight)
print previous_week
if not previous_week:
return snaps
current_day = None
for snap in previous_week:
if current_day and current_day == snap.date.day:
snap.keep = False
else:
current_day = snap.date.day
# Get ourselves onto the next full week boundary
if previous_week:
week_boundary = previous_week[0].date
if week_boundary.weekday() != 0:
delta = datetime.timedelta(days=week_boundary.weekday())
week_boundary = week_boundary - delta
# Keep one within this partial week
partial_week = self.get_snapshot_range(snaps, week_boundary, previous_week[0].date)
if len(partial_week) > 1:
for snap in partial_week[1:]:
snap.keep = False
# Keep the first snapshot of each week for the previous 4 weeks
for i in range(0,4):
weeks_worth = self.get_snapshot_range(snaps, week_boundary-one_week, week_boundary)
if len(weeks_worth) > 1:
for snap in weeks_worth[1:]:
snap.keep = False
week_boundary = week_boundary - one_week
# Now look through all remaining snaps and keep one per month
remainder = self.get_snapshot_range(snaps, end_date=week_boundary)
current_month = None
for snap in remainder:
if current_month and current_month == snap.date.month:
snap.keep = False
else:
current_month = snap.date.month
if delete:
for snap in snaps:
if not snap.keep:
boto.log.info('Deleting %s(%s) for %s' % (snap, snap.date, self.name))
snap.delete()
return snaps
def grow(self, size):
pass
def copy(self, snapshot):
pass
def get_snapshot_from_date(self, date):
pass
def delete(self, delete_ebs_volume=False):
if delete_ebs_volume:
self.detach()
ec2 = self.get_ec2_connection()
ec2.delete_volume(self.volume_id)
Model.delete(self)
def archive(self):
# snapshot volume, trim snaps, delete volume-id
pass
|
jxta/cc
|
vendor/boto/boto/manage/volume.py
|
Python
|
apache-2.0
| 16,328 | 0.002511 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.11 on 2017-05-10 15:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sessao', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='sessaoplenaria',
name='interativa',
field=models.NullBooleanField(choices=[(True, 'Sim'), (False, 'Não')], verbose_name='Sessão interativa'),
),
]
|
cmjatai/cmj
|
sapl/sessao/migrations/0002_sessaoplenaria_interativa.py
|
Python
|
gpl-3.0
| 518 | 0.001938 |
# Copyright (c) 2017 https://github.com/ping
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
__version__ = '0.3.9'
|
ping/instagram_private_api_extensions
|
instagram_private_api_extensions/__init__.py
|
Python
|
mit
| 159 | 0 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
JSONType = Any
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(
subscription_id: str,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2022-01-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', min_length=1),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_request(
subscription_id: str,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2022-01-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', min_length=1),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
"agentPoolName": _SERIALIZER.url("agent_pool_name", agent_pool_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_create_or_update_request_initial(
subscription_id: str,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
*,
json: JSONType = None,
content: Any = None,
**kwargs: Any
) -> HttpRequest:
content_type = kwargs.pop('content_type', None) # type: Optional[str]
api_version = "2022-01-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', min_length=1),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
"agentPoolName": _SERIALIZER.url("agent_pool_name", agent_pool_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
if content_type is not None:
header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str')
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="PUT",
url=url,
params=query_parameters,
headers=header_parameters,
json=json,
content=content,
**kwargs
)
def build_delete_request_initial(
subscription_id: str,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2022-01-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', min_length=1),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
"agentPoolName": _SERIALIZER.url("agent_pool_name", agent_pool_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="DELETE",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_upgrade_profile_request(
subscription_id: str,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2022-01-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeProfiles/default')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', min_length=1),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
"agentPoolName": _SERIALIZER.url("agent_pool_name", agent_pool_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_available_agent_pool_versions_request(
subscription_id: str,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2022-01-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/availableAgentPoolVersions')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', min_length=1),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_upgrade_node_image_version_request_initial(
subscription_id: str,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2022-01-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeNodeImageVersion')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', min_length=1),
"resourceName": _SERIALIZER.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
"agentPoolName": _SERIALIZER.url("agent_pool_name", agent_pool_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="POST",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class AgentPoolsOperations(object):
"""AgentPoolsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.containerservice.v2022_01_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> Iterable["_models.AgentPoolListResult"]:
"""Gets a list of agent pools in the specified managed cluster.
Gets a list of agent pools in the specified managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AgentPoolListResult or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.containerservice.v2022_01_01.models.AgentPoolListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("AgentPoolListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools'} # type: ignore
@distributed_trace
def get(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> "_models.AgentPool":
"""Gets the specified managed cluster agent pool.
Gets the specified managed cluster agent pool.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPool, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_01_01.models.AgentPool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
parameters: "_models.AgentPool",
**kwargs: Any
) -> "_models.AgentPool":
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'AgentPool')
request = build_create_or_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AgentPool', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
parameters: "_models.AgentPool",
**kwargs: Any
) -> LROPoller["_models.AgentPool"]:
"""Creates or updates an agent pool in the specified managed cluster.
Creates or updates an agent pool in the specified managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:param parameters: The agent pool to create or update.
:type parameters: ~azure.mgmt.containerservice.v2022_01_01.models.AgentPool
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either AgentPool or the result of cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.containerservice.v2022_01_01.models.AgentPool]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
parameters=parameters,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
def _delete_initial(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
@distributed_trace
def begin_delete(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> LROPoller[None]:
"""Deletes an agent pool in the specified managed cluster.
Deletes an agent pool in the specified managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
@distributed_trace
def get_upgrade_profile(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> "_models.AgentPoolUpgradeProfile":
"""Gets the upgrade profile for an agent pool.
Gets the upgrade profile for an agent pool.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPoolUpgradeProfile, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_01_01.models.AgentPoolUpgradeProfile
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolUpgradeProfile"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_upgrade_profile_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self.get_upgrade_profile.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPoolUpgradeProfile', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_upgrade_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeProfiles/default'} # type: ignore
@distributed_trace
def get_available_agent_pool_versions(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> "_models.AgentPoolAvailableVersions":
"""Gets a list of supported Kubernetes versions for the specified agent pool.
See `supported Kubernetes versions
<https://docs.microsoft.com/azure/aks/supported-kubernetes-versions>`_ for more details about
the version lifecycle.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPoolAvailableVersions, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_01_01.models.AgentPoolAvailableVersions
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolAvailableVersions"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_available_agent_pool_versions_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.get_available_agent_pool_versions.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPoolAvailableVersions', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_available_agent_pool_versions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/availableAgentPoolVersions'} # type: ignore
def _upgrade_node_image_version_initial(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> Optional["_models.AgentPool"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.AgentPool"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_upgrade_node_image_version_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
template_url=self._upgrade_node_image_version_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
response_headers = {}
if response.status_code == 202:
response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
_upgrade_node_image_version_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeNodeImageVersion'} # type: ignore
@distributed_trace
def begin_upgrade_node_image_version(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> LROPoller["_models.AgentPool"]:
"""Upgrades the node image version of an agent pool to the latest.
Upgrading the node image version of an agent pool applies the newest OS and runtime updates to
the nodes. AKS provides one new image per week with the latest updates. For more details on
node image versions, see: https://docs.microsoft.com/azure/aks/node-image-upgrade.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either AgentPool or the result of cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.containerservice.v2022_01_01.models.AgentPool]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._upgrade_node_image_version_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response_headers = {}
response = pipeline_response.http_response
response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation'))
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
if polling is True: polling_method = ARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_upgrade_node_image_version.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeNodeImageVersion'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2022_01_01/operations/_agent_pools_operations.py
|
Python
|
mit
| 40,397 | 0.004505 |
'''
Created on Jan 27, 2017
@author: montes
'''
import bpy
from inspect import *
import mv
import os
import math
from reportlab.pdfgen import canvas
from reportlab.lib.pagesizes import legal,inch,cm
from reportlab.platypus import Image
from reportlab.platypus import Paragraph,Table,TableStyle
from reportlab.platypus import SimpleDocTemplate, Table, TableStyle, Paragraph, Frame, Spacer, PageTemplate, PageBreak
from reportlab.lib import colors
from reportlab.lib.pagesizes import A3, A4, landscape, portrait
from reportlab.lib.styles import ParagraphStyle, getSampleStyleSheet
from reportlab.lib.enums import TA_LEFT, TA_RIGHT, TA_CENTER, TA_JUSTIFY
from reportlab.platypus.flowables import HRFlowable
class OPS_create_api_doc(bpy.types.Operator):
bl_idname = "fd_api_doc.create_api_doc"
bl_label = "Create Fluid API Documentation"
output_path = bpy.props.StringProperty(name="Output Path")
def esc_uscores(self, string):
if string:
return string.replace("_", "\_")
else:
return
def exclude_builtins(self, classes, module):
new_classes = []
for cls in classes:
if module in cls[1].__module__:
new_classes.append(cls)
return new_classes
def write_sidebar(self, modules):
filepath = os.path.join(self.output_path, "FD_Sidebar.md")
file = open(filepath, "w")
fw = file.write
fw("# Fluid Designer\n")
fw("* [Home](Home)\n")
fw("* [Understanding the User Interface](Understanding-the-User-Interface)\n")
fw("* [Navigating the 3D Viewport](Navigating-the-3D-Viewport)\n")
fw("* [Navigating the Library Browser](Navigating-the-Library-Browser)\n")
fw("* [The Room Builder Panel](The-Room-Builder-Panel)\n")
fw("* [Hotkeys](Fluid-Designer-Hot-Keys)\n\n")
fw("# API Documentation\n")
for mod in modules:
fw("\n## mv.{}\n".format(mod[0]))
classes = self.exclude_builtins(getmembers(mod[1], predicate=isclass), mod[0])
if len(classes) > 0:
for cls in classes:
fw("* [{}()]({})\n".format(self.esc_uscores(cls[0]),
self.esc_uscores(cls[0])))
else:
fw("* [mv.{}]({})\n".format(mod[0], mod[0]))
file.close()
def write_class_doc(self, cls):
filepath = os.path.join(self.output_path, cls[0] + ".md")
file = open(filepath, "w")
fw = file.write
fw("# class {}{}{}{}\n\n".format(cls[1].__module__, ".", cls[0], "():"))
if getdoc(cls[1]):
fw(self.esc_uscores(getdoc(cls[1])) + "\n\n")
for func in getmembers(cls[1], predicate=isfunction):
if cls[0] in func[1].__qualname__:
args = getargspec(func[1])[0]
args_str = ', '.join(item for item in args if item != 'self')
fw("## {}{}{}{}\n\n".format(self.esc_uscores(func[0]),
"(",
self.esc_uscores(args_str) if args_str else " ",
")"))
if getdoc(func[1]):
fw(self.esc_uscores(getdoc(func[1])) + "\n")
else:
fw("Undocumented.\n\n")
file.close()
def write_mod_doc(self, mod):
filepath = os.path.join(self.output_path, mod[0] + ".md")
file = open(filepath, "w")
fw = file.write
fw("# module {}{}:\n\n".format("mv.", mod[0]))
if getdoc(mod[1]):
fw(self.esc_uscores(getdoc(mod[1])) + "\n\n")
for func in getmembers(mod[1], predicate=isfunction):
args = getargspec(func[1])[0]
args_str = ', '.join(item for item in args if item != 'self')
fw("## {}{}{}{}\n\n".format(self.esc_uscores(func[0]),
"(",
self.esc_uscores(args_str if args_str else " "),
")"))
if getdoc(func[1]):
fw(self.esc_uscores(getdoc(func[1])) + "\n")
else:
fw("Undocumented.\n\n")
file.close()
def execute(self, context):
modules = getmembers(mv, predicate=ismodule)
self.write_sidebar(modules)
for mod in modules:
classes = self.exclude_builtins(getmembers(mod[1], predicate=isclass), mod[0])
if len(classes) > 0:
for cls in classes:
self.write_class_doc(cls)
else:
self.write_mod_doc(mod)
return {'FINISHED'}
class OPS_create_content_overview_doc(bpy.types.Operator):
bl_idname = "fd_api_doc.create_content_overview"
bl_label = "Create Fluid Content Overview Documentation"
INCLUDE_FILE_NAME = "doc_include.txt"
write_path = bpy.props.StringProperty(name="Write Path", default="")
elements = []
package = None
def write_html(self):
pass
def read_include_file(self, path):
dirs = []
file_path = os.path.join(path, self.INCLUDE_FILE_NAME)
if os.path.exists(file_path):
file = open(os.path.join(path, self.INCLUDE_FILE_NAME), "r")
dirs_raw = list(file)
for dir in dirs_raw:
dirs.append(dir.replace("\n", ""))
return dirs
def create_hdr(self, name, font_size):
hdr_style = TableStyle([('TEXTCOLOR', (0, 0), (-1, -1), colors.black),
('BOTTOMPADDING', (0, 0), (-1, -1), 15),
('TOPPADDING', (0, 0), (-1, -1), 15),
('FONTSIZE', (0, 0), (-1, -1), 8),
('VALIGN', (0, 0), (-1, -1), 'TOP'),
('ALIGN', (0, 0), (-1, 0), 'LEFT'),
('LINEBELOW', (0, 0), (-1, -1), 2, colors.black),
('BACKGROUND', (0, 1), (-1, -1), colors.white)])
name_p = Paragraph(name, ParagraphStyle("Category name style", fontSize=font_size))
hdr_tbl = Table([[name_p]], colWidths = 500, rowHeights = None, repeatRows = 1)
hdr_tbl.setStyle(hdr_style)
self.elements.append(hdr_tbl)
def create_img_table(self, dir):
item_tbl_data = []
item_tbl_row = []
for i, file in enumerate(os.listdir(dir)):
last_item = len(os.listdir(dir)) - 1
if ".png" in file:
img = Image(os.path.join(dir, file), inch, inch)
img_name = file.replace(".png", "")
if len(item_tbl_row) == 4:
item_tbl_data.append(item_tbl_row)
item_tbl_row = []
elif i == last_item:
item_tbl_data.append(item_tbl_row)
i_tbl = Table([[img], [Paragraph(img_name, ParagraphStyle("item name style", wordWrap='CJK'))]])
item_tbl_row.append(i_tbl)
if len(item_tbl_data) > 0:
item_tbl = Table(item_tbl_data, colWidths=125)
self.elements.append(item_tbl)
self.elements.append(Spacer(1, inch * 0.5))
def search_dir(self, path):
thumb_dir = False
for file in os.listdir(path):
if ".png" in file:
thumb_dir = True
if thumb_dir:
self.create_img_table(path)
for file in os.listdir(path):
if os.path.isdir(os.path.join(path, file)):
self.create_hdr(file, font_size=14)
self.search_dir(os.path.join(path, file))
def write_pdf(self, mod):
file_path = os.path.join(self.write_path if self.write_path != "" else mod.__path__[0], "doc")
file_name = mod.__package__ + ".pdf"
if not os.path.exists(file_path):
os.mkdir(file_path)
doc = SimpleDocTemplate(os.path.join(file_path, file_name),
pagesize = A4,
leftMargin = 0.25 * inch,
rightMargin = 0.25 * inch,
topMargin = 0.25 * inch,
bottomMargin = 0.25 * inch)
lib_name = mod.__package__.replace("_", " ")
self.create_hdr(lib_name, font_size=24)
print("\n", lib_name, "\n")
dirs = self.read_include_file(os.path.join(mod.__path__[0], "doc"))
if len(dirs) > 0:
for d in dirs:
path = os.path.join(mod.__path__[0], d)
if os.path.exists(path):
self.create_hdr(d.title(), font_size=18)
self.search_dir(path)
else:
products_path = os.path.join(mod.__path__[0], "products")
if os.path.exists(products_path):
self.create_hdr("Products", font_size=18)
self.search_dir(products_path)
inserts_path = os.path.join(mod.__path__[0], "inserts")
if os.path.exists(inserts_path):
self.create_hdr("Inserts", font_size=18)
self.search_dir(inserts_path)
doc.build(self.elements)
def execute(self, context):
packages = mv.utils.get_library_packages(context)
for p in packages:
mod = __import__(p)
self.write_pdf(mod)
return {'FINISHED'}
classes = [
OPS_create_api_doc,
OPS_create_content_overview_doc,
]
def register():
for c in classes:
bpy.utils.register_class(c)
def unregister():
for c in classes:
bpy.utils.unregister_class(c)
if __name__ == "__main__":
register()
|
Microvellum/Fluid-Designer
|
win64-vc/2.78/scripts/startup/fluid_operators/fd_api_doc.py
|
Python
|
gpl-3.0
| 10,496 | 0.0121 |
# -*- coding: utf-8 -*-
# Copyright (C) 2009, 2013-2015 Rocky Bernstein
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import inspect, os
# Our local modules
from trepan.processor.command import base_cmd as Mbase_cmd
class EditCommand(Mbase_cmd.DebuggerCommand):
"""**edit** *position*
Edit specified file or module.
With no argument, edits file containing most recent line listed.
See also:
---------
`list`
"""
aliases = ('ed',)
category = 'files'
min_args = 0
max_args = 1
name = os.path.basename(__file__).split('.')[0]
need_stack = False
short_help = 'Edit specified file or module'
def run(self, args):
curframe = self.proc.curframe
if len(args) == 1:
if curframe is None:
self.errmsg('edit: no stack to pick up position from. '
'Use edit FILE:LINE form.')
return
filename = curframe.f_code.co_filename
lineno = curframe.f_lineno
elif len(args) == 2:
(modfunc, filename, lineno) = self.proc.parse_position(args[1])
if inspect.ismodule(modfunc) and lineno is None and len(args) > 2:
val = self.proc.get_an_int(args[1],
'Line number expected, got %s.' %
args[1])
if val is None: return
lineno = val
pass
elif lineno is None:
self.errmsg('edit: no linenumber provided')
return
pass
editor = 'ex'
if 'EDITOR' in os.environ:
editor = os.environ['EDITOR']
pass
if os.path.exists(filename):
os.system("%s +%d %s" % (editor, lineno, filename))
else:
self.errmsg("edit: file %s doesn't exist" % filename)
pass
return
pass
if __name__ == '__main__':
from trepan import debugger as Mdebugger
d = Mdebugger.Debugger()
cmd = EditCommand(d.core.processor)
for c in (['edit'],
['edit', './edit.py:34'],
['edit', './noogood.py'],
):
cmd.run(c)
pass
pass
|
rocky/python2-trepan
|
trepan/processor/command/edit.py
|
Python
|
gpl-3.0
| 2,860 | 0.003846 |
# choco/ui.py
# Copyright (C) 2006-2016 the Choco authors and contributors <see AUTHORS file>
#
# This module is part of Choco and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import re
import os
import posixpath
from choco import errors
from choco import util
from choco.runtime import _kwargs_for_include
class UIModule(object):
default_template = ""
def __init__(self, context, template=None):
self.lookup = context.lookup
self.ui_container = self.lookup.ui_container
self.context = context
self.template = template or self.default_template
self.initialize()
def initialize(self):
pass
def get(self, key, default=None):
"""get parent context local data by key"""
return self.context.get(key, default)
def _execute(self, *args, **kw):
"""execute the template"""
data = self.render(*args, **kw)
t = self.get_template()
return t.render_ui(self.context, *args, **data)
def get_template(self):
return self.ui_container.get_template(self.template)
def render(self, *args, **kw):
"""Entry point and logic section for custom appliction actions"""
raise NotImplemented()
class UIContainer(object):
def __init__(self, ui_paths, uis=None):
"""Init ui container,
param ui_paths: the ui template paths.
param uis: the dict like object, contains the ui module classes.
"""
self.ui_paths = [posixpath.normpath(d) for d in
util.to_list(ui_paths, ())
]
self.uis = uis or dict()
def put_ui(self, ui_name, uicls):
self.uis[ui_name] = uicls
def get_ui(self, ui_name):
uicls = self.uis.get(ui_name)
if uicls is None:
raise errors.UINotFoundException("Cant's find ui for %s" % ui_name)
return uicls
def set_lookup(self, lookup):
"""Set up template lookup"""
self.lookup = lookup
def get_template(self, uri):
"""Return a :class:`.Template` object corresponding to the given
``uri``.
.. note:: The ``relativeto`` argument is not supported here at
the moment.
"""
# the spefical ui uri with prefix "url://"
uiuri = "ui#" + uri
try:
if self.lookup.filesystem_checks:
return self.lookup.check(uiuri, self.lookup.collection[uiuri])
else:
return self.lookup.collection[uiuri]
except KeyError:
u = re.sub(r'^\/+', '', uri)
for dir in self.ui_paths:
# make sure the path seperators are posix - os.altsep is empty
# on POSIX and cannot be used.
dir = dir.replace(os.path.sep, posixpath.sep)
srcfile = posixpath.normpath(posixpath.join(dir, u))
if os.path.isfile(srcfile):
return self.lookup.load(srcfile, uiuri)
else:
raise errors.TopLevelLookupException(
"Cant locate ui template for uri %r" % uiuri)
|
whiteclover/Choco
|
choco/ui.py
|
Python
|
mit
| 3,191 | 0 |
#!/usr/bin/env python
# coding=utf-8
# Furry Text Escape 2 main script
gamevers = ('v1.0')
n = ('null')
tprint1 = ('1')
tprint2 = ('1')
while n.strip()!="4":
if tprint1==('1'):
t = open('./art/title1.TCR', 'r')
tcr_contents = t.read()
print (chr(27) + "[2J" + chr(27) + "[H" + tcr_contents)
t.close()
tprint1=('0')
print (
'''Furry Text Escape II
(c) 2015-2016 Thomas Leathers
'''
)
print (
'''Choose number:
1: Watch Intro
2: begin game
3: Credits
4: quit'''
)
n = raw_input('choose number from the list above:')
print(chr(27) + "[2A")
if n=="2":
#episode selection submenu
print(chr(27) + "[2J" + chr(27) + "[H")
episodeselection = ('null')
tprint2 = ('1')
t = open('./art/EPSEL-BANNER.TCR', 'r')
tcr_contents = t.read()
print (chr(27) + "[2J" + chr(27) + "[H" + tcr_contents + '''"which way?"
''')
while episodeselection.strip()!="5":
if tprint2==('1'):
print(chr(27) + "[2J" + chr(27) + "[H")
episodeselection = ('null')
tprint2 = ('1')
t = open('./art/EPSEL-BANNER.TCR', 'r')
tcr_contents = t.read()
print (chr(27) + "[2J" + chr(27) + "[H" + tcr_contents + '''"which way?"''')
t.close()
tprint2 = ('0')
print (
'''episode selection:
1: episode 1: maintenance duties (RED)
: episode 2 -coming soon- (BLUE)
: episode 3 -coming soon- (GREEN)
4: BONUS! Playable flashback to Furry Text Escape 1!
5: return to main menu.'''
)
episodeselection = raw_input('choice:')
print(chr(27) + "[2A")
if episodeselection=="1":
print(chr(27) + "[2J" + chr(27) + "[H")
execfile("EP1-intro.py")
execfile("EP-1.py")
execfile("EP1-outro.py")
print(chr(27) + "[2J" + chr(27) + "[H")
tprint2 = ('1')
if episodeselection=="4":
print(chr(27) + "[2J" + chr(27) + "[H")
execfile("DARKROOM.py")
print(chr(27) + "[2J" + chr(27) + "[H")
tprint2 = ('1')
print(chr(27) + "[2J" + chr(27) + "[H")
tprint1 = ('1')
if n=="1":
print(chr(27) + "[2J" + chr(27) + "[H")
execfile("CINA1-OPEN.py")
print(chr(27) + "[2J" + chr(27) + "[H")
tprint1 = ('1')
if n=="3":
print(chr(27) + "[2J" + chr(27) + "[H")
execfile("CREDITS.py")
print(chr(27) + "[2J" + chr(27) + "[H")
tprint1 = ('1')
t.close()
#
|
ThomasTheSpaceFox/furry-text-escape-2
|
TE2.py
|
Python
|
gpl-3.0
| 2,244 | 0.043226 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2011 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit import UnicodeMixin, base_text_type, u, s
from guessit.fileutils import load_file_in_same_dir
from guessit.textutils import find_words
from guessit.country import Country
import re
import logging
__all__ = [ 'is_iso_language', 'is_language', 'lang_set', 'Language',
'ALL_LANGUAGES', 'ALL_LANGUAGES_NAMES', 'UNDETERMINED',
'search_language', 'guess_language' ]
log = logging.getLogger(__name__)
# downloaded from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
#
# Description of the fields:
# "An alpha-3 (bibliographic) code, an alpha-3 (terminologic) code (when given),
# an alpha-2 code (when given), an English name, and a French name of a language
# are all separated by pipe (|) characters."
_iso639_contents = load_file_in_same_dir(__file__, 'ISO-639-2_utf-8.txt')
# drop the BOM from the beginning of the file
_iso639_contents = _iso639_contents[1:]
language_matrix = [ l.strip().split('|')
for l in _iso639_contents.strip().split('\n') ]
# update information in the language matrix
language_matrix += [['mol', '', 'mo', 'Moldavian', 'moldave'],
['ass', '', '', 'Assyrian', 'assyrien']]
for lang in language_matrix:
# remove unused languages that shadow other common ones with a non-official form
if (lang[2] == 'se' or # Northern Sami shadows Swedish
lang[2] == 'br'): # Breton shadows Brazilian
lang[2] = ''
# add missing information
if lang[0] == 'und':
lang[2] = 'un'
if lang[0] == 'srp':
lang[1] = 'scc' # from OpenSubtitles
lng3 = frozenset(l[0] for l in language_matrix if l[0])
lng3term = frozenset(l[1] for l in language_matrix if l[1])
lng2 = frozenset(l[2] for l in language_matrix if l[2])
lng_en_name = frozenset(lng for l in language_matrix
for lng in l[3].lower().split('; ') if lng)
lng_fr_name = frozenset(lng for l in language_matrix
for lng in l[4].lower().split('; ') if lng)
lng_all_names = lng3 | lng3term | lng2 | lng_en_name | lng_fr_name
lng3_to_lng3term = dict((l[0], l[1]) for l in language_matrix if l[1])
lng3term_to_lng3 = dict((l[1], l[0]) for l in language_matrix if l[1])
lng3_to_lng2 = dict((l[0], l[2]) for l in language_matrix if l[2])
lng2_to_lng3 = dict((l[2], l[0]) for l in language_matrix if l[2])
# we only return the first given english name, hoping it is the most used one
lng3_to_lng_en_name = dict((l[0], l[3].split('; ')[0])
for l in language_matrix if l[3])
lng_en_name_to_lng3 = dict((en_name.lower(), l[0])
for l in language_matrix if l[3]
for en_name in l[3].split('; '))
# we only return the first given french name, hoping it is the most used one
lng3_to_lng_fr_name = dict((l[0], l[4].split('; ')[0])
for l in language_matrix if l[4])
lng_fr_name_to_lng3 = dict((fr_name.lower(), l[0])
for l in language_matrix if l[4]
for fr_name in l[4].split('; '))
# contains a list of exceptions: strings that should be parsed as a language
# but which are not in an ISO form
lng_exceptions = { 'unknown': ('und', None),
'inconnu': ('und', None),
'unk': ('und', None),
'un': ('und', None),
'gr': ('gre', None),
'greek': ('gre', None),
'esp': ('spa', None),
'español': ('spa', None),
'se': ('swe', None),
'po': ('pt', 'br'),
'pb': ('pt', 'br'),
'pob': ('pt', 'br'),
'br': ('pt', 'br'),
'brazilian': ('pt', 'br'),
'català': ('cat', None),
'cz': ('cze', None),
'ua': ('ukr', None),
'cn': ('chi', None),
'chs': ('chi', None),
'jp': ('jpn', None),
'scr': ('hrv', None)
}
def is_iso_language(language):
return language.lower() in lng_all_names
def is_language(language):
return is_iso_language(language) or language in lng_exceptions
def lang_set(languages, strict=False):
"""Return a set of guessit.Language created from their given string
representation.
if strict is True, then this will raise an exception if any language
could not be identified.
"""
return set(Language(l, strict=strict) for l in languages)
class Language(UnicodeMixin):
"""This class represents a human language.
You can initialize it with pretty much anything, as it knows conversion
from ISO-639 2-letter and 3-letter codes, English and French names.
You can also distinguish languages for specific countries, such as
Portuguese and Brazilian Portuguese.
There are various properties on the language object that give you the
representation of the language for a specific usage, such as .alpha3
to get the ISO 3-letter code, or .opensubtitles to get the OpenSubtitles
language code.
>>> Language('fr')
Language(French)
>>> s(Language('eng').french_name)
'anglais'
>>> s(Language('pt(br)').country.english_name)
'Brazil'
>>> s(Language('Español (Latinoamérica)').country.english_name)
'Latin America'
>>> Language('Spanish (Latin America)') == Language('Español (Latinoamérica)')
True
>>> s(Language('zz', strict=False).english_name)
'Undetermined'
>>> s(Language('pt(br)').opensubtitles)
'pob'
"""
_with_country_regexp = re.compile('(.*)\((.*)\)')
_with_country_regexp2 = re.compile('(.*)-(.*)')
def __init__(self, language, country=None, strict=False, scheme=None):
language = u(language.strip().lower())
with_country = (Language._with_country_regexp.match(language) or
Language._with_country_regexp2.match(language))
if with_country:
self.lang = Language(with_country.group(1)).lang
self.country = Country(with_country.group(2))
return
self.lang = None
self.country = Country(country) if country else None
# first look for scheme specific languages
if scheme == 'opensubtitles':
if language == 'br':
self.lang = 'bre'
return
elif language == 'se':
self.lang = 'sme'
return
elif scheme is not None:
log.warning('Unrecognized scheme: "%s" - Proceeding with standard one' % scheme)
# look for ISO language codes
if len(language) == 2:
self.lang = lng2_to_lng3.get(language)
elif len(language) == 3:
self.lang = (language
if language in lng3
else lng3term_to_lng3.get(language))
else:
self.lang = (lng_en_name_to_lng3.get(language) or
lng_fr_name_to_lng3.get(language))
# general language exceptions
if self.lang is None and language in lng_exceptions:
lang, country = lng_exceptions[language]
self.lang = Language(lang).alpha3
self.country = Country(country) if country else None
msg = 'The given string "%s" could not be identified as a language' % language
if self.lang is None and strict:
raise ValueError(msg)
if self.lang is None:
log.debug(msg)
self.lang = 'und'
@property
def alpha2(self):
return lng3_to_lng2[self.lang]
@property
def alpha3(self):
return self.lang
@property
def alpha3term(self):
return lng3_to_lng3term[self.lang]
@property
def english_name(self):
return lng3_to_lng_en_name[self.lang]
@property
def french_name(self):
return lng3_to_lng_fr_name[self.lang]
@property
def opensubtitles(self):
if self.lang == 'por' and self.country and self.country.alpha2 == 'br':
return 'pob'
elif self.lang in ['gre', 'srp']:
return self.alpha3term
return self.alpha3
@property
def tmdb(self):
if self.country:
return '%s-%s' % (self.alpha2, self.country.alpha2.upper())
return self.alpha2
def __hash__(self):
return hash(self.lang)
def __eq__(self, other):
if isinstance(other, Language):
return self.lang == other.lang
if isinstance(other, base_text_type):
try:
return self == Language(other)
except ValueError:
return False
return False
def __ne__(self, other):
return not self == other
def __nonzero__(self):
return self.lang != 'und'
def __unicode__(self):
if self.country:
return '%s(%s)' % (self.english_name, self.country.alpha2)
else:
return self.english_name
def __repr__(self):
if self.country:
return 'Language(%s, country=%s)' % (self.english_name, self.country)
else:
return 'Language(%s)' % self.english_name
UNDETERMINED = Language('und')
ALL_LANGUAGES = frozenset(Language(lng) for lng in lng_all_names) - frozenset([UNDETERMINED])
ALL_LANGUAGES_NAMES = lng_all_names
def search_language(string, lang_filter=None):
"""Looks for language patterns, and if found return the language object,
its group span and an associated confidence.
you can specify a list of allowed languages using the lang_filter argument,
as in lang_filter = [ 'fr', 'eng', 'spanish' ]
>>> search_language('movie [en].avi')
(Language(English), (7, 9), 0.8)
>>> search_language('the zen fat cat and the gay mad men got a new fan', lang_filter = ['en', 'fr', 'es'])
(None, None, None)
"""
# list of common words which could be interpreted as languages, but which
# are far too common to be able to say they represent a language in the
# middle of a string (where they most likely carry their commmon meaning)
lng_common_words = frozenset([
# english words
'is', 'it', 'am', 'mad', 'men', 'man', 'run', 'sin', 'st', 'to',
'no', 'non', 'war', 'min', 'new', 'car', 'day', 'bad', 'bat', 'fan',
'fry', 'cop', 'zen', 'gay', 'fat', 'cherokee', 'got', 'an', 'as',
'cat', 'her', 'be', 'hat', 'sun', 'may', 'my', 'mr', 'rum', 'pi',
# french words
'bas', 'de', 'le', 'son', 'vo', 'vf', 'ne', 'ca', 'ce', 'et', 'que',
'mal', 'est', 'vol', 'or', 'mon', 'se',
# spanish words
'la', 'el', 'del', 'por', 'mar',
# other
'ind', 'arw', 'ts', 'ii', 'bin', 'chan', 'ss', 'san', 'oss', 'iii',
'vi', 'ben', 'da', 'lt'
])
sep = r'[](){} \._-+'
if lang_filter:
lang_filter = lang_set(lang_filter)
slow = ' %s ' % string.lower()
confidence = 1.0 # for all of them
for lang in set(find_words(slow)) & lng_all_names:
if lang in lng_common_words:
continue
pos = slow.find(lang)
if pos != -1:
end = pos + len(lang)
# make sure our word is always surrounded by separators
if slow[pos - 1] not in sep or slow[end] not in sep:
continue
language = Language(slow[pos:end])
if lang_filter and language not in lang_filter:
continue
# only allow those languages that have a 2-letter code, those that
# don't are too esoteric and probably false matches
if language.lang not in lng3_to_lng2:
continue
# confidence depends on lng2, lng3, english name, ...
if len(lang) == 2:
confidence = 0.8
elif len(lang) == 3:
confidence = 0.9
else:
# Note: we could either be really confident that we found a
# language or assume that full language names are too
# common words and lower their confidence accordingly
confidence = 0.3 # going with the low-confidence route here
return language, (pos - 1, end - 1), confidence
return None, None, None
def guess_language(text):
"""Guess the language in which a body of text is written.
This uses the external guess-language python module, and will fail and return
Language(Undetermined) if it is not installed.
"""
try:
from guess_language import guessLanguage
return Language(guessLanguage(text))
except ImportError:
log.error('Cannot detect the language of the given text body, missing dependency: guess-language')
log.error('Please install it from PyPI, by doing eg: pip install guess-language')
return UNDETERMINED
|
jerbob92/CouchPotatoServer
|
libs/guessit/language.py
|
Python
|
gpl-3.0
| 13,849 | 0.003251 |
#
# (c) 2017, Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import time
import glob
from ansible.plugins.action.ironware import ActionModule as _ActionModule
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.parse import urlsplit
from ansible.utils.vars import merge_hash
PRIVATE_KEYS_RE = re.compile('__.+__')
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._task.args.get('src'):
try:
self._handle_template()
except ValueError as exc:
return dict(failed=True, msg=exc.message)
result = super(ActionModule, self).run(tmp, task_vars)
if self._task.args.get('backup') and result.get('__backup__'):
# User requested backup and no error occurred in module.
# NOTE: If there is a parameter error, _backup key may not be in results.
filepath = self._write_backup(task_vars['inventory_hostname'],
result['__backup__'])
result['backup_path'] = filepath
# strip out any keys that have two leading and two trailing
# underscore characters
for key in result.keys():
if PRIVATE_KEYS_RE.match(key):
del result[key]
return result
def _get_working_path(self):
cwd = self._loader.get_basedir()
if self._task._role is not None:
cwd = self._task._role._role_path
return cwd
def _write_backup(self, host, contents):
backup_path = self._get_working_path() + '/backup'
if not os.path.exists(backup_path):
os.mkdir(backup_path)
for fn in glob.glob('%s/%s*' % (backup_path, host)):
os.remove(fn)
tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time()))
filename = '%s/%s_config.%s' % (backup_path, host, tstamp)
open(filename, 'w').write(contents)
return filename
def _handle_template(self):
src = self._task.args.get('src')
working_path = self._get_working_path()
if os.path.isabs(src) or urlsplit('src').scheme:
source = src
else:
source = self._loader.path_dwim_relative(working_path, 'templates', src)
if not source:
source = self._loader.path_dwim_relative(working_path, src)
if not os.path.exists(source):
raise ValueError('path specified in src not found')
try:
with open(source, 'r') as f:
template_data = to_text(f.read())
except IOError:
return dict(failed=True, msg='unable to load src file')
# Create a template search path in the following order:
# [working_path, self_role_path, dependent_role_paths, dirname(source)]
searchpath = [working_path]
if self._task._role is not None:
searchpath.append(self._task._role._role_path)
if hasattr(self._task, "_block:"):
dep_chain = self._task._block.get_dep_chain()
if dep_chain is not None:
for role in dep_chain:
searchpath.append(role._role_path)
searchpath.append(os.path.dirname(source))
self._templar.environment.loader.searchpath = searchpath
self._task.args['src'] = self._templar.template(template_data)
|
tsdmgz/ansible
|
lib/ansible/plugins/action/ironware_config.py
|
Python
|
gpl-3.0
| 4,167 | 0.00072 |
import scipy.stats as estad
from tikon.ecs.aprioris import APrioriDist
from tikon.ecs.árb_mód import Parám
from tikon.móds.rae.orgs.ecs.repr._plntll_ec import EcuaciónReprCoh
class N(Parám):
nombre = 'n'
líms = (0, None)
unids = None
apriori = APrioriDist(estad.expon(scale=500))
class A(Parám):
nombre = 'a'
líms = (0, None)
unids = None
apriori = APrioriDist(estad.expon(scale=100))
class B(Parám):
nombre = 'b'
líms = (0, None)
unids = None
apriori = APrioriDist(estad.expon(scale=100))
class C(Parám):
nombre = 'c'
líms = (0, 1)
unids = None
class Triang(EcuaciónReprCoh):
nombre = 'Triang'
cls_ramas = [N, A, B, C]
_cls_dist = estad.triang
def _prms_scipy(símismo):
cf = símismo.cf
return dict(loc=cf['a'], scale=cf['b'], c=cf['c'])
|
julienmalard/Tikon
|
tikon/móds/rae/orgs/ecs/repr/triang.py
|
Python
|
agpl-3.0
| 857 | 0 |
"""
WSGI config for Courseware project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Courseware.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
shayan72/Courseware
|
Courseware/wsgi.py
|
Python
|
mit
| 395 | 0.002532 |
import _plotly_utils.basevalidators
class TickvalsValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(self, plotly_name="tickvals", parent_name="mesh3d.colorbar", **kwargs):
super(TickvalsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", "data"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/mesh3d/colorbar/_tickvals.py
|
Python
|
mit
| 460 | 0.002174 |
#!/usr/bin/python
"""Updates the timezone data held in bionic and ICU."""
import ftplib
import glob
import httplib
import os
import re
import shutil
import subprocess
import sys
import tarfile
import tempfile
regions = ['africa', 'antarctica', 'asia', 'australasia',
'etcetera', 'europe', 'northamerica', 'southamerica',
# These two deliberately come last so they override what came
# before (and each other).
'backward', 'backzone' ]
def CheckDirExists(dir, dirname):
if not os.path.isdir(dir):
print "Couldn't find %s (%s)!" % (dirname, dir)
sys.exit(1)
bionic_libc_tools_zoneinfo_dir = os.path.realpath(os.path.dirname(sys.argv[0]))
# Find the bionic directory, searching upward from this script.
bionic_dir = os.path.realpath('%s/../../..' % bionic_libc_tools_zoneinfo_dir)
bionic_libc_zoneinfo_dir = '%s/libc/zoneinfo' % bionic_dir
CheckDirExists(bionic_libc_zoneinfo_dir, 'bionic/libc/zoneinfo')
CheckDirExists(bionic_libc_tools_zoneinfo_dir, 'bionic/libc/tools/zoneinfo')
print 'Found bionic in %s ...' % bionic_dir
# Find the icu4c directory.
icu_dir = os.path.realpath('%s/../external/icu/icu4c/source' % bionic_dir)
CheckDirExists(icu_dir, 'external/icu/icu4c/source')
print 'Found icu in %s ...' % icu_dir
def GetCurrentTzDataVersion():
return open('%s/tzdata' % bionic_libc_zoneinfo_dir).read().split('\x00', 1)[0]
def WriteSetupFile():
"""Writes the list of zones that ZoneCompactor should process."""
links = []
zones = []
for region in regions:
for line in open('extracted/%s' % region):
fields = line.split()
if fields:
if fields[0] == 'Link':
links.append('%s %s %s' % (fields[0], fields[1], fields[2]))
zones.append(fields[2])
elif fields[0] == 'Zone':
zones.append(fields[1])
zones.sort()
setup = open('setup', 'w')
for link in sorted(set(links)):
setup.write('%s\n' % link)
for zone in sorted(set(zones)):
setup.write('%s\n' % zone)
setup.close()
def SwitchToNewTemporaryDirectory():
tmp_dir = tempfile.mkdtemp('-tzdata')
os.chdir(tmp_dir)
print 'Created temporary directory "%s"...' % tmp_dir
def FtpRetrieveFile(ftp, filename):
ftp.retrbinary('RETR %s' % filename, open(filename, 'wb').write)
def FtpRetrieveFileAndSignature(ftp, data_filename):
"""Downloads and repackages the given data from the given FTP server."""
print 'Downloading data...'
FtpRetrieveFile(ftp, data_filename)
print 'Downloading signature...'
signature_filename = '%s.asc' % data_filename
FtpRetrieveFile(ftp, signature_filename)
def HttpRetrieveFile(http, path, output_filename):
http.request("GET", path)
f = open(output_filename, 'wb')
f.write(http.getresponse().read())
f.close()
def HttpRetrieveFileAndSignature(http, data_filename):
"""Downloads and repackages the given data from the given HTTP server."""
path = "/time-zones/repository/releases/%s" % data_filename
print 'Downloading data...'
HttpRetrieveFile(http, path, data_filename)
print 'Downloading signature...'
signature_filename = '%s.asc' % data_filename
HttpRetrievefile(http, "%s.asc" % path, signature_filename)
def BuildIcuToolsAndData(data_filename):
# Keep track of the original cwd so we can go back to it at the end.
original_working_dir = os.getcwd()
# Create a directory to run 'make' from.
icu_working_dir = '%s/icu' % original_working_dir
os.mkdir(icu_working_dir)
os.chdir(icu_working_dir)
# Build the ICU tools.
print 'Configuring ICU tools...'
subprocess.check_call(['%s/runConfigureICU' % icu_dir, 'Linux'])
# Run the ICU tools.
os.chdir('tools/tzcode')
# The tz2icu tool only picks up icuregions and icuzones in they are in the CWD
for icu_data_file in [ 'icuregions', 'icuzones']:
icu_data_file_source = '%s/tools/tzcode/%s' % (icu_dir, icu_data_file)
icu_data_file_symlink = './%s' % icu_data_file
os.symlink(icu_data_file_source, icu_data_file_symlink)
shutil.copyfile('%s/%s' % (original_working_dir, data_filename), data_filename)
print 'Making ICU data...'
# The Makefile assumes the existence of the bin directory.
os.mkdir('%s/bin' % icu_working_dir)
subprocess.check_call(['make'])
# Copy the source file to its ultimate destination.
icu_txt_data_dir = '%s/data/misc' % icu_dir
print 'Copying zoneinfo64.txt to %s ...' % icu_txt_data_dir
shutil.copy('zoneinfo64.txt', icu_txt_data_dir)
# Regenerate the .dat file.
os.chdir(icu_working_dir)
subprocess.check_call(['make', '-j32'])
# Copy the .dat file to its ultimate destination.
icu_dat_data_dir = '%s/stubdata' % icu_dir
datfiles = glob.glob('data/out/tmp/icudt??l.dat')
if len(datfiles) != 1:
print 'ERROR: Unexpectedly found %d .dat files (%s). Halting.' % (len(datfiles), datfiles)
sys.exit(1)
datfile = datfiles[0]
print 'Copying %s to %s ...' % (datfile, icu_dat_data_dir)
shutil.copy(datfile, icu_dat_data_dir)
# Switch back to the original working cwd.
os.chdir(original_working_dir)
def CheckSignature(data_filename):
signature_filename = '%s.asc' % data_filename
print 'Verifying signature...'
# If this fails for you, you probably need to import Paul Eggert's public key:
# gpg --recv-keys ED97E90E62AA7E34
subprocess.check_call(['gpg', '--trusted-key=ED97E90E62AA7E34', '--verify',
signature_filename, data_filename])
def BuildBionicToolsAndData(data_filename):
new_version = re.search('(tzdata.+)\\.tar\\.gz', data_filename).group(1)
print 'Extracting...'
os.mkdir('extracted')
tar = tarfile.open(data_filename, 'r')
tar.extractall('extracted')
print 'Calling zic(1)...'
os.mkdir('data')
zic_inputs = [ 'extracted/%s' % x for x in regions ]
zic_cmd = ['zic', '-d', 'data' ]
zic_cmd.extend(zic_inputs)
subprocess.check_call(zic_cmd)
WriteSetupFile()
print 'Calling ZoneCompactor to update bionic to %s...' % new_version
subprocess.check_call(['javac', '-d', '.',
'%s/ZoneCompactor.java' % bionic_libc_tools_zoneinfo_dir])
subprocess.check_call(['java', 'ZoneCompactor',
'setup', 'data', 'extracted/zone.tab',
bionic_libc_zoneinfo_dir, new_version])
# Run with no arguments from any directory, with no special setup required.
# See http://www.iana.org/time-zones/ for more about the source of this data.
def main():
print 'Looking for new tzdata...'
tzdata_filenames = []
# The FTP server lets you download intermediate releases, and also lets you
# download the signatures for verification, so it's your best choice.
use_ftp = True
if use_ftp:
ftp = ftplib.FTP('ftp.iana.org')
ftp.login()
ftp.cwd('tz/releases')
for filename in ftp.nlst():
if filename.startswith('tzdata20') and filename.endswith('.tar.gz'):
tzdata_filenames.append(filename)
tzdata_filenames.sort()
else:
http = httplib.HTTPConnection('www.iana.org')
http.request("GET", "/time-zones")
index_lines = http.getresponse().read().split('\n')
for line in index_lines:
m = re.compile('.*href="/time-zones/repository/releases/(tzdata20\d\d\c\.tar\.gz)".*').match(line)
if m:
tzdata_filenames.append(m.group(1))
# If you're several releases behind, we'll walk you through the upgrades
# one by one.
current_version = GetCurrentTzDataVersion()
current_filename = '%s.tar.gz' % current_version
for filename in tzdata_filenames:
if filename > current_filename:
print 'Found new tzdata: %s' % filename
SwitchToNewTemporaryDirectory()
if use_ftp:
FtpRetrieveFileAndSignature(ftp, filename)
else:
HttpRetrieveFileAndSignature(http, filename)
CheckSignature(filename)
BuildIcuToolsAndData(filename)
BuildBionicToolsAndData(filename)
print 'Look in %s and %s for new data files' % (bionic_dir, icu_dir)
sys.exit(0)
print 'You already have the latest tzdata (%s)!' % current_version
sys.exit(0)
if __name__ == '__main__':
main()
|
s20121035/rk3288_android5.1_repo
|
bionic/libc/tools/zoneinfo/update-tzdata.py
|
Python
|
gpl-3.0
| 8,075 | 0.016966 |
def splice(alists, recycle = True):
"""
Accepts a list of nonempty lists or indexable objects in
argument alists (each element list may not be of the same
length) and a keyword argument recycle which
if true will reuse elements in lists of shorter length.
Any error will result in an empty list to be returned.
"""
try:
nlists = len(alists)
lens = [len(alist) for alist in alists]
if not recycle:
totlen = sum(lens)
else:
totlen = max(lens) * nlists
pos = [0] * nlists
R = [None] * totlen
i, j = 0, 0
while i < totlen:
if pos[j] < lens[j]:
R[i] = alists[j][pos[j]]
i += 1
pos[j] = pos[j] + 1
if recycle and pos[j] >= lens[j]:
pos[j] = 0
j = (j + 1) % nlists
return R
except:
return []
if __name__ == "__main__":
print splice([[1,2,3], ['a','b'], [4], [-1,-2,-3,-4]], recycle = False)
print splice([[1,2,3], ['a','b'], [4], [-1,-2,-3,-4]])
|
ActiveState/code
|
recipes/Python/496684_Splicing_of_lists/recipe-496684.py
|
Python
|
mit
| 1,158 | 0.025043 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'FacebookProfile.polls'
db.add_column(u'socialplatform_facebookprofile', 'polls',
self.gf('django.db.models.fields.NullBooleanField')(default=True, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'FacebookProfile.polls'
db.delete_column(u'socialplatform_facebookprofile', 'polls')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'socialplatform.dmall': {
'Meta': {'object_name': 'DMAll'},
'content': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'send_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'socialplatform.dmindividual': {
'Meta': {'object_name': 'DMIndividual'},
'content': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'send_ind_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'target_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['socialplatform.TwitterProfile']"})
},
u'socialplatform.facebookprofile': {
'Meta': {'object_name': 'FacebookProfile'},
'access_token': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'active': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'facebook_id': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'helpdesk': ('django.db.models.fields.NullBooleanField', [], {'default': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'issue': ('django.db.models.fields.NullBooleanField', [], {'default': 'True', 'null': 'True', 'blank': 'True'}),
'notifications': ('django.db.models.fields.NullBooleanField', [], {'default': 'True', 'null': 'True', 'blank': 'True'}),
'polls': ('django.db.models.fields.NullBooleanField', [], {'default': 'True', 'null': 'True', 'blank': 'True'}),
'profilePicture': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'socialplatform.fbnotification': {
'Meta': {'object_name': 'FBNotification'},
'fb_profile': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['socialplatform.FacebookProfile']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sender': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'text': ('django.db.models.fields.TextField', [], {})
},
u'socialplatform.tweet': {
'Meta': {'object_name': 'Tweet'},
'content': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'created': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'tweet_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'socialplatform.twitterprofile': {
'Meta': {'object_name': 'TwitterProfile'},
'active': ('django.db.models.fields.NullBooleanField', [], {'default': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"}),
'user_name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['socialplatform']
|
tekton/DocuCanvas
|
socialplatform/migrations/0005_auto__add_field_facebookprofile_polls.py
|
Python
|
gpl-3.0
| 7,753 | 0.007868 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add ``dag_code`` table
Revision ID: 952da73b5eff
Revises: 852ae6c715af
Create Date: 2020-03-12 12:39:01.797462
"""
import sqlalchemy as sa
from alembic import op
from airflow.models.dagcode import DagCode
# revision identifiers, used by Alembic.
revision = '952da73b5eff'
down_revision = '852ae6c715af'
branch_labels = None
depends_on = None
airflow_version = '1.10.10'
def upgrade():
"""Create DagCode Table."""
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class SerializedDagModel(Base):
__tablename__ = 'serialized_dag'
# There are other columns here, but these are the only ones we need for the SELECT/UPDATE we are doing
dag_id = sa.Column(sa.String(250), primary_key=True)
fileloc = sa.Column(sa.String(2000), nullable=False)
fileloc_hash = sa.Column(sa.BigInteger, nullable=False)
"""Apply add source code table"""
op.create_table(
'dag_code',
sa.Column('fileloc_hash', sa.BigInteger(), nullable=False, primary_key=True, autoincrement=False),
sa.Column('fileloc', sa.String(length=2000), nullable=False),
sa.Column('source_code', sa.UnicodeText(), nullable=False),
sa.Column('last_updated', sa.TIMESTAMP(timezone=True), nullable=False),
)
conn = op.get_bind()
if conn.dialect.name != 'sqlite':
if conn.dialect.name == "mssql":
op.drop_index('idx_fileloc_hash', 'serialized_dag')
op.alter_column(
table_name='serialized_dag', column_name='fileloc_hash', type_=sa.BigInteger(), nullable=False
)
if conn.dialect.name == "mssql":
op.create_index('idx_fileloc_hash', 'serialized_dag', ['fileloc_hash'])
sessionmaker = sa.orm.sessionmaker()
session = sessionmaker(bind=conn)
serialized_dags = session.query(SerializedDagModel).all()
for dag in serialized_dags:
dag.fileloc_hash = DagCode.dag_fileloc_hash(dag.fileloc)
session.merge(dag)
session.commit()
def downgrade():
"""Unapply add source code table"""
op.drop_table('dag_code')
|
apache/airflow
|
airflow/migrations/versions/952da73b5eff_add_dag_code_table.py
|
Python
|
apache-2.0
| 2,906 | 0.001376 |
# Copyright (C) 2013 Johnny Vestergaard <jkv@unixcluster.dk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import poplib
from datetime import datetime
import logging
from beeswarm.drones.client.baits.clientbase import ClientBase
logger = logging.getLogger(__name__)
class Pop3(ClientBase):
def __init__(self, options):
"""
Initializes common values.
:param options: A dict containing all options
"""
super(Pop3, self).__init__(options)
def start(self):
"""
Launches a new POP3 client session on the server.
"""
username = self.options['username']
password = self.options['password']
server_host = self.options['server']
server_port = self.options['port']
honeypot_id = self.options['honeypot_id']
session = self.create_session(server_host, server_port, honeypot_id)
try:
logger.debug(
'Sending {0} bait session to {1}:{2}. (bait id: {3})'.format('pop3', server_host, server_port,
session.id))
conn = poplib.POP3(server_host, server_port)
session.source_port = conn.sock.getsockname()[1]
banner = conn.getwelcome()
session.protocol_data['banner'] = banner
session.did_connect = True
conn.user(username)
conn.pass_(password)
# TODO: Handle failed login
session.add_auth_attempt('plaintext', True, username=username, password=password)
session.did_login = True
session.timestamp = datetime.utcnow()
# except (poplib.error_proto, h_socket.error) as err:
except Exception as err:
logger.debug('Caught exception: {0} ({1})'.format(err, str(type(err))))
else:
list_entries = conn.list()[1]
for entry in list_entries:
index, octets = entry.split(' ')
conn.retr(index)
conn.dele(index)
logger.debug('Found and deleted {0} messages on {1}'.format(len(list_entries), server_host))
conn.quit()
session.did_complete = True
finally:
session.all_done = True
session.end_session()
if conn:
try:
conn.file.close()
except Exception:
pass
try:
conn.sock.close()
except Exception:
pass
|
honeynet/beeswarm
|
beeswarm/drones/client/baits/pop3.py
|
Python
|
gpl-3.0
| 3,174 | 0.001575 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python utilities required by Keras."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import marshal
import os
import sys
import time
import types as python_types
import numpy as np
import six
from tensorflow.python.util import tf_decorator
from tensorflow.python.util import tf_inspect
_GLOBAL_CUSTOM_OBJECTS = {}
class CustomObjectScope(object):
"""Provides a scope that changes to `_GLOBAL_CUSTOM_OBJECTS` cannot escape.
Code within a `with` statement will be able to access custom objects
by name. Changes to global custom objects persist
within the enclosing `with` statement. At end of the `with` statement,
global custom objects are reverted to state
at beginning of the `with` statement.
Example:
Consider a custom object `MyObject` (e.g. a class):
```python
with CustomObjectScope({'MyObject':MyObject}):
layer = Dense(..., kernel_regularizer='MyObject')
# save, load, etc. will recognize custom object by name
```
"""
def __init__(self, *args):
self.custom_objects = args
self.backup = None
def __enter__(self):
self.backup = _GLOBAL_CUSTOM_OBJECTS.copy()
for objects in self.custom_objects:
_GLOBAL_CUSTOM_OBJECTS.update(objects)
return self
def __exit__(self, *args, **kwargs):
_GLOBAL_CUSTOM_OBJECTS.clear()
_GLOBAL_CUSTOM_OBJECTS.update(self.backup)
def custom_object_scope(*args):
"""Provides a scope that changes to `_GLOBAL_CUSTOM_OBJECTS` cannot escape.
Convenience wrapper for `CustomObjectScope`.
Code within a `with` statement will be able to access custom objects
by name. Changes to global custom objects persist
within the enclosing `with` statement. At end of the `with` statement,
global custom objects are reverted to state
at beginning of the `with` statement.
Example:
Consider a custom object `MyObject`
```python
with custom_object_scope({'MyObject':MyObject}):
layer = Dense(..., kernel_regularizer='MyObject')
# save, load, etc. will recognize custom object by name
```
Arguments:
*args: Variable length list of dictionaries of name,
class pairs to add to custom objects.
Returns:
Object of type `CustomObjectScope`.
"""
return CustomObjectScope(*args)
def get_custom_objects():
"""Retrieves a live reference to the global dictionary of custom objects.
Updating and clearing custom objects using `custom_object_scope`
is preferred, but `get_custom_objects` can
be used to directly access `_GLOBAL_CUSTOM_OBJECTS`.
Example:
```python
get_custom_objects().clear()
get_custom_objects()['MyObject'] = MyObject
```
Returns:
Global dictionary of names to classes (`_GLOBAL_CUSTOM_OBJECTS`).
"""
return _GLOBAL_CUSTOM_OBJECTS
def serialize_keras_object(instance):
_, instance = tf_decorator.unwrap(instance)
if instance is None:
return None
if hasattr(instance, 'get_config'):
return {
'class_name': instance.__class__.__name__,
'config': instance.get_config()
}
if hasattr(instance, '__name__'):
return instance.__name__
else:
raise ValueError('Cannot serialize', instance)
def deserialize_keras_object(identifier,
module_objects=None,
custom_objects=None,
printable_module_name='object'):
if isinstance(identifier, dict):
# In this case we are dealing with a Keras config dictionary.
config = identifier
if 'class_name' not in config or 'config' not in config:
raise ValueError('Improper config format: ' + str(config))
class_name = config['class_name']
if custom_objects and class_name in custom_objects:
cls = custom_objects[class_name]
elif class_name in _GLOBAL_CUSTOM_OBJECTS:
cls = _GLOBAL_CUSTOM_OBJECTS[class_name]
else:
module_objects = module_objects or {}
cls = module_objects.get(class_name)
if cls is None:
raise ValueError('Unknown ' + printable_module_name + ': ' + class_name)
if hasattr(cls, 'from_config'):
arg_spec = tf_inspect.getargspec(cls.from_config)
custom_objects = custom_objects or {}
if 'custom_objects' in arg_spec.args:
return cls.from_config(
config['config'],
custom_objects=dict(
list(_GLOBAL_CUSTOM_OBJECTS.items()) +
list(custom_objects.items())))
with CustomObjectScope(custom_objects):
return cls.from_config(config['config'])
else:
# Then `cls` may be a function returning a class.
# in this case by convention `config` holds
# the kwargs of the function.
custom_objects = custom_objects or {}
with CustomObjectScope(custom_objects):
return cls(**config['config'])
elif isinstance(identifier, six.string_types):
function_name = identifier
if custom_objects and function_name in custom_objects:
fn = custom_objects.get(function_name)
elif function_name in _GLOBAL_CUSTOM_OBJECTS:
fn = _GLOBAL_CUSTOM_OBJECTS[function_name]
else:
fn = module_objects.get(function_name)
if fn is None:
raise ValueError('Unknown ' + printable_module_name + ':' +
function_name)
return fn
else:
raise ValueError('Could not interpret serialized ' + printable_module_name +
': ' + identifier)
def func_dump(func):
"""Serializes a user defined function.
Arguments:
func: the function to serialize.
Returns:
A tuple `(code, defaults, closure)`.
"""
if os.name == 'nt':
code = marshal.dumps(
func.__code__).replace(b'\\', b'/').decode('raw_unicode_escape')
else:
code = marshal.dumps(func.__code__).decode('raw_unicode_escape')
defaults = func.__defaults__
if func.__closure__:
closure = tuple(c.cell_contents for c in func.__closure__)
else:
closure = None
return code, defaults, closure
def func_load(code, defaults=None, closure=None, globs=None):
"""Deserializes a user defined function.
Arguments:
code: bytecode of the function.
defaults: defaults of the function.
closure: closure of the function.
globs: dictionary of global objects.
Returns:
A function object.
"""
if isinstance(code, (tuple, list)): # unpack previous dump
code, defaults, closure = code
if isinstance(defaults, list):
defaults = tuple(defaults)
code = marshal.loads(code.encode('raw_unicode_escape'))
if globs is None:
globs = globals()
return python_types.FunctionType(
code, globs, name=code.co_name, argdefs=defaults, closure=closure)
def has_arg(fn, name, accept_all=False):
"""Checks if a callable accepts a given keyword argument.
Arguments:
fn: Callable to inspect.
name: Check if `fn` can be called with `name` as a keyword argument.
accept_all: What to return if there is no parameter called `name`
but the function accepts a `**kwargs` argument.
Returns:
bool, whether `fn` accepts a `name` keyword argument.
"""
arg_spec = tf_inspect.getargspec(fn)
if accept_all and arg_spec.keywords is not None:
return True
return name in arg_spec.args
class Progbar(object):
"""Displays a progress bar.
Arguments:
target: Total number of steps expected, None if unknown.
interval: Minimum visual progress update interval (in seconds).
"""
def __init__(self, target, width=30, verbose=1, interval=0.05):
self.width = width
if target is None:
target = -1
self.target = target
self.sum_values = {}
self.unique_values = []
self.start = time.time()
self.last_update = 0
self.interval = interval
self.total_width = 0
self.seen_so_far = 0
self.verbose = verbose
self._dynamic_display = ((hasattr(sys.stdout, 'isatty') and
sys.stdout.isatty()) or
'ipykernel' in sys.modules)
def update(self, current, values=None, force=False):
"""Updates the progress bar.
Arguments:
current: Index of current step.
values: List of tuples (name, value_for_last_step).
The progress bar will display averages for these values.
force: Whether to force visual progress update.
"""
values = values or []
for k, v in values:
if k not in self.sum_values:
self.sum_values[k] = [
v * (current - self.seen_so_far), current - self.seen_so_far
]
self.unique_values.append(k)
else:
self.sum_values[k][0] += v * (current - self.seen_so_far)
self.sum_values[k][1] += (current - self.seen_so_far)
self.seen_so_far = current
now = time.time()
info = ' - %.0fs' % (now - self.start)
if self.verbose == 1:
if (not force and (now - self.last_update) < self.interval and
current < self.target):
return
prev_total_width = self.total_width
if self._dynamic_display:
sys.stdout.write('\b' * prev_total_width)
sys.stdout.write('\r')
else:
sys.stdout.write('\n')
if self.target is not None:
numdigits = int(np.floor(np.log10(self.target))) + 1
barstr = '%%%dd/%d [' % (numdigits, self.target)
bar = barstr % current
prog = float(current) / self.target
prog_width = int(self.width * prog)
if prog_width > 0:
bar += ('=' * (prog_width - 1))
if current < self.target:
bar += '>'
else:
bar += '='
bar += ('.' * (self.width - prog_width))
bar += ']'
sys.stdout.write(bar)
self.total_width = len(bar)
else:
bar = '%7d/Unknown' % current
self.total_width = len(bar)
sys.stdout.write(bar)
if current:
time_per_unit = (now - self.start) / current
else:
time_per_unit = 0
if self.target is not None and current < self.target:
eta = time_per_unit * (self.target - current)
if eta > 3600:
eta_format = '%d:%02d:%02d' % (eta // 3600, (eta % 3600) // 60,
eta % 60)
elif eta > 60:
eta_format = '%d:%02d' % (eta // 60, eta % 60)
else:
eta_format = '%ds' % eta
info = ' - ETA: %s' % eta_format
else:
if time_per_unit >= 1:
info += ' %.0fs/step' % time_per_unit
elif time_per_unit >= 1e-3:
info += ' %.0fms/step' % (time_per_unit * 1e3)
else:
info += ' %.0fus/step' % (time_per_unit * 1e6)
for k in self.unique_values:
info += ' - %s:' % k
if isinstance(self.sum_values[k], list):
avg = np.mean(self.sum_values[k][0] / max(1, self.sum_values[k][1]))
if abs(avg) > 1e-3:
info += ' %.4f' % avg
else:
info += ' %.4e' % avg
else:
info += ' %s' % self.sum_values[k]
self.total_width += len(info)
if prev_total_width > self.total_width:
info += (' ' * (prev_total_width - self.total_width))
if self.target is not None and current >= self.target:
info += '\n'
sys.stdout.write(info)
sys.stdout.flush()
if current >= self.target:
sys.stdout.write('\n')
elif self.verbose == 2:
if self.target is None or current >= self.target:
for k in self.unique_values:
info += ' - %s:' % k
avg = np.mean(
self.sum_values[k][0] / max(1, self.sum_values[k][1]))
if avg > 1e-3:
info += ' %.4f' % avg
else:
info += ' %.4e' % avg
info += '\n'
sys.stdout.write(info)
sys.stdout.flush()
self.last_update = now
def add(self, n, values=None):
self.update(self.seen_so_far + n, values)
|
horance-liu/tensorflow
|
tensorflow/python/keras/_impl/keras/utils/generic_utils.py
|
Python
|
apache-2.0
| 12,658 | 0.0079 |
# -*- coding: utf-8 -*-
import mock
from nose.tools import * # noqa (PEP8 asserts)
import hmac
import hashlib
from StringIO import StringIO
from django.db import IntegrityError
import furl
from modularodm import Q
from modularodm.exceptions import ValidationError
from framework.auth import get_or_create_user
from framework.auth.core import Auth
from osf.models import OSFUser as User, AbstractNode as Node
from website import settings
from website.conferences import views
from website.conferences import utils, message
from website.util import api_url_for, web_url_for
from tests.base import OsfTestCase, fake
from osf_tests.factories import ConferenceFactory, ProjectFactory, UserFactory
def assert_absolute(url):
parsed_domain = furl.furl(settings.DOMAIN)
parsed_url = furl.furl(url)
assert_equal(parsed_domain.host, parsed_url.host)
def assert_equal_urls(first, second):
parsed_first = furl.furl(first)
parsed_first.port = None
parsed_second = furl.furl(second)
parsed_second.port = None
assert_equal(parsed_first, parsed_second)
def create_fake_conference_nodes(n, endpoint):
nodes = []
for i in range(n):
node = ProjectFactory(is_public=True)
node.add_tag(endpoint, Auth(node.creator))
node.save()
nodes.append(node)
return nodes
class TestConferenceUtils(OsfTestCase):
def test_get_or_create_user_exists(self):
user = UserFactory()
fetched, created = get_or_create_user(user.fullname, user.username, is_spam=True)
assert_false(created)
assert_equal(user._id, fetched._id)
assert_false('is_spam' in fetched.system_tags)
def test_get_or_create_user_not_exists(self):
fullname = 'Roger Taylor'
username = 'roger@queen.com'
fetched, created = get_or_create_user(fullname, username, is_spam=False)
fetched.save() # in order to access m2m fields, e.g. tags
assert_true(created)
assert_equal(fetched.fullname, fullname)
assert_equal(fetched.username, username)
assert_false('is_spam' in fetched.system_tags)
def test_get_or_create_user_is_spam(self):
fullname = 'John Deacon'
username = 'deacon@queen.com'
fetched, created = get_or_create_user(fullname, username, is_spam=True)
fetched.save() # in order to access m2m fields, e.g. tags
assert_true(created)
assert_equal(fetched.fullname, fullname)
assert_equal(fetched.username, username)
assert_true('is_spam' in fetched.system_tags)
def test_get_or_create_node_exists(self):
node = ProjectFactory()
fetched, created = utils.get_or_create_node(node.title, node.creator)
assert_false(created)
assert_equal(node._id, fetched._id)
def test_get_or_create_node_title_not_exists(self):
title = 'Night at the Opera'
creator = UserFactory()
node = ProjectFactory(creator=creator)
fetched, created = utils.get_or_create_node(title, creator)
assert_true(created)
assert_not_equal(node._id, fetched._id)
def test_get_or_create_node_title_exists_deleted(self):
title = 'Night at the Opera'
creator = UserFactory()
node = ProjectFactory(title=title)
node.is_deleted = True
node.save()
fetched, created = utils.get_or_create_node(title, creator)
assert_true(created)
assert_not_equal(node._id, fetched._id)
def test_get_or_create_node_title_exists_not_deleted(self):
title = 'Night at the Opera'
creator = UserFactory()
node = ProjectFactory(title=title, creator=creator)
node.is_deleted = False
node.save()
fetched, created = utils.get_or_create_node(title, creator)
assert_false(created)
assert_equal(node._id, fetched._id)
def test_get_or_create_node_user_not_exists(self):
title = 'Night at the Opera'
creator = UserFactory()
node = ProjectFactory(title=title)
fetched, created = utils.get_or_create_node(title, creator)
assert_true(created)
assert_not_equal(node._id, fetched._id)
def test_get_or_create_user_with_blacklisted_domain(self):
fullname = 'Kanye West'
username = 'kanye@mailinator.com'
with assert_raises(ValidationError) as e:
get_or_create_user(fullname, username, is_spam=True)
assert_equal(e.exception.message, 'Invalid Email')
class ContextTestCase(OsfTestCase):
MAILGUN_API_KEY = 'mailkimp'
@classmethod
def setUpClass(cls):
super(ContextTestCase, cls).setUpClass()
settings.MAILGUN_API_KEY, cls._MAILGUN_API_KEY = cls.MAILGUN_API_KEY, settings.MAILGUN_API_KEY
@classmethod
def tearDownClass(cls):
super(ContextTestCase, cls).tearDownClass()
settings.MAILGUN_API_KEY = cls._MAILGUN_API_KEY
def make_context(self, method='POST', **kwargs):
data = {
'X-Mailgun-Sscore': 0,
'timestamp': '123',
'token': 'secret',
'signature': hmac.new(
key=settings.MAILGUN_API_KEY,
msg='{}{}'.format('123', 'secret'),
digestmod=hashlib.sha256,
).hexdigest(),
}
data.update(kwargs.pop('data', {}))
data = {
key: value
for key, value in data.iteritems()
if value is not None
}
return self.app.app.test_request_context(method=method, data=data, **kwargs)
class TestProvisionNode(ContextTestCase):
def setUp(self):
super(TestProvisionNode, self).setUp()
self.node = ProjectFactory()
self.user = self.node.creator
self.conference = ConferenceFactory()
self.body = 'dragon on my back'
self.content = 'dragon attack'
self.attachment = StringIO(self.content)
self.recipient = '{0}{1}-poster@osf.io'.format(
'test-' if settings.DEV_MODE else '',
self.conference.endpoint,
)
def make_context(self, **kwargs):
data = {
'attachment-count': '1',
'attachment-1': (self.attachment, 'attachment-1'),
'X-Mailgun-Sscore': 0,
'recipient': self.recipient,
'stripped-text': self.body,
}
data.update(kwargs.pop('data', {}))
return super(TestProvisionNode, self).make_context(data=data, **kwargs)
def test_provision(self):
with self.make_context():
msg = message.ConferenceMessage()
utils.provision_node(self.conference, msg, self.node, self.user)
assert_true(self.node.is_public)
assert_in(self.conference.admins.first(), self.node.contributors)
assert_in('emailed', self.node.system_tags)
assert_in(self.conference.endpoint, self.node.system_tags)
assert_true(self.node.tags.filter(name=self.conference.endpoint).exists())
assert_not_in('spam', self.node.system_tags)
def test_provision_private(self):
self.conference.public_projects = False
self.conference.save()
with self.make_context():
msg = message.ConferenceMessage()
utils.provision_node(self.conference, msg, self.node, self.user)
assert_false(self.node.is_public)
assert_in(self.conference.admins.first(), self.node.contributors)
assert_in('emailed', self.node.system_tags)
assert_not_in('spam', self.node.system_tags)
def test_provision_spam(self):
with self.make_context(data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1}):
msg = message.ConferenceMessage()
utils.provision_node(self.conference, msg, self.node, self.user)
assert_false(self.node.is_public)
assert_in(self.conference.admins.first(), self.node.contributors)
assert_in('emailed', self.node.system_tags)
assert_in('spam', self.node.system_tags)
@mock.patch('website.util.waterbutler_url_for')
@mock.patch('website.conferences.utils.requests.put')
def test_upload(self, mock_put, mock_get_url):
mock_get_url.return_value = 'http://queen.com/'
self.attachment.filename = 'hammer-to-fall'
self.attachment.content_type = 'application/json'
utils.upload_attachment(self.user, self.node, self.attachment)
mock_get_url.assert_called_with(
'upload',
'osfstorage',
'/' + self.attachment.filename,
self.node,
_internal=True,
user=self.user,
)
mock_put.assert_called_with(
mock_get_url.return_value,
data=self.content,
)
@mock.patch('website.util.waterbutler_url_for')
@mock.patch('website.conferences.utils.requests.put')
def test_upload_no_file_name(self, mock_put, mock_get_url):
mock_get_url.return_value = 'http://queen.com/'
self.attachment.filename = ''
self.attachment.content_type = 'application/json'
utils.upload_attachment(self.user, self.node, self.attachment)
mock_get_url.assert_called_with(
'upload',
'osfstorage',
'/' + settings.MISSING_FILE_NAME,
self.node,
_internal=True,
user=self.user,
)
mock_put.assert_called_with(
mock_get_url.return_value,
data=self.content,
)
class TestMessage(ContextTestCase):
PUSH_CONTEXT = False
def test_verify_signature_valid(self):
with self.make_context():
msg = message.ConferenceMessage()
msg.verify_signature()
def test_verify_signature_invalid(self):
with self.make_context(data={'signature': 'fake'}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
with assert_raises(message.ConferenceError):
msg.verify_signature()
def test_is_spam_false_missing_headers(self):
ctx = self.make_context(
method='POST',
data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1},
)
with ctx:
msg = message.ConferenceMessage()
assert not msg.is_spam
def test_is_spam_false_all_headers(self):
ctx = self.make_context(
method='POST',
data={
'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE - 1,
'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0],
'X-Mailgun-Spf': message.SPF_PASS_VALUES[0],
},
)
with ctx:
msg = message.ConferenceMessage()
assert not msg.is_spam
def test_is_spam_true_sscore(self):
ctx = self.make_context(
method='POST',
data={'X-Mailgun-Sscore': message.SSCORE_MAX_VALUE + 1},
)
with ctx:
msg = message.ConferenceMessage()
assert msg.is_spam
def test_is_spam_true_dkim(self):
ctx = self.make_context(
method='POST',
data={'X-Mailgun-Dkim-Check-Result': message.DKIM_PASS_VALUES[0][::-1]},
)
with ctx:
msg = message.ConferenceMessage()
assert msg.is_spam
def test_is_spam_true_spf(self):
ctx = self.make_context(
method='POST',
data={'X-Mailgun-Spf': message.SPF_PASS_VALUES[0][::-1]},
)
with ctx:
msg = message.ConferenceMessage()
assert msg.is_spam
def test_subject(self):
ctx = self.make_context(
method='POST',
data={'subject': 'RE: Hip Hopera'},
)
with ctx:
msg = message.ConferenceMessage()
assert_equal(msg.subject, 'Hip Hopera')
def test_recipient(self):
address = 'test-conference@osf.io'
ctx = self.make_context(
method='POST',
data={'recipient': address},
)
with ctx:
msg = message.ConferenceMessage()
assert_equal(msg.recipient, address)
def test_text(self):
text = 'welcome to my nuclear family'
ctx = self.make_context(
method='POST',
data={'stripped-text': text},
)
with ctx:
msg = message.ConferenceMessage()
assert_equal(msg.text, text)
def test_sender_name(self):
names = [
(' Fred', 'Fred'),
(u'Me䬟', u'Me䬟'),
(u'fred@queen.com', u'fred@queen.com'),
(u'Fred <fred@queen.com>', u'Fred'),
(u'"Fred" <fred@queen.com>', u'Fred'),
]
for name in names:
with self.make_context(data={'from': name[0]}):
msg = message.ConferenceMessage()
assert_equal(msg.sender_name, name[1])
def test_sender_email(self):
emails = [
(u'fred@queen.com', u'fred@queen.com'),
(u'FRED@queen.com', u'fred@queen.com')
]
for email in emails:
with self.make_context(data={'from': email[0]}):
msg = message.ConferenceMessage()
assert_equal(msg.sender_email, email[1])
def test_route_invalid_pattern(self):
with self.make_context(data={'recipient': 'spam@osf.io'}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
with assert_raises(message.ConferenceError):
msg.route
def test_route_invalid_test(self):
recipient = '{0}conf-talk@osf.io'.format('' if settings.DEV_MODE else 'stage-')
with self.make_context(data={'recipient': recipient}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
with assert_raises(message.ConferenceError):
msg.route
def test_route_valid_alternate(self):
conf = ConferenceFactory(endpoint='chocolate', active=True)
conf.name = 'Chocolate Conference'
conf.field_names['submission2'] = 'data'
conf.save()
recipient = '{0}chocolate-data@osf.io'.format('test-' if settings.DEV_MODE else '')
with self.make_context(data={'recipient': recipient}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
assert_equal(msg.conference_name, 'chocolate')
assert_equal(msg.conference_category, 'data')
conf.__class__.remove_one(conf)
def test_route_valid_b(self):
recipient = '{0}conf-poster@osf.io'.format('test-' if settings.DEV_MODE else '')
with self.make_context(data={'recipient': recipient}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
assert_equal(msg.conference_name, 'conf')
assert_equal(msg.conference_category, 'poster')
def test_alternate_route_invalid(self):
recipient = '{0}chocolate-data@osf.io'.format('test-' if settings.DEV_MODE else '')
with self.make_context(data={'recipient': recipient}):
self.app.app.preprocess_request()
msg = message.ConferenceMessage()
with assert_raises(message.ConferenceError):
msg.route
def test_attachments_count_zero(self):
with self.make_context(data={'attachment-count': '0'}):
msg = message.ConferenceMessage()
assert_equal(msg.attachments, [])
def test_attachments_count_one(self):
content = 'slightly mad'
sio = StringIO(content)
ctx = self.make_context(
method='POST',
data={
'attachment-count': 1,
'attachment-1': (sio, 'attachment-1'),
},
)
with ctx:
msg = message.ConferenceMessage()
assert_equal(len(msg.attachments), 1)
assert_equal(msg.attachments[0].read(), content)
class TestConferenceEmailViews(OsfTestCase):
def test_redirect_to_meetings_url(self):
url = '/presentations/'
res = self.app.get(url)
assert_equal(res.status_code, 302)
res = res.follow()
assert_equal(res.request.path, '/meetings/')
def test_conference_submissions(self):
Node.remove()
conference1 = ConferenceFactory()
conference2 = ConferenceFactory()
# Create conference nodes
create_fake_conference_nodes(
3,
conference1.endpoint,
)
create_fake_conference_nodes(
2,
conference2.endpoint,
)
url = api_url_for('conference_submissions')
res = self.app.get(url)
assert_equal(res.json['success'], True)
def test_conference_plain_returns_200(self):
conference = ConferenceFactory()
url = web_url_for('conference_results__plain', meeting=conference.endpoint)
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_conference_data(self):
conference = ConferenceFactory()
# Create conference nodes
n_conference_nodes = 3
create_fake_conference_nodes(
n_conference_nodes,
conference.endpoint,
)
# Create a non-conference node
ProjectFactory()
url = api_url_for('conference_data', meeting=conference.endpoint)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), n_conference_nodes)
def test_conference_data_url_upper(self):
conference = ConferenceFactory()
# Create conference nodes
n_conference_nodes = 3
create_fake_conference_nodes(
n_conference_nodes,
conference.endpoint,
)
# Create a non-conference node
ProjectFactory()
url = api_url_for('conference_data', meeting=conference.endpoint.upper())
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), n_conference_nodes)
def test_conference_data_tag_upper(self):
conference = ConferenceFactory()
# Create conference nodes
n_conference_nodes = 3
create_fake_conference_nodes(
n_conference_nodes,
conference.endpoint.upper(),
)
# Create a non-conference node
ProjectFactory()
url = api_url_for('conference_data', meeting=conference.endpoint)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(len(res.json), n_conference_nodes)
def test_conference_results(self):
conference = ConferenceFactory()
url = web_url_for('conference_results', meeting=conference.endpoint)
res = self.app.get(url)
assert_equal(res.status_code, 200)
class TestConferenceModel(OsfTestCase):
def test_endpoint_is_required(self):
with assert_raises(IntegrityError):
ConferenceFactory(endpoint=None, name=fake.company()).save()
def test_name_is_required(self):
with assert_raises(IntegrityError):
ConferenceFactory(endpoint='spsp2014', name=None).save()
def test_default_field_names(self):
conf = ConferenceFactory(endpoint='cookie', name='Cookies Conference')
conf.save()
assert_equal(conf.field_names['submission1'], 'poster')
assert_equal(conf.field_names['mail_subject'], 'Presentation title')
class TestConferenceIntegration(ContextTestCase):
@mock.patch('website.conferences.views.send_mail')
@mock.patch('website.conferences.utils.upload_attachments')
def test_integration(self, mock_upload, mock_send_mail):
fullname = 'John Deacon'
username = 'deacon@queen.com'
title = 'good songs'
conference = ConferenceFactory()
body = 'dragon on my back'
content = 'dragon attack'
recipient = '{0}{1}-poster@osf.io'.format(
'test-' if settings.DEV_MODE else '',
conference.endpoint,
)
self.app.post(
api_url_for('meeting_hook'),
{
'X-Mailgun-Sscore': 0,
'timestamp': '123',
'token': 'secret',
'signature': hmac.new(
key=settings.MAILGUN_API_KEY,
msg='{}{}'.format('123', 'secret'),
digestmod=hashlib.sha256,
).hexdigest(),
'attachment-count': '1',
'X-Mailgun-Sscore': 0,
'from': '{0} <{1}>'.format(fullname, username),
'recipient': recipient,
'subject': title,
'stripped-text': body,
},
upload_files=[
('attachment-1', 'attachment-1', content),
],
)
assert_true(mock_upload.called)
users = User.find(Q('username', 'eq', username))
assert_equal(users.count(), 1)
nodes = Node.find(Q('title', 'eq', title))
assert_equal(nodes.count(), 1)
node = nodes[0]
assert_equal(node.get_wiki_page('home').content, body)
assert_true(mock_send_mail.called)
call_args, call_kwargs = mock_send_mail.call_args
assert_absolute(call_kwargs['conf_view_url'])
assert_absolute(call_kwargs['set_password_url'])
assert_absolute(call_kwargs['profile_url'])
assert_absolute(call_kwargs['file_url'])
assert_absolute(call_kwargs['node_url'])
@mock.patch('website.conferences.views.send_mail')
def test_integration_inactive(self, mock_send_mail):
conference = ConferenceFactory(active=False)
fullname = 'John Deacon'
username = 'deacon@queen.com'
title = 'good songs'
body = 'dragon on my back'
recipient = '{0}{1}-poster@osf.io'.format(
'test-' if settings.DEV_MODE else '',
conference.endpoint,
)
res = self.app.post(
api_url_for('meeting_hook'),
{
'X-Mailgun-Sscore': 0,
'timestamp': '123',
'token': 'secret',
'signature': hmac.new(
key=settings.MAILGUN_API_KEY,
msg='{}{}'.format('123', 'secret'),
digestmod=hashlib.sha256,
).hexdigest(),
'attachment-count': '1',
'X-Mailgun-Sscore': 0,
'from': '{0} <{1}>'.format(fullname, username),
'recipient': recipient,
'subject': title,
'stripped-text': body,
},
expect_errors=True,
)
assert_equal(res.status_code, 406)
call_args, call_kwargs = mock_send_mail.call_args
assert_equal(call_args, (username, views.CONFERENCE_INACTIVE))
assert_equal(call_kwargs['fullname'], fullname)
assert_equal_urls(
call_kwargs['presentations_url'],
web_url_for('conference_view', _absolute=True),
)
@mock.patch('website.conferences.views.send_mail')
@mock.patch('website.conferences.utils.upload_attachments')
def test_integration_wo_full_name(self, mock_upload, mock_send_mail):
username = 'no_full_name@mail.com'
title = 'no full name only email'
conference = ConferenceFactory()
body = 'dragon on my back'
content = 'dragon attack'
recipient = '{0}{1}-poster@osf.io'.format(
'test-' if settings.DEV_MODE else '',
conference.endpoint,
)
self.app.post(
api_url_for('meeting_hook'),
{
'X-Mailgun-Sscore': 0,
'timestamp': '123',
'token': 'secret',
'signature': hmac.new(
key=settings.MAILGUN_API_KEY,
msg='{}{}'.format('123', 'secret'),
digestmod=hashlib.sha256,
).hexdigest(),
'attachment-count': '1',
'X-Mailgun-Sscore': 0,
'from': username,
'recipient': recipient,
'subject': title,
'stripped-text': body,
},
upload_files=[
('attachment-1', 'attachment-1', content),
],
)
assert_true(mock_upload.called)
users = User.find(Q('username', 'eq', username))
assert_equal(users.count(), 1)
nodes = Node.find(Q('title', 'eq', title))
assert_equal(nodes.count(), 1)
node = nodes[0]
assert_equal(node.get_wiki_page('home').content, body)
assert_true(mock_send_mail.called)
call_args, call_kwargs = mock_send_mail.call_args
assert_absolute(call_kwargs['conf_view_url'])
assert_absolute(call_kwargs['set_password_url'])
assert_absolute(call_kwargs['profile_url'])
assert_absolute(call_kwargs['file_url'])
assert_absolute(call_kwargs['node_url'])
|
cwisecarver/osf.io
|
tests/test_conferences.py
|
Python
|
apache-2.0
| 25,084 | 0.000518 |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import signal
import subprocess
import sys
import tempfile
from profile_chrome import controllers
from profile_chrome import ui
from pylib import android_commands
from pylib import constants
from pylib.perf import perf_control
sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT,
'tools',
'telemetry'))
try:
# pylint: disable=F0401
from telemetry.core.platform.profiler import android_profiling_helper
from telemetry.util import support_binaries
except ImportError:
android_profiling_helper = None
support_binaries = None
_PERF_OPTIONS = [
# Sample across all processes and CPUs to so that the current CPU gets
# recorded to each sample.
'--all-cpus',
# In perf 3.13 --call-graph requires an argument, so use the -g short-hand
# which does not.
'-g',
# Increase priority to avoid dropping samples. Requires root.
'--realtime', '80',
# Record raw samples to get CPU information.
'--raw-samples',
# Increase sampling frequency for better coverage.
'--freq', '2000',
]
class _PerfProfiler(object):
def __init__(self, device, perf_binary, categories):
self._device = device
self._output_file = android_commands.DeviceTempFile(
self._device.old_interface, prefix='perf_output')
self._log_file = tempfile.TemporaryFile()
# TODO(jbudorick) Look at providing a way to unhandroll this once the
# adb rewrite has fully landed.
device_param = (['-s', str(self._device)] if str(self._device) else [])
cmd = ['adb'] + device_param + \
['shell', perf_binary, 'record',
'--output', self._output_file.name] + _PERF_OPTIONS
if categories:
cmd += ['--event', ','.join(categories)]
self._perf_control = perf_control.PerfControl(self._device)
self._perf_control.SetPerfProfilingMode()
self._perf_process = subprocess.Popen(cmd,
stdout=self._log_file,
stderr=subprocess.STDOUT)
def SignalAndWait(self):
self._device.KillAll('perf', signum=signal.SIGINT)
self._perf_process.wait()
self._perf_control.SetDefaultPerfMode()
def _FailWithLog(self, msg):
self._log_file.seek(0)
log = self._log_file.read()
raise RuntimeError('%s. Log output:\n%s' % (msg, log))
def PullResult(self, output_path):
if not self._device.FileExists(self._output_file.name):
self._FailWithLog('Perf recorded no data')
perf_profile = os.path.join(output_path,
os.path.basename(self._output_file.name))
self._device.PullFile(self._output_file.name, perf_profile)
if not os.stat(perf_profile).st_size:
os.remove(perf_profile)
self._FailWithLog('Perf recorded a zero-sized file')
self._log_file.close()
self._output_file.close()
return perf_profile
class PerfProfilerController(controllers.BaseController):
def __init__(self, device, categories):
controllers.BaseController.__init__(self)
self._device = device
self._categories = categories
self._perf_binary = self._PrepareDevice(device)
self._perf_instance = None
def __repr__(self):
return 'perf profile'
@staticmethod
def IsSupported():
return bool(android_profiling_helper)
@staticmethod
def _PrepareDevice(device):
if not 'BUILDTYPE' in os.environ:
os.environ['BUILDTYPE'] = 'Release'
return android_profiling_helper.PrepareDeviceForPerf(device)
@classmethod
def GetCategories(cls, device):
perf_binary = cls._PrepareDevice(device)
return device.RunShellCommand('%s list' % perf_binary)
def StartTracing(self, _):
self._perf_instance = _PerfProfiler(self._device,
self._perf_binary,
self._categories)
def StopTracing(self):
if not self._perf_instance:
return
self._perf_instance.SignalAndWait()
@staticmethod
def _GetInteractivePerfCommand(perfhost_path, perf_profile, symfs_dir,
required_libs, kallsyms):
cmd = '%s report -n -i %s --symfs %s --kallsyms %s' % (
os.path.relpath(perfhost_path, '.'), perf_profile, symfs_dir, kallsyms)
for lib in required_libs:
lib = os.path.join(symfs_dir, lib[1:])
if not os.path.exists(lib):
continue
objdump_path = android_profiling_helper.GetToolchainBinaryPath(
lib, 'objdump')
if objdump_path:
cmd += ' --objdump %s' % os.path.relpath(objdump_path, '.')
break
return cmd
def PullTrace(self):
symfs_dir = os.path.join(tempfile.gettempdir(),
os.path.expandvars('$USER-perf-symfs'))
if not os.path.exists(symfs_dir):
os.makedirs(symfs_dir)
required_libs = set()
# Download the recorded perf profile.
perf_profile = self._perf_instance.PullResult(symfs_dir)
required_libs = \
android_profiling_helper.GetRequiredLibrariesForPerfProfile(
perf_profile)
if not required_libs:
logging.warning('No libraries required by perf trace. Most likely there '
'are no samples in the trace.')
# Build a symfs with all the necessary libraries.
kallsyms = android_profiling_helper.CreateSymFs(self._device,
symfs_dir,
required_libs,
use_symlinks=False)
perfhost_path = support_binaries.FindPath(
android_profiling_helper.GetPerfhostName(), 'x86_64', 'linux')
ui.PrintMessage('\nNote: to view the profile in perf, run:')
ui.PrintMessage(' ' + self._GetInteractivePerfCommand(perfhost_path,
perf_profile, symfs_dir, required_libs, kallsyms))
# Convert the perf profile into JSON.
perf_script_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'third_party', 'perf_to_tracing.py')
json_file_name = os.path.basename(perf_profile)
with open(os.devnull, 'w') as dev_null, \
open(json_file_name, 'w') as json_file:
cmd = [perfhost_path, 'script', '-s', perf_script_path, '-i',
perf_profile, '--symfs', symfs_dir, '--kallsyms', kallsyms]
if subprocess.call(cmd, stdout=json_file, stderr=dev_null):
logging.warning('Perf data to JSON conversion failed. The result will '
'not contain any perf samples. You can still view the '
'perf data manually as shown above.')
return None
return json_file_name
|
CTSRD-SOAAP/chromium-42.0.2311.135
|
tools/profile_chrome/perf_controller.py
|
Python
|
bsd-3-clause
| 6,890 | 0.00566 |
# encoding: utf-8
import os
import re
import shutil
import subprocess
import tempfile
import textwrap
import time
from test.constant import (ARR_D, ARR_L, ARR_R, ARR_U, BS, ESC, PYTHON3,
SEQUENCES)
def wait_until_file_exists(file_path, times=None, interval=0.01):
while times is None or times:
if os.path.exists(file_path):
return True
time.sleep(interval)
if times is not None:
times -= 1
return False
def read_text_file(filename):
"""Reads the contens of a text file."""
if PYTHON3:
return open(filename, 'r', encoding='utf-8').read()
else:
return open(filename, 'r').read()
def is_process_running(pid):
"""Returns true if a process with pid is running, false otherwise."""
# from
# http://stackoverflow.com/questions/568271/how-to-check-if-there-exists-a-process-with-a-given-pid
try:
os.kill(pid, 0)
except OSError:
return False
else:
return True
def silent_call(cmd):
"""Calls 'cmd' and returns the exit value."""
return subprocess.call(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
def create_directory(dirname):
"""Creates 'dirname' and its parents if it does not exist."""
try:
os.makedirs(dirname)
except OSError:
pass
class TempFileManager(object):
def __init__(self, name=''):
self._temp_dir = tempfile.mkdtemp(prefix='UltiSnipsTest_' + name)
def name_temp(self, file_path):
return os.path.join(self._temp_dir, file_path)
def write_temp(self, file_path, content):
abs_path = self.name_temp(file_path)
create_directory(os.path.dirname(abs_path))
if PYTHON3:
with open(abs_path, 'w', encoding='utf-8') as f:
f.write(content)
else:
with open(abs_path, 'w') as f:
f.write(content)
return abs_path
def unique_name_temp(self, suffix='', prefix=''):
file_handler, abspath = tempfile.mkstemp(
suffix, prefix, self._temp_dir)
os.close(file_handler)
os.remove(abspath)
return abspath
def clear_temp(self):
shutil.rmtree(self._temp_dir)
create_directory(self._temp_dir)
class VimInterface(TempFileManager):
def __init__(self, vim_executable, name):
TempFileManager.__init__(self, name)
self._vim_executable = vim_executable
def get_buffer_data(self):
buffer_path = self.unique_name_temp(prefix='buffer_')
self.send(ESC + ':w! %s\n' % buffer_path)
if wait_until_file_exists(buffer_path, 50):
return read_text_file(buffer_path)[:-1]
def send(self, s):
raise NotImplementedError()
def launch(self, config=[]):
pid_file = self.name_temp('vim.pid')
done_file = self.name_temp('loading_done')
if os.path.exists(done_file):
os.remove(done_file)
post_config = []
post_config.append('%s << EOF' % ('py3' if PYTHON3 else 'py'))
post_config.append('import vim')
post_config.append(
"with open('%s', 'w') as pid_file: pid_file.write(vim.eval('getpid()'))" %
pid_file)
post_config.append(
"with open('%s', 'w') as done_file: pass" %
done_file)
post_config.append('EOF')
config_path = self.write_temp('vim_config.vim',
textwrap.dedent(os.linesep.join(config + post_config) + '\n'))
# Note the space to exclude it from shell history.
self.send(""" %s -u %s\r\n""" % (self._vim_executable, config_path))
wait_until_file_exists(done_file)
self._vim_pid = int(open(pid_file, 'r').read())
def leave_with_wait(self):
self.send(3 * ESC + ':qa!\n')
while is_process_running(self._vim_pid):
time.sleep(.05)
class VimInterfaceTmux(VimInterface):
def __init__(self, vim_executable, session):
VimInterface.__init__(self, vim_executable, 'Tmux')
self.session = session
self._check_version()
def send(self, s):
# I did not find any documentation on what needs escaping when sending
# to tmux, but it seems like this is all that is needed for now.
s = s.replace(';', r'\;')
if PYTHON3:
s = s.encode('utf-8')
silent_call(['tmux', 'send-keys', '-t', self.session, '-l', s])
def _check_version(self):
stdout, _ = subprocess.Popen(['tmux', '-V'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
if PYTHON3:
stdout = stdout.decode('utf-8')
m = re.match(r"tmux (\d+).(\d+)", stdout)
if not m or not (int(m.group(1)), int(m.group(2))) >= (1, 8):
raise RuntimeError(
'Need at least tmux 1.8, you have %s.' %
stdout.strip())
class VimInterfaceWindows(VimInterface):
BRACES = re.compile('([}{])')
WIN_ESCAPES = ['+', '^', '%', '~', '[', ']', '<', '>', '(', ')']
WIN_REPLACES = [
(BS, '{BS}'),
(ARR_L, '{LEFT}'),
(ARR_R, '{RIGHT}'),
(ARR_U, '{UP}'),
(ARR_D, '{DOWN}'),
('\t', '{TAB}'),
('\n', '~'),
(ESC, '{ESC}'),
# On my system ` waits for a second keystroke, so `+SPACE = "`". On
# most systems, `+Space = "` ". I work around this, by sending the host
# ` as `+_+BS. Awkward, but the only way I found to get this working.
('`', '`_{BS}'),
('´', '´_{BS}'),
('{^}', '{^}_{BS}'),
]
def __init__(self):
# import windows specific modules
import win32com.client
import win32gui
self.win32gui = win32gui
self.shell = win32com.client.Dispatch('WScript.Shell')
def is_focused(self, title=None):
cur_title = self.win32gui.GetWindowText(
self.win32gui.GetForegroundWindow())
if (title or '- GVIM') in cur_title:
return True
return False
def focus(self, title=None):
if not self.shell.AppActivate(title or '- GVIM'):
raise Exception('Failed to switch to GVim window')
time.sleep(1)
def convert_keys(self, keys):
keys = self.BRACES.sub(r"{\1}", keys)
for k in self.WIN_ESCAPES:
keys = keys.replace(k, '{%s}' % k)
for f, r in self.WIN_REPLACES:
keys = keys.replace(f, r)
return keys
def send(self, keys):
keys = self.convert_keys(keys)
if not self.is_focused():
time.sleep(2)
self.focus()
if not self.is_focused():
# This is the only way I can find to stop test execution
raise KeyboardInterrupt('Failed to focus GVIM')
self.shell.SendKeys(keys)
|
wincent/ultisnips
|
test/vim_interface.py
|
Python
|
gpl-3.0
| 6,882 | 0.000436 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/src/calendar_edit.ui'
#
# Created: Wed Nov 17 12:05:53 2010
# by: PyQt4 UI code generator 4.7.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_CalendarEntryEdit(object):
def setupUi(self, CalendarEntryEdit):
CalendarEntryEdit.setObjectName("CalendarEntryEdit")
CalendarEntryEdit.resize(543, 313)
CalendarEntryEdit.setWindowTitle("New calendar entry")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/view-calendar"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
CalendarEntryEdit.setWindowIcon(icon)
self.verticalLayout = QtGui.QVBoxLayout(CalendarEntryEdit)
self.verticalLayout.setContentsMargins(-1, -1, -1, 6)
self.verticalLayout.setObjectName("verticalLayout")
self.formLayout = QtGui.QFormLayout()
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.ExpandingFieldsGrow)
self.formLayout.setContentsMargins(10, 10, 15, 15)
self.formLayout.setHorizontalSpacing(3)
self.formLayout.setVerticalSpacing(6)
self.formLayout.setObjectName("formLayout")
self.label = QtGui.QLabel(CalendarEntryEdit)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label.setFont(font)
self.label.setObjectName("label")
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label)
self.titleLine = QtGui.QLineEdit(CalendarEntryEdit)
self.titleLine.setObjectName("titleLine")
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.titleLine)
self.label_2 = QtGui.QLabel(CalendarEntryEdit)
self.label_2.setObjectName("label_2")
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_2)
self.locationLine = QtGui.QLineEdit(CalendarEntryEdit)
self.locationLine.setObjectName("locationLine")
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.locationLine)
self.label_3 = QtGui.QLabel(CalendarEntryEdit)
self.label_3.setObjectName("label_3")
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_3)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.startDate = QtGui.QDateEdit(CalendarEntryEdit)
self.startDate.setCalendarPopup(True)
self.startDate.setObjectName("startDate")
self.horizontalLayout_5.addWidget(self.startDate)
self.startTime = QtGui.QTimeEdit(CalendarEntryEdit)
self.startTime.setObjectName("startTime")
self.horizontalLayout_5.addWidget(self.startTime)
self.formLayout.setLayout(2, QtGui.QFormLayout.FieldRole, self.horizontalLayout_5)
self.label_4 = QtGui.QLabel(CalendarEntryEdit)
self.label_4.setObjectName("label_4")
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.label_4)
self.horizontalLayout_6 = QtGui.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.endDate = QtGui.QDateEdit(CalendarEntryEdit)
self.endDate.setCalendarPopup(True)
self.endDate.setObjectName("endDate")
self.horizontalLayout_6.addWidget(self.endDate)
self.endTime = QtGui.QTimeEdit(CalendarEntryEdit)
self.endTime.setObjectName("endTime")
self.horizontalLayout_6.addWidget(self.endTime)
self.formLayout.setLayout(3, QtGui.QFormLayout.FieldRole, self.horizontalLayout_6)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setContentsMargins(4, -1, -1, -1)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.recurrenceLabel = QtGui.QLabel(CalendarEntryEdit)
self.recurrenceLabel.setObjectName("recurrenceLabel")
self.horizontalLayout_4.addWidget(self.recurrenceLabel)
self.recurrenceButton = QtGui.QPushButton(CalendarEntryEdit)
self.recurrenceButton.setObjectName("recurrenceButton")
self.horizontalLayout_4.addWidget(self.recurrenceButton)
self.formLayout.setLayout(4, QtGui.QFormLayout.FieldRole, self.horizontalLayout_4)
self.label_6 = QtGui.QLabel(CalendarEntryEdit)
self.label_6.setObjectName("label_6")
self.formLayout.setWidget(5, QtGui.QFormLayout.LabelRole, self.label_6)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setContentsMargins(4, -1, -1, -1)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.reminderCheckBox = QtGui.QCheckBox(CalendarEntryEdit)
self.reminderCheckBox.setObjectName("reminderCheckBox")
self.horizontalLayout_3.addWidget(self.reminderCheckBox)
self.reminderStack = QtGui.QStackedWidget(CalendarEntryEdit)
self.reminderStack.setObjectName("reminderStack")
self.basicReminderWidget = QtGui.QWidget()
self.basicReminderWidget.setObjectName("basicReminderWidget")
self.horizontalLayout = QtGui.QHBoxLayout(self.basicReminderWidget)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setContentsMargins(0, 0, 5, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.reminderTimeBox = QtGui.QSpinBox(self.basicReminderWidget)
self.reminderTimeBox.setEnabled(False)
self.reminderTimeBox.setMinimumSize(QtCore.QSize(70, 0))
self.reminderTimeBox.setMaximum(500)
self.reminderTimeBox.setObjectName("reminderTimeBox")
self.horizontalLayout.addWidget(self.reminderTimeBox)
self.reminderUnitBox = QtGui.QComboBox(self.basicReminderWidget)
self.reminderUnitBox.setEnabled(False)
self.reminderUnitBox.setMinimumSize(QtCore.QSize(110, 0))
self.reminderUnitBox.setObjectName("reminderUnitBox")
self.horizontalLayout.addWidget(self.reminderUnitBox)
self.reminderStack.addWidget(self.basicReminderWidget)
self.advancedReminderWidget = QtGui.QWidget()
self.advancedReminderWidget.setObjectName("advancedReminderWidget")
self.horizontalLayout_2 = QtGui.QHBoxLayout(self.advancedReminderWidget)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setContentsMargins(0, 0, 5, 0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.reminderDateTime = QtGui.QDateTimeEdit(self.advancedReminderWidget)
self.reminderDateTime.setEnabled(False)
self.reminderDateTime.setObjectName("reminderDateTime")
self.horizontalLayout_2.addWidget(self.reminderDateTime)
self.reminderStack.addWidget(self.advancedReminderWidget)
self.horizontalLayout_3.addWidget(self.reminderStack)
self.reminderAdvancedButton = QtGui.QPushButton(CalendarEntryEdit)
self.reminderAdvancedButton.setEnabled(False)
self.reminderAdvancedButton.setCheckable(True)
self.reminderAdvancedButton.setObjectName("reminderAdvancedButton")
self.horizontalLayout_3.addWidget(self.reminderAdvancedButton)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem)
self.formLayout.setLayout(5, QtGui.QFormLayout.FieldRole, self.horizontalLayout_3)
self.label_7 = QtGui.QLabel(CalendarEntryEdit)
self.label_7.setObjectName("label_7")
self.formLayout.setWidget(6, QtGui.QFormLayout.LabelRole, self.label_7)
self.priorityBox = QtGui.QComboBox(CalendarEntryEdit)
self.priorityBox.setMinimumSize(QtCore.QSize(150, 0))
self.priorityBox.setObjectName("priorityBox")
self.formLayout.setWidget(6, QtGui.QFormLayout.FieldRole, self.priorityBox)
self.label_8 = QtGui.QLabel(CalendarEntryEdit)
self.label_8.setObjectName("label_8")
self.formLayout.setWidget(7, QtGui.QFormLayout.LabelRole, self.label_8)
self.accessBox = QtGui.QComboBox(CalendarEntryEdit)
self.accessBox.setMinimumSize(QtCore.QSize(150, 0))
self.accessBox.setObjectName("accessBox")
self.formLayout.setWidget(7, QtGui.QFormLayout.FieldRole, self.accessBox)
self.verticalLayout.addLayout(self.formLayout)
self.buttonBox = QtGui.QDialogButtonBox(CalendarEntryEdit)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Discard|QtGui.QDialogButtonBox.Reset|QtGui.QDialogButtonBox.Save)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.label.setBuddy(self.titleLine)
self.label_2.setBuddy(self.locationLine)
self.label_3.setBuddy(self.startDate)
self.label_4.setBuddy(self.endDate)
self.recurrenceLabel.setBuddy(self.recurrenceButton)
self.label_6.setBuddy(self.reminderCheckBox)
self.label_7.setBuddy(self.priorityBox)
self.label_8.setBuddy(self.accessBox)
self.retranslateUi(CalendarEntryEdit)
self.reminderStack.setCurrentIndex(0)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), CalendarEntryEdit.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), CalendarEntryEdit.reject)
QtCore.QObject.connect(self.reminderCheckBox, QtCore.SIGNAL("toggled(bool)"), self.reminderTimeBox.setEnabled)
QtCore.QObject.connect(self.reminderCheckBox, QtCore.SIGNAL("toggled(bool)"), self.reminderUnitBox.setEnabled)
QtCore.QObject.connect(self.reminderCheckBox, QtCore.SIGNAL("toggled(bool)"), self.reminderAdvancedButton.setEnabled)
QtCore.QObject.connect(self.titleLine, QtCore.SIGNAL("textEdited(QString)"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.locationLine, QtCore.SIGNAL("textEdited(QString)"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.startDate, QtCore.SIGNAL("dateTimeChanged(QDateTime)"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.startTime, QtCore.SIGNAL("dateTimeChanged(QDateTime)"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.endDate, QtCore.SIGNAL("dateTimeChanged(QDateTime)"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.endTime, QtCore.SIGNAL("dateTimeChanged(QDateTime)"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.reminderCheckBox, QtCore.SIGNAL("clicked()"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.priorityBox, QtCore.SIGNAL("currentIndexChanged(int)"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.accessBox, QtCore.SIGNAL("currentIndexChanged(int)"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.reminderTimeBox, QtCore.SIGNAL("editingFinished()"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.reminderUnitBox, QtCore.SIGNAL("currentIndexChanged(int)"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.reminderDateTime, QtCore.SIGNAL("dateTimeChanged(QDateTime)"), CalendarEntryEdit.entryChanged)
QtCore.QObject.connect(self.reminderCheckBox, QtCore.SIGNAL("toggled(bool)"), self.reminderDateTime.setEnabled)
QtCore.QObject.connect(self.reminderAdvancedButton, QtCore.SIGNAL("toggled(bool)"), CalendarEntryEdit.setAdvancedReminder)
QtCore.QMetaObject.connectSlotsByName(CalendarEntryEdit)
CalendarEntryEdit.setTabOrder(self.titleLine, self.locationLine)
CalendarEntryEdit.setTabOrder(self.locationLine, self.startDate)
CalendarEntryEdit.setTabOrder(self.startDate, self.startTime)
CalendarEntryEdit.setTabOrder(self.startTime, self.endDate)
CalendarEntryEdit.setTabOrder(self.endDate, self.endTime)
CalendarEntryEdit.setTabOrder(self.endTime, self.recurrenceButton)
CalendarEntryEdit.setTabOrder(self.recurrenceButton, self.reminderCheckBox)
CalendarEntryEdit.setTabOrder(self.reminderCheckBox, self.reminderTimeBox)
CalendarEntryEdit.setTabOrder(self.reminderTimeBox, self.reminderUnitBox)
CalendarEntryEdit.setTabOrder(self.reminderUnitBox, self.reminderAdvancedButton)
CalendarEntryEdit.setTabOrder(self.reminderAdvancedButton, self.priorityBox)
CalendarEntryEdit.setTabOrder(self.priorityBox, self.accessBox)
CalendarEntryEdit.setTabOrder(self.accessBox, self.buttonBox)
def retranslateUi(self, CalendarEntryEdit):
self.label.setText(QtGui.QApplication.translate("CalendarEntryEdit", "Title:", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("CalendarEntryEdit", "Location:", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("CalendarEntryEdit", "Start:", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("CalendarEntryEdit", "End:", None, QtGui.QApplication.UnicodeUTF8))
self.recurrenceLabel.setText(QtGui.QApplication.translate("CalendarEntryEdit", "No recurrence", None, QtGui.QApplication.UnicodeUTF8))
self.recurrenceButton.setText(QtGui.QApplication.translate("CalendarEntryEdit", "Edit...", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("CalendarEntryEdit", "Reminder:", None, QtGui.QApplication.UnicodeUTF8))
self.reminderCheckBox.setText(QtGui.QApplication.translate("CalendarEntryEdit", "Reminder:", None, QtGui.QApplication.UnicodeUTF8))
self.reminderAdvancedButton.setText(QtGui.QApplication.translate("CalendarEntryEdit", "Advanced...", None, QtGui.QApplication.UnicodeUTF8))
self.label_7.setText(QtGui.QApplication.translate("CalendarEntryEdit", "Priority:", None, QtGui.QApplication.UnicodeUTF8))
self.label_8.setText(QtGui.QApplication.translate("CalendarEntryEdit", "Access:", None, QtGui.QApplication.UnicodeUTF8))
import resource_rc
|
ypid/series60-remote
|
pc/ui/ui_calendar_edit.py
|
Python
|
gpl-2.0
| 14,073 | 0.003837 |
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
"""Create AWS Lambdas from Python code.
See https://www.pantsbuild.org/docs/awslambda-python.
"""
from pants.backend.awslambda.python import rules as python_rules
from pants.backend.awslambda.python.target_types import PythonAWSLambda
from pants.backend.awslambda.python.target_types import rules as target_types_rules
def rules():
return (*python_rules.rules(), *target_types_rules())
def target_types():
return [PythonAWSLambda]
|
benjyw/pants
|
src/python/pants/backend/awslambda/python/register.py
|
Python
|
apache-2.0
| 577 | 0.001733 |
"""
Author: Dr. John T. Hwang <hwangjt@umich.edu>
This package is distributed under New BSD license.
Full-factorial sampling.
"""
import numpy as np
from smt.sampling_methods.sampling_method import SamplingMethod
class FullFactorial(SamplingMethod):
def _initialize(self):
self.options.declare(
"weights",
values=None,
types=(list, np.ndarray),
desc="relative sampling weights for each nx dimensions",
)
self.options.declare(
"clip",
default=False,
types=bool,
desc="round number of samples to the sampling number product of each nx dimensions (> asked nt)",
)
def _compute(self, nt):
"""
Compute the requested number of sampling points.
Arguments
---------
nt : int
Number of points requested.
Returns
-------
ndarray[nt, nx]
The sampling locations in the input space.
"""
xlimits = self.options["xlimits"]
nx = xlimits.shape[0]
if self.options["weights"] is None:
weights = np.ones(nx) / nx
else:
weights = np.atleast_1d(self.options["weights"])
weights /= np.sum(weights)
num_list = np.ones(nx, int)
while np.prod(num_list) < nt:
ind = np.argmax(weights - num_list / np.sum(num_list))
num_list[ind] += 1
lins_list = [np.linspace(0.0, 1.0, num_list[kx]) for kx in range(nx)]
x_list = np.meshgrid(*lins_list, indexing="ij")
if self.options["clip"]:
nt = np.prod(num_list)
x = np.zeros((nt, nx))
for kx in range(nx):
x[:, kx] = x_list[kx].reshape(np.prod(num_list))[:nt]
return x
|
bouhlelma/smt
|
smt/sampling_methods/full_factorial.py
|
Python
|
bsd-3-clause
| 1,806 | 0.000554 |
# -*- coding: utf-8 -*-
# Copyright 2009 James Hensman
# Licensed under the Gnu General Public license, see COPYING
#from numpy import matlib as ml
import numpy as np
from scipy import linalg
class PCA_EM_matrix:
def __init__(self,data,target_dim):
"""Maximum likelihood PCA by the EM algorithm"""
self.X = ml.matrix(data)
self.N,self.d = self.X.shape
self.q = target_dim
def learn(self,niters):
self.mu = self.X.mean(0).reshape(self.d,1)#ML solution for mu
self.X2 = self.X - self.mu.T
self.xxTsum = ml.sum([x*x.T for x in self.X2])#precalculate for speed
#initialise paramters:
self.W = ml.randn(self.d,self.q)
self.sigma2 = 1.2
for i in range(niters):
#print self.sigma2
self.E_step()
self.M_step()
def E_step(self):
M = self.W.T*self.W + ml.eye(self.q)*self.sigma2
M_inv = ml.linalg.inv(M)
self.m_Z = (M_inv*self.W.T*self.X2.T).T
self.S_z = M_inv*self.sigma2
def M_step(self):
zzT = self.m_Z.T*self.m_Z + self.N*self.S_z
self.W = self.X2.T*self.m_Z*ml.linalg.inv(zzT)
WTW = self.W.T*self.W
self.sigma2 = self.xxTsum - 2*ml.multiply(self.m_Z*self.W.T,self.X2).sum() + ml.trace(zzT*WTW)
#self.sigma2 = self.xxTsum - 2*ml.trace(self.m_Z*self.W.T*self.X2.T) + ml.trace(zzT*WTW)
#self.sigma2 = self.xxTsum + ml.sum([- 2*z*self.W.T*x.T + ml.trace((z.T*z + self.S_z)*WTW) for z,x in zip(self.m_Z, self.X2)])
self.sigma2 /= self.N*self.d
class PCA_EM:
def __init__(self,data,target_dim):
"""Maximum likelihood PCA by the EM algorithm"""
self.X = np.array(data)
self.N,self.d = self.X.shape
self.q = target_dim
def learn(self,niters):
self.mu = self.X.mean(0).reshape(self.d,1)#ML solution for mu
self.X2 = self.X - self.mu.T
self.xxTsum = np.sum([np.dot(x,x.T) for x in self.X2])#precalculate for speed
#initialise paramters:
self.W = np.random.randn(self.d,self.q)
self.sigma2 = 1.2
for i in range(niters):
#print self.sigma2
self.E_step()
self.M_step()
def E_step(self):
M = np.dot(self.W.T,self.W) + np.eye(self.q)*self.sigma2
#M_inv = np.linalg.inv(M)
#self.m_Z = np.dot(M_inv,np.dot(self.W.T,self.X2.T)).T
#self.S_z = M_inv*self.sigma2
M_chol = linalg.cholesky(M)
M_inv = linalg.cho_solve((M_chol,1),np.eye(self.q))
self.m_Z = linalg.cho_solve((M_chol,1),np.dot(self.W.T,self.X2.T)).T
self.S_z = M_inv*self.sigma2
def M_step(self):
zzT = np.dot(self.m_Z.T,self.m_Z) + self.N*self.S_z
#self.W = np.dot(np.dot(self.X2.T,self.m_Z),np.linalg.inv(zzT))
zzT_chol = linalg.cholesky(zzT)
self.W = linalg.cho_solve((zzT_chol,0),np.dot(self.m_Z.T,self.X2)).T
WTW = np.dot(self.W.T,self.W)
self.sigma2 = self.xxTsum - 2*np.sum(np.dot(self.m_Z,self.W.T)*self.X2) + np.trace(np.dot(zzT,WTW))
self.sigma2 /= self.N*self.d
class PCA_EM_missing:
def __init__(self,data,target_dim):
"""Maximum likelihood PCA by the EM algorithm, allows for missing data. uses a masked array to 'hide' the elements of X that are NaN"""
self.X = np.array(data)
self.imask,self.jmask = np.nonzero(np.isnan(self.X))#positions that are missing.
self.indices = [np.nonzero(np.isnan(x)-1)[0] for x in self.X] #positions that are not missing...
self.N,self.d = self.X.shape
self.q = target_dim
def learn(self,niters):
self.Xreconstruct = self.X.copy()
self.Xreconstruct[self.imask,self.jmask] = 0
self.mu = np.sum(self.Xreconstruct,0)/(self.X.shape[0]-np.sum(np.isnan(self.X),0))
self.X2 = self.X.copy()-self.mu
self.X2reconstruct = self.X.copy() - self.mu
#initialise paramters:
self.W = np.random.randn(self.d,self.q)
self.sigma2 = 1.2
#pre-allocate self.m_Z and self.S_Z
self.m_Z = np.zeros((self.X2.shape[0],self.q))
self.S_Z = np.zeros((self.X2.shape[0],self.q,self.q))
for i in range(niters):
print i,self.sigma2
self.E_step()
self.M_step()
self.Xreconstruct = self.X2reconstruct + self.mu
def E_step(self):
""" This should handle missing data, but needs testing (TODO)"""
Ms = np.zeros((self.X.shape[0],self.q,self.q)) #M is going to be different for (potentially) every data point
for m,x,i,mz,sz in zip(Ms,self.X2,self.indices,self.m_Z,self.S_Z):
W = self.W.take(i,0)# get relevant bits of W
x2 = np.array(x).take(i) # get relevant bits of x
m[:,:] = np.dot(W.T,W) + np.eye(self.q)*self.sigma2
mchol = linalg.cholesky(m)
minv = linalg.cho_solve((mchol,1),np.eye(self.q))
mz[:] = linalg.cho_solve((mchol,1),np.dot(W.T,x2.reshape(i.size,1))).T
sz[:,:] = minv*self.sigma2
#calculate reconstructed X values
self.X2reconstruct[self.imask,self.jmask] = np.dot(self.m_Z,self.W.T)[self.imask,self.jmask]
self.xxTsum = np.sum(np.square(self.X2reconstruct))# can;t be pre-calculate in the missing data version :(
def M_step(self):
""" This should handle missing data - needs testing (TODO)"""
zzT = np.dot(self.m_Z.T,self.m_Z) + np.sum(self.S_Z,0)
#self.W = np.dot(np.dot(self.X2.T,self.m_Z),np.linalg.inv(zzT))
zzT_chol = linalg.cholesky(zzT)
self.W = linalg.cho_solve((zzT_chol,0),np.dot(self.m_Z.T,self.X2reconstruct)).T
WTW = np.dot(self.W.T,self.W)
self.sigma2 = self.xxTsum - 2*np.sum(np.dot(self.m_Z,self.W.T)*self.X2reconstruct) + np.trace(np.dot(zzT,WTW))
self.sigma2 /= self.N*self.d
if __name__=='__main__':
q=5#latent dimensions
d=15# observed dimensions
N=500
missing_pc = 100 # percentage of the data points to be 'missing'
truesigma = .002
niters = 300
phases = np.random.rand(1,q)*2*np.pi
frequencies = np.random.randn(1,q)*2
latents = np.sin(np.linspace(0,12,N).reshape(N,1)*frequencies-phases)
trueW = np.random.randn(d,q)
observed = np.dot(latents,trueW.T) + np.random.randn(N,d)*truesigma
#PCA without missing values
a = PCA_EM(observed,q)
a.learn(niters)
#a missing data problem
Nmissing = int(N*missing_pc/100)
observed2 = observed.copy()
missingi = np.argsort(np.random.rand(N))[:Nmissing]
missingj = np.random.randint(0,d-q,Nmissing)#last q columns will be complete
observed2[missingi,missingj] = np.NaN
b = PCA_EM_missing(observed2,q)
b.learn(niters)
from hinton import hinton
import pylab
colours = np.arange(N)# to colour the dots with
hinton(linalg.qr(trueW.T)[1].T)
pylab.title('true transformation')
pylab.figure()
hinton(linalg.qr(a.W.T)[1].T)
pylab.title('reconstructed transformation')
pylab.figure()
hinton(linalg.qr(b.W.T)[1].T)
pylab.title('reconstructed transformation (missing data)')
pylab.figure()
pylab.subplot(3,1,1)
pylab.plot(latents)
pylab.title('true latents')
pylab.subplot(3,1,2)
pylab.plot(a.m_Z)
pylab.title('reconstructed latents')
pylab.subplot(3,1,3)
pylab.plot(b.m_Z)
pylab.title('reconstructed latents (missing data)')
pylab.figure()
pylab.subplot(2,1,1)
pylab.plot(observed)
pylab.title('Observed values')
pylab.subplot(2,1,2)
pylab.plot(observed2,linewidth=2,marker='.')
pylab.plot(b.Xreconstruct)
pylab.show()
|
jameshensman/pythonGPLVM
|
PCA_EM.py
|
Python
|
gpl-3.0
| 6,841 | 0.05087 |
from twisted.internet import error as TxErrors
import couchbase._libcouchbase as LCB
from couchbase._libcouchbase import (
Event, TimerEvent, IOEvent,
LCB_READ_EVENT, LCB_WRITE_EVENT, LCB_RW_EVENT,
PYCBC_EVSTATE_ACTIVE,
PYCBC_EVACTION_WATCH,
PYCBC_EVACTION_UNWATCH,
PYCBC_EVACTION_CLEANUP
)
class TxIOEvent(IOEvent):
"""
IOEvent is a class implemented in C. It exposes
a 'fileno()' method, so we don't have to.
"""
__slots__ = []
def __init__(self):
super(TxIOEvent, self).__init__()
def doRead(self):
self.ready_r()
def doWrite(self):
self.ready_w()
def connectionLost(self, reason):
if self.state == PYCBC_EVSTATE_ACTIVE:
self.ready_w()
def logPrefix(self):
return "Couchbase IOEvent"
class TxTimer(TimerEvent):
__slots__ = ['_txev', 'lcb_active']
def __init__(self):
super(TxTimer, self).__init__()
self.lcb_active = False
self._txev = None
def _timer_wrap(self):
if not self.lcb_active:
return
self.lcb_active = False
self.ready(0)
def schedule(self, usecs, reactor):
nsecs = usecs / 1000000.0
if not self._txev or not self._txev.active():
self._txev = reactor.callLater(nsecs, self._timer_wrap)
else:
self._txev.reset(nsecs)
self.lcb_active = True
def cancel(self):
self.lcb_active = False
def cleanup(self):
if not self._txev:
return
try:
self._txev.cancel()
except (TxErrors.AlreadyCalled, TxErrors.AlreadyCancelled):
pass
self._txev = None
class v0Iops(object):
"""
IOPS Implementation to be used with Twisted's "FD" based reactors
"""
__slots__ = [ 'reactor', 'is_sync', '_stop' ]
def __init__(self, reactor, is_sync=False):
self.reactor = reactor
self.is_sync = is_sync
self._stop = False
def update_event(self, event, action, flags):
"""
Called by libcouchbase to add/remove event watchers
"""
if action == PYCBC_EVACTION_UNWATCH:
if event.flags & LCB_READ_EVENT:
self.reactor.removeReader(event)
if event.flags & LCB_WRITE_EVENT:
self.reactor.removeWriter(event)
elif action == PYCBC_EVACTION_WATCH:
if flags & LCB_READ_EVENT:
self.reactor.addReader(event)
if flags & LCB_WRITE_EVENT:
self.reactor.addWriter(event)
if flags & LCB_READ_EVENT == 0:
self.reactor.removeReader(event)
if flags & LCB_WRITE_EVENT == 0:
self.reactor.removeWriter(event)
def update_timer(self, timer, action, usecs):
"""
Called by libcouchbase to add/remove timers
"""
if action == PYCBC_EVACTION_WATCH:
timer.schedule(usecs, self.reactor)
elif action == PYCBC_EVACTION_UNWATCH:
timer.cancel()
elif action == PYCBC_EVACTION_CLEANUP:
timer.cleanup()
def io_event_factory(self):
return TxIOEvent()
def timer_event_factory(self):
return TxTimer()
def start_watching(self):
"""
Start/Stop operations. This is a no-op in twisted because
it's a continuously running async loop
"""
if not self.is_sync:
return
self._stop = False
while not self._stop:
self.reactor.doIteration(0)
def stop_watching(self):
self._stop = True
|
mnunberg/couchbase-python-client
|
txcouchbase/iops.py
|
Python
|
apache-2.0
| 3,633 | 0.001376 |
"""Deprecated import support. Auto-generated by import_shims/generate_shims.sh."""
# pylint: disable=redefined-builtin,wrong-import-position,wildcard-import,useless-suppression,line-too-long
from import_shims.warn import warn_deprecated_import
warn_deprecated_import('certificates.apps', 'lms.djangoapps.certificates.apps')
from lms.djangoapps.certificates.apps import *
|
eduNEXT/edunext-platform
|
import_shims/lms/certificates/apps.py
|
Python
|
agpl-3.0
| 374 | 0.008021 |
# Copyright (c) 2021, CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from csv import DictReader, DictWriter
from uuid import uuid4
import logging, sys
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import Group
from promort.settings import DEFAULT_GROUPS
from predictions_manager.models import Prediction
from reviews_manager.models import PredictionReview
logger = logging.getLogger('promort_commands')
class Command(BaseCommand):
help = 'build Predictions reviews worklist'
def add_arguments(self, parser):
parser.add_argument('--prediction-type', choices=['TUMOR', 'GLEASON'], type=str, dest='prediction_type',
help='the type of the Prediction objects that are going to be reviewed')
parser.add_argument('--worklist-file', dest='worklist', type=str, default=None,
help='a CSV file containing the worklist, if not present reviews will be assigned randomly')
parser.add_argument('--allow-duplicated', action='store_true', dest='allow_duplicated',
help='create worklist even for predictions that already have a related review')
parser.add_argument('--report-file', dest='report_file', type=str, default=None,
help='a CSV file containing a report of the created prediction reviews')
def _get_prediction_reviews_manager_users(self):
prev_manager_group = Group.objects.get(name=DEFAULT_GROUPS['prediction_manager']['name'])
return prev_manager_group.user_set.all()
def _get_predictions_list(self, prediction_type):
return Prediction.objects.filter(type=prediction_type, review_required=True).all()
def _check_duplicated(self, prediction, reviewer):
annotation_objs = PredictionReview.objects.filter(prediction=prediction, reviewer=reviewer)
if annotation_objs.count() > 0:
logger.info('There are already %d reviews for prediction %s assigned to user %s',
annotation_objs.count(), prediction.label, reviewer.username)
return True
else:
return False
def _create_prediction_annotation(self, prediction, reviewer, allow_duplicated):
if not allow_duplicated:
if self._check_duplicated(prediction, reviewer):
return None
prev_obj = PredictionReview(
label=uuid4().hex,
prediction=prediction,
slide=prediction.slide,
reviewer=reviewer
)
prev_obj.save()
return {
'review_id': prev_obj.id,
'slide': prev_obj.slide.id,
'prediction': prev_obj.prediction.label,
'review_label': prev_obj.label,
'reviewer': prev_obj.reviewer.username
}
def create_random_worklist(self, prediction_type, allow_duplicated, report_file=None):
logger.info('Creating RANDOM worklist')
prediction_rev_managers = self._get_prediction_reviews_manager_users()
if len(prediction_rev_managers) < 1:
raise CommandError('No prediction managers configured')
predictions = self._get_predictions_list(prediction_type)
for i, pred in enumerate(predictions):
logger.info('Processing prediction %s', pred.label)
pred_report = self._create_prediction_annotation(pred,
prediction_rev_managers[i % len(prediction_rev_managers)],
allow_duplicated)
if report_file and pred_report:
report_file.writerow(pred_report)
def create_worklist_from_file(self, worklist_file, prediction_type, allow_duplicated, report_file=None):
raise NotImplementedError()
def handle(self, *args, **opts):
logger.info('=== Starting Predictions Reviews worklist creation ===')
worklist_file = opts['worklist']
allow_duplicated = opts['allow_duplicated']
if opts['report_file']:
report_file = open(opts['report_file'], 'w')
report_writer = DictWriter(report_file,
['review_id', 'review_label', 'slide', 'prediction', 'reviewer'])
report_writer.writeheader()
else:
report_writer = None
try:
if worklist_file:
self.create_worklist_from_file(worklist_file, opts['prediction_type'], allow_duplicated, report_writer)
else:
self.create_random_worklist(opts['prediction_type'], allow_duplicated, report_writer)
except CommandError as cme:
logger.error('A problem occurred while building the worklist, exit')
sys.exit(cme)
if report_writer:
report_file.close()
logger.info('=== Prediction Reviews worklist creation completed ===')
|
crs4/ProMort
|
promort/reviews_manager/management/commands/build_prediction_reviews_worklist.py
|
Python
|
mit
| 5,988 | 0.00501 |
import base64
import os
from django.test import Client as TC
import datetime
import logging
import pytz
from django.utils.module_loading import import_string
from api.directions.sql_func import direction_by_card, get_lab_podr, get_confirm_direction_patient_year, get_type_confirm_direction
from api.stationar.stationar_func import desc_to_data
from api.views import mkb10_dict
from clients.utils import find_patient
from directory.utils import get_researches_details, get_can_created_patient
from doctor_schedule.views import get_hospital_resource, get_available_hospital_plans, check_available_hospital_slot_before_save
from integration_framework.authentication import can_use_schedule_only
from laboratory import settings
from plans.models import PlanHospitalization, PlanHospitalizationFiles, Messages
from podrazdeleniya.models import Podrazdeleniya
import random
from collections import defaultdict
import re
import time
import petrovna
import simplejson as json
from dateutil.relativedelta import relativedelta
from django.db import transaction
from django.db.models import Q, Prefetch
from django.http import JsonResponse
from django.utils import timezone
from rest_framework.decorators import api_view, authentication_classes, permission_classes, parser_classes
from rest_framework.parsers import JSONParser, FormParser, MultiPartParser
from rest_framework.response import Response
import directions.models as directions
from appconf.manager import SettingManager
from clients.models import Individual, Card
from clients.sql_func import last_results_researches_by_time_ago
from directory.models import Researches, Fractions, ReleationsFT
from doctor_call.models import DoctorCall
from hospitals.models import Hospitals
from laboratory.settings import (
AFTER_DATE,
CENTRE_GIGIEN_EPIDEMIOLOGY,
MAX_DOC_CALL_EXTERNAL_REQUESTS_PER_DAY,
REGION,
SCHEDULE_AGE_LIMIT_LTE, LK_FORMS, LK_USER, LK_FILE_SIZE_BYTES, LK_FILE_COUNT,
)
from laboratory.utils import current_time, strfdatetime
from refprocessor.result_parser import ResultRight
from researches.models import Tubes
from results.sql_func import get_laboratory_results_by_directions, get_not_confirm_direction
from rmis_integration.client import Client
from slog.models import Log
from tfoms.integration import match_enp, match_patient, get_ud_info_by_enp, match_patient_by_snils, get_dn_info_by_enp
from users.models import DoctorProfile
from utils.common import values_as_structure_data
from utils.data_verification import data_parse
from utils.dates import normalize_date, valid_date, try_strptime
from utils.xh import check_type_research, short_fio_dots
from . import sql_if
from directions.models import DirectionDocument, DocumentSign, Napravleniya
from .models import CrieOrder, ExternalService
from laboratory.settings import COVID_RESEARCHES_PK
from .utils import get_json_protocol_data, get_json_labortory_data, check_type_file
from django.contrib.auth.models import User
logger = logging.getLogger("IF")
@api_view()
def next_result_direction(request):
from_pk = request.GET.get("fromPk")
after_date = request.GET.get("afterDate")
only_signed = request.GET.get("onlySigned")
if after_date == '0':
after_date = AFTER_DATE
next_n = int(request.GET.get("nextN", 1))
type_researches = request.GET.get("research", '*')
d_start = f'{after_date}'
is_research = 1
researches = [-999]
if type_researches == 'lab':
researches = [x.pk for x in Researches.objects.filter(podrazdeleniye__p_type=Podrazdeleniya.LABORATORY)]
elif type_researches != '*':
researches = [int(i) for i in type_researches.split(',')]
else:
is_research = -1
if only_signed == '1':
# TODO: вернуть только подписанные и как дату next_time использовать дату подписания, а не подтверждения
# признак – eds_total_signed=True, датавремя полного подписания eds_total_signed_at
dirs = sql_if.direction_collect(d_start, researches, is_research, next_n) or []
else:
dirs = sql_if.direction_collect(d_start, researches, is_research, next_n) or []
next_time = None
naprs = [d[0] for d in dirs]
if dirs:
next_time = dirs[-1][3]
return Response({"next": naprs, "next_time": next_time, "n": next_n, "fromPk": from_pk, "afterDate": after_date})
@api_view()
def get_dir_amd(request):
next_n = int(request.GET.get("nextN", 5))
dirs = sql_if.direction_resend_amd(next_n)
result = {"ok": False, "next": []}
if dirs:
result = {"ok": True, "next": [i[0] for i in dirs]}
return Response(result)
@api_view()
def get_dir_n3(request):
next_n = int(request.GET.get("nextN", 5))
dirs = sql_if.direction_resend_n3(next_n)
result = {"ok": False, "next": []}
if dirs:
result = {"ok": True, "next": [i[0] for i in dirs]}
return Response(result)
@api_view()
def resend_dir_l2(request):
next_n = int(request.GET.get("nextN", 5))
dirs = sql_if.direction_resend_l2(next_n)
result = {"ok": False, "next": []}
if dirs:
result = {"ok": True, "next": [i[0] for i in dirs]}
return Response(result)
@api_view()
def resend_dir_crie(request):
next_n = int(request.GET.get("nextN", 5))
dirs = sql_if.direction_resend_crie(next_n)
result = {"ok": False, "next": []}
if dirs:
result = {"ok": True, "next": [i[0] for i in dirs]}
return Response(result)
@api_view()
def result_amd_send(request):
result = json.loads(request.GET.get("result"))
resp = {"ok": False}
if result['error']:
for i in result['error']:
dir_pk = int(i.split(':')[0])
directions.Napravleniya.objects.filter(pk=dir_pk).update(need_resend_amd=False, error_amd=True)
resp = {"ok": True}
if result['send']:
for i in result['send']:
data_amd = i.split(':')
dir_pk = int(data_amd[0])
amd_num = data_amd[1]
directions.Napravleniya.objects.filter(pk=dir_pk).update(need_resend_amd=False, amd_number=amd_num, error_amd=False)
resp = {"ok": True}
return Response(resp)
@api_view()
def direction_data(request):
pk = request.GET.get("pk")
research_pks = request.GET.get("research", '*')
direction: directions.Napravleniya = directions.Napravleniya.objects.select_related('istochnik_f', 'client', 'client__individual', 'client__base').get(pk=pk)
card = direction.client
individual = card.individual
iss = directions.Issledovaniya.objects.filter(napravleniye=direction, time_confirmation__isnull=False).select_related('research', 'doc_confirmation')
if research_pks != '*':
iss = iss.filter(research__pk__in=research_pks.split(','))
if not iss:
return Response({"ok": False})
iss_index = random.randrange(len(iss))
signed_documents = []
if direction.eds_total_signed:
last_time_confirm = direction.last_time_confirm()
for d in DirectionDocument.objects.filter(direction=direction, last_confirmed_at=last_time_confirm):
document = {
'type': d.file_type.upper(),
'content': base64.b64encode(d.file.read()).decode('utf-8'),
'signatures': [],
}
for s in DocumentSign.objects.filter(document=d):
document['signatures'].append(
{
"content": s.sign_value.replace('\n', ''),
"type": s.sign_type,
"executor": s.executor.uploading_data,
}
)
signed_documents.append(document)
return Response(
{
"ok": True,
"pk": pk,
"createdAt": direction.data_sozdaniya,
"patient": {
**card.get_data_individual(full_empty=True, only_json_serializable=True),
"family": individual.family,
"name": individual.name,
"patronymic": individual.patronymic,
"birthday": individual.birthday,
"docs": card.get_n3_documents(),
"sex": individual.sex,
"card": {
"base": {"pk": card.base_id, "title": card.base.title, "short_title": card.base.short_title},
"pk": card.pk,
"number": card.number,
"n3Id": card.n3_id,
"numberWithType": card.number_with_type(),
},
},
"issledovaniya": [x.pk for x in iss],
"timeConfirmation": iss[iss_index].time_confirmation,
"timeTube": iss[iss_index].material_date,
"docLogin": iss[iss_index].doc_confirmation.rmis_login if iss[iss_index].doc_confirmation else None,
"docPassword": iss[iss_index].doc_confirmation.rmis_password if iss[iss_index].doc_confirmation else None,
"department_oid": iss[iss_index].doc_confirmation.podrazdeleniye.oid if iss[iss_index].doc_confirmation else None,
"finSourceTitle": direction.istochnik_f.title if direction.istochnik_f else 'другое',
"finSourceCode": direction.istochnik_f.get_n3_code() if direction.istochnik_f else '6',
"oldPk": direction.core_id,
"isExternal": direction.is_external,
"titleInitiator": direction.get_title_org_initiator(),
"ogrnInitiator": direction.get_ogrn_org_initiator(),
"titleLaboratory": direction.hospital_title.replace("\"", " "),
"ogrnLaboratory": direction.hospital_ogrn,
"hospitalN3Id": direction.hospital_n3id,
"signed": direction.eds_total_signed,
"totalSignedAt": direction.eds_total_signed_at,
"signedDocuments": signed_documents,
"REGION": REGION,
"DEPART": CENTRE_GIGIEN_EPIDEMIOLOGY,
"hasN3IemkUploading": direction.n3_iemk_ok,
}
)
def format_time_if_is_not_none(t):
if not t:
return None
return "{:%Y-%m-%d %H:%M}".format(t)
@api_view()
def issledovaniye_data(request):
pk = request.GET.get("pk")
ignore_sample = request.GET.get("ignoreSample") == 'true'
i = directions.Issledovaniya.objects.get(pk=pk)
sample = directions.TubesRegistration.objects.filter(issledovaniya=i, time_get__isnull=False).first()
results = directions.Result.objects.filter(issledovaniye=i, fraction__fsli__isnull=False)
if (not ignore_sample and not sample) or not results.exists():
return Response({"ok": False})
results_data = []
for r in results:
refs = r.calc_normal(only_ref=True, raw_ref=False)
if isinstance(refs, ResultRight):
if refs.mode == ResultRight.MODE_CONSTANT:
refs = [refs.const_orig]
else:
refs_list = [str(refs.range.val_from.value), str(refs.range.val_to.value)]
if refs_list[0] == '-inf':
refs = [f'до {refs_list[1]}']
elif refs_list[1] == 'inf':
refs = [f'от {refs_list[0]}']
elif refs_list[0] == refs_list[1]:
refs = [refs.const_orig]
else:
refs = refs_list
else:
refs = [r.calc_normal(only_ref=True) or '']
norm = r.calc_normal()
u = r.fraction.get_unit()
results_data.append(
{
"pk": r.pk,
"fsli": r.fraction.get_fsli_code(),
"value": r.value.replace(',', '.'),
"units": r.get_units(),
"unitCode": u.code if u else None,
"ref": refs,
"interpretation": 'N' if norm and norm[0] == ResultRight.RESULT_MODE_NORMAL else 'A',
}
)
time_confirmation = i.time_confirmation_local
doctor_data = {}
if i.doc_confirmation:
doctor_data = i.doc_confirmation.uploading_data
return Response(
{
"ok": True,
"pk": pk,
"sample": {"date": sample.time_get.date() if sample else i.time_confirmation.date()},
"date": time_confirmation.date(),
"dateTimeGet": format_time_if_is_not_none(sample.time_get_local) if sample else None,
"dateTimeReceive": format_time_if_is_not_none(sample.time_recive_local) if sample else None,
"dateTimeConfirm": format_time_if_is_not_none(time_confirmation),
"docConfirm": i.doc_confirmation_fio,
"doctorData": doctor_data,
"results": results_data,
"code": i.research.code,
"comments": i.lab_comment,
}
)
@api_view()
def issledovaniye_data_simple(request):
pk = request.GET.get("pk")
i = directions.Issledovaniya.objects.get(pk=pk)
doctor_data = {}
if i.doc_confirmation:
doctor_data = i.doc_confirmation.uploading_data
return Response(
{
"ok": True,
"pk": pk,
"date": i.time_confirmation_local,
"docConfirm": i.doc_confirmation_fio,
"doctorData": doctor_data,
"outcome": (i.outcome_illness.n3_id if i.outcome_illness else None) or '3',
"visitPlace": (i.place.n3_id if i.place else None) or '1',
"visitPurpose": (i.purpose.n3_id if i.purpose else None) or '2',
"typeFlags": i.research.get_flag_types_n3(),
}
)
@api_view()
def issledovaniye_data_multi(request):
pks = request.GET["pks"].split(",")
ignore_sample = request.GET.get("ignoreSample") == 'true'
iss = (
directions.Issledovaniya.objects.filter(pk__in=pks)
.select_related('doc_confirmation', 'research')
.prefetch_related(Prefetch('result_set', queryset=(directions.Result.objects.filter(fraction__fsli__isnull=False).select_related('fraction'))))
.prefetch_related(Prefetch('tubes', queryset=(directions.TubesRegistration.objects.filter(time_get__isnull=False))))
)
result = []
i: directions.Issledovaniya
for i in iss:
sample = i.tubes.all().first()
if (not ignore_sample and not sample) or not i.result_set.all().exists():
continue
results_data = []
for r in i.result_set.all():
refs = r.calc_normal(only_ref=True, raw_ref=False)
if isinstance(refs, ResultRight):
if refs.mode == ResultRight.MODE_CONSTANT:
refs = [refs.const]
else:
refs = [str(refs.range.val_from.value), str(refs.range.val_to.value)]
if refs[0] == '-inf':
refs = [f'до {refs[1]}']
elif refs[1] == 'inf':
refs = [f'от {refs[0]}']
elif refs[0] == refs[1]:
refs = [refs[0]]
else:
refs = [r.calc_normal(only_ref=True) or '']
results_data.append(
{
"pk": r.pk,
"issTitle": i.research.title,
"title": r.fraction.title,
"fsli": r.fraction.get_fsli_code(),
"value": r.value.replace(',', '.'),
"units": r.get_units(),
"ref": refs,
"confirmed": i.time_confirmation,
}
)
time_confirmation = i.time_confirmation_local
result.append(
{
"pk": i.pk,
"sample": {"date": sample.time_get.date() if sample else i.time_confirmation.date()},
"date": time_confirmation.date(),
"dateTimeGet": format_time_if_is_not_none(sample.time_get_local) if sample else None,
"dateTimeReceive": format_time_if_is_not_none(sample.time_recive_local) if sample else None,
"dateTimeConfirm": format_time_if_is_not_none(time_confirmation),
"docConfirm": i.doc_confirmation_fio,
"results": results_data,
"code": i.research.code,
"comments": i.lab_comment,
}
)
return Response(
{
"ok": len(result) > 0,
"pks": pks,
"results": result,
}
)
@api_view(['GET', 'POST'])
def make_log(request):
key = request.GET.get("key")
keys = request.GET.get("keys", key).split(",")
t = int(request.GET.get("type"))
body = {}
if request.method == "POST":
body = json.loads(request.body)
pks_to_resend_n3_false = [x for x in keys if x] if t in (60000, 60001, 60002, 60003) else []
pks_to_resend_l2_false = [x for x in keys if x] if t in (60004, 60005) else []
pks_to_set_odli_id = [x for x in keys if x] if t in (60007,) else []
pks_to_set_odli_id_fail = [x for x in keys if x] if t in (60008,) else []
pks_to_set_iemk = [x for x in keys if x] if t in (60009, 60011) else []
pks_to_set_iemk_fail = [x for x in keys if x] if t in (60010,) else []
pks_to_set_vi = [x for x in keys if x] if t in (60020,) else []
pks_to_set_vi_fail = [x for x in keys if x] if t in (60021,) else []
with transaction.atomic():
directions.Napravleniya.objects.filter(pk__in=pks_to_resend_n3_false).update(need_resend_n3=False)
directions.Napravleniya.objects.filter(pk__in=pks_to_resend_l2_false).update(need_resend_l2=False)
for k in pks_to_resend_n3_false:
Log.log(key=k, type=t, body=body.get(k, {}))
for k in pks_to_resend_l2_false:
Log.log(key=k, type=t, body=body.get(k, {}))
for k in pks_to_set_odli_id_fail:
Log.log(key=k, type=t, body=body.get(k, {}))
for k in pks_to_set_odli_id:
Log.log(key=k, type=t, body=body.get(k, {}))
if str(k) in body and isinstance(body[k], dict) and body[str(k)]['id']:
d = directions.Napravleniya.objects.get(pk=k)
d.n3_odli_id = body[str(k)]['id']
d.save(update_fields=['n3_odli_id'])
for k in pks_to_set_vi_fail:
Log.log(key=k, type=t, body=body.get(k, {}))
for k in pks_to_set_vi:
Log.log(key=k, type=t, body=body.get(k, {}))
if str(k) in body and isinstance(body[k], dict) and body[str(k)]['id']:
d = directions.Napravleniya.objects.get(pk=k)
d.vi_id = body[str(k)]['id']
d.save(update_fields=['vi_id'])
for k in pks_to_set_iemk_fail:
Log.log(key=k, type=t, body=body.get(k, {}))
for k in pks_to_set_iemk:
Log.log(key=k, type=t, body=body.get(k, {}))
d = directions.Napravleniya.objects.get(pk=k)
d.n3_iemk_ok = True
d.save(update_fields=['n3_iemk_ok'])
return Response({"ok": True})
@api_view(['GET'])
def crie_status(request):
pk = request.GET.get("direction")
system_id = request.GET.get("system_id")
status = request.GET.get("status") or 'null'
error = request.GET.get("error") or ''
direction = directions.Napravleniya.objects.filter(pk=pk).first()
if direction:
if direction.need_resend_crie:
direction.need_resend_crie = False
direction.save(update_fields=['need_resend_crie'])
order = CrieOrder.objects.filter(local_direction=direction).first()
if not order:
order = CrieOrder.objects.create(local_direction=direction, system_id=system_id, status=status, error=error)
updated = ['system_id', 'status', 'error', 'local_direction']
else:
updated = []
if order.system_id != system_id:
order.system_id = system_id
updated.append('system_id')
if order.status != status:
order.status = status
updated.append('status')
if order.error != error:
order.error = error
updated.append('error')
if updated:
order.save(update_fields=updated)
if updated:
Log.log(key=pk, type=60006, body={'updated': updated, 'order_id': order.pk})
return Response({"ok": True, "order": order.pk})
return Response({"ok": False})
@api_view(['POST'])
def check_enp(request):
enp, family, name, patronymic, bd, enp_mode, snils = data_parse(
request.body,
{'enp': str, 'family': str, 'name': str, 'patronymic': str, 'bd': str, 'check_mode': str, 'snils': str},
{'check_mode': 'tfoms', 'bd': None, 'name': None, 'patronymic': None, 'family': None, 'enp': None, 'ud': None, 'snils': None},
)
if not enp:
enp = ""
enp = enp.replace(' ', '')
logger.exception(f'enp_mode: {enp_mode}')
if enp_mode == 'l2-enp':
tfoms_data = match_enp(enp)
if tfoms_data:
return Response({"ok": True, 'patient_data': tfoms_data})
elif enp_mode == 'l2-enp-ud':
tfoms_data = get_ud_info_by_enp(enp)
if tfoms_data:
return Response({"ok": True, 'patient_data': tfoms_data})
elif enp_mode == 'l2-enp-dn':
tfoms_data = get_dn_info_by_enp(enp)
if tfoms_data:
return Response({"ok": True, 'patient_data': tfoms_data})
elif enp_mode == 'l2-snils':
tfoms_data = match_patient_by_snils(snils)
if tfoms_data:
return Response({"ok": True, 'patient_data': tfoms_data})
elif enp_mode == 'l2-enp-full':
patronymic = patronymic if patronymic != 'None' else None
logger.exception(f'data: {(family, name, patronymic, bd)}')
tfoms_data = match_patient(family, name, patronymic, bd)
if tfoms_data:
return Response({"ok": True, 'list': tfoms_data})
elif enp_mode == 'tfoms':
tfoms_data = match_enp(enp)
logger.exception(f'tfoms data: {json.dumps(tfoms_data)}')
if tfoms_data:
bdate = tfoms_data.get('birthdate', '').split(' ')[0]
if normalize_date(bd) == normalize_date(bdate):
return Response({"ok": True, 'patient_data': tfoms_data})
elif enp_mode == 'rmis':
logger.exception(f'enp: {enp}')
c = Client(modules=['patients'])
card = c.patients.get_l2_card_by_enp(enp)
if card:
logger.exception(f'card: {card}')
i: Individual = card.individual
bd_orig = f"{i.birthday:%Y-%m-%d}"
logger.exception(f'{bd_orig} == {bd}')
if bd_orig == bd:
return Response(
{
"ok": True,
'patient_data': {
"rmis_id": card.individual.get_rmis_uid_fast(),
},
}
)
elif enp_mode == 'local':
logger.exception(f'enp: {enp}')
card = Card.objects.filter(base__internal_type=True, is_archive=False, carddocusage__document__number=enp, carddocusage__document__document_type__title='Полис ОМС').first()
if card:
logger.exception(f'card: {card}')
i: Individual = card.individual
bd_orig = f"{i.birthday:%Y-%m-%d}"
logger.exception(f'{bd_orig} == {bd}')
if bd_orig == bd:
return Response(
{
"ok": True,
'patient_data': {
"rmis_id": card.individual.get_rmis_uid_fast(),
},
}
)
return Response({"ok": False, 'message': 'Неверные данные или нет прикрепления к поликлинике'})
@api_view(['POST'])
def patient_results_covid19(request):
return Response({"ok": False})
days = 15
results = []
p_enp = data_parse(request.body, {'enp': str}, {'enp': ''})[0]
if p_enp:
logger.exception(f'patient_results_covid19 by enp: {p_enp}')
card = Card.objects.filter(
base__internal_type=True, is_archive=False, carddocusage__document__number=str(p_enp).replace(' ', ''), carddocusage__document__document_type__title='Полис ОМС'
).first()
logger.exception(f'patient_results_covid19 by enp [CARD]: {card}')
if card:
date_end = current_time()
date_start = date_end + relativedelta(days=-days)
date_end = date_end + relativedelta(days=1)
results_covid = last_results_researches_by_time_ago(card.pk, COVID_RESEARCHES_PK, date_start, date_end)
logger.exception(f'patient_results_covid19 by enp params: {(card.pk, COVID_RESEARCHES_PK, date_start, date_end)}')
logger.exception(f'patient_results_covid19 by enp results count: {len(results_covid)}')
for i in results_covid:
results.append({'date': i.confirm, 'result': i.value})
if len(results) > 0:
return Response({"ok": True, 'results': results})
rmis_id = data_parse(request.body, {'rmis_id': str}, {'rmis_id': ''})[0]
results = []
if rmis_id:
for i in range(3):
results = []
logger.exception(f'patient_results_covid19 by rmis id, try {i + 1}/3: {rmis_id}')
try:
c = Client(modules=['directions', 'rendered_services'])
now = current_time().date()
variants = ['РНК вируса SARS-CоV2 не обнаружена', 'РНК вируса SARS-CоV2 обнаружена']
for i in range(days):
date = now - datetime.timedelta(days=i)
rendered_services = c.rendered_services.client.searchServiceRend(patientUid=rmis_id, dateFrom=date)
for rs in rendered_services[:5]:
protocol = c.directions.get_protocol(rs)
for v in variants:
if v in protocol:
results.append({'date': date.strftime('%d.%m.%Y'), 'result': v})
break
break
except Exception as e:
logger.exception(e)
time.sleep(2)
return Response({"ok": True, 'results': results})
@api_view(['POST'])
def external_doc_call_create(request):
data = json.loads(request.body)
org_id = data.get('org_id')
patient_data = data.get('patient_data')
form = data.get('form')
idp = patient_data.get('idp')
enp = patient_data.get('enp')
comment = form.get('comment')
purpose = form.get('purpose')
email = form.get('email')
external_num = form.get('external_num')
is_main_external = form.get('is_main_external')
if email == 'undefined':
email = None
logger.exception(f'external_doc_call_create: {org_id} {json.dumps(patient_data)} {json.dumps(form)} {idp} {enp} {comment} {purpose} {email} {external_num}')
Individual.import_from_tfoms(patient_data)
individuals = Individual.objects.filter(Q(tfoms_enp=enp or '###$fakeenp$###') | Q(tfoms_idp=idp or '###$fakeidp$###'))
individual_obj = individuals.first()
if not individual_obj:
return JsonResponse({"ok": False, "number": None})
card = Card.objects.filter(individual=individual_obj, base__internal_type=True).first()
research = Researches.objects.filter(title='Обращение пациента').first()
hospital = Hospitals.objects.filter(code_tfoms=org_id).first()
if not card or not research or not hospital:
return JsonResponse({"ok": False, "number": None})
date = current_time()
count = DoctorCall.objects.filter(client=card, is_external=True, exec_at__date=date.date()).count()
if count >= MAX_DOC_CALL_EXTERNAL_REQUESTS_PER_DAY:
logger.exception(f'TOO MANY REQUESTS PER DAY: already have {count} calls at {date:%d.%m.%Y}')
return JsonResponse({"ok": False, "number": None, "tooManyRequests": True})
research_pk = research.pk
doc_call = DoctorCall.doctor_call_save(
{
'card': card,
'research': research_pk,
'address': card.main_address,
'district': -1,
'date': date,
'comment': comment,
'phone': form.get('phone'),
'doc': -1,
'purpose': int(purpose),
'hospital': hospital.pk,
'external': True,
'email': email,
'external_num': external_num,
'is_main_external': bool(is_main_external),
}
)
if is_main_external:
doc_call.external_num = doc_call.num
elif SettingManager.l2('send_doc_calls'):
doc_call.external_num = f"{org_id}{doc_call.pk}"
doc_call.save()
return Response({"ok": True, "number": doc_call.external_num})
@api_view(['POST'])
def external_doc_call_update_status(request):
if not hasattr(request.user, 'hospitals'):
return Response({"ok": False, 'message': 'Некорректный auth токен'})
body = json.loads(request.body)
external_num = body.get("externalNum")
status = body.get("status")
org = body.get("org")
code_tfoms = org.get("codeTFOMS")
oid_org = org.get("oid")
if not external_num:
return Response({"ok": False, 'message': 'externalNum не указан'})
if not status:
return Response({"ok": False, 'message': 'status не указан'})
if not code_tfoms and not oid_org:
return Response({"ok": False, 'message': 'Должно быть указано хотя бы одно значение из org.codeTFOMS или org.oid'})
if code_tfoms:
hospital = Hospitals.objects.filter(code_tfoms=code_tfoms).first()
else:
hospital = Hospitals.objects.filter(oid=oid_org).first()
if not hospital:
return Response({"ok": False, 'message': 'Организация не найдена'})
if not request.user.hospitals.filter(pk=hospital.pk).exists():
return Response({"ok": False, 'message': 'Нет доступа в переданную организацию'})
if not hospital:
return Response({"ok": False, 'message': 'Организация не найдена'})
status = str(status)
if not status.isdigit():
return Response({"ok": False, 'message': 'Некорректный status'})
status = int(status)
if status not in [x[0] for x in DoctorCall.STATUS]:
return Response({"ok": False, 'message': 'Некорректный status'})
num = str(external_num)
if not num.startswith('XR'):
return Response({"ok": False, 'message': 'Некорректный externalNum'})
num = num.replace('XR', '')
if not num.isdigit():
return Response({"ok": False, 'message': 'Некорректный externalNum'})
call: DoctorCall = DoctorCall.objects.filter(pk=num).first()
if not call:
return Response({"ok": False, 'message': f'Заявка с номером {num} не найдена'})
call.status = status
call.save(update_fields=['status'])
return Response({"ok": True})
@api_view(['POST'])
def external_doc_call_send(request):
data = json.loads(request.body)
patient_data = data.get('patient_data')
form = data.get('form')
enp = patient_data.get('enp')
address = patient_data.get('address')
comment = form.get('comment')
purpose = form.get('purpose_id')
email = form.get('email')
external_num = form.get('num')
logger.exception(f'external_doc_call_send: {json.dumps(patient_data)} {json.dumps(form)} {enp} {comment} {purpose} {email} {external_num}')
individuals = Individual.objects.filter(tfoms_enp=enp)
if not individuals.exists():
individuals = Individual.objects.filter(document__number=enp).filter(Q(document__document_type__title='Полис ОМС') | Q(document__document_type__title='ЕНП'))
if not individuals.exists():
tfoms_data = match_enp(enp)
if not tfoms_data:
return Response({"ok": False, 'message': 'Неверные данные полиса, в базе ТФОМС нет такого пациента'})
Individual.import_from_tfoms(tfoms_data)
individuals = Individual.objects.filter(tfoms_enp=enp)
individual = individuals if isinstance(individuals, Individual) else individuals.first()
if not individual:
return Response({"ok": False, 'message': 'Физлицо не найдено'})
card = Card.objects.filter(individual=individual, base__internal_type=True).first()
research = Researches.objects.filter(title='Обращение пациента').first()
hospital = Hospitals.get_default_hospital()
if not card or not research or not hospital:
return JsonResponse({"ok": False, "number": None})
research_pk = research.pk
doc_call = DoctorCall.doctor_call_save(
{
'card': card,
'research': research_pk,
'address': address,
'district': -1,
'date': current_time(),
'comment': comment,
'phone': form.get('phone'),
'doc': -1,
'purpose': int(purpose),
'hospital': hospital.pk,
'external': True,
'email': email,
'external_num': external_num,
'is_main_external': False,
}
)
return Response({"ok": True, "number": doc_call.num})
@api_view(['POST'])
def set_core_id(request):
data = json.loads(request.body)
pk = data.get('pk')
core_id = data.get('coreId')
n = directions.Napravleniya.objects.get(pk=pk)
n.core_id = core_id
n.save(update_fields=['core_id'])
return Response({"ok": True})
class InvalidData(Exception):
pass
def limit_str(s: str, limit=500):
return str(s)[:limit]
@api_view(['POST'])
def external_research_create(request):
if not hasattr(request.user, 'hospitals'):
return Response({"ok": False, 'message': 'Некорректный auth токен'})
body = json.loads(request.body)
old_pk = body.get("oldId")
org = body.get("org", {})
code_tfoms = org.get("codeTFOMS")
oid_org = org.get("oid")
if not code_tfoms and not oid_org:
return Response({"ok": False, 'message': 'Должно быть указано хотя бы одно значение из org.codeTFOMS или org.oid'})
if code_tfoms:
hospital = Hospitals.objects.filter(code_tfoms=code_tfoms).first()
else:
hospital = Hospitals.objects.filter(oid=oid_org).first()
if not hospital:
return Response({"ok": False, 'message': 'Организация не найдена'})
if not request.user.hospitals.filter(pk=hospital.pk).exists():
return Response({"ok": False, 'message': 'Нет доступа в переданную организацию'})
initiator = org.get('initiator') or {}
title_org_initiator = initiator.get('title')
if title_org_initiator is not None:
title_org_initiator = str(title_org_initiator)[:254]
ogrn_org_initiator = initiator.get('ogrn')
if ogrn_org_initiator is not None:
ogrn_org_initiator = str(ogrn_org_initiator)
if not title_org_initiator:
title_org_initiator = None
if not ogrn_org_initiator:
ogrn_org_initiator = None
if not title_org_initiator and ogrn_org_initiator:
return Response({"ok": False, 'message': 'org.initiator: при передаче ogrn поле title обязательно'})
if title_org_initiator and not ogrn_org_initiator:
return Response({"ok": False, 'message': 'org.initiator: при передаче title поле ogrn обязательно'})
if ogrn_org_initiator and not ogrn_org_initiator.isdigit():
return Response({"ok": False, 'message': 'org.initiator.ogrn: в значении возможны только числа'})
if ogrn_org_initiator and len(ogrn_org_initiator) != 13:
return Response({"ok": False, 'message': 'org.initiator.ogrn: длина должна быть 13'})
if ogrn_org_initiator and not petrovna.validate_ogrn(ogrn_org_initiator):
return Response({"ok": False, 'message': 'org.initiator.ogrn: не прошёл валидацию'})
patient = body.get("patient", {})
enp = (patient.get("enp") or '').replace(' ', '')
if enp and (len(enp) != 16 or not enp.isdigit()):
return Response({"ok": False, 'message': 'Неверные данные полиса, должно быть 16 чисел'})
passport_serial = (patient.get("passportSerial") or '').replace(' ', '')
passport_number = (patient.get("passportNumber") or '').replace(' ', '')
snils = (patient.get("snils") or '').replace(' ', '').replace('-', '')
if not enp and (not passport_serial or not passport_number) and not snils:
return Response({"ok": False, 'message': 'При пустом patient.enp должно быть передано patient.snils или patient.passportSerial+patient.passportNumber'})
if passport_serial and len(passport_serial) != 4:
return Response({"ok": False, 'message': 'Длина patient.passportSerial должна быть 4'})
if passport_serial and not passport_serial.isdigit():
return Response({"ok": False, 'message': 'patient.passportSerial должен содержать только числа'})
if passport_number and len(passport_number) != 6:
return Response({"ok": False, 'message': 'Длина patient.passportNumber должна быть 6'})
if passport_number and not passport_number.isdigit():
return Response({"ok": False, 'message': 'patient.passportNumber должен содержать только числа'})
if snils and not petrovna.validate_snils(snils):
return Response({"ok": False, 'message': 'patient.snils: не прошёл валидацию'})
individual_data = patient.get("individual") or {}
if not enp and not individual_data:
return Response({"ok": False, 'message': 'При пустом patient.enp должно быть передано поле patient.individual'})
lastname = str(individual_data.get("lastname") or '')
firstname = str(individual_data.get('firstname') or '')
patronymic = str(individual_data.get('patronymic') or '')
birthdate = str(individual_data.get('birthdate') or '')
sex = str(individual_data.get('sex') or '').lower()
individual = None
if lastname and not firstname:
return Response({"ok": False, 'message': 'При передаче lastname должен быть передан и firstname'})
if firstname and not lastname:
return Response({"ok": False, 'message': 'При передаче firstname должен быть передан и lastname'})
if firstname and lastname and not birthdate:
return Response({"ok": False, 'message': 'При передаче firstname и lastname должно быть передано поле birthdate'})
if birthdate and (not re.fullmatch(r'\d{4}-\d\d-\d\d', birthdate) or birthdate[0] not in ['1', '2']):
return Response({"ok": False, 'message': 'birthdate должно соответствовать формату YYYY-MM-DD'})
if birthdate and sex not in ['м', 'ж']:
return Response({"ok": False, 'message': 'individual.sex должно быть "м" или "ж"'})
individual_status = "unknown"
if enp:
individuals = Individual.objects.filter(tfoms_enp=enp)
if not individuals.exists():
individuals = Individual.objects.filter(document__number=enp).filter(Q(document__document_type__title='Полис ОМС') | Q(document__document_type__title='ЕНП'))
individual = individuals.first()
individual_status = "local_enp"
if not individual:
tfoms_data = match_enp(enp)
if tfoms_data:
individuals = Individual.import_from_tfoms(tfoms_data, need_return_individual=True)
individual_status = "tfoms_match_enp"
individual = individuals.first()
if not individual and lastname:
tfoms_data = match_patient(lastname, firstname, patronymic, birthdate)
if tfoms_data:
individual_status = "tfoms_match_patient"
individual = Individual.import_from_tfoms(tfoms_data, need_return_individual=True)
if not individual and passport_serial:
individuals = Individual.objects.filter(document__serial=passport_serial, document__number=passport_number, document__document_type__title='Паспорт гражданина РФ')
individual = individuals.first()
individual_status = "passport"
if not individual and snils:
individuals = Individual.objects.filter(document__number=snils, document__document_type__title='СНИЛС')
individual = individuals.first()
individual_status = "snils"
if not individual and lastname:
individual = Individual.import_from_tfoms(
{
"family": lastname,
"given": firstname,
"patronymic": patronymic,
"gender": sex,
"birthdate": birthdate,
"enp": enp,
"passport_serial": passport_serial,
"passport_number": passport_number,
"snils": snils,
},
need_return_individual=True,
)
individual_status = "new_local"
if not individual:
return Response({"ok": False, 'message': 'Физлицо не найдено'})
card = Card.objects.filter(individual=individual, base__internal_type=True).first()
if not card:
card = Card.add_l2_card(individual)
if not card:
return Response({"ok": False, 'message': 'Карта не найдена'})
financing_source_title = body.get("financingSource", '')
financing_source = directions.IstochnikiFinansirovaniya.objects.filter(title__iexact=financing_source_title, base__internal_type=True).first()
if not financing_source:
return Response({"ok": False, 'message': 'Некорректный источник финансирования'})
results = body.get("results")
if not results or not isinstance(results, list):
return Response({"ok": False, 'message': 'Некорректное значение results'})
results = results[:40]
message = None
id_in_hospital = body.get("internalId", '')
if id_in_hospital is not None:
id_in_hospital = limit_str(id_in_hospital, 15)
try:
with transaction.atomic():
if old_pk and Napravleniya.objects.filter(pk=old_pk, hospital=hospital).exists():
direction = Napravleniya.objects.get(pk=old_pk)
direction.is_external = True
direction.istochnik_f = financing_source
direction.polis_who_give = card.polis.who_give if card.polis else None
direction.polis_n = card.polis.number if card.polis else None
direction.id_in_hospital = id_in_hospital
direction.title_org_initiator = title_org_initiator
direction.ogrn_org_initiator = ogrn_org_initiator
direction.save()
direction.issledovaniya_set.all().delete()
else:
direction = Napravleniya.objects.create(
client=card,
is_external=True,
istochnik_f=financing_source,
polis_who_give=card.polis.who_give if card.polis else None,
polis_n=card.polis.number if card.polis else None,
hospital=hospital,
id_in_hospital=id_in_hospital,
title_org_initiator=title_org_initiator,
ogrn_org_initiator=ogrn_org_initiator,
)
research_to_filter = defaultdict(lambda: False)
for r in results:
code_research = r.get("codeResearch", "unknown")
research = Researches.objects.filter(code=code_research).first()
if not research:
raise InvalidData(f'Исследование с кодом {code_research} не найдено')
if research_to_filter[code_research]:
raise InvalidData(f'Исследование с кодом {code_research} отправлено повторно в одном направлении')
tests = r.get("tests")
if not tests or not isinstance(tests, list):
raise InvalidData(f'Исследование {code_research} содержит некорректное поле tests')
comments = str(r.get("comments", "") or "") or None
time_confirmation = r.get("dateTimeConfirm")
if not time_confirmation or not valid_date(time_confirmation):
raise InvalidData(f'{code_research}: содержит некорректное поле dateTimeConfirm. Оно должно быть заполнено и соответствовать шаблону YYYY-MM-DD HH:MM')
time_get = str(r.get("dateTimeGet", "") or "") or None
if time_get and not valid_date(time_confirmation):
raise InvalidData(f'{code_research}: содержит некорректное поле dateTimeGet. Оно должно быть пустым или соответствовать шаблону YYYY-MM-DD HH:MM')
time_receive = str(r.get("dateTimeReceive", "") or "") or None
if time_receive and not valid_date(time_confirmation):
raise InvalidData(f'{code_research}: содержит некорректное поле dateTimeReceive. Оно должно быть пустым или соответствовать шаблону YYYY-MM-DD HH:MM')
doc_confirm = str(r.get("docConfirm", "") or "") or None
if doc_confirm is not None:
doc_confirm = limit_str(doc_confirm, 64)
iss = directions.Issledovaniya.objects.create(
napravleniye=direction,
research=research,
lab_comment=comments,
time_confirmation=time_confirmation,
time_save=timezone.now(),
doc_confirmation_string=doc_confirm or f'Врач {hospital.short_title or hospital.title}',
)
tube = Tubes.objects.filter(title='Универсальная пробирка').first()
if not tube:
tube = Tubes.objects.create(title='Универсальная пробирка', color='#049372')
ft = ReleationsFT.objects.filter(tube=tube).first()
if not ft:
ft = ReleationsFT.objects.create(tube=tube)
tr = iss.tubes.create(type=ft)
tr.time_get = time_get
tr.time_recive = time_receive
tr.save(update_fields=['time_get', 'time_recive'])
tests_to_filter = defaultdict(lambda: False)
for t in tests[:30]:
fsli_code = t.get("idFsli", "unknown")
fraction = Fractions.objects.filter(fsli=fsli_code).first()
if not fraction:
raise InvalidData(f'В исследовании {code_research} не найден тест {fsli_code}')
if tests_to_filter[code_research]:
raise InvalidData(f'Тест с кодом {fsli_code} отправлен повторно в одном направлении в {code_research}')
value = limit_str(t.get("valueString", "") or "", 500)
units = limit_str(str(t.get("units", "") or ""), 50)
reference_value = t.get("referenceValue") or None
reference_range = t.get("referenceRange") or None
if reference_value and not isinstance(reference_value, str):
raise InvalidData(f'{code_research} -> {fsli_code}: поле referenceValue должно быть строкой или null')
if reference_range and not isinstance(reference_range, dict):
raise InvalidData(f'{code_research} -> {fsli_code}: поле referenceRange должно быть объектом {{low, high}} или null')
if reference_range and ('low' not in reference_range or 'high' not in reference_range):
raise InvalidData(f'{code_research} -> {fsli_code}: поле referenceRange должно быть объектом с полями {{low, high}} или null')
ref_str = reference_value
if not ref_str and reference_range:
ref_str = f"{reference_range['low']} – {reference_range['high']}"
if ref_str:
ref_str = limit_str(ref_str.replace("\"", "'"), 120)
ref_str = f'{{"Все": "{ref_str}"}}'
directions.Result(
issledovaniye=iss,
fraction=fraction,
value=value,
units=units,
ref_f=ref_str,
ref_m=ref_str,
).save()
try:
Log.log(
str(direction.pk),
90000,
body={
"org": body.get("org"),
"patient": body.get("patient"),
"individualStatus": individual_status,
"financingSource": body.get("financingSource"),
"resultsCount": len(body.get("results")),
"results": body.get("results"),
},
)
except Exception as e:
logger.exception(e)
return Response({"ok": True, 'id': str(direction.pk)})
except InvalidData as e:
message = str(e)
except Exception as e:
logger.exception(e)
message = 'Серверная ошибка'
return Response({"ok": False, 'message': message})
@api_view(['POST'])
@authentication_classes([])
@permission_classes([])
def eds_get_user_data(request):
token = request.META.get('HTTP_AUTHORIZATION')
token = token.replace('Bearer ', '')
if not token or not DoctorProfile.objects.filter(eds_token=token).exists():
return Response({"ok": False})
doc = DoctorProfile.objects.filter(eds_token=token)[0]
return Response(
{
"ok": True,
"userData": {
"fio": doc.get_full_fio(),
"department": doc.podrazdeleniye.title if doc.podrazdeleniye else None,
},
}
)
def get_cda_data(pk):
n: Napravleniya = Napravleniya.objects.get(pk=pk)
card = n.client
ind = n.client.individual
if check_type_research(pk) == "is_refferal":
data = get_json_protocol_data(pk)
elif check_type_research(pk) == "is_lab":
data = get_json_labortory_data(pk)
else:
data = {}
return {
"title": n.get_eds_title(),
"generatorName": n.get_eds_generator(),
"rawResponse": True,
"data": {
"oidMo": data.get("oidMo"),
"document": data,
"patient": {
'id': card.number,
'snils': card.get_data_individual()["snils"],
'name': {'family': ind.family, 'name': ind.name, 'patronymic': ind.patronymic},
'gender': ind.sex.lower(),
'birthdate': ind.birthday.strftime("%Y%m%d"),
},
},
}
@api_view(['POST'])
@authentication_classes([])
@permission_classes([])
def eds_get_cda_data(request):
token = request.META.get('HTTP_AUTHORIZATION')
token = token.replace('Bearer ', '')
if not token or not DoctorProfile.objects.filter(eds_token=token).exists():
return Response({"ok": False})
body = json.loads(request.body)
pk = body.get("pk")
return Response(get_cda_data(pk))
@api_view(['POST'])
@authentication_classes([])
@permission_classes([])
def external_check_result(request):
token = request.META.get('HTTP_AUTHORIZATION')
token = token.replace('Bearer ', '')
external_service = ExternalService.objects.filter(token=token).first()
if not token or not external_service:
return Response(
{
"ok": False,
"message": "Передан некорректный токен в заголовке HTTP_AUTHORIZATION",
},
status=403,
)
external_service: ExternalService = external_service
if not external_service.is_active:
return Response(
{
"ok": False,
"message": "Доступ отключен",
},
status=403,
)
if 'qr_check_result' not in external_service.rights:
return Response(
{
"ok": False,
"message": "Нет доступа",
},
status=403,
)
body = json.loads(request.body)
instance_id = body.get("instanceId")
if SettingManager.instance_id() != instance_id:
return Response(
{
"ok": False,
"message": "Некорректный instance_id",
}
)
pk = body.get("direction")
direction = Napravleniya.objects.filter(pk=pk).first()
if not direction:
return Response(
{
"ok": False,
"message": "Направление не найдено",
}
)
direction: Napravleniya
direction_token = body.get("directionToken")
if str(direction.qr_check_token) != direction_token:
return Response(
{
"ok": False,
"message": "Некорректный токен направления",
}
)
ind: Individual = direction.client.individual
patient = {
"family": f"{ind.family[0] if ind.family else ''}*****",
"name": f"{ind.name[0] if ind.name else ''}*****",
"patronymic": f"{ind.patronymic[0] if ind.patronymic else ''}*****",
"birthdate": str(ind.birthday.year),
}
results = []
i: directions.Issledovaniya
for i in direction.issledovaniya_set.all():
if not i.doc_confirmation:
continue
result = {
"title": i.research.title,
"datetime": strfdatetime(i.time_confirmation, "%d.%m.%Y %X"),
"data": [],
}
fractions = Fractions.objects.filter(research=i.research).order_by("pk").order_by("sort_weight")
f: Fractions
for f in fractions:
if not directions.Result.objects.filter(issledovaniye=i, fraction=f).exists():
continue
r: directions.Result = directions.Result.objects.filter(issledovaniye=i, fraction=f)[0]
result["data"].append(
{
"title": f.title,
"value": r.value,
}
)
results.append(result)
return Response(
{
"patient": patient,
"results": results,
}
)
@api_view(['POST'])
def get_protocol_result(request):
body = json.loads(request.body)
pk = body.get("pk")
n: Napravleniya = Napravleniya.objects.get(pk=pk)
card = n.client
ind = n.client.individual
if check_type_research(pk) == "is_refferal":
data = get_json_protocol_data(pk)
return Response(
{
"title": n.get_eds_title(),
"generatorName": n.get_eds_generator(),
"data": {
"oidMo": data["oidMo"],
"document": data,
"patient": {
'id': card.number,
'snils': card.get_data_individual()["snils"],
'name': {'family': ind.family, 'name': ind.name, 'patronymic': ind.patronymic},
'gender': ind.sex.lower(),
'birthdate': ind.birthday.strftime("%Y%m%d"),
},
"organization": data["organization"],
},
}
)
elif check_type_research(pk) == "is_lab":
data = get_json_labortory_data(pk)
return Response(
{
"generatorName": "Laboratory_min",
"data": {
"oidMo": data["oidMo"],
"document": data,
"patient": {
'id': card.number,
'snils': card.get_data_individual()["snils"],
'name': {'family': ind.family, 'name': ind.name, 'patronymic': ind.patronymic},
'gender': ind.sex.lower(),
'birthdate': ind.birthday.strftime("%Y%m%d"),
},
"organization": data["organization"],
},
}
)
return Response({})
@api_view(['POST', 'GET'])
def get_hosp_services(request):
services = []
r: Researches
for r in Researches.objects.filter(is_hospital=True):
services.append(
{
"pk": r.pk,
"title": r.get_title(),
}
)
return Response({"services": services})
@api_view(['GET'])
def mkb10(request):
return Response({"rows": mkb10_dict(request, True)})
@api_view(['POST', 'PUT'])
@parser_classes([MultiPartParser, FormParser, JSONParser])
@can_use_schedule_only
def hosp_record(request):
files = []
if request.method == 'PUT':
for kf in request.data:
if kf != 'document':
files.append(request.data[kf])
form = request.data['document']
else:
form = request.body
data = data_parse(
form,
{
'snils': 'str_strip',
'enp': 'str_strip',
'family': 'str_strip',
'name': 'str_strip',
'patronymic': 'str_strip',
'sex': 'str_strip',
'birthdate': 'str_strip',
'comment': 'str_strip',
'date': 'str_strip',
'service': int,
'phone': 'str_strip',
'diagnosis': 'str_strip',
},
)
if len(files) > LK_FILE_COUNT:
return Response({"ok": False, 'message': 'Слишком много файлов'})
for f in files:
if f.size > LK_FILE_SIZE_BYTES:
return Response({"ok": False, 'message': 'Файл слишком большой'})
if not check_type_file(file_in_memory=f):
return JsonResponse({
"ok": False,
"message": "Поддерживаются PDF и JPEG файлы",
})
snils: str = data[0]
enp: str = data[1]
family: str = data[2]
name: str = data[3]
patronymic: str = data[4]
sex: str = data[5].lower()
birthdate: str = data[6]
comment: str = data[7]
date: str = data[8]
service: int = data[9]
phone: str = data[10]
diagnosis: str = data[11]
if sex == 'm':
sex = 'м'
if sex == 'f':
sex = 'ж'
snils = ''.join(ch for ch in snils if ch.isdigit())
individual = None
if enp:
individuals = Individual.objects.filter(tfoms_enp=enp)
individual = individuals.first()
if not individual and snils:
individuals = Individual.objects.filter(document__number=snils, document__document_type__title='СНИЛС')
individual = individuals.first()
if not individual and family and name:
individual = Individual.import_from_tfoms(
{
"family": family,
"given": name,
"patronymic": patronymic,
"gender": sex,
"birthdate": birthdate,
"enp": enp,
"snils": snils,
},
need_return_individual=True,
)
if not individual:
return Response({"ok": False, 'message': 'Физлицо не найдено'})
card = Card.objects.filter(individual=individual, base__internal_type=True).first()
if not card:
card = Card.add_l2_card(individual)
if not card:
return Response({"ok": False, 'message': 'Карта не найдена'})
if SCHEDULE_AGE_LIMIT_LTE:
age = card.individual.age()
if age > SCHEDULE_AGE_LIMIT_LTE:
return Response({"ok": False, 'message': f'Пациент должен быть не старше {SCHEDULE_AGE_LIMIT_LTE} лет'})
hospital_research: Researches = Researches.objects.filter(pk=service, is_hospital=True).first()
if not hospital_research:
return Response({"ok": False, 'message': 'Услуга не найдена'})
has_free_slots = check_available_hospital_slot_before_save(hospital_research.pk, None, date)
if not has_free_slots:
return JsonResponse({"ok": False, "message": "Нет свободных слотов"})
hosp_department_id = hospital_research.podrazdeleniye.pk
with transaction.atomic():
plan_pk = PlanHospitalization.plan_hospitalization_save(
{
'card': card,
'research': hospital_research.pk,
'date': date,
'comment': comment[:256],
'phone': phone,
'action': 0,
'hospital_department_id': hosp_department_id,
'diagnos': diagnosis,
'files': files,
},
None
)
for f in files:
plan_files: PlanHospitalizationFiles = PlanHospitalizationFiles(plan_id=plan_pk)
plan_files.uploaded_file = f
plan_files.save()
y, m, d = date.split('-')
return Response({"ok": True, "message": f"Запись создана — {hospital_research.get_title()} {d}.{m}.{y}"})
@api_view(['POST'])
@can_use_schedule_only
def hosp_record_list(request):
data = data_parse(
request.body,
{
'snils': 'str_strip',
'enp': 'str_strip',
},
)
snils: str = data[0]
enp: str = data[1]
snils = ''.join(ch for ch in snils if ch.isdigit())
individual = None
if enp:
individuals = Individual.objects.filter(tfoms_enp=enp)
individual = individuals.first()
if not individual and snils:
individuals = Individual.objects.filter(document__number=snils, document__document_type__title='СНИЛС')
individual = individuals.first()
if not individual:
return Response({"rows": [], 'message': 'Физлицо не найдено'})
card = Card.objects.filter(individual=individual, base__internal_type=True).first()
if not card:
return Response({"rows": [], 'message': 'Карта не найдена'})
rows = []
plan: PlanHospitalization
for plan in PlanHospitalization.objects.filter(client=card, research__isnull=False, action=0).order_by('-exec_at'):
status_description = ""
if plan.work_status == 2:
status_description = plan.why_cancel
if plan.work_status == 3:
slot_plan = plan.slot_fact.plan
status_description = slot_plan.datetime.astimezone(pytz.timezone(settings.TIME_ZONE)).strftime('%d.%m.%Y %H:%M')
rows_files = []
row_file: PlanHospitalizationFiles
for row_file in PlanHospitalizationFiles.objects.filter(plan=plan).order_by('-created_at'):
rows_files.append({
'pk': row_file.pk,
'fileName': os.path.basename(row_file.uploaded_file.name) if row_file.uploaded_file else None,
})
messages_data = Messages.get_messages_by_plan_hosp(plan.pk, last=True)
rows.append({
"pk": plan.pk,
"service": plan.research.get_title(),
"date": plan.exec_at.strftime('%d.%m.%Y'),
"phone": plan.phone,
"diagnosis": plan.diagnos,
"comment": plan.comment,
"status": plan.get_work_status_display(),
"status_description": status_description,
"files": rows_files,
"messages": messages_data
})
return Response({"rows": rows})
@api_view(['POST'])
def get_all_messages_by_plan_id(request):
data = data_parse(request.body, {'pk': int})
pk: int = data[0]
messages = Messages.get_messages_by_plan_hosp(pk, last=False)
return Response({"rows": messages})
@api_view(['POST'])
def direction_records(request):
data = data_parse(
request.body,
{
'snils': 'str_strip',
'enp': 'str_strip',
'date_year': int
},
)
snils: str = data[0]
enp: str = data[1]
date_year: int = data[2]
card: Card = find_patient(snils, enp)
if not card:
return Response({"rows": [], 'message': 'Карта не найдена'})
d1 = try_strptime(f"{date_year}-01-01", formats=('%Y-%m-%d',))
d2 = try_strptime(f"{date_year}-12-31", formats=('%Y-%m-%d',))
start_date = datetime.datetime.combine(d1, datetime.time.min)
end_date = datetime.datetime.combine(d2, datetime.time.max)
rows = {}
collect_direction = direction_by_card(start_date, end_date, card.pk)
prev_direction = None
unique_direction = set([i.napravleniye_id for i in collect_direction])
not_confirm_direction = get_not_confirm_direction(list(unique_direction))
not_confirm_direction = [i[0] for i in not_confirm_direction]
confirm_direction = list(unique_direction - set(not_confirm_direction))
for dr in collect_direction:
if dr.napravleniye_id in confirm_direction:
status = 2
date_confirm = dr.date_confirm
elif dr.cancel:
date_confirm = ""
status = -1
else:
date_confirm = ""
status = 0
if dr.napravleniye_id != prev_direction:
rows[dr.napravleniye_id] = {"createdAt": dr.date_create, "services": [], "status": status, "confirmedAt": date_confirm}
temp_research = rows.get(dr.napravleniye_id, None)
temp_research["services"].append(dr.research_title)
rows[dr.napravleniye_id] = temp_research.copy()
prev_direction = dr.napravleniye_id
category_directions = get_type_confirm_direction(tuple(confirm_direction))
lab_podr = get_lab_podr()
lab_podr = [i[0] for i in lab_podr]
count_paraclinic = 0
count_doc_refferal = 0
count_laboratory = 0
for dr in category_directions:
if dr.is_doc_refferal:
count_paraclinic += 1
elif dr.is_paraclinic:
count_doc_refferal += 1
elif dr.podrazdeleniye_id in lab_podr:
count_laboratory += 1
return Response({"rows": rows, "count_paraclinic": count_paraclinic, "count_doc_refferal": count_doc_refferal, "count_laboratory": count_laboratory})
@api_view(['POST'])
def directions_by_category_result_year(request):
request_data = json.loads(request.body)
mode = request_data.get('mode')
is_lab = request_data.get('isLab', mode == 'laboratory')
is_paraclinic = request_data.get('isParaclinic', mode == 'paraclinic')
is_doc_refferal = request_data.get('isDocReferral', mode == 'docReferral')
year = request_data['year']
card: Card = find_patient(request_data.get('snils'), request_data.get('enp'))
if not card:
return Response({"results": [], 'message': 'Карта не найдена'})
d1 = datetime.datetime.strptime(f'01.01.{year}', '%d.%m.%Y')
start_date = datetime.datetime.combine(d1, datetime.time.min)
d2 = datetime.datetime.strptime(f'31.12.{year}', '%d.%m.%Y')
end_date = datetime.datetime.combine(d2, datetime.time.max)
if not is_lab and not is_doc_refferal and not is_paraclinic:
return JsonResponse({"results": []})
if is_lab:
lab_podr = get_lab_podr()
lab_podr = [i[0] for i in lab_podr]
else:
lab_podr = [-1]
confirmed_directions = get_confirm_direction_patient_year(start_date, end_date, lab_podr, card.pk, is_lab, is_paraclinic, is_doc_refferal)
if not confirmed_directions:
return JsonResponse({"results": []})
directions = {}
for d in confirmed_directions:
if d.direction not in directions:
directions[d.direction] = {
'pk': d.direction,
'confirmedAt': d.ch_time_confirmation,
'services': [],
}
directions[d.direction]['services'].append(d.research_title)
return JsonResponse({"results": list(directions.values())})
@api_view(['POST'])
def results_by_direction(request):
request_data = json.loads(request.body)
mode = request_data.get('mode')
is_lab = request_data.get('isLab', mode == 'laboratory')
is_paraclinic = request_data.get('isParaclinic', mode == 'paraclinic')
is_doc_refferal = request_data.get('isDocReferral', mode == 'docReferral')
direction = request_data.get('pk')
directions = request_data.get('directions', [])
if not directions and direction:
directions = [direction]
objs_result = {}
if is_lab:
direction_result = get_laboratory_results_by_directions(directions)
for r in direction_result:
if r.direction not in objs_result:
objs_result[r.direction] = {'pk': r.direction, 'confirmedAt': r.date_confirm, 'services': {}}
if r.iss_id not in objs_result[r.direction]['services']:
objs_result[r.direction]['services'][r.iss_id] = {'title': r.research_title, 'fio': short_fio_dots(r.fio), 'confirmedAt': r.date_confirm, 'fractions': []}
objs_result[r.direction]['services'][r.iss_id]['fractions'].append({'title': r.fraction_title, 'value': r.value, 'units': r.units})
if is_paraclinic or is_doc_refferal:
results = desc_to_data(directions, force_all_fields=True)
for i in results:
direction_data = i['result'][0]["date"].split(' ')
if direction_data[1] not in objs_result:
objs_result[direction_data[1]] = {'pk': direction_data[1], 'confirmedAt': direction_data[0], 'services': {}}
if i['result'][0]["iss_id"] not in objs_result[direction_data[1]]['services']:
objs_result[direction_data[1]]['services'][i['result'][0]["iss_id"]] = {
'title': i['title_research'],
'fio': short_fio_dots(i['result'][0]["docConfirm"]),
'confirmedAt': direction_data[0],
'fractions': [],
}
values = values_as_structure_data(i['result'][0]["data"])
objs_result[direction_data[1]]['services'][i['result'][0]["iss_id"]]["fractions"].extend(values)
return JsonResponse({"results": list(objs_result.values())})
@api_view(['POST'])
@can_use_schedule_only
def check_employee(request):
data = json.loads(request.body)
snils = data.get('snils')
date_now = current_time(only_date=True)
doctor_profile = DoctorProfile.objects.filter(snils=snils, external_access=True, date_stop_external_access__gte=date_now).first()
if doctor_profile:
return Response({"ok": True})
return Response({"ok": False})
@api_view(['GET'])
@can_use_schedule_only
def hospitalization_plan_research(request):
return Response({"services": get_hospital_resource()})
@api_view(['POST'])
@can_use_schedule_only
def available_hospitalization_plan(request):
data = json.loads(request.body)
research_pk = data.get('research_pk')
resource_id = data.get('resource_id')
date_start = data.get('date_start')
date_end = data.get('date_end')
result, _ = get_available_hospital_plans(research_pk, resource_id, date_start, date_end)
return Response({"data": result})
@api_view(['POST'])
@can_use_schedule_only
def check_hosp_slot_before_save(request):
data = json.loads(request.body)
research_pk = data.get('research_pk')
resource_id = data.get('resource_id')
date = data.get('date')
result = check_available_hospital_slot_before_save(research_pk, resource_id, date)
return JsonResponse({"result": result})
@api_view(['POST'])
@can_use_schedule_only
def get_pdf_result(request):
data = json.loads(request.body)
pk = data.get('pk')
localclient = TC(enforce_csrf_checks=False)
addr = "/results/pdf"
params = {"pk": json.dumps([pk]), 'leftnone': '1', 'token': "8d63a9d6-c977-4c7b-a27c-64f9ba8086a7"}
result = localclient.get(addr, params).content
pdf_content = base64.b64encode(result).decode('utf-8')
return JsonResponse({"result": pdf_content})
@api_view(['POST'])
@can_use_schedule_only
def get_pdf_direction(request):
data = json.loads(request.body)
pk = data.get('pk')
localclient = TC(enforce_csrf_checks=False)
addr = "/directions/pdf"
params = {"napr_id": json.dumps([pk]), 'token': "8d63a9d6-c977-4c7b-a27c-64f9ba8086a7"}
result = localclient.get(addr, params).content
pdf_content = base64.b64encode(result).decode('utf-8')
return JsonResponse({"result": pdf_content})
@api_view(['POST'])
@can_use_schedule_only
def documents_lk(request):
return Response({"documents": get_can_created_patient()})
@api_view(['POST'])
@can_use_schedule_only
def details_document_lk(request):
data = data_parse(request.body, {'pk': int},)
pk: int = data[0]
response = get_researches_details(pk)
return Response(response)
@api_view(['POST'])
@can_use_schedule_only
def forms_lk(request):
response = {"forms": LK_FORMS}
return Response(response)
@api_view(['POST'])
@can_use_schedule_only
def pdf_form_lk(request):
data = data_parse(request.body, {'type_form': str, 'snils': str, 'enp': str, 'agent': {'snils': str, 'enp': str}}, )
type_form: str = data[0]
snils: str = data[1]
enp: str = data[2]
card: Card = find_patient(snils, enp)
if not card:
return Response({"results": [], 'message': 'Карта не найдена'})
f = import_string('forms.forms' + type_form[0:3] + '.form_' + type_form[4:6])
user = User.objects.get(pk=LK_USER)
result = f(
request_data={
"card_pk": card,
"user": user,
"hospital": user.doctorprofile.get_hospital(),
}
)
pdf_content = base64.b64encode(result).decode('utf-8')
return Response({"result": pdf_content})
@api_view(['POST', 'PUT'])
@parser_classes([MultiPartParser, FormParser, JSONParser])
@can_use_schedule_only
def add_file_hospital_plan(request):
file = request.data.get('file-add')
data = data_parse(request.data.get('document'), {'pk': int})
pk: int = data[0]
with transaction.atomic():
plan: PlanHospitalization = PlanHospitalization.objects.select_for_update().get(pk=pk)
if file.size > LK_FILE_SIZE_BYTES:
return JsonResponse({
"ok": False,
"message": "Файл слишком большой",
})
if PlanHospitalizationFiles.get_count_files_by_plan(plan) >= LK_FILE_COUNT:
return JsonResponse({
"ok": False,
"message": "Вы добавили слишком много файлов в одну заявку",
})
if not check_type_file(file_in_memory=file):
return JsonResponse({
"ok": False,
"message": "Поддерживаются PDF и JPEG файлы",
})
plan_files: PlanHospitalizationFiles = PlanHospitalizationFiles(plan=plan)
plan_files.uploaded_file = file
plan_files.save()
return Response({
"ok": True,
"message": "Файл добавлен",
})
@api_view(['POST'])
@can_use_schedule_only
def get_limit_download_files(request):
return Response({"lk_file_count": LK_FILE_COUNT, "lk_file_size_bytes": LK_FILE_SIZE_BYTES})
|
moodpulse/l2
|
integration_framework/views.py
|
Python
|
mit
| 76,926 | 0.002275 |
import unittest
import os
import sys
import shutil
sys.path.append('../lib/')
sys.path.append(os.path.dirname(os.path.realpath(__file__)))
import alchemy
from alchemy.schema import *
class TestAlchemy(unittest.TestCase):
def setUp(self):
# this basically resets our testing database
path = config.get('sqlite').get('path')
shutil.copyfile('{0}/alchemy.raw'.format(path), '{0}/test.db'.format(path))
def tearDown(self):
# we keep this to tidy up our database if it fails
session.close()
def test_raw_clean(self):
# add a Clean record to mark something against
asg0 = session.query(RawAssignee).limit(10)
asg1 = session.query(RawAssignee).limit(10).offset(10)
alchemy.match(asg0, session)
alchemy.match(asg1, session)
alchemy.match([asg0[0], asg1[0].assignee], session)
def test_match_all(self):
alchemy.match(session.query(RawAssignee), session)
def test_set_default(self):
# create two items
loc = session.query(RawLocation)
alchemy.match(loc, session)
alchemy.match(loc[0], session, {"city": u"Frisco", "state": u"Cali", "country": u"US", "longitude": 10.0, "latitude": 10.0})
self.assertEqual("Frisco, Cali, US", loc[0].location.address)
alchemy.match(loc[0], session, keepexisting=True)
self.assertEqual("Frisco, Cali, US", loc[0].location.address)
self.assertEqual(10.0, loc[0].location.latitude)
self.assertEqual(10.0, loc[0].location.longitude)
alchemy.match(loc[0], session)
self.assertEqual("Hong Kong, OH, US", loc[0].location.address)
self.assertEqual(10.0, loc[0].location.latitude)
self.assertEqual(10.0, loc[0].location.longitude)
alchemy.match(loc[0], session, {"city": u"Frisco"}, keepexisting=True)
self.assertEqual("Frisco, OH, US", loc[0].location.address)
self.assertEqual(10.0, loc[0].location.latitude)
self.assertEqual(10.0, loc[0].location.longitude)
def test_unmatch_asgloc(self):
loc = session.query(RawLocation).limit(20)
asg = session.query(RawAssignee).limit(20)
alchemy.match(asg, session)
alchemy.match(loc[0:5], session)
alchemy.match(loc[5:10], session)
alchemy.match(loc[10:15], session)
alchemy.match(loc[15:20], session)
clean = asg[0].assignee
alchemy.unmatch(asg[0], session)
self.assertEqual(None, asg[0].assignee)
self.assertEqual(19, len(clean.rawassignees))
self.assertEqual(19, len(clean.patents))
self.assertEqual(4, session.query(Location).count())
self.assertEqual(4, session.query(locationassignee).count())
clean = loc[0].location
self.assertEqual(5, len(clean.rawlocations))
alchemy.unmatch(loc[0], session)
self.assertEqual(4, len(clean.rawlocations))
alchemy.unmatch(loc[1], session)
self.assertEqual(3, len(clean.rawlocations))
alchemy.unmatch(loc[2:5], session)
self.assertEqual(None, loc[0].location)
self.assertEqual(3, session.query(Location).count())
self.assertEqual(3, session.query(locationassignee).count())
alchemy.unmatch(loc[5].location, session)
self.assertEqual(2, session.query(Location).count())
self.assertEqual(2, session.query(locationassignee).count())
alchemy.unmatch(asg[3:20], session)
alchemy.unmatch(loc[10].location, session)
self.assertEqual(1, session.query(Location).count())
self.assertEqual(0, session.query(locationassignee).count())
def test_unmatch_invloc(self):
loc = session.query(RawLocation).limit(20)
inv = session.query(RawInventor).limit(20)
alchemy.match(inv, session)
alchemy.match(loc[0:5], session)
alchemy.match(loc[5:10], session)
alchemy.match(loc[10:15], session)
alchemy.match(loc[15:20], session)
clean = inv[0].inventor
alchemy.unmatch(inv[0], session)
self.assertEqual(None, inv[0].inventor)
self.assertEqual(19, len(clean.rawinventors))
self.assertEqual(10, len(clean.patents))
self.assertEqual(4, session.query(Location).count())
self.assertEqual(4, session.query(locationinventor).count())
clean = loc[0].location
self.assertEqual(5, len(clean.rawlocations))
alchemy.unmatch(loc[0], session)
self.assertEqual(4, len(clean.rawlocations))
alchemy.unmatch(loc[1], session)
self.assertEqual(3, len(clean.rawlocations))
alchemy.unmatch(loc[2:5], session)
self.assertEqual(None, loc[0].location)
self.assertEqual(3, session.query(Location).count())
self.assertEqual(3, session.query(locationinventor).count())
clean = inv[5].inventor
alchemy.unmatch(inv[1], session)
self.assertEqual(None, inv[1].inventor)
self.assertEqual(18, len(clean.rawinventors))
# this patent is repeated
self.assertEqual(10, len(clean.patents))
alchemy.unmatch(inv[2], session)
self.assertEqual(None, inv[2].inventor)
self.assertEqual(17, len(clean.rawinventors))
self.assertEqual(9, len(clean.patents))
alchemy.unmatch(loc[5].location, session)
self.assertEqual(2, session.query(Location).count())
self.assertEqual(2, session.query(locationinventor).count())
alchemy.unmatch(inv[3:20], session)
alchemy.unmatch(loc[10].location, session)
self.assertEqual(1, session.query(Location).count())
self.assertEqual(0, session.query(locationinventor).count())
def test_unmatch_lawyer(self):
law = session.query(RawLawyer).limit(20)
alchemy.match(law, session)
alchemy.unmatch(law[0], session)
self.assertEqual(None, law[0].lawyer)
self.assertEqual(19, len(law[1].lawyer.rawlawyers))
self.assertEqual(14, len(law[1].lawyer.patents))
def test_assigneematch(self):
# blindly assume first 10 are the same
asg0 = session.query(RawAssignee).limit(10)
asg1 = session.query(RawAssignee).limit(10).offset(10)
asgs = session.query(Assignee)
alchemy.match(asg0, session)
alchemy.match(asg1, session)
# create two items
self.assertEqual(10, len(asg0[0].assignee.rawassignees))
self.assertEqual(10, len(asg1[0].assignee.rawassignees))
self.assertEqual(10, len(asg0[0].assignee.patents))
self.assertEqual(2, asgs.count())
self.assertEqual("CAFEPRESS.COM", asg0[0].assignee.organization)
# merge the assignees together
alchemy.match([asg0[0], asg1[0]], session)
self.assertEqual(20, len(asg0[0].assignee.rawassignees))
self.assertEqual(20, len(asg1[0].assignee.rawassignees))
self.assertEqual(20, len(asg0[0].assignee.patents))
self.assertEqual(1, asgs.count())
# override the default values provided
alchemy.match(asg0[0], session, {"organization": u"Kevin"})
self.assertEqual("Kevin", asg0[0].assignee.organization)
# determine the most common organization name
alchemy.match(session.query(RawAssignee).limit(40).all(), session)
self.assertEqual(40, len(asg1[0].assignee.rawassignees))
self.assertEqual("The Procter & Gamble Company", asg0[0].assignee.organization)
def test_inventormatch(self):
# blindly assume first 10 are the same
inv0 = session.query(RawInventor).limit(10)
inv1 = session.query(RawInventor).limit(10).offset(10)
invs = session.query(Inventor)
alchemy.match(inv0, session)
alchemy.match(inv1, session)
# create two items
self.assertEqual(10, len(inv0[0].inventor.rawinventors))
self.assertEqual(10, len(inv1[0].inventor.rawinventors))
self.assertEqual(2, invs.count())
self.assertEqual(6, len(inv0[0].inventor.patents))
self.assertEqual(5, len(inv1[0].inventor.patents))
self.assertEqual("David C. Mattison", inv0[0].inventor.name_full)
# merge the assignees together
alchemy.match([inv0[0], inv1[0]], session)
self.assertEqual(20, len(inv0[0].inventor.rawinventors))
self.assertEqual(20, len(inv1[0].inventor.rawinventors))
self.assertEqual(11, len(inv0[0].inventor.patents))
self.assertEqual(1, invs.count())
# override the default values provided
alchemy.match(inv0[0], session, {"name_first": u"Kevin", "name_last": u"Yu"})
self.assertEqual("Kevin Yu", inv0[0].inventor.name_full)
# determine the most common organization name
alchemy.match(session.query(RawInventor).all(), session)
self.assertEqual(137, len(inv1[0].inventor.rawinventors))
self.assertEqual("Robert Wang", inv0[0].inventor.name_full)
def test_lawyermatch(self):
# blindly assume first 10 are the same
law0 = session.query(RawLawyer).limit(10)
law1 = session.query(RawLawyer).limit(10).offset(10)
laws = session.query(Lawyer)
alchemy.match(law0, session)
alchemy.match(law1, session)
# create two items
self.assertEqual(10, len(law0[0].lawyer.rawlawyers))
self.assertEqual(10, len(law1[0].lawyer.rawlawyers))
self.assertEqual(2, laws.count())
self.assertEqual(7, len(law0[0].lawyer.patents))
self.assertEqual(9, len(law1[0].lawyer.patents))
self.assertEqual("Warner Norcross & Judd LLP", law0[0].lawyer.organization)
# merge the assignees together
alchemy.match([law0[0], law1[0]], session)
self.assertEqual(20, len(law0[0].lawyer.rawlawyers))
self.assertEqual(20, len(law1[0].lawyer.rawlawyers))
self.assertEqual(15, len(law0[0].lawyer.patents))
self.assertEqual(1, laws.count())
# override the default values provided
alchemy.match(law0[0], session, {"name_first": u"Devin", "name_last": u"Ko"})
self.assertEqual("Devin Ko", law0[0].lawyer.name_full)
# determine the most common organization name
alchemy.match(session.query(RawLawyer).all(), session)
self.assertEqual(57, len(law1[0].lawyer.rawlawyers))
self.assertEqual("Robert Robert Chuey", law0[0].lawyer.name_full)
def test_locationmatch(self):
# blindly assume first 10 are the same
loc0 = session.query(RawLocation).limit(10)
loc1 = session.query(RawLocation).limit(10).offset(10)
locs = session.query(Location)
alchemy.match(loc0, session)
alchemy.match(loc1, session)
# create two items
self.assertEqual(10, len(loc0[0].location.rawlocations))
self.assertEqual(10, len(loc1[0].location.rawlocations))
self.assertEqual(0, len(loc0[0].location.assignees))
self.assertEqual(0, len(loc0[0].location.inventors))
self.assertEqual(2, locs.count())
self.assertEqual("Hong Kong, MN, NL", loc0[0].location.address)
# merge the assignees together
alchemy.match([loc0[0], loc1[0]], session)
self.assertEqual(20, len(loc0[0].location.rawlocations))
self.assertEqual(20, len(loc1[0].location.rawlocations))
self.assertEqual(0, len(loc0[0].location.assignees))
self.assertEqual(0, len(loc0[0].location.inventors))
self.assertEqual(1, locs.count())
self.assertEqual("Hong Kong, MN, US", loc0[0].location.address)
self.assertEqual(None, loc0[0].location.latitude)
self.assertEqual(None, loc0[0].location.longitude)
# override the default values provided
alchemy.match(loc0[0], session, {"city": u"Frisco", "state": u"Cali", "country": u"US", "longitude": 10.0, "latitude": 10.0})
self.assertEqual("Frisco, Cali, US", loc0[0].location.address)
self.assertEqual(10.0, loc0[0].location.latitude)
self.assertEqual(10.0, loc0[0].location.longitude)
def test_assignee_location(self):
# insert an assignee first.
# then location. make sure links ok
asg = session.query(RawAssignee).limit(20)
loc = session.query(RawLocation).limit(40)
alchemy.match(asg[0:5], session)
alchemy.match(asg[5:10], session)
alchemy.match(asg[10:15], session)
alchemy.match(asg[15:20], session)
alchemy.match(loc[0:20], session)
alchemy.match(loc[20:40], session)
self.assertEqual(2, len(loc[19].location.assignees))
self.assertEqual(1, len(asg[4].assignee.locations))
self.assertEqual(2, len(asg[5].assignee.locations))
def test_inventor_location(self):
# insert an assignee first.
# then location. make sure links ok
inv = session.query(RawInventor).limit(20)
loc = session.query(RawLocation).limit(40)
alchemy.match(inv[0:5], session)
alchemy.match(inv[5:10], session)
alchemy.match(inv[10:15], session)
alchemy.match(inv[15:20], session)
alchemy.match(loc[0:20], session)
alchemy.match(loc[20:40], session)
self.assertEqual(1, len(inv[14].inventor.locations))
self.assertEqual(2, len(inv[15].inventor.locations))
self.assertEqual(4, len(loc[19].location.inventors))
self.assertEqual(1, len(loc[20].location.inventors))
def test_location_assignee(self):
asg = session.query(RawAssignee).limit(20)
loc = session.query(RawLocation).limit(40)
alchemy.match(loc[0:20], session)
alchemy.match(loc[20:40], session)
alchemy.match(asg[0:5], session)
alchemy.match(asg[5:10], session)
alchemy.match(asg[10:15], session)
alchemy.match(asg[15:20], session)
self.assertEqual(2, len(loc[19].location.assignees))
self.assertEqual(1, len(asg[4].assignee.locations))
self.assertEqual(2, len(asg[5].assignee.locations))
def test_location_inventor(self):
# insert an assignee first.
# then location. make sure links ok
inv = session.query(RawInventor).limit(20)
loc = session.query(RawLocation).limit(40)
alchemy.match(loc[0:20], session)
alchemy.match(loc[20:40], session)
alchemy.match(inv[0:5], session)
alchemy.match(inv[5:10], session)
alchemy.match(inv[10:15], session)
alchemy.match(inv[15:20], session)
self.assertEqual(1, len(inv[14].inventor.locations))
self.assertEqual(2, len(inv[15].inventor.locations))
self.assertEqual(4, len(loc[19].location.inventors))
self.assertEqual(1, len(loc[20].location.inventors))
if __name__ == '__main__':
config = alchemy.get_config()
session = alchemy.session
unittest.main()
|
yngcan/patentprocessor
|
test/test_alchemy.py
|
Python
|
bsd-2-clause
| 14,826 | 0.000742 |
# -*- coding: utf-8 -*-
from __future__ import with_statement
import base64
import datetime
import json
import os
from django import http
from django.conf import settings
from django.conf.urls import url
from django.contrib import admin
from django.core import urlresolvers
from django.core.cache import cache
from django.core.exceptions import ValidationError, ImproperlyConfigured
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.management import call_command
from django.forms.widgets import Media
from django.test.testcases import TestCase
from django.utils import timezone
from cms import api
from cms.constants import PLUGIN_MOVE_ACTION, PLUGIN_COPY_ACTION
from cms.exceptions import PluginAlreadyRegistered, PluginNotRegistered, DontUsePageAttributeWarning
from cms.models import Page, Placeholder
from cms.models.pluginmodel import CMSPlugin
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cms.sitemaps.cms_sitemap import CMSSitemap
from cms.test_utils.project.pluginapp.plugins.manytomany_rel.models import (
Article, Section, ArticlePluginModel,
FKModel,
M2MTargetModel)
from cms.test_utils.project.pluginapp.plugins.meta.cms_plugins import (
TestPlugin, TestPlugin2, TestPlugin3, TestPlugin4, TestPlugin5)
from cms.test_utils.project.pluginapp.plugins.validation.cms_plugins import (
NonExisitngRenderTemplate, NoRender, NoRenderButChildren, DynTemplate)
from cms.test_utils.testcases import (
CMSTestCase, URL_CMS_PAGE, URL_CMS_PLUGIN_MOVE, URL_CMS_PAGE_ADD,
URL_CMS_PLUGIN_ADD, URL_CMS_PLUGIN_EDIT, URL_CMS_PAGE_CHANGE,
URL_CMS_PLUGIN_REMOVE, URL_CMS_PAGE_PUBLISH)
from cms.test_utils.util.fuzzy_int import FuzzyInt
from cms.toolbar.toolbar import CMSToolbar
from cms.utils.conf import get_cms_setting
from cms.utils.copy_plugins import copy_plugins_to
from cms.utils.plugins import get_plugins_for_page, get_plugins
from djangocms_googlemap.models import GoogleMap
from djangocms_inherit.cms_plugins import InheritPagePlaceholderPlugin
from djangocms_file.models import File
from djangocms_inherit.models import InheritPagePlaceholder
from djangocms_link.forms import LinkForm
from djangocms_link.models import Link
from djangocms_picture.models import Picture
from djangocms_text_ckeditor.models import Text
from djangocms_text_ckeditor.utils import plugin_tags_to_id_list, plugin_to_tag
class DumbFixturePlugin(CMSPluginBase):
model = CMSPlugin
name = "Dumb Test Plugin. It does nothing."
render_template = ""
admin_preview = False
render_plugin = False
def render(self, context, instance, placeholder):
return context
class DumbFixturePluginWithUrls(DumbFixturePlugin):
name = DumbFixturePlugin.name + " With custom URLs."
render_plugin = False
def _test_view(self, request):
return http.HttpResponse("It works")
def get_plugin_urls(self):
return [
url(r'^testview/$', admin.site.admin_view(self._test_view), name='dumbfixtureplugin'),
]
plugin_pool.register_plugin(DumbFixturePluginWithUrls)
class PluginsTestBaseCase(CMSTestCase):
def setUp(self):
self.super_user = self._create_user("test", True, True)
self.slave = self._create_user("slave", True)
self.FIRST_LANG = settings.LANGUAGES[0][0]
self.SECOND_LANG = settings.LANGUAGES[1][0]
self._login_context = self.login_user_context(self.super_user)
self._login_context.__enter__()
def tearDown(self):
self._login_context.__exit__(None, None, None)
def approve_page(self, page):
response = self.client.get(URL_CMS_PAGE + "%d/approve/" % page.pk)
self.assertRedirects(response, URL_CMS_PAGE)
# reload page
return self.reload_page(page)
def get_request(self, *args, **kwargs):
request = super(PluginsTestBaseCase, self).get_request(*args, **kwargs)
request.placeholder_media = Media()
request.toolbar = CMSToolbar(request)
return request
def get_response_pk(self, response):
return int(response.content.decode('utf8').split("/edit-plugin/")[1].split("/")[0])
class PluginsTestCase(PluginsTestBaseCase):
def _create_text_plugin_on_page(self, page):
plugin_data = {
'plugin_type': "TextPlugin",
'plugin_language': settings.LANGUAGES[0][0],
'placeholder_id': page.placeholders.get(slot="body").pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(CMSPlugin.objects.count(), 1)
self.assertEqual(response.status_code, 200)
created_plugin_id = self.get_response_pk(response)
self.assertEqual(created_plugin_id, CMSPlugin.objects.all()[0].pk)
return created_plugin_id
def _edit_text_plugin(self, plugin_id, text):
edit_url = "%s%s/" % (URL_CMS_PLUGIN_EDIT, plugin_id)
response = self.client.get(edit_url)
self.assertEqual(response.status_code, 200)
data = {
"body": text
}
response = self.client.post(edit_url, data)
self.assertEqual(response.status_code, 200)
txt = Text.objects.get(pk=plugin_id)
return txt
def test_add_edit_plugin(self):
"""
Test that you can add a text plugin
"""
# add a new text plugin
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
created_plugin_id = self._create_text_plugin_on_page(page)
# now edit the plugin
txt = self._edit_text_plugin(created_plugin_id, "Hello World")
self.assertEqual("Hello World", txt.body)
# edit body, but click cancel button
data = {
"body": "Hello World!!",
"_cancel": True,
}
edit_url = '%s%d/' % (URL_CMS_PLUGIN_EDIT, created_plugin_id)
response = self.client.post(edit_url, data)
self.assertEqual(response.status_code, 200)
txt = Text.objects.all()[0]
self.assertEqual("Hello World", txt.body)
def test_plugin_edit_marks_page_dirty(self):
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
self.assertEqual(response.status_code, 302)
page = Page.objects.all()[0]
response = self.client.post(URL_CMS_PAGE_PUBLISH % (page.pk, 'en'))
self.assertEqual(response.status_code, 302)
created_plugin_id = self._create_text_plugin_on_page(page)
page = Page.objects.all()[0]
self.assertEqual(page.is_dirty('en'), True)
response = self.client.post(URL_CMS_PAGE_PUBLISH % (page.pk, 'en'))
self.assertEqual(response.status_code, 302)
page = Page.objects.all()[0]
self.assertEqual(page.is_dirty('en'), False)
self._edit_text_plugin(created_plugin_id, "Hello World")
page = Page.objects.all()[0]
self.assertEqual(page.is_dirty('en'), True)
def test_plugin_order(self):
"""
Test that plugin position is saved after creation
"""
page_en = api.create_page("PluginOrderPage", "col_two.html", "en",
slug="page1", published=True, in_navigation=True)
ph_en = page_en.placeholders.get(slot="col_left")
# We check created objects and objects from the DB to be sure the position value
# has been saved correctly
text_plugin_1 = api.add_plugin(ph_en, "TextPlugin", "en", body="I'm the first")
text_plugin_2 = api.add_plugin(ph_en, "TextPlugin", "en", body="I'm the second")
db_plugin_1 = CMSPlugin.objects.get(pk=text_plugin_1.pk)
db_plugin_2 = CMSPlugin.objects.get(pk=text_plugin_2.pk)
with self.settings(CMS_PERMISSION=False):
self.assertEqual(text_plugin_1.position, 0)
self.assertEqual(db_plugin_1.position, 0)
self.assertEqual(text_plugin_2.position, 1)
self.assertEqual(db_plugin_2.position, 1)
## Finally we render the placeholder to test the actual content
rendered_placeholder = ph_en.render(self.get_context(page_en.get_absolute_url(), page=page_en), None)
self.assertEqual(rendered_placeholder, "I'm the firstI'm the second")
def test_plugin_order_alt(self):
"""
Test that plugin position is saved after creation
"""
draft_page = api.create_page("PluginOrderPage", "col_two.html", "en",
slug="page1", published=False, in_navigation=True)
placeholder = draft_page.placeholders.get(slot="col_left")
# We check created objects and objects from the DB to be sure the position value
# has been saved correctly
text_plugin_2 = api.add_plugin(placeholder, "TextPlugin", "en", body="I'm the second")
text_plugin_3 = api.add_plugin(placeholder, "TextPlugin", "en", body="I'm the third")
# Publish to create a 'live' version
draft_page.publish('en')
draft_page = draft_page.reload()
placeholder = draft_page.placeholders.get(slot="col_left")
# Add a plugin and move it to the first position
text_plugin_1 = api.add_plugin(placeholder, "TextPlugin", "en", body="I'm the first")
data = {
'placeholder_id': placeholder.id,
'plugin_id': text_plugin_1.id,
'plugin_parent': '',
'plugin_language': 'en',
'plugin_order[]': [text_plugin_1.id, text_plugin_2.id, text_plugin_3.id],
}
self.client.post(URL_CMS_PLUGIN_MOVE, data)
draft_page.publish('en')
draft_page = draft_page.reload()
live_page = draft_page.get_public_object()
placeholder = draft_page.placeholders.get(slot="col_left")
live_placeholder = live_page.placeholders.get(slot="col_left")
with self.settings(CMS_PERMISSION=False):
self.assertEqual(CMSPlugin.objects.get(pk=text_plugin_1.pk).position, 0)
self.assertEqual(CMSPlugin.objects.get(pk=text_plugin_2.pk).position, 1)
self.assertEqual(CMSPlugin.objects.get(pk=text_plugin_3.pk).position, 2)
## Finally we render the placeholder to test the actual content
rendered_placeholder = placeholder.render(self.get_context(draft_page.get_absolute_url(), page=draft_page), None)
self.assertEqual(rendered_placeholder, "I'm the firstI'm the secondI'm the third")
rendered_live_placeholder = live_placeholder.render(self.get_context(live_page.get_absolute_url(), page=live_page), None)
self.assertEqual(rendered_live_placeholder, "I'm the firstI'm the secondI'm the third")
def test_plugin_breadcrumbs(self):
"""
Test the plugin breadcrumbs order
"""
draft_page = api.create_page("home", "col_two.html", "en",
slug="page1", published=False, in_navigation=True)
placeholder = draft_page.placeholders.get(slot="col_left")
columns = api.add_plugin(placeholder, "MultiColumnPlugin", "en")
column = api.add_plugin(placeholder, "ColumnPlugin", "en", target=columns, width='10%')
text_plugin = api.add_plugin(placeholder, "TextPlugin", "en", target=column, body="I'm the second")
text_breadcrumbs = text_plugin.get_breadcrumb()
self.assertEqual(len(columns.get_breadcrumb()), 1)
self.assertEqual(len(column.get_breadcrumb()), 2)
self.assertEqual(len(text_breadcrumbs), 3)
self.assertTrue(text_breadcrumbs[0]['title'], columns.get_plugin_class().name)
self.assertTrue(text_breadcrumbs[1]['title'], column.get_plugin_class().name)
self.assertTrue(text_breadcrumbs[2]['title'], text_plugin.get_plugin_class().name)
self.assertTrue('/edit-plugin/%s/'% columns.pk in text_breadcrumbs[0]['url'])
self.assertTrue('/edit-plugin/%s/'% column.pk, text_breadcrumbs[1]['url'])
self.assertTrue('/edit-plugin/%s/'% text_plugin.pk, text_breadcrumbs[2]['url'])
def test_add_cancel_plugin(self):
"""
Test that you can cancel a new plugin before editing and
that the plugin is removed.
"""
# add a new text plugin
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
plugin_data = {
'plugin_type': "TextPlugin",
'plugin_language': settings.LANGUAGES[0][0],
'placeholder_id': page.placeholders.get(slot="body").pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(response.status_code, 200)
pk = CMSPlugin.objects.all()[0].pk
expected = {
"url": "/en/admin/cms/page/edit-plugin/%s/" % pk,
"breadcrumb": [
{
"url": "/en/admin/cms/page/edit-plugin/%s/" % pk,
"title": "Text"
}
],
'delete': '/en/admin/cms/page/delete-plugin/%s/' % pk
}
output = json.loads(response.content.decode('utf8'))
self.assertEqual(output, expected)
# now click cancel instead of editing
response = self.client.get(output['url'])
self.assertEqual(response.status_code, 200)
data = {
"body": "Hello World",
"_cancel": True,
}
response = self.client.post(output['url'], data)
self.assertEqual(response.status_code, 200)
self.assertEqual(0, Text.objects.count())
def test_extract_images_from_text(self):
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
plugin_data = {
'plugin_type': "TextPlugin",
'plugin_language': settings.LANGUAGES[0][0],
'placeholder_id': page.placeholders.get(slot="body").pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(response.status_code, 200)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + "%s/" % CMSPlugin.objects.all()[0].pk
response = self.client.get(edit_url)
self.assertEqual(response.status_code, 200)
img_path = os.path.join(os.path.dirname(__file__), 'data', 'image.jpg')
with open(img_path, 'rb') as fobj:
img_data = base64.b64encode(fobj.read()).decode('utf-8')
body = """<p>
<img alt='' src='data:image/jpeg;base64,{data}' />
</p>""".format(data=img_data)
data = {
"body": body
}
response = self.client.post(edit_url, data)
self.assertEqual(response.status_code, 200)
txt = Text.objects.all()[0]
self.assertTrue('id="plugin_obj_%s"' % (txt.pk + 1) in txt.body)
def test_add_text_plugin_empty_tag(self):
"""
Test that you can add a text plugin
"""
# add a new text plugin
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
plugin_data = {
'plugin_type': "TextPlugin",
'plugin_language': settings.LANGUAGES[0][0],
'placeholder_id': page.placeholders.get(slot="body").pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(response.status_code, 200)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + "%s/" % CMSPlugin.objects.all()[0].pk
response = self.client.get(edit_url)
self.assertEqual(response.status_code, 200)
data = {
"body": '<div class="someclass"></div><p>foo</p>'
}
response = self.client.post(edit_url, data)
self.assertEqual(response.status_code, 200)
txt = Text.objects.all()[0]
self.assertEqual('<div class="someclass"></div><p>foo</p>', txt.body)
def test_add_text_plugin_html_sanitizer(self):
"""
Test that you can add a text plugin
"""
# add a new text plugin
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
plugin_data = {
'plugin_type': "TextPlugin",
'plugin_language': settings.LANGUAGES[0][0],
'placeholder_id': page.placeholders.get(slot="body").pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(response.status_code, 200)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + "%s/" % CMSPlugin.objects.all()[0].pk
response = self.client.get(edit_url)
self.assertEqual(response.status_code, 200)
data = {
"body": '<script>var bar="hacked"</script>'
}
response = self.client.post(edit_url, data)
self.assertEqual(response.status_code, 200)
txt = Text.objects.all()[0]
self.assertEqual('<script>var bar="hacked"</script>', txt.body)
def test_copy_plugins_method(self):
"""
Test that CMSPlugin copy does not have side effects
"""
# create some objects
page_en = api.create_page("CopyPluginTestPage (EN)", "nav_playground.html", "en")
page_de = api.create_page("CopyPluginTestPage (DE)", "nav_playground.html", "de")
ph_en = page_en.placeholders.get(slot="body")
ph_de = page_de.placeholders.get(slot="body")
# add the text plugin
text_plugin_en = api.add_plugin(ph_en, "TextPlugin", "en", body="Hello World")
self.assertEqual(text_plugin_en.pk, CMSPlugin.objects.all()[0].pk)
# add a *nested* link plugin
link_plugin_en = api.add_plugin(ph_en, "LinkPlugin", "en", target=text_plugin_en,
name="A Link", url="https://www.django-cms.org")
#
text_plugin_en.body += plugin_to_tag(link_plugin_en)
text_plugin_en.save()
# the call above to add a child makes a plugin reload required here.
text_plugin_en = self.reload(text_plugin_en)
# setup the plugins to copy
plugins = [text_plugin_en, link_plugin_en]
# save the old ids for check
old_ids = [plugin.pk for plugin in plugins]
new_plugins = []
plugins_ziplist = []
old_parent_cache = {}
# This is a stripped down version of cms.copy_plugins.copy_plugins_to
# to low-level testing the copy process
for plugin in plugins:
new_plugins.append(plugin.copy_plugin(ph_de, 'de', old_parent_cache))
plugins_ziplist.append((new_plugins[-1], plugin))
for idx, plugin in enumerate(plugins):
inst, _ = new_plugins[idx].get_plugin_instance()
new_plugins[idx] = inst
new_plugins[idx].post_copy(plugin, plugins_ziplist)
for idx, plugin in enumerate(plugins):
# original plugin instance reference should stay unmodified
self.assertEqual(old_ids[idx], plugin.pk)
# new plugin instance should be different from the original
self.assertNotEqual(new_plugins[idx], plugin.pk)
# text plugins (both old and new) should contain a reference
# to the link plugins
if plugin.plugin_type == 'TextPlugin':
self.assertTrue('link.png' in plugin.body)
self.assertTrue('plugin_obj_%s' % plugin.get_children()[0].pk in plugin.body)
self.assertTrue('link.png' in new_plugins[idx].body)
self.assertTrue('plugin_obj_%s' % new_plugins[idx].get_children()[0].pk in new_plugins[idx].body)
def test_plugin_position(self):
page_en = api.create_page("CopyPluginTestPage (EN)", "nav_playground.html", "en")
placeholder = page_en.placeholders.get(slot="body") # ID 2
placeholder_right = page_en.placeholders.get(slot="right-column")
columns = api.add_plugin(placeholder, "MultiColumnPlugin", "en") # ID 1
column_1 = api.add_plugin(placeholder, "ColumnPlugin", "en", target=columns, width='10%') # ID 2
column_2 = api.add_plugin(placeholder, "ColumnPlugin", "en", target=columns, width='30%') # ID 3
first_text_plugin = api.add_plugin(placeholder, "TextPlugin", "en", target=column_1, body="I'm the first") # ID 4
text_plugin = api.add_plugin(placeholder, "TextPlugin", "en", target=column_1, body="I'm the second") # ID 5
returned_1 = copy_plugins_to([text_plugin], placeholder, 'en', column_1.pk) # ID 6
returned_2 = copy_plugins_to([text_plugin], placeholder_right, 'en') # ID 7
returned_3 = copy_plugins_to([text_plugin], placeholder, 'en', column_2.pk) # ID 8
# STATE AT THIS POINT:
# placeholder
# - columns
# - column_1
# - text_plugin "I'm the first" created here
# - text_plugin "I'm the second" created here
# - text_plugin "I'm the second" (returned_1) copied here
# - column_2
# - text_plugin "I'm the second" (returned_3) copied here
# placeholder_right
# - text_plugin "I'm the second" (returned_2) copied here
# First plugin in the plugin branch
self.assertEqual(first_text_plugin.position, 0)
# Second plugin in the plugin branch
self.assertEqual(text_plugin.position, 1)
# Added as third plugin in the same branch as the above
self.assertEqual(returned_1[0][0].position, 2)
# First plugin in a placeholder
self.assertEqual(returned_2[0][0].position, 0)
# First plugin nested in a plugin
self.assertEqual(returned_3[0][0].position, 0)
def test_copy_plugins(self):
"""
Test that copying plugins works as expected.
"""
# create some objects
page_en = api.create_page("CopyPluginTestPage (EN)", "nav_playground.html", "en")
page_de = api.create_page("CopyPluginTestPage (DE)", "nav_playground.html", "de")
ph_en = page_en.placeholders.get(slot="body")
ph_de = page_de.placeholders.get(slot="body")
# add the text plugin
text_plugin_en = api.add_plugin(ph_en, "TextPlugin", "en", body="Hello World")
self.assertEqual(text_plugin_en.pk, CMSPlugin.objects.all()[0].pk)
# add a *nested* link plugin
link_plugin_en = api.add_plugin(ph_en, "LinkPlugin", "en", target=text_plugin_en,
name="A Link", url="https://www.django-cms.org")
# the call above to add a child makes a plugin reload required here.
text_plugin_en = self.reload(text_plugin_en)
# check the relations
self.assertEqual(text_plugin_en.get_children().count(), 1)
self.assertEqual(link_plugin_en.parent.pk, text_plugin_en.pk)
# just sanity check that so far everything went well
self.assertEqual(CMSPlugin.objects.count(), 2)
# copy the plugins to the german placeholder
copy_plugins_to(ph_en.get_plugins(), ph_de, 'de')
self.assertEqual(ph_de.cmsplugin_set.filter(parent=None).count(), 1)
text_plugin_de = ph_de.cmsplugin_set.get(parent=None).get_plugin_instance()[0]
self.assertEqual(text_plugin_de.get_children().count(), 1)
link_plugin_de = text_plugin_de.get_children().get().get_plugin_instance()[0]
# check we have twice as many plugins as before
self.assertEqual(CMSPlugin.objects.count(), 4)
# check language plugins
self.assertEqual(CMSPlugin.objects.filter(language='de').count(), 2)
self.assertEqual(CMSPlugin.objects.filter(language='en').count(), 2)
text_plugin_en = self.reload(text_plugin_en)
link_plugin_en = self.reload(link_plugin_en)
# check the relations in english didn't change
self.assertEqual(text_plugin_en.get_children().count(), 1)
self.assertEqual(link_plugin_en.parent.pk, text_plugin_en.pk)
self.assertEqual(link_plugin_de.name, link_plugin_en.name)
self.assertEqual(link_plugin_de.url, link_plugin_en.url)
self.assertEqual(text_plugin_de.body, text_plugin_en.body)
# test subplugin copy
copy_plugins_to([link_plugin_en], ph_de, 'de')
def test_deep_copy_plugins(self):
page_en = api.create_page("CopyPluginTestPage (EN)", "nav_playground.html", "en")
page_de = api.create_page("CopyPluginTestPage (DE)", "nav_playground.html", "de")
ph_en = page_en.placeholders.get(slot="body")
ph_de = page_de.placeholders.get(slot="body")
# add the text plugin
mcol1 = api.add_plugin(ph_en, "MultiColumnPlugin", "en", position="first-child")
mcol2 = api.add_plugin(ph_en, "MultiColumnPlugin", "en", position="first-child")
mcol1 = self.reload(mcol1)
col1 = api.add_plugin(ph_en, "ColumnPlugin", "en", position="first-child", target=mcol1)
mcol1 = self.reload(mcol1)
col2 = api.add_plugin(ph_en, "ColumnPlugin", "en", position="first-child", target=mcol1)
mcol2 = self.reload(mcol2)
col3 = api.add_plugin(ph_en, "ColumnPlugin", "en", position="first-child", target=mcol2)
mcol2 = self.reload(mcol2)
api.add_plugin(ph_en, "ColumnPlugin", "en", position="first-child", target=mcol2)
mcol1 = api.add_plugin(ph_de, "MultiColumnPlugin", "de", position="first-child")
# add a *nested* link plugin
mcol1 = self.reload(mcol1)
mcol2 = self.reload(mcol2)
col3 = self.reload(col3)
col2 = self.reload(col2)
col1 = self.reload(col1)
link_plugin_en = api.add_plugin(ph_en, "LinkPlugin", "en", target=col2,
name="A Link", url="https://www.django-cms.org")
mcol1 = self.reload(mcol1)
mcol2 = self.reload(mcol2)
col3 = self.reload(col3)
col2 = self.reload(col2)
col1 = self.reload(col1)
copy_plugins_to([col2, link_plugin_en], ph_de, 'de', mcol1.pk)
mcol1 = self.reload(mcol1)
mcol2 = self.reload(mcol2)
self.reload(col3)
self.reload(col2)
self.reload(col1)
self.reload(link_plugin_en)
mcol1 = self.reload(mcol1)
self.assertEqual(mcol1.get_descendants().count(), 2)
with self.assertNumQueries(FuzzyInt(0, 207)):
page_en.publish('en')
def test_plugin_validation(self):
self.assertRaises(ImproperlyConfigured, plugin_pool.register_plugin, NonExisitngRenderTemplate)
self.assertRaises(ImproperlyConfigured, plugin_pool.register_plugin, NoRender)
self.assertRaises(ImproperlyConfigured, plugin_pool.register_plugin, NoRenderButChildren)
plugin_pool.register_plugin(DynTemplate)
def test_remove_plugin_before_published(self):
"""
When removing a draft plugin we would expect the public copy of the plugin to also be removed
"""
# add a page
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
# add a plugin
plugin_data = {
'plugin_type': "TextPlugin",
'plugin_language': settings.LANGUAGES[0][0],
'placeholder_id': page.placeholders.get(slot="body").pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(self.get_response_pk(response), CMSPlugin.objects.all()[0].pk)
# there should be only 1 plugin
self.assertEqual(CMSPlugin.objects.all().count(), 1)
# delete the plugin
plugin_data = {
'plugin_id': self.get_response_pk(response)
}
remove_url = URL_CMS_PLUGIN_REMOVE + "%s/" % self.get_response_pk(response)
response = self.client.post(remove_url, plugin_data)
self.assertEqual(response.status_code, 302)
# there should be no plugins
self.assertEqual(0, CMSPlugin.objects.all().count())
def test_remove_plugin_after_published(self):
# add a page
api.create_page("home", "nav_playground.html", "en")
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
# add a plugin
plugin_data = {
'plugin_type': "TextPlugin",
'plugin_language': settings.LANGUAGES[0][0],
'placeholder_id': page.placeholders.get(slot="body").pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
plugin_id = self.get_response_pk(response)
self.assertEqual(response.status_code, 200)
self.assertEqual(self.get_response_pk(response), CMSPlugin.objects.all()[0].pk)
# there should be only 1 plugin
self.assertEqual(CMSPlugin.objects.all().count(), 1)
self.assertEqual(CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=True).count(), 1)
# publish page
response = self.client.post(URL_CMS_PAGE + "%d/en/publish/" % page.pk, {1: 1})
self.assertEqual(response.status_code, 302)
self.assertEqual(Page.objects.count(), 3)
# there should now be two plugins - 1 draft, 1 public
self.assertEqual(CMSPlugin.objects.all().count(), 2)
# delete the plugin
plugin_data = {
'plugin_id': plugin_id
}
remove_url = URL_CMS_PLUGIN_REMOVE + "%s/" % plugin_id
response = self.client.post(remove_url, plugin_data)
self.assertEqual(response.status_code, 302)
# there should be no plugins
self.assertEqual(CMSPlugin.objects.all().count(), 1)
self.assertEqual(CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=False).count(), 1)
def test_remove_plugin_not_associated_to_page(self):
"""
Test case for PlaceholderField
"""
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
# add a plugin
plugin_data = {
'plugin_type': "TextPlugin",
'plugin_language': settings.LANGUAGES[0][0],
'placeholder_id': page.placeholders.get(slot="body").pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(self.get_response_pk(response), CMSPlugin.objects.all()[0].pk)
# there should be only 1 plugin
self.assertEqual(CMSPlugin.objects.all().count(), 1)
ph = Placeholder(slot="subplugin")
ph.save()
plugin_data = {
'plugin_type': "TextPlugin",
'language': settings.LANGUAGES[0][0],
'placeholder': ph.pk,
'parent': self.get_response_pk(response)
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
# no longer allowed for security reasons
self.assertEqual(response.status_code, 404)
def test_register_plugin_twice_should_raise(self):
number_of_plugins_before = len(plugin_pool.get_all_plugins())
# The first time we register the plugin is should work
plugin_pool.register_plugin(DumbFixturePlugin)
# Let's add it a second time. We should catch and exception
raised = False
try:
plugin_pool.register_plugin(DumbFixturePlugin)
except PluginAlreadyRegistered:
raised = True
self.assertTrue(raised)
# Let's also unregister the plugin now, and assert it's not in the
# pool anymore
plugin_pool.unregister_plugin(DumbFixturePlugin)
# Let's make sure we have the same number of plugins as before:
number_of_plugins_after = len(plugin_pool.get_all_plugins())
self.assertEqual(number_of_plugins_before, number_of_plugins_after)
def test_unregister_non_existing_plugin_should_raise(self):
number_of_plugins_before = len(plugin_pool.get_all_plugins())
raised = False
try:
# There should not be such a plugin registered if the others tests
# don't leak plugins
plugin_pool.unregister_plugin(DumbFixturePlugin)
except PluginNotRegistered:
raised = True
self.assertTrue(raised)
# Let's count, to make sure we didn't remove a plugin accidentally.
number_of_plugins_after = len(plugin_pool.get_all_plugins())
self.assertEqual(number_of_plugins_before, number_of_plugins_after)
def test_inheritplugin_media(self):
"""
Test case for InheritPagePlaceholder
"""
inheritfrompage = api.create_page('page to inherit from',
'nav_playground.html',
'en')
body = inheritfrompage.placeholders.get(slot="body")
plugin = GoogleMap(
plugin_type='GoogleMapPlugin',
placeholder=body,
position=1,
language=settings.LANGUAGE_CODE,
address="Riedtlistrasse 16",
zipcode="8006",
city="Zurich",
)
plugin.add_root(instance=plugin)
inheritfrompage.publish('en')
page = api.create_page('inherit from page',
'nav_playground.html',
'en',
published=True)
inherited_body = page.placeholders.get(slot="body")
inherit_plugin = InheritPagePlaceholder(
plugin_type='InheritPagePlaceholderPlugin',
placeholder=inherited_body,
position=1,
language=settings.LANGUAGE_CODE,
from_page=inheritfrompage,
from_language=settings.LANGUAGE_CODE)
inherit_plugin.add_root(instance=inherit_plugin)
page.publish('en')
self.client.logout()
cache.clear()
response = self.client.get(page.get_absolute_url())
self.assertTrue(
'https://maps-api-ssl.google.com/maps/api/js' in response.content.decode('utf8').replace("&", "&"))
def test_inherit_plugin_with_empty_plugin(self):
inheritfrompage = api.create_page('page to inherit from',
'nav_playground.html',
'en', published=True)
body = inheritfrompage.placeholders.get(slot="body")
empty_plugin = CMSPlugin(
plugin_type='TextPlugin', # create an empty plugin
placeholder=body,
position=1,
language='en',
)
empty_plugin.add_root(instance=empty_plugin)
other_page = api.create_page('other page', 'nav_playground.html', 'en', published=True)
inherited_body = other_page.placeholders.get(slot="body")
api.add_plugin(inherited_body, InheritPagePlaceholderPlugin, 'en', position='last-child',
from_page=inheritfrompage, from_language='en')
api.add_plugin(inherited_body, "TextPlugin", "en", body="foobar")
# this should not fail, even if there in an empty plugin
rendered = inherited_body.render(context=self.get_context(other_page.get_absolute_url(), page=other_page), width=200)
self.assertIn("foobar", rendered)
def test_render_textplugin(self):
# Setup
page = api.create_page("render test", "nav_playground.html", "en")
ph = page.placeholders.get(slot="body")
text_plugin = api.add_plugin(ph, "TextPlugin", "en", body="Hello World")
link_plugins = []
for i in range(0, 10):
text_plugin = Text.objects.get(pk=text_plugin.pk)
link_plugins.append(api.add_plugin(ph, "LinkPlugin", "en",
target=text_plugin,
name="A Link %d" % i,
url="http://django-cms.org"))
text_plugin.text.body += '<img src="/static/cms/img/icons/plugins/link.png" alt="Link - %s" id="plugin_obj_%d" title="Link - %s" />' % (
link_plugins[-1].name,
link_plugins[-1].pk,
link_plugins[-1].name,
)
text_plugin.save()
txt = text_plugin.text
ph = Placeholder.objects.get(pk=ph.pk)
txt.body = '\n'.join(['<img id="plugin_obj_%d" src=""/>' % l.cmsplugin_ptr_id for l in link_plugins])
txt.save()
text_plugin = self.reload(text_plugin)
with self.assertNumQueries(2):
rendered = text_plugin.render_plugin(placeholder=ph)
for i in range(0, 10):
self.assertTrue('A Link %d' % i in rendered)
def test_copy_textplugin(self):
"""
Test that copying of textplugins replaces references to copied plugins
"""
page = api.create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
plugin_base = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=0,
language=self.FIRST_LANG)
plugin_base = plugin_base.add_root(instance=plugin_base)
plugin = Text(body='')
plugin_base.set_base_attr(plugin)
plugin.save()
plugin_ref_1_base = CMSPlugin(
plugin_type='EmptyPlugin',
placeholder=placeholder,
position=0,
language=self.FIRST_LANG)
plugin_ref_1_base = plugin_base.add_child(instance=plugin_ref_1_base)
plugin_ref_2_base = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG)
plugin_ref_2_base = plugin_base.add_child(instance=plugin_ref_2_base)
plugin_ref_2 = Text(body='')
plugin_ref_2_base.set_base_attr(plugin_ref_2)
plugin_ref_2.save()
plugin.body = ' <img id="plugin_obj_%s" src=""/><img id="plugin_obj_%s" src=""/>' % (
str(plugin_ref_1_base.pk), str(plugin_ref_2.pk))
plugin.save()
page_data = self.get_new_page_data()
#create 2nd language page
page_data.update({
'language': self.SECOND_LANG,
'title': "%s %s" % (page.get_title(), self.SECOND_LANG),
})
response = self.client.post(URL_CMS_PAGE_CHANGE % page.pk + "?language=%s" % self.SECOND_LANG, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
self.assertEqual(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 3)
self.assertEqual(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 0)
self.assertEqual(CMSPlugin.objects.count(), 3)
self.assertEqual(Page.objects.all().count(), 1)
copy_data = {
'source_placeholder_id': placeholder.pk,
'target_placeholder_id': placeholder.pk,
'target_language': self.SECOND_LANG,
'source_language': self.FIRST_LANG,
}
response = self.client.post(URL_CMS_PAGE + "copy-plugins/", copy_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode('utf8').count('"position":'), 3)
# assert copy success
self.assertEqual(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 3)
self.assertEqual(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 3)
self.assertEqual(CMSPlugin.objects.count(), 6)
plugins = list(CMSPlugin.objects.all())
new_plugin = plugins[3].get_plugin_instance()[0]
idlist = sorted(plugin_tags_to_id_list(new_plugin.body))
expected = sorted([plugins[4].pk, plugins[5].pk])
self.assertEqual(idlist, expected)
def test_search_pages(self):
"""
Test search for pages
"""
page = api.create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
text = Text(body="hello", language="en", placeholder=placeholder, plugin_type="TextPlugin", position=1)
text.save()
page.publish('en')
pages = Page.objects.search("hi")
self.assertEqual(pages.count(), 0)
self.assertEqual(Page.objects.search("hello").count(),1)
def test_empty_plugin_is_not_ignored(self):
page = api.create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
plugin = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG)
plugin.add_root(instance=plugin)
# this should not raise any errors, but just ignore the empty plugin
out = placeholder.render(self.get_context(), width=300)
self.assertFalse(len(out))
self.assertTrue(len(placeholder._plugins_cache))
def test_defer_pickel(self):
page = api.create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
api.add_plugin(placeholder, "TextPlugin", 'en', body="Hello World")
plugins = Text.objects.all().defer('path')
import pickle
import io
a = io.BytesIO()
pickle.dump(plugins[0], a)
def test_empty_plugin_description(self):
page = api.create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
a = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG
)
self.assertEqual(a.get_short_description(), "<Empty>")
def test_page_attribute_warns(self):
page = api.create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
a = CMSPlugin(
plugin_type='TextPlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG
)
a.save()
def get_page(plugin):
return plugin.page
self.assertWarns(
DontUsePageAttributeWarning,
"Don't use the page attribute on CMSPlugins! CMSPlugins are not guaranteed to have a page associated with them!",
get_page, a
)
def test_set_translatable_content(self):
a = Text(body="hello")
self.assertTrue(a.set_translatable_content({'body': 'world'}))
b = Link(name="hello")
self.assertTrue(b.set_translatable_content({'name': 'world'}))
def test_editing_plugin_changes_page_modification_time_in_sitemap(self):
now = timezone.now()
one_day_ago = now - datetime.timedelta(days=1)
page = api.create_page("page", "nav_playground.html", "en", published=True)
title = page.get_title_obj('en')
page.creation_date = one_day_ago
page.changed_date = one_day_ago
plugin_id = self._create_text_plugin_on_page(page)
plugin = self._edit_text_plugin(plugin_id, "fnord")
actual_last_modification_time = CMSSitemap().lastmod(title)
actual_last_modification_time -= datetime.timedelta(microseconds=actual_last_modification_time.microsecond)
self.assertEqual(plugin.changed_date.date(), actual_last_modification_time.date())
def test_moving_plugin_to_different_placeholder(self):
plugin_pool.register_plugin(DumbFixturePlugin)
page = api.create_page("page", "nav_playground.html", "en", published=True)
plugin_data = {
'plugin_type': 'DumbFixturePlugin',
'plugin_language': settings.LANGUAGES[0][0],
'placeholder_id': page.placeholders.get(slot='body').pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(response.status_code, 200)
plugin_data['plugin_parent'] = self.get_response_pk(response)
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(response.status_code, 200)
post = {
'plugin_id': self.get_response_pk(response),
'placeholder_id': page.placeholders.get(slot='right-column').pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_MOVE, post)
self.assertEqual(response.status_code, 200)
from cms.utils.plugins import build_plugin_tree
build_plugin_tree(page.placeholders.get(slot='right-column').get_plugins_list())
plugin_pool.unregister_plugin(DumbFixturePlugin)
def test_get_plugins_for_page(self):
page_en = api.create_page("PluginOrderPage", "col_two.html", "en",
slug="page1", published=True, in_navigation=True)
ph_en = page_en.placeholders.get(slot="col_left")
text_plugin_1 = api.add_plugin(ph_en, "TextPlugin", "en", body="I'm inside an existing placeholder.")
# This placeholder is not in the template.
ph_en_not_used = page_en.placeholders.create(slot="not_used")
text_plugin_2 = api.add_plugin(ph_en_not_used, "TextPlugin", "en", body="I'm inside a non-existent placeholder.")
page_plugins = get_plugins_for_page(None, page_en, page_en.get_title_obj_attribute('language'))
db_text_plugin_1 = page_plugins.get(pk=text_plugin_1.pk)
self.assertRaises(CMSPlugin.DoesNotExist, page_plugins.get, pk=text_plugin_2.pk)
self.assertEqual(db_text_plugin_1.pk, text_plugin_1.pk)
def test_plugin_move_with_reload(self):
action_options = {
PLUGIN_MOVE_ACTION: {
'requires_reload': True
},
PLUGIN_COPY_ACTION: {
'requires_reload': True
},
}
non_reload_action_options = {
PLUGIN_MOVE_ACTION: {
'requires_reload': False
},
PLUGIN_COPY_ACTION: {
'requires_reload': False
},
}
ReloadDrivenPlugin = type('ReloadDrivenPlugin', (CMSPluginBase,), dict(action_options=action_options, render_plugin=False))
NonReloadDrivenPlugin = type('NonReloadDrivenPlugin', (CMSPluginBase,), dict(action_options=non_reload_action_options, render_plugin=False))
plugin_pool.register_plugin(ReloadDrivenPlugin)
plugin_pool.register_plugin(NonReloadDrivenPlugin)
page = api.create_page("page", "nav_playground.html", "en", published=True)
source_placeholder = page.placeholders.get(slot='body')
target_placeholder = page.placeholders.get(slot='right-column')
reload_expected = {'reload': True}
no_reload_expected = {'reload': False}
plugin_1 = api.add_plugin(source_placeholder, ReloadDrivenPlugin, settings.LANGUAGES[0][0])
plugin_2 = api.add_plugin(source_placeholder, NonReloadDrivenPlugin, settings.LANGUAGES[0][0])
# Test Plugin reload == True on Move
post = {
'plugin_id': plugin_1.pk,
'placeholder_id': target_placeholder.pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_MOVE, post)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content.decode('utf8')), reload_expected)
# Test Plugin reload == False on Move
post = {
'plugin_id': plugin_2.pk,
'placeholder_id': target_placeholder.pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_MOVE, post)
self.assertEqual(response.status_code, 200)
self.assertEqual(json.loads(response.content.decode('utf8')), no_reload_expected)
plugin_pool.unregister_plugin(ReloadDrivenPlugin)
plugin_pool.unregister_plugin(NonReloadDrivenPlugin)
def test_plugin_copy_with_reload(self):
action_options = {
PLUGIN_MOVE_ACTION: {
'requires_reload': True
},
PLUGIN_COPY_ACTION: {
'requires_reload': True
},
}
non_reload_action_options = {
PLUGIN_MOVE_ACTION: {
'requires_reload': False
},
PLUGIN_COPY_ACTION: {
'requires_reload': False
},
}
ReloadDrivenPlugin = type('ReloadDrivenPlugin', (CMSPluginBase,), dict(action_options=action_options, render_plugin=False))
NonReloadDrivenPlugin = type('NonReloadDrivenPlugin', (CMSPluginBase,), dict(action_options=non_reload_action_options, render_plugin=False))
plugin_pool.register_plugin(ReloadDrivenPlugin)
plugin_pool.register_plugin(NonReloadDrivenPlugin)
page = api.create_page("page", "nav_playground.html", "en", published=True)
source_placeholder = page.placeholders.get(slot='body')
target_placeholder = page.placeholders.get(slot='right-column')
api.add_plugin(source_placeholder, ReloadDrivenPlugin, settings.LANGUAGES[0][0])
plugin_2 = api.add_plugin(source_placeholder, NonReloadDrivenPlugin, settings.LANGUAGES[0][0])
# Test Plugin reload == True on Copy
copy_data = {
'source_placeholder_id': source_placeholder.pk,
'target_placeholder_id': target_placeholder.pk,
'target_language': settings.LANGUAGES[0][0],
'source_language': settings.LANGUAGES[0][0],
}
response = self.client.post(URL_CMS_PAGE + "copy-plugins/", copy_data)
self.assertEqual(response.status_code, 200)
json_response = json.loads(response.content.decode('utf8'))
self.assertEqual(json_response['reload'], True)
# Test Plugin reload == False on Copy
copy_data = {
'source_placeholder_id': source_placeholder.pk,
'source_plugin_id': plugin_2.pk,
'target_placeholder_id': target_placeholder.pk,
'target_language': settings.LANGUAGES[0][0],
'source_language': settings.LANGUAGES[0][0],
}
response = self.client.post(URL_CMS_PAGE + "copy-plugins/", copy_data)
self.assertEqual(response.status_code, 200)
json_response = json.loads(response.content.decode('utf8'))
self.assertEqual(json_response['reload'], False)
plugin_pool.unregister_plugin(ReloadDrivenPlugin)
plugin_pool.unregister_plugin(NonReloadDrivenPlugin)
def test_custom_plugin_urls(self):
plugin_url = urlresolvers.reverse('admin:dumbfixtureplugin')
response = self.client.get(plugin_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b"It works")
def test_plugin_require_parent(self):
"""
Assert that a plugin marked as 'require_parent' is not listed
in the plugin pool when a placeholder is specified
"""
ParentRequiredPlugin = type('ParentRequiredPlugin', (CMSPluginBase,),
dict(require_parent=True, render_plugin=False))
plugin_pool.register_plugin(ParentRequiredPlugin)
page = api.create_page("page", "nav_playground.html", "en", published=True)
placeholder = page.placeholders.get(slot='body')
plugin_list = plugin_pool.get_all_plugins(placeholder=placeholder, page=page)
self.assertFalse(ParentRequiredPlugin in plugin_list)
plugin_pool.unregister_plugin(ParentRequiredPlugin)
def test_plugin_parent_classes(self):
"""
Assert that a plugin with a list of parent classes only appears in the
toolbar plugin struct for those given parent Plugins
"""
ParentClassesPlugin = type('ParentClassesPlugin', (CMSPluginBase,),
dict(parent_classes=['GenericParentPlugin'], render_plugin=False))
GenericParentPlugin = type('GenericParentPlugin', (CMSPluginBase,), {'render_plugin':False})
KidnapperPlugin = type('KidnapperPlugin', (CMSPluginBase,), {'render_plugin':False})
expected_struct = {'module': u'Generic',
'name': u'Parent Classes Plugin',
'value': 'ParentClassesPlugin'}
for plugin in [ParentClassesPlugin, GenericParentPlugin, KidnapperPlugin]:
plugin_pool.register_plugin(plugin)
page = api.create_page("page", "nav_playground.html", "en", published=True)
placeholder = page.placeholders.get(slot='body')
from cms.utils.placeholder import get_toolbar_plugin_struct
toolbar_struct = get_toolbar_plugin_struct([ParentClassesPlugin],
placeholder.slot,
page,
parent=GenericParentPlugin)
self.assertTrue(expected_struct in toolbar_struct)
toolbar_struct = get_toolbar_plugin_struct([ParentClassesPlugin],
placeholder.slot,
page,
parent=KidnapperPlugin)
self.assertFalse(expected_struct in toolbar_struct)
toolbar_struct = get_toolbar_plugin_struct([ParentClassesPlugin, GenericParentPlugin],
placeholder.slot,
page)
expected_struct = {'module': u'Generic',
'name': u'Generic Parent Plugin',
'value': 'GenericParentPlugin'}
self.assertTrue(expected_struct in toolbar_struct)
for plugin in [ParentClassesPlugin, GenericParentPlugin, KidnapperPlugin]:
plugin_pool.unregister_plugin(plugin)
def test_plugin_child_classes_from_settings(self):
page = api.create_page("page", "nav_playground.html", "en", published=True)
placeholder = page.placeholders.get(slot='body')
ChildClassesPlugin = type('ChildClassesPlugin', (CMSPluginBase,),
dict(child_classes=['TextPlugin'], render_template='allow_children_plugin.html'))
plugin_pool.register_plugin(ChildClassesPlugin)
plugin = api.add_plugin(placeholder, ChildClassesPlugin, settings.LANGUAGES[0][0])
plugin = plugin.get_plugin_class_instance()
## assert baseline
self.assertEqual(['TextPlugin'], plugin.get_child_classes(placeholder.slot, page))
CMS_PLACEHOLDER_CONF = {
'body': {
'child_classes': {
'ChildClassesPlugin': ['LinkPlugin', 'PicturePlugin'],
}
}
}
with self.settings(CMS_PLACEHOLDER_CONF=CMS_PLACEHOLDER_CONF):
self.assertEqual(['LinkPlugin', 'PicturePlugin'],
plugin.get_child_classes(placeholder.slot, page))
plugin_pool.unregister_plugin(ChildClassesPlugin)
def test_plugin_parent_classes_from_settings(self):
page = api.create_page("page", "nav_playground.html", "en", published=True)
placeholder = page.placeholders.get(slot='body')
ParentClassesPlugin = type('ParentClassesPlugin', (CMSPluginBase,),
dict(parent_classes=['TextPlugin'], render_plugin=False))
plugin_pool.register_plugin(ParentClassesPlugin)
plugin = api.add_plugin(placeholder, ParentClassesPlugin, settings.LANGUAGES[0][0])
plugin = plugin.get_plugin_class_instance()
## assert baseline
self.assertEqual(['TextPlugin'], plugin.get_parent_classes(placeholder.slot, page))
CMS_PLACEHOLDER_CONF = {
'body': {
'parent_classes': {
'ParentClassesPlugin': ['TestPlugin'],
}
}
}
with self.settings(CMS_PLACEHOLDER_CONF=CMS_PLACEHOLDER_CONF):
self.assertEqual(['TestPlugin'],
plugin.get_parent_classes(placeholder.slot, page))
plugin_pool.unregister_plugin(ParentClassesPlugin)
def test_plugin_translatable_content_getter_setter(self):
"""
Test that you can add a text plugin
"""
# add a new text plugin
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
created_plugin_id = self._create_text_plugin_on_page(page)
# now edit the plugin
plugin = self._edit_text_plugin(created_plugin_id, "Hello World")
self.assertEqual("Hello World", plugin.body)
# see if the getter works
self.assertEqual({'body': "Hello World"}, plugin.get_translatable_content())
# change the content
self.assertEqual(True, plugin.set_translatable_content({'body': "It works!"}))
# check if it changed
self.assertEqual("It works!", plugin.body)
# double check through the getter
self.assertEqual({'body': "It works!"}, plugin.get_translatable_content())
def test_plugin_pool_register_returns_plugin_class(self):
@plugin_pool.register_plugin
class DecoratorTestPlugin(CMSPluginBase):
render_plugin = False
name = "Test Plugin"
self.assertIsNotNone(DecoratorTestPlugin)
class FileSystemPluginTests(PluginsTestBaseCase):
def setUp(self):
super(FileSystemPluginTests, self).setUp()
call_command('collectstatic', interactive=False, verbosity=0, link=True)
def tearDown(self):
for directory in [settings.STATIC_ROOT, settings.MEDIA_ROOT]:
for root, dirs, files in os.walk(directory, topdown=False):
# We need to walk() the directory tree since rmdir() does not allow
# to remove non-empty directories...
for name in files:
# Start by killing all files we walked
os.remove(os.path.join(root, name))
for name in dirs:
# Now all directories we walked...
os.rmdir(os.path.join(root, name))
super(FileSystemPluginTests, self).tearDown()
def test_fileplugin_icon_uppercase(self):
page = api.create_page('testpage', 'nav_playground.html', 'en')
body = page.placeholders.get(slot="body")
plugin = File(
plugin_type='FilePlugin',
placeholder=body,
position=1,
language=settings.LANGUAGE_CODE,
)
plugin.file.save("UPPERCASE.JPG", SimpleUploadedFile("UPPERCASE.jpg", b"content"), False)
plugin.add_root(instance=plugin)
self.assertNotEquals(plugin.get_icon_url().find('jpg'), -1)
class PluginManyToManyTestCase(PluginsTestBaseCase):
def setUp(self):
self.super_user = self._create_user("test", True, True)
self.slave = self._create_user("slave", True)
self._login_context = self.login_user_context(self.super_user)
self._login_context.__enter__()
# create 3 sections
self.sections = []
self.section_pks = []
for i in range(3):
section = Section.objects.create(name="section %s" % i)
self.sections.append(section)
self.section_pks.append(section.pk)
self.section_count = len(self.sections)
# create 10 articles by section
for section in self.sections:
for j in range(10):
Article.objects.create(
title="article %s" % j,
section=section
)
self.FIRST_LANG = settings.LANGUAGES[0][0]
self.SECOND_LANG = settings.LANGUAGES[1][0]
def test_dynamic_plugin_template(self):
page_en = api.create_page("CopyPluginTestPage (EN)", "nav_playground.html", "en")
ph_en = page_en.placeholders.get(slot="body")
api.add_plugin(ph_en, "ArticleDynamicTemplatePlugin", "en", title="a title")
api.add_plugin(ph_en, "ArticleDynamicTemplatePlugin", "en", title="custom template")
request = self.get_request(path=page_en.get_absolute_url())
plugins = get_plugins(request, ph_en, page_en.template)
for plugin in plugins:
if plugin.title == 'custom template':
self.assertEqual(plugin.get_plugin_class_instance().get_render_template({}, plugin, ph_en), 'articles_custom.html')
self.assertTrue('Articles Custom template' in plugin.render_plugin({}, ph_en))
else:
self.assertEqual(plugin.get_plugin_class_instance().get_render_template({}, plugin, ph_en), 'articles.html')
self.assertFalse('Articles Custom template' in plugin.render_plugin({}, ph_en))
def test_add_plugin_with_m2m(self):
# add a new text plugin
self.assertEqual(ArticlePluginModel.objects.count(), 0)
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
page.publish('en')
placeholder = page.placeholders.get(slot="body")
plugin_data = {
'plugin_type': "ArticlePlugin",
'plugin_language': self.FIRST_LANG,
'plugin_parent': '',
'placeholder_id': placeholder.pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(response.status_code, 200)
pk = CMSPlugin.objects.all()[0].pk
expected = {
"url": "/en/admin/cms/page/edit-plugin/%s/" % pk,
"breadcrumb": [
{
"url": "/en/admin/cms/page/edit-plugin/%s/" % pk,
"title": "Articles"
}
],
'delete': '/en/admin/cms/page/delete-plugin/%s/' % pk
}
self.assertEqual(json.loads(response.content.decode('utf8')), expected)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + str(CMSPlugin.objects.all()[0].pk) + "/"
response = self.client.get(edit_url)
self.assertEqual(response.status_code, 200)
data = {
'title': "Articles Plugin 1",
"sections": self.section_pks
}
response = self.client.post(edit_url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(ArticlePluginModel.objects.count(), 1)
plugin = ArticlePluginModel.objects.all()[0]
self.assertEqual(self.section_count, plugin.sections.count())
response = self.client.get('/en/?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
self.assertEqual(response.status_code, 200)
self.assertEqual(plugin.sections.through._meta.db_table, 'manytomany_rel_articlepluginmodel_sections')
def test_add_plugin_with_m2m_and_publisher(self):
self.assertEqual(ArticlePluginModel.objects.count(), 0)
page_data = self.get_new_page_data()
response = self.client.post(URL_CMS_PAGE_ADD, page_data)
self.assertEqual(response.status_code, 302)
page = Page.objects.all()[0]
placeholder = page.placeholders.get(slot="body")
# add a plugin
plugin_data = {
'plugin_type': "ArticlePlugin",
'plugin_language': self.FIRST_LANG,
'plugin_parent': '',
'placeholder_id': placeholder.pk,
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(response.status_code, 200)
pk = CMSPlugin.objects.all()[0].pk
expected = {
"url": "/en/admin/cms/page/edit-plugin/%s/" % pk,
"breadcrumb": [
{
"url": "/en/admin/cms/page/edit-plugin/%s/" % pk,
"title": "Articles"
}
],
'delete': '/en/admin/cms/page/delete-plugin/%s/' % pk
}
self.assertEqual(json.loads(response.content.decode('utf8')), expected)
# there should be only 1 plugin
self.assertEqual(1, CMSPlugin.objects.all().count())
articles_plugin_pk = CMSPlugin.objects.all()[0].pk
self.assertEqual(articles_plugin_pk, CMSPlugin.objects.all()[0].pk)
# now edit the plugin
edit_url = URL_CMS_PLUGIN_EDIT + str(CMSPlugin.objects.all()[0].pk) + "/"
data = {
'title': "Articles Plugin 1",
'sections': self.section_pks
}
response = self.client.post(edit_url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(1, ArticlePluginModel.objects.count())
articles_plugin = ArticlePluginModel.objects.all()[0]
self.assertEqual(u'Articles Plugin 1', articles_plugin.title)
self.assertEqual(self.section_count, articles_plugin.sections.count())
# check publish box
page = api.publish_page(page, self.super_user, 'en')
# there should now be two plugins - 1 draft, 1 public
self.assertEqual(2, CMSPlugin.objects.all().count())
self.assertEqual(2, ArticlePluginModel.objects.all().count())
db_counts = [plugin.sections.count() for plugin in ArticlePluginModel.objects.all()]
expected = [self.section_count for i in range(len(db_counts))]
self.assertEqual(expected, db_counts)
def test_copy_plugin_with_m2m(self):
page = api.create_page("page", "nav_playground.html", "en")
placeholder = page.placeholders.get(slot='body')
plugin = ArticlePluginModel(
plugin_type='ArticlePlugin',
placeholder=placeholder,
position=1,
language=self.FIRST_LANG)
plugin.add_root(instance=plugin)
edit_url = URL_CMS_PLUGIN_EDIT + str(plugin.pk) + "/"
data = {
'title': "Articles Plugin 1",
"sections": self.section_pks
}
response = self.client.post(edit_url, data)
self.assertEqual(response.status_code, 200)
self.assertEqual(ArticlePluginModel.objects.count(), 1)
self.assertEqual(ArticlePluginModel.objects.all()[0].sections.count(), self.section_count)
page_data = self.get_new_page_data()
#create 2nd language page
page_data.update({
'language': self.SECOND_LANG,
'title': "%s %s" % (page.get_title(), self.SECOND_LANG),
})
response = self.client.post(URL_CMS_PAGE_CHANGE % page.pk + "?language=%s" % self.SECOND_LANG, page_data)
self.assertRedirects(response, URL_CMS_PAGE)
self.assertEqual(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 1)
self.assertEqual(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 0)
self.assertEqual(CMSPlugin.objects.count(), 1)
self.assertEqual(Page.objects.all().count(), 1)
copy_data = {
'source_placeholder_id': placeholder.pk,
'target_placeholder_id': placeholder.pk,
'target_language': self.SECOND_LANG,
'source_language': self.FIRST_LANG,
}
response = self.client.post(URL_CMS_PAGE + "copy-plugins/", copy_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode('utf8').count('"position":'), 1)
# assert copy success
self.assertEqual(CMSPlugin.objects.filter(language=self.FIRST_LANG).count(), 1)
self.assertEqual(CMSPlugin.objects.filter(language=self.SECOND_LANG).count(), 1)
self.assertEqual(CMSPlugin.objects.count(), 2)
db_counts = [plgn.sections.count() for plgn in ArticlePluginModel.objects.all()]
expected = [self.section_count for _ in range(len(db_counts))]
self.assertEqual(expected, db_counts)
class PluginCopyRelationsTestCase(PluginsTestBaseCase):
"""Test the suggestions in the docs for copy_relations()"""
def setUp(self):
self.super_user = self._create_user("test", True, True)
self.FIRST_LANG = settings.LANGUAGES[0][0]
self._login_context = self.login_user_context(self.super_user)
self._login_context.__enter__()
page_data1 = self.get_new_page_data_dbfields()
page_data1['published'] = False
self.page1 = api.create_page(**page_data1)
page_data2 = self.get_new_page_data_dbfields()
page_data2['published'] = False
self.page2 = api.create_page(**page_data2)
self.placeholder1 = self.page1.placeholders.get(slot='body')
self.placeholder2 = self.page2.placeholders.get(slot='body')
def test_copy_fk_from_model(self):
plugin = api.add_plugin(
placeholder=self.placeholder1,
plugin_type="PluginWithFKFromModel",
language=self.FIRST_LANG,
)
FKModel.objects.create(fk_field=plugin)
old_public_count = FKModel.objects.filter(
fk_field__placeholder__page__publisher_is_draft=False
).count()
api.publish_page(
self.page1,
self.super_user,
self.FIRST_LANG
)
new_public_count = FKModel.objects.filter(
fk_field__placeholder__page__publisher_is_draft=False
).count()
self.assertEqual(
new_public_count,
old_public_count + 1
)
def test_copy_m2m_to_model(self):
plugin = api.add_plugin(
placeholder=self.placeholder1,
plugin_type="PluginWithM2MToModel",
language=self.FIRST_LANG,
)
m2m_target = M2MTargetModel.objects.create()
plugin.m2m_field.add(m2m_target)
old_public_count = M2MTargetModel.objects.filter(
pluginmodelwithm2mtomodel__placeholder__page__publisher_is_draft=False
).count()
api.publish_page(
self.page1,
self.super_user,
self.FIRST_LANG
)
new_public_count = M2MTargetModel.objects.filter(
pluginmodelwithm2mtomodel__placeholder__page__publisher_is_draft=False
).count()
self.assertEqual(
new_public_count,
old_public_count + 1
)
class PluginsMetaOptionsTests(TestCase):
''' TestCase set for ensuring that bugs like #992 are caught '''
# these plugins are inlined because, due to the nature of the #992
# ticket, we cannot actually import a single file with all the
# plugin variants in, because that calls __new__, at which point the
# error with splitted occurs.
def test_meta_options_as_defaults(self):
''' handling when a CMSPlugin meta options are computed defaults '''
# this plugin relies on the base CMSPlugin and Model classes to
# decide what the app_label and db_table should be
plugin = TestPlugin.model
self.assertEqual(plugin._meta.db_table, 'meta_testpluginmodel')
self.assertEqual(plugin._meta.app_label, 'meta')
def test_meta_options_as_declared_defaults(self):
''' handling when a CMSPlugin meta options are declared as per defaults '''
# here, we declare the db_table and app_label explicitly, but to the same
# values as would be computed, thus making sure it's not a problem to
# supply options.
plugin = TestPlugin2.model
self.assertEqual(plugin._meta.db_table, 'meta_testpluginmodel2')
self.assertEqual(plugin._meta.app_label, 'meta')
def test_meta_options_custom_app_label(self):
''' make sure customised meta options on CMSPlugins don't break things '''
plugin = TestPlugin3.model
self.assertEqual(plugin._meta.db_table, 'one_thing_testpluginmodel3')
self.assertEqual(plugin._meta.app_label, 'one_thing')
def test_meta_options_custom_db_table(self):
''' make sure custom database table names are OK. '''
plugin = TestPlugin4.model
self.assertEqual(plugin._meta.db_table, 'or_another_4')
self.assertEqual(plugin._meta.app_label, 'meta')
def test_meta_options_custom_both(self):
''' We should be able to customise app_label and db_table together '''
plugin = TestPlugin5.model
self.assertEqual(plugin._meta.db_table, 'or_another_5')
self.assertEqual(plugin._meta.app_label, 'one_thing')
class LinkPluginTestCase(PluginsTestBaseCase):
def test_does_not_verify_existance_of_url(self):
form = LinkForm(
{'name': 'Linkname', 'url': 'http://www.nonexistant.test'})
self.assertTrue(form.is_valid())
def test_opens_in_same_window_by_default(self):
"""Could not figure out how to render this plugin
Checking only for the values in the model"""
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test'})
link = form.save()
self.assertEqual(link.target, '')
def test_open_in_blank_window(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': '_blank'})
link = form.save()
self.assertEqual(link.target, '_blank')
def test_open_in_parent_window(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': '_parent'})
link = form.save()
self.assertEqual(link.target, '_parent')
def test_open_in_top_window(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': '_top'})
link = form.save()
self.assertEqual(link.target, '_top')
def test_open_in_nothing_else(self):
form = LinkForm({'name': 'Linkname',
'url': 'http://www.nonexistant.test', 'target': 'artificial'})
self.assertFalse(form.is_valid())
class NoDatabasePluginTests(TestCase):
def test_render_meta_is_unique(self):
text = Text()
link = Link()
self.assertNotEqual(id(text._render_meta), id(link._render_meta))
def test_render_meta_does_not_leak(self):
text = Text()
link = Link()
text._render_meta.text_enabled = False
link._render_meta.text_enabled = False
self.assertFalse(text._render_meta.text_enabled)
self.assertFalse(link._render_meta.text_enabled)
link._render_meta.text_enabled = True
self.assertFalse(text._render_meta.text_enabled)
self.assertTrue(link._render_meta.text_enabled)
def test_db_table_hack(self):
# Plugin models has been moved away due to the Django 1.7 AppConfig
from cms.test_utils.project.bunch_of_plugins.models import TestPlugin1
self.assertEqual(TestPlugin1._meta.db_table, 'bunch_of_plugins_testplugin1')
def test_db_table_hack_with_mixin(self):
# Plugin models has been moved away due to the Django 1.7 AppConfig
from cms.test_utils.project.bunch_of_plugins.models import TestPlugin2
self.assertEqual(TestPlugin2._meta.db_table, 'bunch_of_plugins_testplugin2')
def test_pickle(self):
text = Text()
text.__reduce__()
class PicturePluginTests(PluginsTestBaseCase):
def test_link_or_page(self):
"""Test a validator: you can enter a url or a page_link, but not both."""
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
picture = Picture(url="test")
# Note: don't call full_clean as it will check ALL fields - including
# the image, which we haven't defined. Call clean() instead which
# just validates the url and page_link fields.
picture.clean()
picture.page_link = page
picture.url = None
picture.clean()
picture.url = "test"
self.assertRaises(ValidationError, picture.clean)
class SimplePluginTests(TestCase):
def test_simple_naming(self):
class MyPlugin(CMSPluginBase):
render_template = 'base.html'
self.assertEqual(MyPlugin.name, 'My Plugin')
def test_simple_context(self):
class MyPlugin(CMSPluginBase):
render_template = 'base.html'
plugin = MyPlugin(ArticlePluginModel, admin.site)
context = {}
out_context = plugin.render(context, 1, 2)
self.assertEqual(out_context['instance'], 1)
self.assertEqual(out_context['placeholder'], 2)
self.assertIs(out_context, context)
class BrokenPluginTests(TestCase):
def test_import_broken_plugin(self):
"""
If there is an import error in the actual cms_plugin file it should
raise the ImportError rather than silently swallowing it -
in opposition to the ImportError if the file 'cms_plugins.py' doesn't
exist.
"""
new_apps = ['cms.test_utils.project.brokenpluginapp']
with self.settings(INSTALLED_APPS=new_apps):
plugin_pool.discovered = False
self.assertRaises(ImportError, plugin_pool.discover_plugins)
class MTIPluginsTestCase(PluginsTestBaseCase):
def test_add_edit_plugin(self):
from cms.test_utils.project.mti_pluginapp.models import TestPluginBetaModel
"""
Test that we can instantiate and use a MTI plugin
"""
# Create a page
page_data = self.get_new_page_data()
self.client.post(URL_CMS_PAGE_ADD, page_data)
page = Page.objects.all()[0]
# Add the MTI plugin
plugin_data = {
'plugin_type': "TestPluginBeta",
'plugin_language': settings.LANGUAGES[0][0],
'placeholder_id': page.placeholders.get(slot="body").pk,
'plugin_parent': '',
}
response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data)
self.assertEqual(response.status_code, 200)
plugin_id = self.get_response_pk(response)
self.assertEqual(plugin_id, CMSPlugin.objects.all()[0].pk)
# Test we can open the change form for the MTI plugin
edit_url = "%s%s/" % (URL_CMS_PLUGIN_EDIT, plugin_id)
response = self.client.get(edit_url)
self.assertEqual(response.status_code, 200)
# Edit the MTI plugin
data = {
"alpha": "ALPHA",
"beta": "BETA"
}
response = self.client.post(edit_url, data)
self.assertEqual(response.status_code, 200)
# Test that the change was properly stored in the DB
plugin_model = TestPluginBetaModel.objects.all()[0]
self.assertEqual("ALPHA", plugin_model.alpha)
self.assertEqual("BETA", plugin_model.beta)
|
Venturi/cms
|
env/lib/python2.7/site-packages/cms/tests/test_plugins.py
|
Python
|
gpl-2.0
| 79,253 | 0.002801 |
# -*- coding: utf-8 -*-
"""
Unit tests for embargo app admin forms.
"""
from __future__ import absolute_import
import six
# Explicitly import the cache from ConfigurationModel so we can reset it after each test
from config_models.models import cache
from django.test import TestCase
from opaque_keys.edx.locator import CourseLocator
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from ..forms import IPFilterForm, RestrictedCourseForm
from ..models import IPFilter
class RestrictedCourseFormTest(ModuleStoreTestCase):
"""Test the course form properly validates course IDs"""
def test_save_valid_data(self):
course = CourseFactory.create()
data = {
'course_key': six.text_type(course.id),
'enroll_msg_key': 'default',
'access_msg_key': 'default'
}
form = RestrictedCourseForm(data=data)
self.assertTrue(form.is_valid())
def test_invalid_course_key(self):
# Invalid format for the course key
form = RestrictedCourseForm(data={'course_key': 'not/valid'})
self._assert_course_field_error(form)
def test_course_not_found(self):
course_key = CourseLocator(org='test', course='test', run='test')
form = RestrictedCourseForm(data={'course_key': course_key})
self._assert_course_field_error(form)
def _assert_course_field_error(self, form):
"""
Validation shouldn't work.
"""
self.assertFalse(form.is_valid())
msg = 'COURSE NOT FOUND'
self.assertIn(msg, form._errors['course_key'][0]) # pylint: disable=protected-access
with self.assertRaisesRegexp(
ValueError, "The RestrictedCourse could not be created because the data didn't validate."
):
form.save()
class IPFilterFormTest(TestCase):
"""Test form for adding [black|white]list IP addresses"""
def tearDown(self):
super(IPFilterFormTest, self).tearDown()
# Explicitly clear ConfigurationModel's cache so tests have a clear cache
# and don't interfere with each other
cache.clear()
def test_add_valid_ips(self):
# test adding valid ip addresses
# should be able to do both ipv4 and ipv6
# spacing should not matter
form_data = {
'whitelist': u'127.0.0.1, 2003:dead:beef:4dad:23:46:bb:101, 1.1.0.1/32, 1.0.0.0/24',
'blacklist': u' 18.244.1.5 , 2002:c0a8:101::42, 18.36.22.1, 1.0.0.0/16'
}
form = IPFilterForm(data=form_data)
self.assertTrue(form.is_valid())
form.save()
whitelist = IPFilter.current().whitelist_ips
blacklist = IPFilter.current().blacklist_ips
for addr in u'127.0.0.1, 2003:dead:beef:4dad:23:46:bb:101'.split(','):
self.assertIn(addr.strip(), whitelist)
for addr in u'18.244.1.5, 2002:c0a8:101::42, 18.36.22.1'.split(','):
self.assertIn(addr.strip(), blacklist)
# Network tests
# ips not in whitelist network
for addr in [u'1.1.0.2', u'1.0.1.0']:
self.assertNotIn(addr.strip(), whitelist)
# ips in whitelist network
for addr in [u'1.1.0.1', u'1.0.0.100']:
self.assertIn(addr.strip(), whitelist)
# ips not in blacklist network
for addr in [u'2.0.0.0', u'1.1.0.0']:
self.assertNotIn(addr.strip(), blacklist)
# ips in blacklist network
for addr in [u'1.0.100.0', u'1.0.0.10']:
self.assertIn(addr.strip(), blacklist)
# Test clearing by adding an empty list is OK too
form_data = {
'whitelist': '',
'blacklist': ''
}
form = IPFilterForm(data=form_data)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(len(IPFilter.current().whitelist), 0)
self.assertEqual(len(IPFilter.current().blacklist), 0)
def test_add_invalid_ips(self):
# test adding invalid ip addresses
form_data = {
'whitelist': u'.0.0.1, :dead:beef:::, 1.0.0.0/55',
'blacklist': u' 18.244.* , 999999:c0a8:101::42, 1.0.0.0/'
}
form = IPFilterForm(data=form_data)
self.assertFalse(form.is_valid())
if six.PY2:
wmsg = "Invalid IP Address(es): [u'.0.0.1', u':dead:beef:::', u'1.0.0.0/55']" \
" Please fix the error(s) and try again."
else:
wmsg = "Invalid IP Address(es): ['.0.0.1', ':dead:beef:::', '1.0.0.0/55']" \
" Please fix the error(s) and try again."
self.assertEquals(wmsg, form._errors['whitelist'][0]) # pylint: disable=protected-access
if six.PY2:
bmsg = "Invalid IP Address(es): [u'18.244.*', u'999999:c0a8:101::42', u'1.0.0.0/']" \
" Please fix the error(s) and try again."
else:
bmsg = "Invalid IP Address(es): ['18.244.*', '999999:c0a8:101::42', '1.0.0.0/']" \
" Please fix the error(s) and try again."
self.assertEquals(bmsg, form._errors['blacklist'][0]) # pylint: disable=protected-access
with self.assertRaisesRegexp(ValueError, "The IPFilter could not be created because the data didn't validate."):
form.save()
|
ESOedX/edx-platform
|
openedx/core/djangoapps/embargo/tests/test_forms.py
|
Python
|
agpl-3.0
| 5,361 | 0.002425 |
"""Implementation of allocation API.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
from treadmill import discovery
from treadmill import context
_LOGGER = logging.getLogger(__name__)
class API:
"""Treadmill Local REST api."""
def __init__(self):
def _get(hostname):
"""Get hostname nodeinfo endpoint info."""
_LOGGER.info('Redirect: %s', hostname)
discovery_iter = discovery.iterator(
context.GLOBAL.zk.conn,
'root.%s' % hostname, 'nodeinfo', False
)
for (_app, hostport) in discovery_iter:
if not hostport:
continue
_LOGGER.info('Found: %s - %s', hostname, hostport)
return hostport
_LOGGER.info('nodeinfo not found: %s', hostname)
return None
self.get = _get
|
Morgan-Stanley/treadmill
|
lib/python/treadmill/api/nodeinfo.py
|
Python
|
apache-2.0
| 1,004 | 0 |
#----------------------------------------------------------------------
#This utility sets up the python configuration files so as to
#allow Python to find files in a specified directory, regardless
#of what directory the user is working from. This is typically
#used to create a directory where the user will put resources shared
#by many Python scripts, such as courseware modules
#
#----------------------------------------------------------------------
#Usage:
# (1) Put a copy of this file (setpath.py) in the directory
# you want to share
#
# (2) Execute setpath.py, either by opening it and running it
# in Canopy, or from the command line by changing director
# to the directory you want to share and then typing
# python setup.py
# If you run it by opening it in the Canopy editor you need to
# select the directory popup menu item that tells Canopy to
# change the working directory to the Editor directory.
# in Canopy, the working directory always appears at the upper
# right corner of the Python interpreter window.
#
#----------------------------------------------------------------------
#Notes:
#
# This will create a startup file which will properly
# initialize ipython (whether used directly or via Enthought
# Canopy) to find your files, and will do that regardless
# of your operating system.
#
# If you are using a Linux or Mac OSX operating system, it
# will also edit your .cshrc and .bash_profile shell startup
# scripts to set the environment variable PYTHONPATH so that
# any version of the python interperter started from the
# command line (i.e. whether ipython or python) will find
# the shared files. This feature will not work on
# Windows operating systems, so Windows users should start
# either start up python by clicking on the Canopy app, or
# by starting ipython from the command line. It is possible
# to set the PYTHONPATH environment variable in Windows,
# but this script does not yet implement that feature.
#
# Note that it is also possible to manually set up a temporary
# shared path (for example /home/MyModules) in a given script
# by executing the lines:
#
# import sys
# sys.path.append('home/MyModules')
#
# where you would replace '/home/MyModules') with the
# actual full path to the directory you want on your own
# system
#----------------------------------------------------------------------
import os,glob,platform
#Utility function to return an acceptable filename for the
#startup file
def makeFileName(startupDir):
files = glob.glob(os.path.join(startupDir,'*.py'))
#Make a startup filename that doesn't already exist
for i in range(10000):
if i<100:
fname = '%02d-startup.py'%i
else:
fname ='%04d-startup.py'%i
fname = os.path.join(startupDir,fname)
if not fname in files: break
return fname
#
#--------Main program starts here
#
#Get current path
curPath = os.getcwd()
#Get home directory
home = os.path.expanduser('~')
#
#If this is a Linux or Mac OS X system, edit the
#shell initialization files to set the PYTHONPATH environment
#variable
if ( (platform.system()=='Darwin') or ('inux' in platform.system())):
#We are on a Linux or Mac system. Edit Shell startup files
print 'This is a Linux or Mac system. Adding path to shell startup scripts'
#
#csh script: (Note, should also do this for .tcshrc if it exists)
cshFile = os.path.join(home,'.cshrc')
print 'csh family -- Editing '+cshFile
#Make backup copy of file
os.system('cp %s %s'%(cshFile,cshFile+'.setPathBackup'))
#Append line to set PYTHONPATH
outfile = open(cshFile,'a')
outfile.write('#Line added by setPath.py. Original in %s\n'%(cshFile+'.setPathBackup'))
#Note: the double quotes allow paths to contain spaces
outfile.write('setenv PYTHONPATH \"%s:$PYTHONPATH\"\n'%curPath)
outfile.close()
#
#bash script (ToDo: also edit .profile, for sh users)
bashFile = os.path.join(home,'.bash_profile')
print 'sh family -- Editing '+bashFile
#Make backup copy of file
os.system('cp %s %s'%(bashFile,bashFile+'.setPathBackup'))
#Append line to set PYTHONPATH
outfile = open(bashFile,'a')
outfile.write('#Line added by setPath.py. Original in %s\n'%(bashFile+'.setPathBackup'))
#Note: the double quotes allow paths to contain spaces
outfile.write('export PYTHONPATH=\"%s:$PYTHONPATH\"\n'%curPath)
outfile.close()
#
#
#Set paths for ipython startup. This takes care of starting up ipython from
#double-clicking the Canopy app on any operating system
#
profilepath = os.path.join(home,'.ipython/profile_default/startup')
if os.path.isdir(profilepath):
fname = makeFileName(profilepath)
else:
print "Could not find .ipython startup directory. Exiting."
exit(1)
#
#Write the startup file
contents = 'import sys \nsys.path.append(\'%s\')\n'%curPath
outfile = open(fname,'w')
outfile.write(contents)
outfile.close()
|
CommonClimate/teaching_notebooks
|
GEOL351/CoursewareModules/setpath.py
|
Python
|
mit
| 5,073 | 0.013996 |
import subprocess
import os
class CommandRunner:
HOST_LIST_TO_RUN_LOCAL = ["localhost", "127.0.0.1"]
def __init__(self, local_hostname, logger):
logger.debug("Creating CommandRunner with Args - local_hostname: {local_hostname}, logger: {logger}".format(**locals()))
self.local_hostname = local_hostname
self.logger = logger
# returns: is_successful, output
def run_command(self, host, base_command):
self.logger.debug("Running Command: " + str(base_command))
if host == self.local_hostname or host in self.HOST_LIST_TO_RUN_LOCAL:
return self._run_local_command(base_command)
else:
return self._run_ssh_command(host, base_command)
# This will start the process up as a child process. Meaning if the scheduler_failover_controller fails the child process will fail as well. (unless you're running the systemctl command)
def _run_local_command(self, base_command):
self.logger.debug("Running command as Local command")
output = os.popen(base_command).read()
if output:
output = output.split("\n")
self.logger.debug("Run Command output: " + str(output))
return True, output
def _run_ssh_command(self, host, base_command):
self.logger.debug("Running command as SSH command")
if base_command.startswith("sudo"):
command_split = ["ssh", "-tt", host, base_command]
else:
command_split = ["ssh", host, base_command]
return self._run_split_command(
command_split=command_split
)
def _run_split_command(self, command_split):
self.logger.debug("Running command_split: " + str(command_split))
is_successful = True
output = []
try:
process = subprocess.Popen(command_split, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
process.wait()
if process.stderr is not None:
stderr_output = process.stderr.readlines()
if stderr_output and len(stderr_output) > 0:
output += [err.decode() if isinstance(err, bytes) else err for err in stderr_output]
self.logger.debug("Run Command stderr output: " + str(stderr_output))
if process.stdout is not None:
output += [out.decode() if isinstance(out, bytes) else out for out in process.stdout.readlines()]
if process.returncode != 0:
self.logger.warn("Process returned code '" + str(process.returncode) + "'")
is_successful = False
except Exception as e:
is_successful = False
output = str(e)
self.logger.debug("Run Command output: " + str(output))
return is_successful, output
|
teamclairvoyant/airflow-scheduler-failover-controller
|
scheduler_failover_controller/command_runner/command_runner.py
|
Python
|
apache-2.0
| 2,797 | 0.002503 |
import logging
import ibmsecurity.utilities.tools
logger = logging.getLogger(__name__)
module_uri = "/isam/felb/configuration/services/"
requires_modules = None
requires_versions = None
requires_model = "Appliance"
def add(isamAppliance, service_name, name, value, check_mode=False, force=False):
"""
Creates a service attribute
"""
check_value, warnings = _check(isamAppliance, service_name, name)
if force is True or check_value is False:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnings=warnings)
else:
return isamAppliance.invoke_post("Creating a service attribute",
"{0}{1}/attributes".format(module_uri, service_name),
{
"name": name,
"value": value
}, requires_version=requires_versions,
requires_modules=requires_modules, requires_model=requires_model)
else:
return isamAppliance.create_return_object(warnings=warnings)
def delete(isamAppliance, service_name, attribute_name, check_mode=False, force=False):
"""
deletes a service level attribute
"""
check_value, warnings = _check(isamAppliance, service_name, attribute_name)
if force is True or check_value is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnings=warnings)
else:
return isamAppliance.invoke_delete("Deleting a service attribute",
"{0}{1}/attributes/{2}".format(module_uri, service_name,
attribute_name),
requires_version=requires_versions,
requires_modules=requires_modules, requires_model=requires_model)
else:
return isamAppliance.create_return_object(warnings=warnings)
def get(isamAppliance, service_name, attribute_name):
"""
Retrieving a service attribute
"""
return isamAppliance.invoke_get("Retrieving a service attribute",
"{0}{1}/attributes/{2}".format(module_uri, service_name, attribute_name),
requires_version=requires_versions,
requires_modules=requires_modules,
requires_model=requires_model)
def get_all(isamAppliance, service_name):
"""
Retrieving service attribute names
"""
return isamAppliance.invoke_get("Retrieving service attribute names",
"{0}{1}/attributes?includeAllValues=true".format(module_uri, service_name),
requires_version=requires_versions,
requires_modules=requires_modules,
requires_model=requires_model)
def update(isamAppliance, service_name, attribute_name, attribute_value, check_mode=False, force=False):
"""
Updating a service attribute
"""
check_value, warnings = _check_add(isamAppliance, service_name, attribute_name, attribute_value)
if force is True or check_value is True:
if check_mode is True:
return isamAppliance.create_return_object(changed=True, warnings=warnings)
else:
return isamAppliance.invoke_put("Updating a service attribute",
"{0}{1}/attributes/{2}".format(module_uri, service_name, attribute_name),
{
"value": attribute_value
},
requires_modules=requires_modules, requires_version=requires_versions,
requires_model=requires_model)
else:
return isamAppliance.create_return_object(warnings=warnings)
def set(isamAppliance, service_name, attribute_name, attribute_value, check_mode=False, force=False):
"""
Determines if add or update is called
"""
check_value, warnings = _check(isamAppliance, service_name, attribute_name)
if check_value is False:
return add(isamAppliance, service_name, attribute_name, attribute_value, check_mode, force)
else:
return update(isamAppliance, service_name, attribute_name, attribute_value, check_mode, force)
def compare(isamAppliance1, service_name1, isamAppliance2, service_name2):
"""
Compare configuration between two appliances
"""
ret_obj1 = get_all(isamAppliance1, service_name1)
ret_obj2 = get_all(isamAppliance2, service_name2)
return ibmsecurity.utilities.tools.json_compare(ret_obj1, ret_obj2, deleted_keys=[])
def _check_add(isamAppliance, service_name, name, value):
"""
idempotency test for add function
"""
check_obj = {}
warnings = ""
# check to see if attribute under service name exist, return True if it doesnt exist
try:
check_obj = get(isamAppliance, service_name, name)
warnings = check_obj['warnings']
except:
return True, warnings
if 'value' in check_obj['data']:
if check_obj['data']['value'] != value:
return True, warnings
else:
return False, warnings
else:
return False, warnings
def _check(isamAppliance, service_name, attribute_name):
"""
Checks to see if attribute exists
"""
warnings = ""
try:
check_obj = get(isamAppliance, service_name, attribute_name)
warnings = check_obj['warnings']
except:
return False, warnings
if check_obj['data'] == {}:
return False, warnings
return True, warnings
|
IBM-Security/ibmsecurity
|
ibmsecurity/isam/base/network/felb/services/advanced_tuning.py
|
Python
|
apache-2.0
| 6,046 | 0.004135 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from py4j.protocol import Py4JJavaError
from pyspark import keyword_only
from pyspark.testing.utils import PySparkTestCase
class KeywordOnlyTests(unittest.TestCase):
class Wrapped(object):
@keyword_only
def set(self, x=None, y=None):
if "x" in self._input_kwargs:
self._x = self._input_kwargs["x"]
if "y" in self._input_kwargs:
self._y = self._input_kwargs["y"]
return x, y
def test_keywords(self):
w = self.Wrapped()
x, y = w.set(y=1)
self.assertEqual(y, 1)
self.assertEqual(y, w._y)
self.assertIsNone(x)
self.assertFalse(hasattr(w, "_x"))
def test_non_keywords(self):
w = self.Wrapped()
self.assertRaises(TypeError, lambda: w.set(0, y=1))
def test_kwarg_ownership(self):
# test _input_kwargs is owned by each class instance and not a shared static variable
class Setter(object):
@keyword_only
def set(self, x=None, other=None, other_x=None):
if "other" in self._input_kwargs:
self._input_kwargs["other"].set(x=self._input_kwargs["other_x"])
self._x = self._input_kwargs["x"]
a = Setter()
b = Setter()
a.set(x=1, other=b, other_x=2)
self.assertEqual(a._x, 1)
self.assertEqual(b._x, 2)
class UtilTests(PySparkTestCase):
def test_py4j_exception_message(self):
from pyspark.util import _exception_message
with self.assertRaises(Py4JJavaError) as context:
# This attempts java.lang.String(null) which throws an NPE.
self.sc._jvm.java.lang.String(None)
self.assertTrue('NullPointerException' in _exception_message(context.exception))
def test_parsing_version_string(self):
from pyspark.util import VersionUtils
self.assertRaises(ValueError, lambda: VersionUtils.majorMinorVersion("abced"))
if __name__ == "__main__":
from pyspark.tests.test_util import *
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports')
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
WindCanDie/spark
|
python/pyspark/tests/test_util.py
|
Python
|
apache-2.0
| 3,052 | 0.001311 |
# -*- coding: utf-8 -*-
from harpia.model.connectionmodel import ConnectionModel as ConnectionModel
from harpia.system import System as System
class DiagramModel(object):
# ----------------------------------------------------------------------
def __init__(self):
self.last_id = 1 # first block is n1, increments to each new block
self.blocks = {} # GUI blocks
self.connectors = []
self.zoom = 1.0 # pixels per unit
self.file_name = "Untitled"
self.modified = False
self.language = None
self.undo_stack = []
self.redo_stack = []
# ----------------------------------------------------------------------
@property
def patch_name(self):
return self.file_name.split("/").pop()
# ----------------------------------------------------------------------
|
llgoncalves/harpia
|
harpia/model/diagrammodel.py
|
Python
|
gpl-2.0
| 854 | 0.001171 |
# -*- coding: UTF-8 -*-
# Copyright 2019-2020 Rumma & Ko Ltd
# License: GNU Affero General Public License v3 (see file COPYING for details)
from django.db import models
from lino_xl.lib.ledger.choicelists import VoucherStates
from lino.api import dd, _
class OrderStates(VoucherStates):
pass
add = OrderStates.add_item
add('10', _("Waiting"), 'draft', is_editable=True)
add('20', _("Active"), 'active', is_editable=True)
add('30', _("Urgent"), 'urgent', is_editable=True)
add('40', _("Done"), 'registered')
add('50', _("Cancelled"), 'cancelled')
OrderStates.draft.add_transition(required_states="active urgent registered cancelled")
OrderStates.active.add_transition(required_states="draft urgent registered cancelled")
OrderStates.urgent.add_transition(required_states="draft active registered cancelled")
OrderStates.registered.add_transition(required_states="draft active urgent cancelled")
OrderStates.cancelled.add_transition(required_states="draft active urgent registered")
|
lino-framework/xl
|
lino_xl/lib/orders/choicelists.py
|
Python
|
bsd-2-clause
| 992 | 0.006048 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for running legacy optimizer code with DistributionStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy
from tensorflow.contrib.distribute.python import combinations
from tensorflow.contrib.distribute.python.single_loss_example import batchnorm_example
from tensorflow.contrib.distribute.python.single_loss_example import minimize_loss_example
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.distribute import reduce_util
from tensorflow.python.eager import context
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.layers import core
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.ops.losses import losses_impl
class MinimizeLossStepTest(test.TestCase, parameterized.TestCase):
def _get_iterator(self, ds):
if context.executing_eagerly():
iterator = ds.make_one_shot_iterator()
else:
iterator = ds.make_initializable_iterator()
self.evaluate(iterator.initializer)
return iterator
@combinations.generate(
combinations.times(
combinations.distributions_and_v1_optimizers(),
combinations.combine(mode=["graph"], use_callable_loss=[True, False])
+ combinations.combine(mode=["eager"], use_callable_loss=[True])) +
combinations.combine(
distribution=[combinations.tpu_strategy],
optimizer_fn=combinations.optimizers_v1,
mode=["graph"],
use_callable_loss=[True, False]))
def testTrainNetwork(self, distribution, optimizer_fn, use_callable_loss):
with distribution.scope():
model_fn, dataset_fn, layer = minimize_loss_example(
optimizer_fn, use_bias=True, use_callable_loss=use_callable_loss)
def step_fn(ctx, inputs):
del ctx # Unused
return distribution.group(
distribution.call_for_each_replica(model_fn, args=(inputs,)))
iterator = self._get_iterator(distribution.distribute_dataset(dataset_fn))
def run_step():
return distribution.run_steps_on_dataset(
step_fn, iterator, iterations=2).run_op
self.evaluate(distribution.initialize())
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
weights, biases = [], []
for _ in range(5):
run_step()
weights.append(self.evaluate(layer.kernel))
biases.append(self.evaluate(layer.bias))
self.evaluate(distribution.finalize())
error = abs(numpy.add(numpy.squeeze(weights), numpy.squeeze(biases)) - 1)
is_not_increasing = all(y <= x for x, y in zip(error, error[1:]))
self.assertTrue(is_not_increasing)
@combinations.generate(
combinations.times(
combinations.distributions_and_v1_optimizers(),
combinations.combine(mode=["graph"], use_callable_loss=[True, False])
+ combinations.combine(mode=["eager"], use_callable_loss=[True])))
def testTrainNetworkByCallForEachReplica(self, distribution, optimizer_fn,
use_callable_loss):
with distribution.scope():
model_fn, dataset_fn, layer = minimize_loss_example(
optimizer_fn, use_bias=True, use_callable_loss=use_callable_loss)
iterator = self._get_iterator(distribution.distribute_dataset(dataset_fn))
def run_step():
return distribution.group(
distribution.call_for_each_replica(
model_fn, args=(iterator.get_next(),)))
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
weights, biases = [], []
for _ in range(10):
run_step()
weights.append(self.evaluate(layer.kernel))
biases.append(self.evaluate(layer.bias))
error = abs(numpy.add(numpy.squeeze(weights), numpy.squeeze(biases)) - 1)
is_not_increasing = all(y <= x for x, y in zip(error, error[1:]))
self.assertTrue(is_not_increasing)
@combinations.generate(
combinations.times(
combinations.distributions_and_v1_optimizers() +
combinations.distributions_and_v2_optimizers(),
combinations.combine(mode=["graph", "eager"])) +
combinations.combine(
distribution=[combinations.tpu_strategy],
optimizer_fn=combinations.optimizers_v1+combinations.optimizers_v2,
mode=["graph"]))
def testOptimizerInsideModelFn(self, distribution, optimizer_fn):
created_variables = []
trainable_variables = []
def appending_creator(next_creator, *args, **kwargs):
v = next_creator(*args, **kwargs)
created_variables.append(v.name)
if "trainable" in kwargs and kwargs["trainable"]:
trainable_variables.append(v.name)
return v
# Creator scope needs to be set before it's used inside
# `distribution.scope`.
with variable_scope.variable_creator_scope(
appending_creator), distribution.scope():
model_fn, dataset_fn, layer = minimize_loss_example(
optimizer_fn,
use_bias=True,
use_callable_loss=True,
create_optimizer_inside_model_fn=True)
def step_fn(ctx, inputs):
del ctx # Unused
return distribution.group(
distribution.call_for_each_replica(model_fn, args=(inputs,)))
iterator = self._get_iterator(distribution.distribute_dataset(dataset_fn))
def run_step():
return distribution.run_steps_on_dataset(
step_fn, iterator, iterations=1).run_op
self.evaluate(distribution.initialize())
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
run_step()
self.evaluate(distribution.finalize())
def get_expected_variables(optimizer_fn, num_parameter_devices):
variables_map = {
"GradientDescent": ["dense/kernel", "dense/bias"],
"Adagrad": [
"dense/kernel/Adagrad", "dense/kernel",
"dense/bias/Adagrad", "dense/bias"
]
}
variables = variables_map[optimizer_fn().get_name()]
variables.extend([
v + "/replica_{}".format(replica)
for v in variables
for replica in range(1, num_parameter_devices)
])
return set([v + ":0" for v in variables])
self.assertEqual(
get_expected_variables(optimizer_fn,
len(distribution.parameter_devices)),
set(created_variables))
@combinations.generate(
combinations.times(
combinations.combine(momentum=[0.8, 0.9, 0.99], renorm=[False, True]),
combinations.times(
combinations.distributions_and_v1_optimizers(),
combinations.combine(
mode=["graph", "eager"],
# TODO(isaprykin): Allow False here. Currently subsequent
# replicas will re-execute UPDATE_OPS of previous replicas.
update_ops_in_cross_replica_mode=[True])) +
combinations.combine(
distribution=[combinations.tpu_strategy],
optimizer_fn=combinations.optimizers_v1,
mode=["graph"],
update_ops_in_cross_replica_mode=[False])))
def testTrainNetworkWithBatchNorm(self, distribution, optimizer_fn, momentum,
renorm, update_ops_in_cross_replica_mode):
"""Verifies that moving mean updates are reduced across replicas."""
with distribution.scope():
num_replicas = distribution.num_replicas_in_sync
model_fn, dataset_fn, batchnorm = batchnorm_example(
optimizer_fn,
batch_per_epoch=num_replicas,
momentum=momentum,
renorm=renorm,
update_ops_in_replica_mode=not update_ops_in_cross_replica_mode)
def step_fn(ctx, inputs):
del ctx # Unused
fetches = distribution.unwrap(
distribution.call_for_each_replica(model_fn, args=(inputs,)))
if update_ops_in_cross_replica_mode:
fetches += tuple(ops.get_collection(ops.GraphKeys.UPDATE_OPS))
return control_flow_ops.group(fetches)
iterator = self._get_iterator(distribution.distribute_dataset(dataset_fn))
def run_step():
return distribution.run_steps_on_dataset(
step_fn, iterator, iterations=1).run_op
self.evaluate(distribution.initialize())
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
expected_moving_means = [0.] * 8
def averaged_batch_mean(i):
# Each batch has shape [16, 8] where the ith element in jth list is
# (8 * j + i + replica_id * 100). So the batch mean in each replica is
# (60 + i + replica_id * 100). So here comes its batch mean over all
# replicas:
return 60. + i + (num_replicas - 1.) / 2. * 100.
for _ in range(10):
run_step()
moving_means = self.evaluate(batchnorm.moving_mean)
# We make sure that the moving_mean is updated as if the sample mean is
# calculated over all replicas.
for i, expected_moving_mean in enumerate(expected_moving_means):
expected_moving_means[i] -= ((
expected_moving_mean - averaged_batch_mean(i)) * (1.0 - momentum))
self.assertNear(expected_moving_means[i], moving_means[i], 0.0001)
self.evaluate(distribution.finalize())
@combinations.generate(
combinations.times(
combinations.combine(
optimizer_fn=[
combinations.gradient_descent_optimizer_v1_fn,
combinations.gradient_descent_optimizer_v2_fn
],
loss_reduction=[
losses_impl.Reduction.SUM, losses_impl.Reduction.MEAN,
losses_impl.Reduction.SUM_OVER_BATCH_SIZE,
losses_impl.Reduction.SUM_OVER_NONZERO_WEIGHTS
]),
combinations.times(
combinations.combine(
distribution=[
combinations.one_device_strategy,
combinations.mirrored_strategy_with_gpu_and_cpu,
combinations.mirrored_strategy_with_two_gpus,
combinations.core_mirrored_strategy_with_gpu_and_cpu,
combinations.core_mirrored_strategy_with_two_gpus
]),
combinations.combine(
mode=["graph"], use_callable_loss=[True, False]) +
combinations.combine(mode=["eager"], use_callable_loss=[True])) +
combinations.combine(
distribution=[combinations.tpu_strategy],
mode=["graph"],
use_callable_loss=[True, False])))
def testMeanVsSum(self, distribution, optimizer_fn, loss_reduction,
use_callable_loss):
with distribution.scope():
all_vars = []
def model_fn(inputs):
x, y = inputs
def loss_fn():
# Use fixed initialization to make the steps deterministic.
w = variable_scope.get_variable("w", initializer=[[2.]])
all_vars.append(w)
predict = math_ops.matmul(x, w)
return losses_impl.mean_squared_error(
y, predict, reduction=loss_reduction)
optimizer = optimizer_fn() # GradientDescent with 0.2 learning rate
if use_callable_loss:
return optimizer.minimize(loss_fn)
else:
return optimizer.minimize(loss_fn())
def dataset_fn():
features = dataset_ops.Dataset.from_tensors([[2.], [7.]])
labels = dataset_ops.Dataset.from_tensors([[6.], [21.]])
return dataset_ops.Dataset.zip((features, labels)).repeat()
def step_fn(ctx, inputs):
del ctx # Unused
return distribution.group(
distribution.call_for_each_replica(model_fn, args=(inputs,)))
iterator = self._get_iterator(distribution.distribute_dataset(dataset_fn))
def run_step():
return distribution.run_steps_on_dataset(
step_fn, iterator, iterations=1).run_op
self.evaluate(distribution.initialize())
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
run_step()
v = all_vars[0]
self.assertTrue(all(v is vi for vi in all_vars[1:]))
weight = numpy.squeeze(self.evaluate(v))
# Our model is:
# predict = x * w
# loss = (predict - y)^2
# dloss/dpredict = 2*(predict - y)
# dloss/dw = 2 * x^T @ (predict - y)
# For our batch size of 2, assuming sum loss reduction:
# x = [2, 7]
# y = [6, 21]
# w_initial = 2
# predict = [4, 14]
# predict - y = [-2, -7]
# dloss/dw = 2 <[2, 7], [-2, -7]> = - 2(4 + 49) = -106
# So unreplicated the update to w with lr=0.2 is -0.2 * -106 = 21.2
# with sum loss reduction, or 10.6 with mean.
if loss_reduction == losses_impl.Reduction.SUM:
# Note that the "distribution.num_replicas_in_sync" factor will go away
# once we split the input across replicas, instead of pulling a complete
# batch of input per replica.
self.assertNear(weight, 2 + 21.2 * distribution.num_replicas_in_sync,
0.0001)
else:
# One of the mean loss reductions.
self.assertNear(weight, 2 + 10.6, 0.0001)
self.evaluate(distribution.finalize())
@combinations.generate(
combinations.times(
combinations.distributions_and_v1_optimizers(),
combinations.combine(mode=["graph", "eager"]),
combinations.combine(is_tpu=[False])) +
combinations.combine(
distribution=[combinations.tpu_strategy],
optimizer_fn=combinations.optimizers_v1,
mode=["graph"],
is_tpu=[True]))
def testRunStepsWithOutputContext(self, distribution, optimizer_fn, is_tpu):
with distribution.scope():
def dataset_fn():
dataset = dataset_ops.Dataset.from_tensors([[1.]]).repeat()
# TODO(priyag): batch with drop_remainder=True causes shapes to be
# fully defined for TPU. Remove this when XLA supports dynamic shapes.
return dataset.batch(batch_size=1, drop_remainder=True)
optimizer = optimizer_fn()
layer = core.Dense(1, use_bias=True)
key1 = "foo"
value1 = "bar"
def model_fn(output_context, x):
"""A very simple model written by the user."""
def loss_fn():
y = array_ops.reshape(layer(x), []) - constant_op.constant(1.)
return y * y
train_op = optimizer.minimize(loss_fn)
loss = loss_fn()
output_context.set_last_step_output(
name="replica_loss_reduced",
output=loss,
reduce_op=reduce_util.ReduceOp.MEAN)
output_context.set_non_tensor_output(key1, value1)
return (train_op, loss)
def step_fn(output_context, inputs):
(train_op, loss) = distribution.call_for_each_replica(
model_fn, args=(output_context, inputs))
output_context.set_last_step_output(
name="cross_replica_loss_reduced",
output=loss,
reduce_op=reduce_util.ReduceOp.MEAN)
output_context.set_last_step_output(
name="cross_replica_loss_not_reduced",
output=loss)
return distribution.group(train_op)
iterator = self._get_iterator(distribution.distribute_dataset(dataset_fn))
def run_step():
initial_loss = lambda: constant_op.constant(1e7)
# Initial values corresponding to reduced losses are just single
# tensors. But for non reduced losses, we need to have initial
# values that are of the same structure as non reduced losses. In
# MirroredStrategy, this will be a list of losses, in TPUStrategy
# it will be single tensor. Using `broadcast` followed by `unwrap`
# gives us the desired initial value structure.
initial_loop_values = {
"replica_loss_reduced": initial_loss(),
"cross_replica_loss_reduced": initial_loss(),
"cross_replica_loss_not_reduced":
distribution.unwrap(distribution.broadcast(initial_loss()))
}
ctx = distribution.run_steps_on_dataset(
step_fn, iterator, iterations=2,
initial_loop_values=initial_loop_values)
self.assertEqual({key1: (value1,)}, ctx.non_tensor_outputs)
self._verify_loss_output(
initial_loss(),
loss_output=ctx.last_step_outputs["replica_loss_reduced"],
reduced=True, distribution=distribution)
self._verify_loss_output(
initial_loss(),
loss_output=ctx.last_step_outputs["cross_replica_loss_reduced"],
reduced=True, distribution=distribution)
self._verify_loss_output(
initial_loss(),
loss_output=ctx.last_step_outputs["cross_replica_loss_not_reduced"],
reduced=False, distribution=distribution)
return (ctx.run_op, ctx.last_step_outputs["replica_loss_reduced"])
self.evaluate(distribution.initialize())
if not context.executing_eagerly():
with self.cached_session() as sess:
run_step = sess.make_callable(run_step())
self.evaluate(variables_lib.global_variables_initializer())
weights, biases, losses = [], [], []
for _ in range(5):
_, loss = run_step()
losses.append(loss)
weights.append(self.evaluate(layer.kernel))
biases.append(self.evaluate(layer.bias))
self.evaluate(distribution.finalize())
loss_is_not_increasing = all(y <= x for x, y in zip(losses, losses[1:]))
self.assertTrue(loss_is_not_increasing)
error = abs(
numpy.add(numpy.squeeze(weights), numpy.squeeze(biases)) - 1)
error_is_not_increasing = all(y <= x for x, y in zip(error, error[1:]))
self.assertTrue(error_is_not_increasing)
def _verify_loss_output(self, initial_loss, loss_output, reduced,
distribution):
if not reduced:
self.assertLen(distribution.unwrap(loss_output),
distribution.num_replicas_in_sync)
loss_tensor = distribution.reduce(reduce_util.ReduceOp.MEAN, loss_output)
else:
unwrapped_output = distribution.unwrap(loss_output)
self.assertLen(unwrapped_output, 1)
loss_tensor = unwrapped_output[0]
self.assertEqual(initial_loss.dtype, loss_tensor.dtype)
self.assertEqual(initial_loss.shape, loss_tensor.shape)
if __name__ == "__main__":
test.main()
|
hfp/tensorflow-xsmm
|
tensorflow/contrib/distribute/python/minimize_loss_test.py
|
Python
|
apache-2.0
| 20,274 | 0.007695 |
from django.conf import settings
from django.utils.translation import gettext_lazy as _
ASSIGNMENT_ANY = 0
ASSIGNMENT_MATCH = 1
ASSIGNMENT_EXCEPT = 2
ASSIGNMENT_CHOICES = (
(ASSIGNMENT_ANY, _("any")),
(ASSIGNMENT_MATCH, _("matches")),
(ASSIGNMENT_EXCEPT, _("don't match")),
)
DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON = getattr(
settings, "DJANGO_ADMIN_SSO_ADD_LOGIN_BUTTON", True
)
AUTH_USER_MODEL = getattr(settings, "AUTH_USER_MODEL", "auth.User")
DJANGO_ADMIN_SSO_OAUTH_CLIENT_ID = getattr(
settings, "DJANGO_ADMIN_SSO_OAUTH_CLIENT_ID", None
)
DJANGO_ADMIN_SSO_OAUTH_CLIENT_SECRET = getattr(
settings, "DJANGO_ADMIN_SSO_OAUTH_CLIENT_SECRET", None
)
DJANGO_ADMIN_SSO_AUTH_URI = getattr(
settings, "DJANGO_ADMIN_SSO_AUTH_URI", "https://accounts.google.com/o/oauth2/auth"
)
DJANGO_ADMIN_SSO_TOKEN_URI = getattr(
settings, "DJANGO_ADMIN_SSO_TOKEN_URI", "https://accounts.google.com/o/oauth2/token"
)
DJANGO_ADMIN_SSO_REVOKE_URI = getattr(
settings,
"DJANGO_ADMIN_SSO_REVOKE_URI",
"https://accounts.google.com/o/oauth2/revoke",
)
|
matthiask/django-admin-sso
|
admin_sso/default_settings.py
|
Python
|
bsd-3-clause
| 1,071 | 0.001867 |
#!/usr/bin/env python
#
# Copyright 2005,2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
import audio_alsa
class qa_alsa (gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_000_nop (self):
"""Just see if we can import the module...
They may not have ALSA drivers, etc. Don't try to run anything"""
pass
if __name__ == '__main__':
gr_unittest.main ()
|
GREO/GNU-Radio
|
gr-audio-alsa/src/qa_alsa.py
|
Python
|
gpl-3.0
| 1,240 | 0.010484 |
# coding=utf-8
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from app import app
if __name__ == "__main__":
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(5000)
IOLoop.instance().start()
|
levythu/swift-layerC
|
inapi/httpd.py
|
Python
|
gpl-2.0
| 287 | 0 |
"""distutils.command.sdist
Implements the Distutils 'sdist' command (create a source distribution)."""
import os
import string
import sys
from types import *
from glob import glob
from warnings import warn
from distutils.core import Command
from distutils import dir_util, dep_util, file_util, archive_util
from distutils.text_file import TextFile
from distutils.errors import *
from distutils.filelist import FileList
from distutils import log
from distutils.util import convert_path
def show_formats():
"""Print all possible values for the 'formats' option (used by
the "--help-formats" command-line option).
"""
from distutils.fancy_getopt import FancyGetopt
from distutils.archive_util import ARCHIVE_FORMATS
formats = []
for format in ARCHIVE_FORMATS.keys():
formats.append(("formats=" + format, None,
ARCHIVE_FORMATS[format][2]))
formats.sort()
FancyGetopt(formats).print_help(
"List of available source distribution formats:")
class sdist(Command):
description = "create a source distribution (tarball, zip file, etc.)"
def checking_metadata(self):
"""Callable used for the check sub-command.
Placed here so user_options can view it"""
return self.metadata_check
user_options = [
('template=', 't',
"name of manifest template file [default: MANIFEST.in]"),
('manifest=', 'm',
"name of manifest file [default: MANIFEST]"),
('use-defaults', None,
"include the default file set in the manifest "
"[default; disable with --no-defaults]"),
('no-defaults', None,
"don't include the default file set"),
('prune', None,
"specifically exclude files/directories that should not be "
"distributed (build tree, RCS/CVS dirs, etc.) "
"[default; disable with --no-prune]"),
('no-prune', None,
"don't automatically exclude anything"),
('manifest-only', 'o',
"just regenerate the manifest and then stop "
"(implies --force-manifest)"),
('force-manifest', 'f',
"forcibly regenerate the manifest and carry on as usual. "
"Deprecated: now the manifest is always regenerated."),
('formats=', None,
"formats for source distribution (comma-separated list)"),
('keep-temp', 'k',
"keep the distribution tree around after creating " +
"archive file(s)"),
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
('metadata-check', None,
"Ensure that all required elements of meta-data "
"are supplied. Warn if any missing. [default]"),
]
boolean_options = ['use-defaults', 'prune',
'manifest-only', 'force-manifest',
'keep-temp', 'metadata-check']
help_options = [
('help-formats', None,
"list available distribution formats", show_formats),
]
negative_opt = {'no-defaults': 'use-defaults',
'no-prune': 'prune' }
default_format = {'posix': 'gztar',
'nt': 'zip' }
sub_commands = [('check', checking_metadata)]
def initialize_options(self):
# 'template' and 'manifest' are, respectively, the names of
# the manifest template and manifest file.
self.template = None
self.manifest = None
# 'use_defaults': if true, we will include the default file set
# in the manifest
self.use_defaults = 1
self.prune = 1
self.manifest_only = 0
self.force_manifest = 0
self.formats = None
self.keep_temp = 0
self.dist_dir = None
self.archive_files = None
self.metadata_check = 1
def finalize_options(self):
if self.manifest is None:
self.manifest = "MANIFEST"
if self.template is None:
self.template = "MANIFEST.in"
self.ensure_string_list('formats')
if self.formats is None:
try:
self.formats = [self.default_format[os.name]]
except KeyError:
raise DistutilsPlatformError(
"don't know how to create source distributions "
"on platform %s" % os.name)
bad_format = archive_util.check_archive_formats(self.formats)
if bad_format:
raise DistutilsOptionError(
"unknown archive format '%s'" % bad_format)
if self.dist_dir is None:
self.dist_dir = "dist"
def run(self):
# 'filelist' contains the list of files that will make up the
# manifest
self.filelist = FileList()
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
# Do whatever it takes to get the list of files to process
# (process the manifest template, read an existing manifest,
# whatever). File list is accumulated in 'self.filelist'.
self.get_file_list()
# If user just wanted us to regenerate the manifest, stop now.
if self.manifest_only:
return
# Otherwise, go ahead and create the source distribution tarball,
# or zipfile, or whatever.
self.make_distribution()
def check_metadata(self):
"""Deprecated API."""
warn("distutils.command.sdist.check_metadata is deprecated, \
use the check command instead", PendingDeprecationWarning)
check = self.distribution.get_command_obj('check')
check.ensure_finalized()
check.run()
def get_file_list(self):
"""Figure out the list of files to include in the source
distribution, and put it in 'self.filelist'. This might involve
reading the manifest template (and writing the manifest), or just
reading the manifest, or just using the default file set -- it all
depends on the user's options.
"""
# new behavior when using a template:
# the file list is recalculated every time because
# even if MANIFEST.in or setup.py are not changed
# the user might have added some files in the tree that
# need to be included.
#
# This makes --force the default and only behavior with templates.
template_exists = os.path.isfile(self.template)
if not template_exists and self._manifest_is_not_generated():
self.read_manifest()
self.filelist.sort()
self.filelist.remove_duplicates()
return
if not template_exists:
self.warn(("manifest template '%s' does not exist " +
"(using default file list)") %
self.template)
self.filelist.findall()
if self.use_defaults:
self.add_defaults()
if template_exists:
self.read_template()
if self.prune:
self.prune_file_list()
self.filelist.sort()
self.filelist.remove_duplicates()
self.write_manifest()
def add_defaults(self):
"""Add all the default files to self.filelist:
- README or README.txt
- setup.py
- test/test*.py
- all pure Python modules mentioned in setup script
- all files pointed by package_data (build_py)
- all files defined in data_files.
- all files defined as scripts.
- all C sources listed as part of extensions or C libraries
in the setup script (doesn't catch C headers!)
Warns if (README or README.txt) or setup.py are missing; everything
else is optional.
"""
standards = [('README', 'README.txt'), self.distribution.script_name]
for fn in standards:
if isinstance(fn, tuple):
alts = fn
got_it = False
for fn in alts:
if os.path.exists(fn):
got_it = True
self.filelist.append(fn)
break
if not got_it:
self.warn("standard file not found: should have one of " +
', '.join(alts))
else:
if os.path.exists(fn):
self.filelist.append(fn)
else:
self.warn("standard file '%s' not found" % fn)
optional = ['test/test*.py', 'setup.cfg']
for pattern in optional:
files = filter(os.path.isfile, glob(pattern))
self.filelist.extend(files)
# build_py is used to get:
# - python modules
# - files defined in package_data
build_py = self.get_finalized_command('build_py')
# getting python files
if self.distribution.has_pure_modules():
self.filelist.extend(build_py.get_source_files())
# getting package_data files
# (computed in build_py.data_files by build_py.finalize_options)
for pkg, src_dir, build_dir, filenames in build_py.data_files:
for filename in filenames:
self.filelist.append(os.path.join(src_dir, filename))
# getting distribution.data_files
if self.distribution.has_data_files():
for item in self.distribution.data_files:
if isinstance(item, str): # plain file
item = convert_path(item)
if os.path.isfile(item):
self.filelist.append(item)
else: # a (dirname, filenames) tuple
dirname, filenames = item
for f in filenames:
f = convert_path(f)
if os.path.isfile(f):
self.filelist.append(f)
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
self.filelist.extend(build_ext.get_source_files())
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.filelist.extend(build_clib.get_source_files())
if self.distribution.has_scripts():
build_scripts = self.get_finalized_command('build_scripts')
self.filelist.extend(build_scripts.get_source_files())
def read_template(self):
"""Read and parse manifest template file named by self.template.
(usually "MANIFEST.in") The parsing and processing is done by
'self.filelist', which updates itself accordingly.
"""
log.info("reading manifest template '%s'", self.template)
template = TextFile(self.template, strip_comments=1, skip_blanks=1,
join_lines=1, lstrip_ws=1, rstrip_ws=1,
collapse_join=1)
try:
while True:
line = template.readline()
if line is None: # end of file
break
try:
self.filelist.process_template_line(line)
# the call above can raise a DistutilsTemplateError for
# malformed lines, or a ValueError from the lower-level
# convert_path function
except (DistutilsTemplateError, ValueError) as msg:
self.warn("%s, line %d: %s" % (template.filename,
template.current_line,
msg))
finally:
template.close()
def prune_file_list(self):
"""Prune off branches that might slip into the file list as created
by 'read_template()', but really don't belong there:
* the build tree (typically "build")
* the release tree itself (only an issue if we ran "sdist"
previously with --keep-temp, or it aborted)
* any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories
"""
build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname()
self.filelist.exclude_pattern(None, prefix=build.build_base)
self.filelist.exclude_pattern(None, prefix=base_dir)
if sys.platform == 'win32':
seps = r'/|\\'
else:
seps = '/'
vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr',
'_darcs']
vcs_ptrn = r'(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps)
self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
def write_manifest(self):
"""Write the file list in 'self.filelist' (presumably as filled in
by 'add_defaults()' and 'read_template()') to the manifest file
named by 'self.manifest'.
"""
if self._manifest_is_not_generated():
log.info("not writing to manually maintained "
"manifest file '%s'" % self.manifest)
return
content = self.filelist.files[:]
content.insert(0, '# file GENERATED by distutils, do NOT edit')
self.execute(file_util.write_file, (self.manifest, content),
"writing manifest file '%s'" % self.manifest)
def _manifest_is_not_generated(self):
# check for special comment used in 3.1.3 and higher
if not os.path.isfile(self.manifest):
return False
fp = open(self.manifest)
try:
first_line = fp.readline()
finally:
fp.close()
return first_line != '# file GENERATED by distutils, do NOT edit\n'
def read_manifest(self):
"""Read the manifest file (named by 'self.manifest') and use it to
fill in 'self.filelist', the list of files to include in the source
distribution.
"""
log.info("reading manifest file '%s'", self.manifest)
manifest = open(self.manifest)
for line in manifest:
# ignore comments and blank lines
line = line.strip()
if line.startswith('#') or not line:
continue
self.filelist.append(line)
manifest.close()
def make_release_tree(self, base_dir, files):
"""Create the directory tree that will become the source
distribution archive. All directories implied by the filenames in
'files' are created under 'base_dir', and then we hard link or copy
(if hard linking is unavailable) those files into place.
Essentially, this duplicates the developer's source tree, but in a
directory named after the distribution, containing only the files
to be distributed.
"""
# Create all the directories under 'base_dir' necessary to
# put 'files' there; the 'mkpath()' is just so we don't die
# if the manifest happens to be empty.
self.mkpath(base_dir)
dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
# And walk over the list of files, either making a hard link (if
# os.link exists) to each one that doesn't already exist in its
# corresponding location under 'base_dir', or copying each file
# that's out-of-date in 'base_dir'. (Usually, all files will be
# out-of-date, because by default we blow away 'base_dir' when
# we're done making the distribution archives.)
if hasattr(os, 'link'): # can make hard links on this system
link = 'hard'
msg = "making hard links in %s..." % base_dir
else: # nope, have to copy
link = None
msg = "copying files to %s..." % base_dir
if not files:
log.warn("no files to distribute -- empty manifest?")
else:
log.info(msg)
for file in files:
if not os.path.isfile(file):
log.warn("'%s' not a regular file -- skipping" % file)
else:
dest = os.path.join(base_dir, file)
self.copy_file(file, dest, link=link)
self.distribution.metadata.write_pkg_info(base_dir)
def make_distribution(self):
"""Create the source distribution(s). First, we create the release
tree with 'make_release_tree()'; then, we create all required
archive files (according to 'self.formats') from the release tree.
Finally, we clean up by blowing away the release tree (unless
'self.keep_temp' is true). The list of archive files created is
stored so it can be retrieved later by 'get_archive_files()'.
"""
# Don't warn about missing meta-data here -- should be (and is!)
# done elsewhere.
base_dir = self.distribution.get_fullname()
base_name = os.path.join(self.dist_dir, base_dir)
self.make_release_tree(base_dir, self.filelist.files)
archive_files = [] # remember names of files we create
# tar archive must be created last to avoid overwrite and remove
if 'tar' in self.formats:
self.formats.append(self.formats.pop(self.formats.index('tar')))
for fmt in self.formats:
file = self.make_archive(base_name, fmt, base_dir=base_dir)
archive_files.append(file)
self.distribution.dist_files.append(('sdist', '', file))
self.archive_files = archive_files
if not self.keep_temp:
dir_util.remove_tree(base_dir, dry_run=self.dry_run)
def get_archive_files(self):
"""Return the list of archive files created when the command
was run, or None if the command hasn't run yet.
"""
return self.archive_files
|
timm/timmnix
|
pypy3-v5.5.0-linux64/lib-python/3/distutils/command/sdist.py
|
Python
|
mit
| 17,891 | 0.000391 |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import re
import waffle
from django.conf import settings
from django.shortcuts import redirect
class SoftLaunchMiddleware(object):
def __init__(self):
self.redirect_url = getattr(settings, 'SOFT_LAUNCH_REDIRECT_URL', '/')
regexes = getattr(settings, 'SOFT_LAUNCH_REGEXES', [])
self.regexes = [re.compile(r) for r in regexes]
def process_view(self, request, view_func, view_args, view_kwargs):
if waffle.flag_is_active(request, 'full_access'):
return None
allowed = ((request.path == self.redirect_url) or
any(r.match(request.path) for r in self.regexes))
if not allowed:
return redirect(self.redirect_url)
|
RickMohr/nyc-trees
|
src/nyc_trees/nyc_trees/middleware.py
|
Python
|
apache-2.0
| 846 | 0 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from abc import ABCMeta, abstractmethod
from django.core.files import File
from six import with_metaclass
from django.utils.module_loading import import_string
from rest_framework_tus import signals
from .settings import TUS_SAVE_HANDLER_CLASS
class AbstractUploadSaveHandler(with_metaclass(ABCMeta, object)):
def __init__(self, upload):
self.upload = upload
@abstractmethod
def handle_save(self):
pass
def run(self):
# Trigger state change
self.upload.start_saving()
self.upload.save()
# Initialize saving
self.handle_save()
def finish(self):
# Trigger signal
signals.saved.send(sender=self.__class__, instance=self)
# Finish
self.upload.finish()
self.upload.save()
class DefaultSaveHandler(AbstractUploadSaveHandler):
destination_file_field = 'uploaded_file'
def handle_save(self):
# Save temporary field to file field
file_field = getattr(self.upload, self.destination_file_field)
file_field.save(self.upload.filename, File(open(self.upload.temporary_file_path)))
# Finish upload
self.finish()
def get_save_handler(import_path=None):
return import_string(import_path or TUS_SAVE_HANDLER_CLASS)
|
dirkmoors/drf-tus
|
rest_framework_tus/storage.py
|
Python
|
mit
| 1,346 | 0.000743 |
#!/usr/bin/env python3
# Copyright (c) 2018-2019 TurboCoin
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the scantxoutset rpc call."""
from test_framework.test_framework import TurbocoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
from decimal import Decimal
import shutil
import os
def descriptors(out):
return sorted(u['desc'] for u in out['unspents'])
class ScantxoutsetTest(TurbocoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(110)
addr_P2SH_SEGWIT = self.nodes[0].getnewaddress("", "p2sh-segwit")
pubk1 = self.nodes[0].getaddressinfo(addr_P2SH_SEGWIT)['pubkey']
addr_LEGACY = self.nodes[0].getnewaddress("", "legacy")
pubk2 = self.nodes[0].getaddressinfo(addr_LEGACY)['pubkey']
addr_BECH32 = self.nodes[0].getnewaddress("", "bech32")
pubk3 = self.nodes[0].getaddressinfo(addr_BECH32)['pubkey']
self.nodes[0].sendtoaddress(addr_P2SH_SEGWIT, 0.001)
self.nodes[0].sendtoaddress(addr_LEGACY, 0.002)
self.nodes[0].sendtoaddress(addr_BECH32, 0.004)
#send to child keys of tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK
self.nodes[0].sendtoaddress("mkHV1C6JLheLoUSSZYk7x3FH5tnx9bu7yc", 0.008) # (m/0'/0'/0')
self.nodes[0].sendtoaddress("mipUSRmJAj2KrjSvsPQtnP8ynUon7FhpCR", 0.016) # (m/0'/0'/1')
self.nodes[0].sendtoaddress("n37dAGe6Mq1HGM9t4b6rFEEsDGq7Fcgfqg", 0.032) # (m/0'/0'/1500')
self.nodes[0].sendtoaddress("mqS9Rpg8nNLAzxFExsgFLCnzHBsoQ3PRM6", 0.064) # (m/0'/0'/0)
self.nodes[0].sendtoaddress("mnTg5gVWr3rbhHaKjJv7EEEc76ZqHgSj4S", 0.128) # (m/0'/0'/1)
self.nodes[0].sendtoaddress("mketCd6B9U9Uee1iCsppDJJBHfvi6U6ukC", 0.256) # (m/0'/0'/1500)
self.nodes[0].sendtoaddress("mj8zFzrbBcdaWXowCQ1oPZ4qioBVzLzAp7", 0.512) # (m/1/1/0')
self.nodes[0].sendtoaddress("mfnKpKQEftniaoE1iXuMMePQU3PUpcNisA", 1.024) # (m/1/1/1')
self.nodes[0].sendtoaddress("mou6cB1kaP1nNJM1sryW6YRwnd4shTbXYQ", 2.048) # (m/1/1/1500')
self.nodes[0].sendtoaddress("mtfUoUax9L4tzXARpw1oTGxWyoogp52KhJ", 4.096) # (m/1/1/0)
self.nodes[0].sendtoaddress("mxp7w7j8S1Aq6L8StS2PqVvtt4HGxXEvdy", 8.192) # (m/1/1/1)
self.nodes[0].sendtoaddress("mpQ8rokAhp1TAtJQR6F6TaUmjAWkAWYYBq", 16.384) # (m/1/1/1500)
self.nodes[0].generate(1)
self.log.info("Stop node, remove wallet, mine again some blocks...")
self.stop_node(0)
shutil.rmtree(os.path.join(self.nodes[0].datadir, "regtest", 'wallets'))
self.start_node(0)
self.nodes[0].generate(110)
self.restart_node(0, ['-nowallet'])
self.log.info("Test if we have found the non HD unspent outputs.")
assert_equal(self.nodes[0].scantxoutset("start", [ "pkh(" + pubk1 + ")", "pkh(" + pubk2 + ")", "pkh(" + pubk3 + ")"])['total_amount'], Decimal("0.002"))
assert_equal(self.nodes[0].scantxoutset("start", [ "wpkh(" + pubk1 + ")", "wpkh(" + pubk2 + ")", "wpkh(" + pubk3 + ")"])['total_amount'], Decimal("0.004"))
assert_equal(self.nodes[0].scantxoutset("start", [ "sh(wpkh(" + pubk1 + "))", "sh(wpkh(" + pubk2 + "))", "sh(wpkh(" + pubk3 + "))"])['total_amount'], Decimal("0.001"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(" + pubk1 + ")", "combo(" + pubk2 + ")", "combo(" + pubk3 + ")"])['total_amount'], Decimal("0.007"))
assert_equal(self.nodes[0].scantxoutset("start", [ "addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")", "addr(" + addr_BECH32 + ")"])['total_amount'], Decimal("0.007"))
assert_equal(self.nodes[0].scantxoutset("start", [ "addr(" + addr_P2SH_SEGWIT + ")", "addr(" + addr_LEGACY + ")", "combo(" + pubk3 + ")"])['total_amount'], Decimal("0.007"))
self.log.info("Test range validation.")
assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].scantxoutset, "start", [ {"desc": "desc", "range": -1}])
assert_raises_rpc_error(-8, "Range should be greater or equal than 0", self.nodes[0].scantxoutset, "start", [ {"desc": "desc", "range": [-1, 10]}])
assert_raises_rpc_error(-8, "End of range is too high", self.nodes[0].scantxoutset, "start", [ {"desc": "desc", "range": [(2 << 31 + 1) - 1000000, (2 << 31 + 1)]}])
assert_raises_rpc_error(-8, "Range specified as [begin,end] must not have begin after end", self.nodes[0].scantxoutset, "start", [ {"desc": "desc", "range": [2, 1]}])
assert_raises_rpc_error(-8, "Range is too large", self.nodes[0].scantxoutset, "start", [ {"desc": "desc", "range": [0, 1000001]}])
self.log.info("Test extended key derivation.")
# Run various scans, and verify that the sum of the amounts of the matches corresponds to the expected subset.
# Note that all amounts in the UTXO set are powers of 2 multiplied by 0.001 TURBO, so each amounts uniquely identifies a subset.
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/0h)"])['total_amount'], Decimal("0.008"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/1h)"])['total_amount'], Decimal("0.016"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500')"])['total_amount'], Decimal("0.032"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0h/0)"])['total_amount'], Decimal("0.064"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/1)"])['total_amount'], Decimal("0.128"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/1500)"])['total_amount'], Decimal("0.256"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*h)", "range": 1499}])['total_amount'], Decimal("0.024"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0'/*h)", "range": 1500}])['total_amount'], Decimal("0.056"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499}])['total_amount'], Decimal("0.192"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0'/0h/*)", "range": 1500}])['total_amount'], Decimal("0.448"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0')"])['total_amount'], Decimal("0.512"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1')"])['total_amount'], Decimal("1.024"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500h)"])['total_amount'], Decimal("2.048"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"])['total_amount'], Decimal("4.096"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1)"])['total_amount'], Decimal("8.192"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/1500)"])['total_amount'], Decimal("16.384"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/0)"])['total_amount'], Decimal("4.096"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo([abcdef88/1/2'/3/4h]tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1)"])['total_amount'], Decimal("8.192"))
assert_equal(self.nodes[0].scantxoutset("start", [ "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/1500)"])['total_amount'], Decimal("16.384"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1499}])['total_amount'], Decimal("1.536"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*')", "range": 1500}])['total_amount'], Decimal("3.584"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1499}])['total_amount'], Decimal("12.288"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/*)", "range": 1500}])['total_amount'], Decimal("28.672"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1499}])['total_amount'], Decimal("12.288"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])['total_amount'], Decimal("28.672"))
assert_equal(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": [1500,1500]}])['total_amount'], Decimal("16.384"))
# Test the reported descriptors for a few matches
assert_equal(descriptors(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/0'/*)", "range": 1499}])), ["pkh([0c5f9a1e/0'/0'/0]026dbd8b2315f296d36e6b6920b1579ca75569464875c7ebe869b536a7d9503c8c)#dzxw429x", "pkh([0c5f9a1e/0'/0'/1]033e6f25d76c00bedb3a8993c7d5739ee806397f0529b1b31dda31ef890f19a60c)#43rvceed"])
assert_equal(descriptors(self.nodes[0].scantxoutset("start", [ "combo(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/1/1/0)"])), ["pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8"])
assert_equal(descriptors(self.nodes[0].scantxoutset("start", [ {"desc": "combo(tpubD6NzVbkrYhZ4WaWSyoBvQwbpLkojyoTZPRsgXELWz3Popb3qkjcJyJUGLnL4qHHoQvao8ESaAstxYSnhyswJ76uZPStJRJCTKvosUCJZL5B/1/1/*)", "range": 1500}])), ['pkh([0c5f9a1e/1/1/0]03e1c5b6e650966971d7e71ef2674f80222752740fc1dfd63bbbd220d2da9bd0fb)#cxmct4w8', 'pkh([0c5f9a1e/1/1/1500]03832901c250025da2aebae2bfb38d5c703a57ab66ad477f9c578bfbcd78abca6f)#vchwd07g', 'pkh([0c5f9a1e/1/1/1]030d820fc9e8211c4169be8530efbc632775d8286167afd178caaf1089b77daba7)#z2t3ypsa'])
if __name__ == '__main__':
ScantxoutsetTest().main()
|
Phonemetra/TurboCoin
|
test/functional/rpc_scantxoutset.py
|
Python
|
mit
| 12,820 | 0.008892 |
"""
Examples of Wavelets
--------------------
Figure 10.9
Wavelets for several values of wavelet parameters Q and f0. Solid lines show
the real part and dashed lines show the imaginary part (see eq. 10.16).
"""
# Author: Jake VanderPlas
# License: BSD
# The figure produced by this code is published in the textbook
# "Statistics, Data Mining, and Machine Learning in Astronomy" (2013)
# For more information, see http://astroML.github.com
# To report a bug or issue, use the following forum:
# https://groups.google.com/forum/#!forum/astroml-general
import numpy as np
from matplotlib import pyplot as plt
from astroML.fourier import FT_continuous, IFT_continuous, sinegauss
#----------------------------------------------------------------------
# This function adjusts matplotlib settings for a uniform feel in the textbook.
# Note that with usetex=True, fonts are rendered with LaTeX. This may
# result in an error if LaTeX is not installed on your system. In that case,
# you can set usetex to False.
from astroML.plotting import setup_text_plots
setup_text_plots(fontsize=8, usetex=True)
#------------------------------------------------------------
# Set up the wavelets
t0 = 0
t = np.linspace(-0.4, 0.4, 10000)
f0 = np.array([5, 5, 10, 10])
Q = np.array([1, 0.5, 1, 0.5])
# compute wavelets all at once
W = sinegauss(t, t0, f0[:, None], Q[:, None])
#------------------------------------------------------------
# Plot the wavelets
fig = plt.figure(figsize=(5, 3.75))
fig.subplots_adjust(hspace=0.05, wspace=0.05)
# in each panel, plot and label a different wavelet
for i in range(4):
ax = fig.add_subplot(221 + i)
ax.plot(t, W[i].real, '-k')
ax.plot(t, W[i].imag, '--k')
ax.text(0.04, 0.95, "$f_0 = %i$\n$Q = %.1f$" % (f0[i], Q[i]),
ha='left', va='top', transform=ax.transAxes)
ax.set_ylim(-1.2, 1.2)
ax.set_xlim(-0.35, 0.35)
ax.xaxis.set_major_locator(plt.MultipleLocator(0.2))
if i in (0, 1):
ax.xaxis.set_major_formatter(plt.NullFormatter())
else:
ax.set_xlabel('$t$')
if i in (1, 3):
ax.yaxis.set_major_formatter(plt.NullFormatter())
else:
ax.set_ylabel('$w(t)$')
plt.show()
|
eramirem/astroML
|
book_figures/chapter10/fig_wavelets.py
|
Python
|
bsd-2-clause
| 2,201 | 0.001363 |
from .inverse import RandomInverseModel
from .sciopt import BFGSInverseModel, COBYLAInverseModel
from .nn import NNInverseModel
from .wnn import WeightedNNInverseModel, ESWNNInverseModel
from .cmamodel import CMAESInverseModel
from .jacobian import JacobianInverseModel
|
jgrizou/explauto
|
explauto/sensorimotor_model/inverse/__init__.py
|
Python
|
gpl-3.0
| 284 | 0.014085 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('depot', '0002_lineitem'),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=50)),
('address', models.TextField()),
('email', models.EmailField(max_length=75)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='lineitem',
name='order',
field=models.ForeignKey(default=datetime.date(2014, 9, 30), to='depot.Order'),
preserve_default=False,
),
]
|
kwailamchan/programming-languages
|
python/django/artdepot/artdepot/depot/migrations/0003_auto_20140930_2137.py
|
Python
|
mit
| 942 | 0.002123 |
# Plotting performance of string_subst_.py scripts
# bar chart of relative comparison with variances as error bars
import numpy as np
import matplotlib.pyplot as plt
performance = [10.3882388499416,1,10.3212281215746]
variance = [0.790435196936213,0,0.827207394592818]
scripts = ['string_subst_1.py', 'string_subst_2.py', 'string_subst_3.py']
x_pos = np.arange(len(scripts))
plt.bar(x_pos, performance, yerr=variance, align='center', alpha=0.5)
plt.xticks(x_pos, scripts)
plt.axhline(y=1, linestyle='--', color='black')
plt.ylim([0,12])
plt.ylabel('rel. performance gain')
plt.title('String substitution - Speed improvements')
#plt.show()
plt.savefig('PNGs/string_subst_bar.png')
|
pswaminathan/python_efficiency_tweaks
|
plots/plot_string_subst_bar.py
|
Python
|
gpl-3.0
| 686 | 0.008746 |
from datetime import datetime, timedelta
from django import http
from django.conf import settings
from django.core.exceptions import PermissionDenied
import mock
import pytest
from olympia.amo.tests import BaseTestCase, TestCase
from olympia.amo import decorators, get_user, set_user
from olympia.amo.urlresolvers import reverse
from olympia.users.models import UserProfile
pytestmark = pytest.mark.django_db
def test_post_required():
def func(request):
return mock.sentinel.response
g = decorators.post_required(func)
request = mock.Mock()
request.method = 'GET'
assert isinstance(g(request), http.HttpResponseNotAllowed)
request.method = 'POST'
assert g(request) == mock.sentinel.response
def test_json_view():
"""Turns a Python object into a response."""
def func(request):
return {'x': 1}
response = decorators.json_view(func)(mock.Mock())
assert isinstance(response, http.HttpResponse)
assert response.content == '{"x": 1}'
assert response['Content-Type'] == 'application/json'
assert response.status_code == 200
def test_json_view_normal_response():
"""Normal responses get passed through."""
expected = http.HttpResponseForbidden()
def func(request):
return expected
response = decorators.json_view(func)(mock.Mock())
assert expected is response
assert response['Content-Type'] == 'text/html; charset=utf-8'
def test_json_view_error():
"""json_view.error returns 400 responses."""
response = decorators.json_view.error({'msg': 'error'})
assert isinstance(response, http.HttpResponseBadRequest)
assert response.content == '{"msg": "error"}'
assert response['Content-Type'] == 'application/json'
def test_json_view_status():
def func(request):
return {'x': 1}
response = decorators.json_view(func, status_code=202)(mock.Mock())
assert response.status_code == 202
def test_json_view_response_status():
response = decorators.json_response({'msg': 'error'}, status_code=202)
assert response.content == '{"msg": "error"}'
assert response['Content-Type'] == 'application/json'
assert response.status_code == 202
class TestTaskUser(TestCase):
fixtures = ['base/users']
def test_set_task_user(self):
@decorators.set_task_user
def some_func():
return get_user()
set_user(UserProfile.objects.get(username='regularuser'))
assert get_user().pk == 999
assert some_func().pk == int(settings.TASK_USER_ID)
assert get_user().pk == 999
class TestLoginRequired(BaseTestCase):
def setUp(self):
super(TestLoginRequired, self).setUp()
self.f = mock.Mock()
self.f.__name__ = 'function'
self.request = mock.Mock()
self.request.user.is_authenticated.return_value = False
self.request.get_full_path.return_value = 'path'
def test_normal(self):
func = decorators.login_required(self.f)
response = func(self.request)
assert not self.f.called
assert response.status_code == 302
assert response['Location'] == (
'%s?to=%s' % (reverse('users.login'), 'path'))
def test_no_redirect(self):
func = decorators.login_required(self.f, redirect=False)
response = func(self.request)
assert not self.f.called
assert response.status_code == 401
def test_decorator_syntax(self):
# @login_required(redirect=False)
func = decorators.login_required(redirect=False)(self.f)
response = func(self.request)
assert not self.f.called
assert response.status_code == 401
def test_no_redirect_success(self):
func = decorators.login_required(redirect=False)(self.f)
self.request.user.is_authenticated.return_value = True
func(self.request)
assert self.f.called
class TestSetModifiedOn(TestCase):
fixtures = ['base/users']
@decorators.set_modified_on
def some_method(self, worked):
return worked
def test_set_modified_on(self):
users = list(UserProfile.objects.all()[:3])
self.some_method(True, set_modified_on=users)
for user in users:
assert UserProfile.objects.get(pk=user.pk).modified.date() == (
datetime.today().date())
def test_not_set_modified_on(self):
yesterday = datetime.today() - timedelta(days=1)
qs = UserProfile.objects.all()
qs.update(modified=yesterday)
users = list(qs[:3])
self.some_method(False, set_modified_on=users)
for user in users:
date = UserProfile.objects.get(pk=user.pk).modified.date()
assert date < datetime.today().date()
class TestPermissionRequired(TestCase):
def setUp(self):
super(TestPermissionRequired, self).setUp()
self.f = mock.Mock()
self.f.__name__ = 'function'
self.request = mock.Mock()
@mock.patch('olympia.access.acl.action_allowed')
def test_permission_not_allowed(self, action_allowed):
action_allowed.return_value = False
func = decorators.permission_required('', '')(self.f)
with self.assertRaises(PermissionDenied):
func(self.request)
@mock.patch('olympia.access.acl.action_allowed')
def test_permission_allowed(self, action_allowed):
action_allowed.return_value = True
func = decorators.permission_required('', '')(self.f)
func(self.request)
assert self.f.called
@mock.patch('olympia.access.acl.action_allowed')
def test_permission_allowed_correctly(self, action_allowed):
func = decorators.permission_required('Admin', '%')(self.f)
func(self.request)
action_allowed.assert_called_with(self.request, 'Admin', '%')
|
andymckay/addons-server
|
src/olympia/amo/tests/test_decorators.py
|
Python
|
bsd-3-clause
| 5,822 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.