repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
joosthoeks/jhBacktest
|
jhbacktest/strategy/strategy.py
|
1
|
22478
|
from jhbacktest.data import *
import jhbacktest.stats as jhstats
from tabulate import tabulate
import termcolor as tc
class Strategy(object):
def __init__(self, df, slippage=0):
self.__df = df
__data = Data()
self.__df_np = __data.df2numpy(self.__df)
self.__df_index = -1
self.__long_signal = False
self.__short_signal = False
self.__long_pos = False
self.__short_pos = False
self.__bar_in_market = 0
self.__bar_up_count = 0
self.__bar_down_count = 0
self.__slippage = slippage
self.__buy_and_hold_pos_start = 0
self.__buy_and_hold_pos_end = 0
self.__benchmark_start = 0
self.__benchmark_list = []
self.__pos_long_dict = {}
self.__pos_short_dict = {}
self.__long_pos_start = 0
self.__long_pos_end = 0
self.__long_pos_count = 0
self.__long_pos_open = 0
self.__long_pos_close = 0
self.__short_pos_start = 0
self.__short_pos_end = 0
self.__short_pos_count = 0
self.__short_pos_open = 0
self.__short_pos_close = 0
self.__last_trade_bar_index = 0
self.__total_values_list = []
self.__profit_values_list = []
self.__loss_values_list = []
self.__bar_drawdown_list = []
self.__position_drawdown_list = []
self.__equity_curve = 0
self.__equity_curve_list = []
self.__open_equity_curve = 0
self.__open_equity_curve_list = []
def get_df(self):
return self.__df
def get_df_np(self, price='Close', index=None):
if index is None:
return self.__df_np[price]
return self.__df_np[price][index]
def get_total_values_list(self):
return self.__total_values_list
def set_buy_and_hold(self, bar):
if self.__buy_and_hold_pos_start is 0:
self.__buy_and_hold_pos_start = bar['Open']
self.__buy_and_hold_pos_end = bar['Close']
def get_result_buy_and_hold_absolute(self):
result = self.__buy_and_hold_pos_end - self.__buy_and_hold_pos_start
result = self.add_slippage(result)
return result
def set_benchmark_list(self, bar):
benchmark = 0
if self.__benchmark_start is 0:
self.__benchmark_start = bar['Open']
else:
benchmark = bar['Open'] - self.__benchmark_start
self.__benchmark_list.append(benchmark)
def get_benchmark_list(self):
return self.__benchmark_list
def set_pos_long(self, pos):
self.__pos_long_dict[self.get_df_index()] = pos
def get_pos_long(self, key):
return self.__pos_long_dict[key]
def set_pos_short(self, pos):
self.__pos_short_dict[self.get_df_index()] = pos
def get_pos_short(self, key):
return self.__pos_short_dict[key]
def get_long_pos_start(self):
return self.__long_pos_start
def get_long_pos_end(self):
return self.__long_pos_end
def get_long_pos_count(self):
return self.__long_pos_count
def get_long_pos_open(self):
return self.__long_pos_open
def set_long_pos_open(self, price):
self.__long_pos_open = price
def get_long_pos_close(self):
return self.__long_pos_close
def set_long_pos_close(self, price):
self.__long_pos_close = price
def get_short_pos_start(self):
return self.__short_pos_start
def get_short_pos_end(self):
return self.__short_pos_end
def get_short_pos_count(self):
return self.__short_pos_count
def get_short_pos_open(self):
return self.__short_pos_open
def set_short_pos_open(self, price):
self.__short_pos_open = price
def get_short_pos_close(self):
return self.__short_pos_close
def set_short_pos_close(self, price):
self.__short_pos_close = price
def get_last_trade_bar_index(self):
return self.__last_trade_bar_index
def set_last_trade_bar_index(self, index):
self.__last_trade_bar_index += 1
def get_df_index(self):
return self.__df_index
def set_df_index(self):
self.__df_index += 1
def get_bars_total(self):
return self.__df_index + 1
def get_bars_in_market(self):
return self.__bar_in_market
def set_bars_in_market(self):
if self.__long_pos or self.__short_pos:
self.__bar_in_market += 1
def get_procent_in_market(self):
return (float(self.get_bars_in_market()) / self.get_bars_total() * 100)
def set_bar_up_count(self, bar):
if bar['Open'] < bar['Close']:
self.__bar_up_count += 1
def get_bar_up_count(self):
return self.__bar_up_count
def set_bar_down_count(self, bar):
if bar['Open'] > bar['Close']:
self.__bar_down_count += 1
def get_bar_down_count(self):
return self.__bar_down_count
def get_max_bar_drawdown_absolute(self):
if len(self.__bar_drawdown_list) == 0:
return 0
return max(self.__bar_drawdown_list) * -1
def get_max_position_drawdown_absolute(self):
if len(self.__position_drawdown_list) == 0:
return 0
return max(self.__position_drawdown_list) * -1
def set_drawdown(self, bar):
if self.__long_pos:
bar_drawdown = (bar['Open'] - bar['Low'])
position_drawdown = (self.__long_pos_start - bar['Low'])
elif self.__short_pos:
bar_drawdown = (bar['High'] - bar['Open'])
position_drawdown = (bar['High'] - self.__short_pos_start)
else:
bar_drawdown = 0
position_drawdown = 0
self.__bar_drawdown_list.append(bar_drawdown)
self.__position_drawdown_list.append(position_drawdown)
def get_max_consecutive_loss_count(self):
loss_count = 0
loss_count_list = []
for value in self.__total_values_list:
if value < 0:
loss_count += 1
loss_count_list.append(loss_count)
else:
loss_count = 0
if len(loss_count_list) == 0:
return 0
return max(loss_count_list)
def get_max_consecutive_loss_absolute(self):
loss_value = 0
loss_value_list = []
for value in self.__total_values_list:
if value < 0:
loss_value += value
loss_value_list.append(loss_value)
else:
loss_value = 0
if len(loss_value_list) == 0:
return 0
return min(loss_value_list)
def get_max_consecutive_profit_count(self):
profit_count = 0
profit_count_list = []
for value in self.__total_values_list:
if value > 0:
profit_count += 1
profit_count_list.append(profit_count)
else:
profit_count = 0
if len(profit_count_list) == 0:
return 0
return max(profit_count_list)
def get_max_consecutive_profit_absolute(self):
profit_value = 0
profit_value_list = []
for value in self.__total_values_list:
if value > 0:
profit_value += value
profit_value_list.append(profit_value)
else:
profit_value = 0
if len(profit_value_list) == 0:
return 0
return max(profit_value_list)
def set_equity_curve(self, bar):
"""
set equity curve
"""
result = 0
if self.__long_pos_start is not 0 and self.__long_pos_end is not 0:
result = self.__long_pos_end - self.__long_pos_start
result = self.add_slippage(result)
self.__total_values_list.append(result)
if result > 0:
self.__profit_values_list.append(result)
else:
self.__loss_values_list.append(result)
self.__long_pos_start = 0
self.__long_pos_end = 0
self.__long_pos_open = 0
self.__long_pos_close = 0
if self.__short_pos_start is not 0 and self.__short_pos_end is not 0:
result = self.__short_pos_start - self.__short_pos_end
result = self.add_slippage(result)
self.__total_values_list.append(result)
if result > 0:
self.__profit_values_list.append(result)
else:
self.__loss_values_list.append(result)
self.__short_pos_start = 0
self.__short_pos_end = 0
self.__short_pos_open = 0
self.__short_pos_close = 0
self.__equity_curve += result
self.__equity_curve_list.append(self.__equity_curve)
def get_equity_curve_list(self):
return self.__equity_curve_list
# TODO
# problem with open equity curve:
def set_open_equity_curve(self, bar):
"""
set open equity curve
"""
open_price = bar['Open']
price = bar['Close']
last_price = self.__df['Close'][self.get_df_index() - 1]
open_result = 0
open_result_long = 0
if self.get_pos_long(self.get_df_index()) is 'start':
open_result_long = price - open_price
if self.get_pos_long(self.get_df_index()) is 'end':
open_result_long = last_price - open_price
if self.get_pos_long(self.get_df_index()) is 'in':
open_result_long = price - last_price
open_result_short = 0
if self.get_pos_short(self.get_df_index()) is 'start':
open_result_short = open_price - price
if self.get_pos_short(self.get_df_index()) is 'end':
open_result_short = open_price - last_price
if self.get_pos_short(self.get_df_index()) is 'in':
open_result_short = last_price - price
open_result = open_result_long + open_result_short
self.__open_equity_curve += open_result
self.__open_equity_curve_list.append(self.__open_equity_curve)
def get_open_equity_curve_list(self):
return self.__open_equity_curve_list
def add_slippage(self, result):
if self.__slippage > 0:
result = result - (self.__slippage * 2)
return result
def get_format_str(self, value):
return ('%.8f' % round(value, 8))
def enter_long_signal(self, bar, print_output):
self.__long_signal = True
if print_output:
print ('%s ################################################### ENTER LONG SIGNAL ############################' % bar['datetime'])
def exit_long_signal(self, bar, print_output):
self.__long_signal = False
if print_output:
print ('%s ################################################### EXIT LONG SIGNAL #############################' % bar['datetime'])
def enter_short_signal(self, bar, print_output):
self.__short_signal = True
if print_output:
print ('%s ################################################### ENTER SHORT SIGNAL ###########################' % bar['datetime'])
def exit_short_signal(self, bar, print_output):
self.__short_signal = False
if print_output:
print ('%s ################################################### EXIT SHORT SIGNAL ############################' % bar['datetime'])
def enter_long_pos(self, bar, print_output):
self.__long_pos = True
if self.__long_pos_open > 0:
self.__long_pos_start = self.__long_pos_open
else:
self.__long_pos_start = bar['Open']
self.__long_pos_count += 1
self.set_pos_long('enter')
self.set_last_trade_bar_index(self.get_df_index())
if print_output:
print ('%s ################################################### ENTER LONG AT %s #########################' % \
(bar['datetime'], self.get_format_str(self.__long_pos_start)))
def exit_long_pos(self, bar, print_output):
self.__long_pos = False
if self.__long_pos_close > 0:
self.__long_pos_end = self.__long_pos_close
else:
self.__long_pos_end = bar['Open']
self.set_pos_long('exit')
self.set_last_trade_bar_index(0)
profit = self.__long_pos_end - self.__long_pos_start
color = 'red'
if profit > 0:
color = 'green'
if print_output:
print ('%s ################################################### EXIT LONG AT %s ########################## PROFIT: %s' % \
(bar['datetime'], self.get_format_str(self.__long_pos_end), tc.colored(profit, color)))
def enter_short_pos(self, bar, print_output):
self.__short_pos = True
if self.__short_pos_open > 0:
self.__short_pos_start = self.__short_pos_open
else:
self.__short_pos_start = bar['Open']
self.__short_pos_count += 1
self.set_pos_short('enter')
self.set_last_trade_bar_index(self.get_df_index())
if print_output:
print ('%s ################################################### ENTER SHORT AT %s ########################' % \
(bar['datetime'], self.get_format_str(self.__short_pos_start)))
def exit_short_pos(self, bar, print_output):
self.__short_pos = False
if self.__short_pos_close > 0:
self.__short_pos_end = self.__short_pos_close
else:
self.__short_pos_end = bar['Open']
self.set_pos_short('exit')
self.set_last_trade_bar_index(0)
profit = self.__short_pos_start - self.__short_pos_end
color = 'red'
if profit > 0:
color = 'green'
if print_output:
print ('%s ################################################### EXIT SHORT AT %s ######################### PROFIT: %s' % \
(bar['datetime'], self.get_format_str(self.__short_pos_end), tc.colored(profit, color)))
def check_do_long_pos(self, bar, print_output):
# check and do long position:
if self.__long_signal:
if self.__long_pos is False:
self.enter_long_pos(bar, print_output)
else:
if self.__long_pos:
self.exit_long_pos(bar, print_output)
def check_do_short_pos(self, bar, print_output):
# check and do short position:
if self.__short_signal:
if self.__short_pos is False:
self.enter_short_pos(bar, print_output)
else:
if self.__short_pos:
self.exit_short_pos(bar, print_output)
def on_bars(self, bar, print_output):
# current bar:
if print_output:
print (('%s Open: %s High: %s Low: %s Close: %s Volume: %s indicator: %s' % \
(
bar['datetime'],
self.get_format_str(bar['Open']),
self.get_format_str(bar['High']),
self.get_format_str(bar['Low']),
self.get_format_str(bar['Close']),
bar['Volume'],
# self.get_format_str(indicator)
self.get_color(self._indicator[self.get_df_index()])
)))
def check_do_long_signal(self, bar, print_output):
# check and do long signal:
if self.__long_signal:
if self.check_exit_long_signal(bar):
self.exit_long_signal(bar, print_output)
else:
if self.check_enter_long_signal(bar):
self.enter_long_signal(bar, print_output)
def check_do_short_signal(self, bar, print_output):
# check and do short signal:
if self.__short_signal:
if self.check_exit_short_signal(bar):
self.exit_short_signal(bar, print_output)
else:
if self.check_enter_short_signal(bar):
self.enter_short_signal(bar, print_output)
def run(self, print_output=True):
i = 0
while i < len(self.__df['Close']):
bar = {}
bar['datetime'] = self.__df['datetime'][i]
bar['Open'] = self.__df['Open'][i]
bar['High'] = self.__df['High'][i]
bar['Low'] = self.__df['Low'][i]
bar['Close'] = self.__df['Close'][i]
bar['Volume'] = self.__df['Volume'][i]
self.set_df_index()
self.check_do_long_pos(bar, print_output)
self.check_do_short_pos(bar, print_output)
self.set_pos_long('out')
if self.__long_pos:
self.set_pos_long('in')
self.set_pos_short('out')
if self.__short_pos:
self.set_pos_short('in')
self.on_bars(bar, print_output)
self.check_do_long_signal(bar, print_output)
self.check_do_short_signal(bar, print_output)
self.set_buy_and_hold(bar)
self.set_benchmark_list(bar)
self.set_equity_curve(bar)
self.set_open_equity_curve(bar)
self.set_bars_in_market()
self.set_bar_up_count(bar)
self.set_bar_down_count(bar)
self.set_drawdown(bar)
i += 1
def get_analysis(self):
headers = [
'Total trades %s' % jhstats.get_count(self.__total_values_list), 'Absolute',
'Profit trades %s' % jhstats.get_count(self.__profit_values_list), 'Absolute',
'Loss trades %s' % jhstats.get_count(self.__loss_values_list), 'Absolute'
]
table = [
[
'Total result', self.get_color(jhstats.get_absolute(self.__total_values_list)),
'Total result', self.get_color(jhstats.get_absolute(self.__profit_values_list)),
'Total result', self.get_color(jhstats.get_absolute(self.__loss_values_list))
],
[
'Max result', self.get_color(jhstats.get_max_absolute(self.__total_values_list)),
'Max result', self.get_color(jhstats.get_max_absolute(self.__profit_values_list)),
'Max result', self.get_color(jhstats.get_max_absolute(self.__loss_values_list))
],
[
'Min result', self.get_color(jhstats.get_min_absolute(self.__total_values_list)),
'Min result', self.get_color(jhstats.get_min_absolute(self.__profit_values_list)),
'Min result', self.get_color(jhstats.get_min_absolute(self.__loss_values_list))
],
[
'Mean', self.get_color(jhstats.get_mean_absolute(self.__total_values_list)),
'Mean', self.get_color(jhstats.get_mean_absolute(self.__profit_values_list)),
'Mean', self.get_color(jhstats.get_mean_absolute(self.__loss_values_list))
],
[
'Median', self.get_color(jhstats.get_median_absolute(self.__total_values_list)),
'Median', self.get_color(jhstats.get_median_absolute(self.__profit_values_list)),
'Median', self.get_color(jhstats.get_median_absolute(self.__loss_values_list))
],
[
'Variance', self.get_color(jhstats.get_variance_absolute(self.__total_values_list)),
'Variance', self.get_color(jhstats.get_variance_absolute(self.__profit_values_list)),
'Variance', self.get_color(jhstats.get_variance_absolute(self.__loss_values_list))
],
[
'Std Dev', self.get_color(jhstats.get_std_dev_absolute(self.__total_values_list)),
'Std Dev', self.get_color(jhstats.get_std_dev_absolute(self.__profit_values_list)),
'Std Dev', self.get_color(jhstats.get_std_dev_absolute(self.__loss_values_list))
]
]
print (tabulate(table, headers, tablefmt='grid', floatfmt='.4f'))
headers = ['Description', 'Value']
table = [
['Hitrate %', self.get_color(jhstats.get_hitrate(self.__total_values_list, self.__profit_values_list), 50)],
['Profit Loss Ratio', self.get_color(jhstats.get_profit_loss_ratio(jhstats.get_mean_absolute(self.__profit_values_list), jhstats.get_mean_absolute(self.__loss_values_list)), 1)],
['Expected Value', self.get_color(jhstats.get_expected_value(jhstats.get_hitrate(self.__total_values_list, self.__profit_values_list), jhstats.get_mean_absolute(self.__profit_values_list), jhstats.get_mean_absolute(self.__loss_values_list)))],
['Probability of Ruin (POR) (table of Lucas & LeBeau)', jhstats.get_por_lucas_and_lebeau(jhstats.get_hitrate(self.__total_values_list, self.__profit_values_list), jhstats.get_mean_absolute(self.__profit_values_list), jhstats.get_mean_absolute(self.__loss_values_list))],
['Total bars', self.get_bars_total()],
['Up bars', self.get_bar_up_count()],
['Down bars', self.get_bar_down_count()],
['In market bars', self.get_bars_in_market()],
['In market %', self.get_procent_in_market()],
['Long positions', self.get_long_pos_count()],
['Short positions', self.get_short_pos_count()],
['Result (buy & hold) absolute', self.get_color(self.get_result_buy_and_hold_absolute())]
]
print (tabulate(table, headers, tablefmt='grid', floatfmt='.4f'))
headers = ['Description', 'Value', 'Description', 'Value', 'Description', 'Value']
table = [
[
'Consecutive profit count', self.get_max_consecutive_profit_count(),
'Consecutive loss count', self.get_max_consecutive_loss_count(),
'Max bar drawdown absolute', self.get_color(self.get_max_bar_drawdown_absolute())
],
[
'Consecutive profit absolute', self.get_color(self.get_max_consecutive_profit_absolute()),
'Consecutive loss absolute', self.get_color(self.get_max_consecutive_loss_absolute()),
'Max position drawdown absolute', self.get_color(self.get_max_position_drawdown_absolute())
]
]
print (tabulate(table, headers, tablefmt='grid', floatfmt='.4f'))
def get_color(self, value, split=0):
color = 'red'
if value > split:
color = 'green'
return tc.colored(value, color)
|
gpl-3.0
| -576,245,011,368,361,200 | 37.423932 | 282 | 0.533989 | false |
tosaka2/tacotron
|
models/tacotron.py
|
1
|
4014
|
from util import textinput
from util.infolog import log
import chainer
from chainer import Variable
import chainer.functions as F
import chainer.links as L
import numpy as np
from .modules import get_encoder_cbhg, get_decoder_cbhg, PreNet, Attention
from hparams import hparams as hp
def sequence_embed(embed, xs):
x_len = [len(x) for x in xs]
x_section = np.cumsum(x_len[:-1])
ex = embed(F.concat(xs, axis=0))
exs = F.split_axis(ex, x_section, 0)
return exs
class Tacotron(chainer.Chain):
def __init__(self, n_source_vocab, n_units):
super(Tacotron, self).__init__()
with self.init_scope():
self.embed = L.EmbedID(n_source_vocab, n_units)
self.encoder_prenet = PreNet()
self.decoder_prenet = PreNet()
self.encoder_cbhg = get_encoder_cbhg()
self.decoder_cbhg = get_decoder_cbhg()
# TODO: change to NStepGRU
self.attention_rnn = L.GRU(hp.num_mels * hp.outputs_per_step, 256)
self.attention = Attention(256)
self.decoder_rnn1 = L.GRU(512, 256)
self.decoder_rnn2 = L.GRU(256, 256)
self.calc_mel = L.Linear(256, hp.num_mels * hp.outputs_per_step)
# 入力から誤差を計算
def __call__(self, input, t):
mel_t, lin_t = t # uncertain
# Embeddings
embed_output = sequence_embed(self.embed, input) # [N, T_in, n_units]
# Encoder
encoder_prenet_output = self.encoder_prenet(embed_output) # [N, T_in, 128] # uncertain
encoder_outputs = encoder_cbhg(encoder_prenet_output) # [N, T_in, 256]
# Decoder
e = Variable(np.zeros(mel_t.shape))
out_mels = Variable(np.array([]))
for i in range(mel_t.shape[1]):
if i > 0:
e = mel_t[:,i - 1] # uncertain
prenet_output = self.decoder_prenet()
_, rnn_output = self.attention_rnn(e) # [N, 256]
# Attention
context_vector = self.attention() # [N, 256]
decoder_input = F.concat((rnn_output, context_vector)) # [N, 512]
decoder_rnn1_output = self.decoder_rnn1(decoder_input) # [N, 256]
decoder_rnn2_output = self.decoder_rnn2(decoder_rnn1_output) # [N, 256]
decoder_output = decoder_rnn1_output + decoder_rnn2_output # [N, 256]
mel = self.calc_mel(decoder_output) # [N, r * num_mels]
out_mels = F.concat((out_mels, mel))
out_lins = self.decoder_cbhg(out_mels)
# L1 loss
mel_loss = F.absolute_error(mel_t, out_mels)
lin_loss = F.absolute_error(lin_t, out_lins)
# TODO: calculate loss from griffin_lim
return mel_loss + lin_loss
spectrogram = Variable(np.array([]))
# synthesize spectrogram
def output(self, input, max_length=100):
with chainer.no_backprop_mode():
with chainer.using_config('train', False):
# Embeddings
embed_output = sequence_embed(self.embed, input) # [N, T_in, n_units]
# Encoder
encoder_prenet_output = self.encoder_prenet(embed_output) # [N, T_in, 128] # uncertain
encoder_outputs = encoder_cbhg(encoder_prenet_output) # [N, T_in, 256]
# Decoder
e = Variable(np.zeros(mel_t.shape))
out_mels = Variable(np.array([]))
for i in range(max_length): #TODO: loop for output length (until A becomes 0 vector)
if i > 0:
e = mel_t[:,i - 1] # uncertain
prenet_output = self.decoder_prenet()
_, rnn_output = self.attention_rnn(h) # [N, 256]
# Attention
context_vector = self.attention() # [N, 256]
decoder_input = F.concat((rnn_output, context_vector)) # [N, 512]
decoder_rnn1_output = self.decoder_rnn1(decoder_input) # [N, 256]
decoder_rnn2_output = self.decoder_rnn2(decoder_rnn1_output) # [N, 256]
decoder_output = decoder_rnn1_output + decoder_rnn2_output # [N, 256]
mel = self.calc_mel(decoder_output) # [N, r * num_mels]
out_mels = F.concat((out_mels, mel))
out_lins = self.decoder_cbhg(out_mels)
self.spectrogram = out_lins # BAD
return out_lins
|
mit
| 2,407,114,685,599,975,400 | 33.756522 | 94 | 0.616366 | false |
huntxu/neutron
|
neutron/tests/unit/common/test_rpc.py
|
1
|
19542
|
# Copyright 2015 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import fixtures
import mock
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_messaging import conffixture as messaging_conffixture
from oslo_messaging.rpc import dispatcher
import testtools
from neutron.common import rpc
from neutron.tests import base
CONF = cfg.CONF
CONF.import_opt('state_path', 'neutron.conf.common')
class RPCFixture(fixtures.Fixture):
def _setUp(self):
self.trans = copy.copy(rpc.TRANSPORT)
self.noti_trans = copy.copy(rpc.NOTIFICATION_TRANSPORT)
self.noti = copy.copy(rpc.NOTIFIER)
self.addCleanup(self._reset_everything)
def _reset_everything(self):
rpc.TRANSPORT = self.trans
rpc.NOTIFICATION_TRANSPORT = self.noti_trans
rpc.NOTIFIER = self.noti
class TestRPC(base.DietTestCase):
def setUp(self):
super(TestRPC, self).setUp()
self.useFixture(RPCFixture())
@mock.patch.object(rpc, 'RequestContextSerializer')
@mock.patch.object(messaging, 'get_rpc_transport')
@mock.patch.object(messaging, 'get_notification_transport')
@mock.patch.object(messaging, 'Notifier')
def test_init(self, mock_not, mock_noti_trans, mock_trans, mock_ser):
notifier = mock.Mock()
transport = mock.Mock()
noti_transport = mock.Mock()
serializer = mock.Mock()
conf = mock.Mock()
mock_trans.return_value = transport
mock_noti_trans.return_value = noti_transport
mock_ser.return_value = serializer
mock_not.return_value = notifier
rpc.init(conf, rpc_ext_mods=['foo'])
expected_mods = list(set(['foo'] + rpc._DFT_EXMODS))
mock_trans.assert_called_once_with(
conf, allowed_remote_exmods=expected_mods)
mock_noti_trans.assert_called_once_with(
conf, allowed_remote_exmods=expected_mods)
mock_not.assert_called_once_with(noti_transport,
serializer=serializer)
self.assertIsNotNone(rpc.TRANSPORT)
self.assertIsNotNone(rpc.NOTIFICATION_TRANSPORT)
self.assertIsNotNone(rpc.NOTIFIER)
def test_cleanup_transport_null(self):
rpc.NOTIFIER = mock.Mock()
rpc.NOTIFICATION_TRANSPORT = mock.Mock()
self.assertRaises(AssertionError, rpc.cleanup)
def test_cleanup_notification_transport_null(self):
rpc.TRANSPORT = mock.Mock()
rpc.NOTIFIER = mock.Mock()
self.assertRaises(AssertionError, rpc.cleanup)
def test_cleanup_notifier_null(self):
rpc.TRANSPORT = mock.Mock()
rpc.NOTIFICATION_TRANSPORT = mock.Mock()
self.assertRaises(AssertionError, rpc.cleanup)
def test_cleanup(self):
rpc.NOTIFIER = mock.Mock()
rpc.NOTIFICATION_TRANSPORT = mock.Mock()
rpc.TRANSPORT = mock.Mock()
trans_cleanup = mock.Mock()
not_trans_cleanup = mock.Mock()
rpc.TRANSPORT.cleanup = trans_cleanup
rpc.NOTIFICATION_TRANSPORT.cleanup = not_trans_cleanup
rpc.cleanup()
trans_cleanup.assert_called_once_with()
not_trans_cleanup.assert_called_once_with()
self.assertIsNone(rpc.TRANSPORT)
self.assertIsNone(rpc.NOTIFICATION_TRANSPORT)
self.assertIsNone(rpc.NOTIFIER)
@mock.patch.object(rpc, 'RequestContextSerializer')
@mock.patch.object(rpc, 'BackingOffClient')
def test_get_client(self, mock_client, mock_ser):
rpc.TRANSPORT = mock.Mock()
tgt = mock.Mock()
ser = mock.Mock()
mock_client.return_value = 'client'
mock_ser.return_value = ser
client = rpc.get_client(tgt, version_cap='1.0', serializer='foo')
mock_ser.assert_called_once_with('foo')
mock_client.assert_called_once_with(rpc.TRANSPORT,
tgt, version_cap='1.0',
serializer=ser)
self.assertEqual('client', client)
@mock.patch.object(rpc, 'RequestContextSerializer')
@mock.patch.object(messaging, 'get_rpc_server')
def test_get_server(self, mock_get, mock_ser):
rpc.TRANSPORT = mock.Mock()
ser = mock.Mock()
tgt = mock.Mock()
ends = mock.Mock()
mock_ser.return_value = ser
mock_get.return_value = 'server'
server = rpc.get_server(tgt, ends, serializer='foo')
mock_ser.assert_called_once_with('foo')
access_policy = dispatcher.DefaultRPCAccessPolicy
mock_get.assert_called_once_with(rpc.TRANSPORT, tgt, ends,
'eventlet', ser,
access_policy=access_policy)
self.assertEqual('server', server)
def test_get_notifier(self):
rpc.NOTIFIER = mock.Mock()
mock_prep = mock.Mock()
mock_prep.return_value = 'notifier'
rpc.NOTIFIER.prepare = mock_prep
notifier = rpc.get_notifier('service', publisher_id='foo')
mock_prep.assert_called_once_with(publisher_id='foo')
self.assertEqual('notifier', notifier)
def test_get_notifier_null_publisher(self):
rpc.NOTIFIER = mock.Mock()
mock_prep = mock.Mock()
mock_prep.return_value = 'notifier'
rpc.NOTIFIER.prepare = mock_prep
notifier = rpc.get_notifier('service', host='bar')
mock_prep.assert_called_once_with(publisher_id='service.bar')
self.assertEqual('notifier', notifier)
class TestRequestContextSerializer(base.DietTestCase):
def setUp(self):
super(TestRequestContextSerializer, self).setUp()
self.mock_base = mock.Mock()
self.ser = rpc.RequestContextSerializer(self.mock_base)
self.ser_null = rpc.RequestContextSerializer(None)
def test_serialize_entity(self):
self.mock_base.serialize_entity.return_value = 'foo'
ser_ent = self.ser.serialize_entity('context', 'entity')
self.mock_base.serialize_entity.assert_called_once_with('context',
'entity')
self.assertEqual('foo', ser_ent)
def test_deserialize_entity(self):
self.mock_base.deserialize_entity.return_value = 'foo'
deser_ent = self.ser.deserialize_entity('context', 'entity')
self.mock_base.deserialize_entity.assert_called_once_with('context',
'entity')
self.assertEqual('foo', deser_ent)
def test_deserialize_entity_null_base(self):
deser_ent = self.ser_null.deserialize_entity('context', 'entity')
self.assertEqual('entity', deser_ent)
def test_serialize_context(self):
context = mock.Mock()
self.ser.serialize_context(context)
context.to_dict.assert_called_once_with()
def test_deserialize_context(self):
context_dict = {'foo': 'bar',
'user_id': 1,
'tenant_id': 1,
'is_admin': True}
c = self.ser.deserialize_context(context_dict)
self.assertEqual(1, c.user_id)
self.assertEqual(1, c.project_id)
def test_deserialize_context_no_user_id(self):
context_dict = {'foo': 'bar',
'user': 1,
'tenant_id': 1,
'is_admin': True}
c = self.ser.deserialize_context(context_dict)
self.assertEqual(1, c.user_id)
self.assertEqual(1, c.project_id)
def test_deserialize_context_no_tenant_id(self):
context_dict = {'foo': 'bar',
'user_id': 1,
'project_id': 1,
'is_admin': True}
c = self.ser.deserialize_context(context_dict)
self.assertEqual(1, c.user_id)
self.assertEqual(1, c.project_id)
def test_deserialize_context_no_ids(self):
context_dict = {'foo': 'bar', 'is_admin': True}
c = self.ser.deserialize_context(context_dict)
self.assertIsNone(c.user_id)
self.assertIsNone(c.project_id)
class ServiceTestCase(base.DietTestCase):
# the class cannot be based on BaseTestCase since it mocks rpc.Connection
def setUp(self):
super(ServiceTestCase, self).setUp()
self.host = 'foo'
self.topic = 'neutron-agent'
self.target_mock = mock.patch('oslo_messaging.Target')
self.target_mock.start()
self.messaging_conf = messaging_conffixture.ConfFixture(CONF)
self.messaging_conf.transport_url = 'fake://'
self.messaging_conf.response_timeout = 0
self.useFixture(self.messaging_conf)
self.addCleanup(rpc.cleanup)
rpc.init(CONF)
def test_operations(self):
with mock.patch('oslo_messaging.get_rpc_server') as get_rpc_server:
rpc_server = get_rpc_server.return_value
service = rpc.Service(self.host, self.topic)
service.start()
rpc_server.start.assert_called_once_with()
service.stop()
rpc_server.stop.assert_called_once_with()
rpc_server.wait.assert_called_once_with()
class TimeoutTestCase(base.DietTestCase):
def setUp(self):
super(TimeoutTestCase, self).setUp()
self.messaging_conf = messaging_conffixture.ConfFixture(CONF)
self.messaging_conf.transport_url = 'fake://'
self.messaging_conf.response_timeout = 0
self.useFixture(self.messaging_conf)
self.addCleanup(rpc.cleanup)
rpc.init(CONF)
rpc.TRANSPORT = mock.MagicMock()
rpc.TRANSPORT._send.side_effect = messaging.MessagingTimeout
target = messaging.Target(version='1.0', topic='testing')
self.client = rpc.get_client(target)
self.call_context = mock.Mock()
self.sleep = mock.patch('time.sleep').start()
rpc.TRANSPORT.conf.rpc_response_timeout = 10
def test_timeout_unaffected_when_explicitly_set(self):
rpc.TRANSPORT.conf.rpc_response_timeout = 5
ctx = self.client.prepare(topic='sandwiches', timeout=77)
with testtools.ExpectedException(messaging.MessagingTimeout):
ctx.call(self.call_context, 'create_pb_and_j')
# ensure that the timeout was not increased and the back-off sleep
# wasn't called
self.assertEqual(
5,
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['create_pb_and_j'])
self.assertFalse(self.sleep.called)
def test_timeout_store_defaults(self):
# any method should default to the configured timeout
self.assertEqual(
rpc.TRANSPORT.conf.rpc_response_timeout,
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'])
self.assertEqual(
rpc.TRANSPORT.conf.rpc_response_timeout,
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_2'])
# a change to an existing should not affect new or existing ones
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_2'] = 7000
self.assertEqual(
rpc.TRANSPORT.conf.rpc_response_timeout,
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'])
self.assertEqual(
rpc.TRANSPORT.conf.rpc_response_timeout,
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_3'])
def test_method_timeout_sleep(self):
rpc.TRANSPORT.conf.rpc_response_timeout = 2
for i in range(100):
with testtools.ExpectedException(messaging.MessagingTimeout):
self.client.call(self.call_context, 'method_1')
# sleep value should always be between 0 and configured timeout
self.assertGreaterEqual(self.sleep.call_args_list[0][0][0], 0)
self.assertLessEqual(self.sleep.call_args_list[0][0][0], 2)
self.sleep.reset_mock()
def test_method_timeout_increases_on_timeout_exception(self):
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'] = 1
for i in range(5):
with testtools.ExpectedException(messaging.MessagingTimeout):
self.client.call(self.call_context, 'method_1')
# we only care to check the timeouts sent to the transport
timeouts = [call[1]['timeout']
for call in rpc.TRANSPORT._send.call_args_list]
self.assertEqual([1, 2, 4, 8, 16], timeouts)
def test_method_timeout_10x_config_ceiling(self):
rpc.TRANSPORT.conf.rpc_response_timeout = 10
# 5 doublings should max out at the 10xdefault ceiling
for i in range(5):
with testtools.ExpectedException(messaging.MessagingTimeout):
self.client.call(self.call_context, 'method_1')
self.assertEqual(
10 * rpc.TRANSPORT.conf.rpc_response_timeout,
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'])
with testtools.ExpectedException(messaging.MessagingTimeout):
self.client.call(self.call_context, 'method_1')
self.assertEqual(
10 * rpc.TRANSPORT.conf.rpc_response_timeout,
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'])
def test_timeout_unchanged_on_other_exception(self):
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'] = 1
rpc.TRANSPORT._send.side_effect = ValueError
with testtools.ExpectedException(ValueError):
self.client.call(self.call_context, 'method_1')
rpc.TRANSPORT._send.side_effect = messaging.MessagingTimeout
with testtools.ExpectedException(messaging.MessagingTimeout):
self.client.call(self.call_context, 'method_1')
timeouts = [call[1]['timeout']
for call in rpc.TRANSPORT._send.call_args_list]
self.assertEqual([1, 1], timeouts)
def test_timeouts_for_methods_tracked_independently(self):
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'] = 1
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_2'] = 1
for method in ('method_1', 'method_1', 'method_2',
'method_1', 'method_2'):
with testtools.ExpectedException(messaging.MessagingTimeout):
self.client.call(self.call_context, method)
timeouts = [call[1]['timeout']
for call in rpc.TRANSPORT._send.call_args_list]
self.assertEqual([1, 2, 1, 4, 2], timeouts)
def test_timeouts_for_namespaces_tracked_independently(self):
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['ns1.method'] = 1
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['ns2.method'] = 1
for ns in ('ns1', 'ns2'):
self.client.target.namespace = ns
for i in range(4):
with testtools.ExpectedException(messaging.MessagingTimeout):
self.client.call(self.call_context, 'method')
timeouts = [call[1]['timeout']
for call in rpc.TRANSPORT._send.call_args_list]
self.assertEqual([1, 2, 4, 8, 1, 2, 4, 8], timeouts)
def test_method_timeout_increases_with_prepare(self):
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'] = 1
ctx = self.client.prepare(version='1.4')
with testtools.ExpectedException(messaging.MessagingTimeout):
ctx.call(self.call_context, 'method_1')
with testtools.ExpectedException(messaging.MessagingTimeout):
ctx.call(self.call_context, 'method_1')
# we only care to check the timeouts sent to the transport
timeouts = [call[1]['timeout']
for call in rpc.TRANSPORT._send.call_args_list]
self.assertEqual([1, 2], timeouts)
def test_set_max_timeout_caps_all_methods(self):
rpc.TRANSPORT.conf.rpc_response_timeout = 300
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'] = 100
rpc.BackingOffClient.set_max_timeout(50)
# both explicitly tracked
self.assertEqual(
50, rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'])
# as well as new methods
self.assertEqual(
50, rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_2'])
def test_set_max_timeout_retains_lower_timeouts(self):
rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'] = 10
rpc.BackingOffClient.set_max_timeout(50)
self.assertEqual(
10, rpc._BackingOffContextWrapper._METHOD_TIMEOUTS['method_1'])
def test_set_max_timeout_overrides_default_timeout(self):
rpc.TRANSPORT.conf.rpc_response_timeout = 10
self.assertEqual(
10 * 10, rpc._BackingOffContextWrapper.get_max_timeout())
rpc._BackingOffContextWrapper.set_max_timeout(10)
self.assertEqual(10, rpc._BackingOffContextWrapper.get_max_timeout())
class CastExceptionTestCase(base.DietTestCase):
def setUp(self):
super(CastExceptionTestCase, self).setUp()
self.messaging_conf = messaging_conffixture.ConfFixture(CONF)
self.messaging_conf.transport_url = 'fake://'
self.messaging_conf.response_timeout = 0
self.useFixture(self.messaging_conf)
self.addCleanup(rpc.cleanup)
rpc.init(CONF)
rpc.TRANSPORT = mock.MagicMock()
rpc.TRANSPORT._send.side_effect = Exception
target = messaging.Target(version='1.0', topic='testing')
self.client = rpc.get_client(target)
self.cast_context = mock.Mock()
def test_cast_catches_exception(self):
self.client.cast(self.cast_context, 'method_1')
class TestConnection(base.DietTestCase):
def setUp(self):
super(TestConnection, self).setUp()
self.conn = rpc.Connection()
@mock.patch.object(messaging, 'Target')
@mock.patch.object(cfg, 'CONF')
@mock.patch.object(rpc, 'get_server')
def test_create_consumer(self, mock_get, mock_cfg, mock_tgt):
mock_cfg.host = 'foo'
server = mock.Mock()
target = mock.Mock()
mock_get.return_value = server
mock_tgt.return_value = target
self.conn.create_consumer('topic', 'endpoints', fanout=True)
mock_tgt.assert_called_once_with(topic='topic', server='foo',
fanout=True)
mock_get.assert_called_once_with(target, 'endpoints')
self.assertEqual([server], self.conn.servers)
def test_consume_in_threads(self):
self.conn.servers = [mock.Mock(), mock.Mock()]
servs = self.conn.consume_in_threads()
for serv in self.conn.servers:
serv.start.assert_called_once_with()
self.assertEqual(servs, self.conn.servers)
def test_close(self):
self.conn.servers = [mock.Mock(), mock.Mock()]
self.conn.close()
for serv in self.conn.servers:
serv.stop.assert_called_once_with()
serv.wait.assert_called_once_with()
|
apache-2.0
| 6,218,864,329,141,855,000 | 38.162325 | 78 | 0.631 | false |
MaxIV-KitsControls/tango-facadedevice
|
facadedevice/device.py
|
1
|
12477
|
"""Provide the facade device class and metaclass."""
# Imports
import time
import collections
# Graph imports
from facadedevice.graph import triplet, Graph, INVALID
# Exception imports
from facadedevice.exception import to_dev_failed, context
# Utils imports
from facadedevice.utils import EnhancedDevice, aggregate_qualities
from facadedevice.utils import get_default_attribute_value
# Object imports
from facadedevice.objects import class_object, local_attribute
# Tango imports
from tango.server import command
from tango import DevFailed, DevState, EventData, EventType, DispLevel
# Proxy metaclass
class FacadeMeta(type(EnhancedDevice)):
"""Metaclass for Facade device."""
def __new__(metacls, name, bases, dct):
# Class attribute
dct["_class_dict"] = class_dict = {}
# Inheritance
for base in reversed(bases):
try:
base_class_dict = base._class_dict
except AttributeError:
continue
# Copy _class_dict from the bases
for key, obj in base_class_dict.items():
# Allow to remove facade objects by setting them to None
if key not in dct:
class_dict[key] = obj
# Process class objects
for key, value in list(dct.items()):
if isinstance(value, class_object):
class_dict[key] = value
value.update_class(key, dct)
# Create device class
return type(EnhancedDevice).__new__(metacls, name, bases, dct)
# Metaclassing manually for python compatibility
_Facade = FacadeMeta('_Facade', (EnhancedDevice,), {})
# Facade device
class Facade(_Facade):
"""Base class for facade devices.
It supports the following objects:
- `facadedevice.local_attribute`_
- `facadedevice.logical_attribute`_
- `facadedevice.proxy_attribute`_
- `facadedevice.combined_attribute`_
- `facadedevice.state_attribute`_
- `facadedevice.proxy_command`_
It also provides a few helpers:
- `self.graph`: act as a `<key, node>` dictionnary
- `self.get_combined_results`: return the subresults of a combined
attribute
The `init_device` method shouldn't be overridden. It performs specific
exception handling. Instead, override `safe_init_device` if you have to
add some extra logic. Don't forget to call the parent method since it
performs a few useful steps:
- load device properties
- configure and build the graph
- run the connection routine
It also provides an expert command called `GetInfo` that displays useful
information such as:
- the connection status
- the list of all event subscriptions
- the exception history
"""
# Reasons to ignore for errors in events
reasons_to_ignore = ["API_PollThreadOutOfSync"]
# Properties
@property
def graph(self):
return self._graph
# Helper
def get_combined_results(self, name):
"""Return the subresults of a given combined attribute.
It produces an ordered dictionnary of <attribute_name, triplet>.
"""
subnodes = self.graph.subnodes(name)
return collections.OrderedDict(
(node.remote_attr, node.result()) for node in subnodes)
def _get_default_value(self, attr):
dtype = attr.get_data_type()
dformat = attr.get_data_format()
return get_default_attribute_value(dformat, dtype)
def _emulate_subcommand(self, result, *args):
if args or result is None:
raise ValueError('This proxy command is disabled')
return result
# Initialization
def safe_init_device(self):
"""Initialize the device."""
# Init data structures
self._graph = Graph()
self._subcommand_dict = {}
# Get properties
with context('getting', 'properties'):
super(Facade, self).safe_init_device()
# Configure
for value in self._class_dict.values():
with context('configuring', value):
value.configure(self)
# Build graph
with context('building', self._graph):
self._graph.build()
# Connect
for value in self._class_dict.values():
with context('connecting', value):
value.connect(self)
# Event subscription
def _subscribe_for_node(self, attr, node):
try:
self.subscribe_event(
attr,
EventType.CHANGE_EVENT,
lambda event: self._on_node_event(node, event))
except DevFailed:
try:
self.subscribe_event(
attr,
EventType.PERIODIC_EVENT,
lambda event: self._on_node_event(node, event))
except DevFailed:
msg = "Can't subscribe to event for attribute {}"
self.info_stream(msg.format(attr))
raise
else:
msg = "Subscribed to periodic event for attribute {}"
self.info_stream(msg.format(attr))
return EventType.PERIODIC_EVENT
else:
msg = "Subscribed to change event for attribute {}"
self.info_stream(msg.format(attr))
return EventType.CHANGE_EVENT
# Event callback
def _on_node_event(self, node, event):
"""Handle node events."""
# Ignore the event if not a data event
if not isinstance(event, EventData):
msg = "Received an unexpected event for {}"
self.error_stream(msg.format(node))
return
# Format attribute name
attr_name = '/'.join(event.attr_name.split('/')[-4:])
# Ignore the event if it contains an error
if event.errors:
exc = DevFailed(*event.errors)
reason = exc.args[0].reason
msg = "Received an event from {} that contains errors"
self.ignore_exception(exc, msg=msg.format(attr_name))
if reason not in self.reasons_to_ignore:
node.set_exception(exc)
return
# Info stream
msg = "Received a valid event from {} for {}."
self.info_stream(msg.format(attr_name, node))
# Save
value = triplet.from_attr_value(event.attr_value)
node.set_result(value)
# Client requests
def _read_from_node(self, node, attr=None):
"""Used when reading an attribute"""
if node.result() is None:
return
value, stamp, quality = node.result()
if attr:
if value is None:
value = self._get_default_value(attr)
attr.set_value_date_quality(value, stamp, quality)
return value, stamp, quality
def _write_to_node(self, node, value):
"""Used when writing a local attribute"""
node.set_result(triplet(value))
def _run_proxy_command(self, key, value):
"""Used when writing a proxy attribute"""
return self._run_proxy_command_context(
key, lambda subcommand, value: subcommand(value), value)
def _run_proxy_command_context(self, key, ctx, *values):
"""Used when running a proxy command"""
# Run subcommand in context
subcommand = self._subcommand_dict[key]
return ctx(subcommand, *values)
# Controlled callbacks
def _run_callback(self, ctx, func, node):
"""Contexualize different node callbacks."""
try:
with context(ctx, node):
func(node)
except Exception as exc:
self.ignore_exception(exc)
def _standard_aggregation(self, node, func, *nodes):
"""Contextualize aggregation and propagate errors automatically."""
# Forward first exception
for subnode in nodes:
if subnode.exception() is not None:
with context("updating", node):
raise subnode.exception()
# Shortcut for empty nodes
results = [subnode.result() for subnode in nodes]
if any(result is None for result in results):
return
# Exctract values
values, stamps, qualities = zip(*results)
# Invalid quality
if any(quality == INVALID for quality in qualities):
return triplet(None, max(stamps), INVALID)
# Run function
try:
with context("updating", node):
result = func(*values)
except Exception as exc:
self.ignore_exception(exc)
raise exc
# Return triplet
if isinstance(result, triplet):
return result
# Create triplet
quality = aggregate_qualities(qualities)
return triplet(result, max(stamps), quality)
def _custom_aggregation(self, node, func, *nodes):
"""Contextualize aggregation."""
# Run function
try:
with context("updating", node):
result = func(*nodes)
except Exception as exc:
self.ignore_exception(exc)
raise exc
# Return result
if not isinstance(result, triplet):
result = triplet(result)
return result
# Dedicated callbacks
def _set_state_from_node(self, node):
# Forward exception
if node.exception() is not None:
self.register_exception(node.exception())
return
# Empty node
if node.result() is None:
self.set_state(DevState.UNKNOWN)
self.set_status("The state is currently not available.")
return
# Unpack triplet
value, stamp, quality = node.result()
# Invalid value
if value is None:
value = (
DevState.FAULT,
"The state cannot be computed. Some values are invalid.")
# Unpack value
try:
state, status = value
except (TypeError, ValueError):
state = value
status = "The device is in {} state.".format(value)
# Set state and status
try:
with context('setting', 'state and status'):
self.set_state(state, stamp, quality)
self.set_status(status, stamp, quality)
# Exception while setting the state
except Exception as exc:
self.register_exception(exc)
def _push_event_for_node(self, node):
attr = getattr(self, node.name)
# Exception
if node.exception() is not None:
exception = to_dev_failed(node.exception())
self.push_change_event(node.name, exception)
self.push_archive_event(node.name, exception)
# Log the pushing of exceptions
msg = 'Pushing an exception for attribute {}'
self.debug_exception(exception, msg.format(node.name))
# Empty result
elif node.result() is None:
pass
# Triplet result
else:
value, stamp, quality = node.result()
if value is None:
value = self._get_default_value(attr)
self.push_change_event(node.name, value, stamp, quality)
self.push_archive_event(node.name, value, stamp, quality)
# Clean up
def delete_device(self):
# Reset graph
try:
self._graph.reset()
except Exception as exc:
msg = "Error while resetting the graph"
self.ignore_exception(exc, msg)
# Unsubscribe all
super(Facade, self).delete_device()
# Timed Facade
class TimedFacade(Facade):
"""Similar to the `facadedevice.Facade` base class with time handling.
In particular, it adds:
- the `UpdateTime` polled command, used trigger updates periodically
- the `Time` local attribute, a float updated at every tick
- the `on_time` method, a callback that runs at every tick
"""
def init_device(self):
super(TimedFacade, self).init_device()
self.UpdateTime()
Time = local_attribute(dtype=float)
@Time.notify
def _on_time(self, node):
self.on_time(node.result().value)
def on_time(self, value):
pass
@command(
polling_period=1000,
display_level=DispLevel.EXPERT)
def UpdateTime(self):
t = time.time()
result = triplet(t, t)
self.graph['Time'].set_result(result)
|
gpl-3.0
| 1,596,889,208,874,801,400 | 31.662304 | 76 | 0.59213 | false |
palrogg/foundations-homework
|
Data_and_databases/Homework6_api.py
|
1
|
1394
|
from flask import Flask, request, jsonify
import pg8000, decimal
import json
app = Flask(__name__)
def get_data(_type, _sort):
conn = pg8000.connect(user="paul", database="mondial")
cursor = conn.cursor()
sort_result = ''
if _sort == 'name':
sort_result = 'ORDER BY name'
elif _sort in ('area', 'elevation'):
sort_result = 'ORDER BY ' + _sort + ' DESC'
if _type:
sql = "SELECT name, elevation, area, type FROM lake WHERE type = %s " + sort_result
cursor.execute(sql, [str(_type)])
else:
sql = "SELECT name, elevation, area, type FROM lake " + sort_result
cursor.execute(sql)
# nested function
def get_int(value):
if isinstance(value, decimal.Decimal):
return int(value)
else:
return None
query_result = []
for row in cursor.fetchall():
item = {
"name": row[0],
"elevation": get_int(row[1]),
"area": get_int(row[2]),
"type": row[3]
}
query_result.append(item)
return query_result
@app.route("/lakes")
def getLakes():
lake_type = request.args.get('type', '')
sort_by = request.args.get('sort', '')
if sort_by and sort_by not in ('name', 'elevation', 'area'):
sort_by = 'name'
result = get_data(lake_type, sort_by)
return jsonify(result)
app.run()
|
mit
| -3,189,071,382,793,536,000 | 23.892857 | 91 | 0.558824 | false |
skelneko/-CuriousActorCritic
|
Environment.py
|
1
|
2876
|
from Config import Config
import gym
import PIL
from collections import deque
import numpy as np
class Environment(object):
def __init__(self, game="MsPacman-v0"):
self.screen_h = Config.SCREEN_H
self.screen_w = Config.SCREEN_W
self.screen_shape = Config.SCREEN_SHAPE
self.frame_per_row = Config.FRAME_PER_ROW
self.frame_buffer = None
self.action_space = 9
# meta
self.total_episode_run = 0
self.steps_in_episode = 0
self.max_steps_in_episode = 0
self.env = gym.make(game)
self.reset()
def init_frame_buffer(self):
# initialize history
if self.frame_buffer:
self.frame_buffer.clear()
else:
self.frame_buffer = deque()
for i in range(0, self.frame_per_row):
self.frame_buffer.append(self.get_screen(reduced=True)) # always full
def reset(self):
self.max_steps_in_episode = max(self.max_steps_in_episode, self.steps_in_episode)
self.current_screen = self.env.reset() # current_screen always align with ORIGINAL SETTING
self.init_frame_buffer()
self.current_reward = 0
self.done_flag = False
self.info = None
self.total_episode_run += 1
self.steps_in_episode = 0
def step(self, action):
self.current_screen, r, self.done_flag, self.info = self.env.step(action)
self.current_reward = r
self.frame_buffer.popleft()
self.frame_buffer.append(self.get_screen(reduced=True))
self.steps_in_episode += 1
def render(self):
self.env.render()
### GETs ###
def get_environment(self):
return self.env
def get_screen(self, reduced=True):
if reduced:
grayscale = self.rgb2gray(self.current_screen)
return self.resizeScreen(grayscale, self.screen_shape)
else:
return self.current_screen
def get_reward(self):
return self.current_reward
def get_done_flag(self):
return self.done_flag
def get_info(self):
return self.info
def get_action_space(self):
return self.action_space
def get_frame_buffer(self):
return self.frame_buffer
# return full list of frame_buffer in {W, H, Channel} shape
def get_history(self):
return np.transpose(self.frame_buffer, (1,2,0))
def get_max_steps(self):
return self.max_steps_in_episode
### Utilities ###
def rgb2gray(self, rgb):
r, g, b = rgb[:,:,0], rgb[:,:,1], rgb[:,:,2]
gray = 0.2989 * r + 0.5870 * g + 0.1140 * b
return gray
def resizeScreen(self, state, shape):
img = PIL.Image.fromarray(state, mode=None)
img = img.resize(shape, PIL.Image.LANCZOS)
arr = list(img.getdata())
return np.reshape(arr, shape)
|
gpl-3.0
| 434,799,176,446,478,660 | 27.196078 | 102 | 0.599096 | false |
shawnchin/checkfort
|
checkfort/files.py
|
1
|
2041
|
import os
import re
import sys
from checkfort.exceptions import *
from checkfort.logging import p_warn, p_verbose
default_extensions = ("h", "f", "F",
"f90", "F90", "f95", "F95",
"f03", "F03", "f08", "F08", "h",)
class InputFileReader(object):
def __init__(self, input_file):
with open(input_file) as f:
lines = (re.split(r'[#!]', x, 1)[0].strip() for x in f)
self.entries = list(x for x in lines if x)
def get_entries(self):
return self.entries
class FileList(object):
def __init__(self, entries=None, extensions=default_extensions):
self.files = []
# build regex from extension list
if not extensions:
raise CheckfortException("Invalid extensions list - " + extensions)
self.extensions = extensions
self.re_extensions = re.compile(".*\.(%s)$" % "|".join(extensions))
if entries:
self.add_files(entries)
def add_files(self, entries):
if isinstance(entries, basestring):
self._add(entries)
else:
for entry in entries:
self._add(entry)
def _check_and_add(self, filename):
if not os.path.exists(filename):
if os.path.islink(filename):
p_warn("Warning: ignoring broken sym link - (%s)" % filename)
return
else:
raise CheckfortException("Invalid path - " + filename)
assert(not os.path.isdir(filename))
self.files.append(os.path.relpath(filename))
def _add(self, entry):
if os.path.isdir(entry):
self._search_dir(entry)
else:
self._check_and_add(entry)
def _search_dir(self, directory):
p_verbose(" - Searching for files in %s" % directory)
for root, dirs, files in os.walk(os.path.relpath(directory)):
for f in files:
if self.re_extensions.match(f):
self._check_and_add(os.path.join(root, f))
|
bsd-3-clause
| 2,146,750,796,814,318,800 | 31.919355 | 79 | 0.55757 | false |
phgupta/Building-Analytics
|
building-analytics/TS_Util_Clean_Data.py
|
1
|
15125
|
# -*- coding: utf-8 -*-
"""
@author : Armando Casillas <armcasillas@ucdavis.edu>
@author : Marco Pritoni <marco.pritoni@gmail.com>
Created on Wed Jul 26 2017
Update Aug 08 2017
"""
from __future__ import division
import pandas as pd
import os
import sys
import requests as req
import json
import numpy as np
import datetime
import pytz
from pandas import rolling_median
from matplotlib import style
import matplotlib
class TS_Util(object):
########################################################################
## simple load file section - eventually replace this with CSV_Importer
def _set_TS_index(self, data):
'''
Parameters
----------
Returns
-------
'''
# set index
data.index = pd.to_datetime(data.index)
# format types to numeric
for col in data.columns:
data[col] = pd.to_numeric(data[col], errors="coerce")
return data
def load_TS(self, fileName, folder):
'''
Parameters
----------
Returns
-------
'''
path = os.path.join(folder, fileName)
data = pd.read_csv(path, index_col=0)
data = self._set_TS_index(data)
return data
########################################################################
## time correction for time zones - eventually replace this with CSV_Importer
def _utc_to_local(self, data, local_zone="America/Los_Angeles"):
'''
Function takes in pandas dataframe and adjusts index according to timezone in which is requested by user
Parameters
----------
data: Dataframe
pandas dataframe of json timeseries response from server
local_zone: string
pytz.timezone string of specified local timezone to change index to
Returns
-------
data: Dataframe
Pandas dataframe with timestamp index adjusted for local timezone
'''
data.index = data.index.tz_localize(pytz.utc).tz_convert(
local_zone) # accounts for localtime shift
# Gets rid of extra offset information so can compare with csv data
data.index = data.index.tz_localize(None)
return data
def _local_to_utc(self, timestamp, local_zone="America/Los_Angeles"):
'''
Parameters
----------
# Change timestamp request time to reflect request in terms of local time relative to utc - working as of 5/5/17 ( Should test more )
# remove and add to TS_Util and import
Returns
-------
'''
timestamp_new = pd.to_datetime(
timestamp, infer_datetime_format=True, errors='coerce')
timestamp_new = timestamp_new.tz_localize(
local_zone).tz_convert(pytz.utc)
timestamp_new = timestamp_new.strftime('%Y-%m-%d %H:%M:%S')
return timestamp_new
########################################################################
## remove start and end NaN: Note issue with multi-column df
def remove_start_NaN(self, data, var=None):
'''
Parameters
----------
Returns
-------
'''
if var: # limit to one or some variables
start_ok_data = data[var].first_valid_index()
else:
start_ok_data = data.first_valid_index()
data = data.loc[start_ok_data:, :]
return data
def remove_end_NaN(self, data, var=None):
'''
Parameters
----------
Returns
-------
'''
if var: # limit to one or some variables
end_ok_data = data[var].last_valid_index()
else:
end_ok_data = data.last_valid_index()
data = data.loc[:end_ok_data, :]
return data
########################################################################
## Missing data section
def _find_missing_return_frame(self, data):
'''
Function takes in pandas dataframe and find missing values in each column
Parameters
----------
data: Dataframe
Returns
-------
data: Dataframe
'''
return data.isnull()
def _find_missing(self, data, return_bool=False):
if return_bool == False: # this returns the full table with True where the condition is true
data = self._find_missing_return_frame(data)
return data
elif return_bool == "any": # this returns a bool selector if any of the column is True
bool_sel = self._find_missing_return_frame(data).any(axis=1)
return bool_sel
elif return_bool == "all": # this returns a bool selector if all of the column are True
bool_sel = self._find_missing_return_frame(data).all(axis=1)
return bool_sel
else:
print("error in multi_col_how input")
return
def display_missing(self, data, return_bool="any"):
'''
Parameters
----------
Returns
-------
'''
if return_bool == "any":
bool_sel = self._find_missing(data,return_bool="any")
elif return_bool == "all":
bool_sel = self._find_missing(data,return_bool="all")
return data[bool_sel]
def count_missing(self, data, output="number"):
'''
Parameters
----------
how = "number" or "percent"
Returns
-------
'''
count = self._find_missing(data,return_bool=False).sum()
if output == "number":
return count
elif output == "percent":
return ((count / (data.shape[0])) * 100)
def remove_missing(self, data, return_bool="any"):
'''
Parameters
----------
Returns
-------
'''
if return_bool == "any":
bool_sel = self._find_missing(data,return_bool="any")
elif return_bool == "all":
bool_sel = self._find_missing(data,return_bool="all")
return data[~bool_sel]
########################################################################
## Out of Bound section
def _find_outOfBound(self, data, lowBound, highBound):
'''
Parameters
----------
Returns
-------
'''
data = ((data < lowBound) | (data > highBound))
return data
def display_outOfBound(self, data, lowBound, highBound):
'''
Parameters
----------
Returns
-------
'''
data = data[self._find_outOfBound(
data, lowBound, highBound).any(axis=1)]
return data
def count_outOfBound(self, data, lowBound, highBound, output):
'''
Parameters
----------
Returns
-------
'''
count = self._find_outOfBound(data, lowBound, highBound).sum()
if output == "number":
return count
elif output == "percent":
return count / (data.shape[0]) * 1.0 * 100
def remove_outOfBound(self, data, lowBound, highBound):
'''
Parameters
----------
Returns
-------
'''
data = data[~self._find_outOfBound(
data, lowBound, highBound).any(axis=1)]
return data
########################################################################
## Outliers section
def _calc_outliers_bounds(self, data, method, coeff, window):
'''
Parameters
----------
Returns
-------
'''
if method == "std":
lowBound = (data.mean(axis=0) - coeff * data.std(axis=0)).values[0]
highBound = (data.mean(axis=0) + coeff * data.std(axis=0)).values[0]
elif method == "rstd":
rl_mean=data.rolling(window=window).mean(how=any)
rl_std = data.rolling(window=window).std(how=any).fillna(method='bfill').fillna(method='ffill')
lowBound = rl_mean - coeff * rl_std
highBound = rl_mean + coeff * rl_std
elif method == "rmedian":
rl_med = data.rolling(window=window, center=True).median().fillna(
method='bfill').fillna(method='ffill')
lowBound = rl_med - coeff
highBound = rl_med + coeff
elif method == "iqr": # coeff is multip for std and IQR or threshold for rolling median
Q1 = data.quantile(.25) # coeff is multip for std or % of quartile
Q3 = data.quantile(.75)
IQR = Q3 - Q1
lowBound = Q1 - coeff * IQR
highBound = Q3 + coeff * IQR
elif method == "qtl":
lowBound = data.quantile(.005)
highBound = data.quantile(.995)
else:
print ("method chosen does not exist")
lowBound = None
highBound = None
return lowBound, highBound
def display_outliers(self, data, method, coeff, window=10):
'''
Parameters
----------
Returns
-------
'''
lowBound, highBound = self._calc_outliers_bounds(
data, method, coeff, window)
data = self.display_outOfBound(data, lowBound, highBound)
return data
def count_outliers(self, data, method, coeff, output, window=10):
'''
Parameters
----------
Returns
-------
'''
lowBound, highBound = self._calc_outliers_bounds(
data, method, coeff, window)
count = self.count_outOfBound(data, lowBound, highBound, output=output)
return count
def remove_outliers(self, data, method, coeff, window=10):
'''
Parameters
----------
Returns
-------
'''
lowBound, highBound = self._calc_outliers_bounds(
data, method, coeff, window)
data = self.remove_outOfBound(data, lowBound, highBound)
return data
########################################################################
## If condition section
def _find_equal_to_values(self, data, val):
'''
Parameters
----------
Returns
-------
'''
#print(val)
bool_sel = (data == val)
return bool_sel
def _find_greater_than_values(self, data, val):
'''
Parameters
----------
Returns
-------
'''
bool_sel = (data > val)
return bool_sel
def _find_less_than_values(self, data, val):
'''
Parameters
----------
Returns
-------
'''
bool_sel = (data < val)
return bool_sel
def _find_greater_than_or_equal_to_values(self, data, val):
'''
Parameters
----------
Returns
-------
'''
bool_sel = (data >= val)
return bool_sel
def _find_less_than_or_equal_to_values(self, data, val):
'''
Parameters
----------
Returns
-------
'''
bool_sel = (data <= val)
return bool_sel
def _find_different_from_values(self, data, val):
'''
Parameters
----------
Returns
-------
'''
bool_sel = ~(data == val)
return bool_sel
def count_if(self, data, condition, val, output="number"):
"""
condition = "equal", "below", "above"
val = value to compare against
how = "number" or "percent"
"""
if condition == "=":
count = self._find_equal_to_values(data,val).sum()
elif condition == ">":
count = self._find_greater_than_values(data,val).sum()
elif condition == "<":
count = self._find_less_than_values(data,val).sum()
elif condition == ">=":
count = self._find_greater_than_or_equal_to_values(data,val).sum()
elif condition == "<=":
count = self._find_less_than_or_equal_to_values(data,val).sum()
elif condition == "!=":
count = self._find_different_from_values(data,val).sum()
if output == "number":
return count
elif output == "percent":
return count/data.shape[0]*1.0*100
return count
########################################################################
## Missing Data Events section
def get_start_events(self, data, var = "T_ctrl [oF]"): # create list of start events
'''
Parameters
----------
Returns
-------
'''
start_event = (data[var].isnull()) & ~(data[var].shift().isnull()) # find NaN start event
start = data[start_event].index.tolist() # selector for these events
if np.isnan(data.loc[data.index[0],var]): # if the first record is NaN
start = [data.index[0]] + start # add first record as starting time for first NaN event
else:
start = start
return start
def get_end_events(self, data, var = "T_ctrl [oF]"): # create list of end events
'''
Parameters
----------
Returns
-------
'''
end_events = ~(data[var].isnull()) & (data[var].shift().isnull()) # find NaN end events
end = data[end_events].index.tolist() # selector for these events
if ~np.isnan(data.loc[data.index[0],var]): # if first record is not NaN
end.remove(end[0]) # remove the endpoint ()
if np.isnan(data.loc[data.index[-1],var]): # if the last record is NaN
end = end + [data.index[-1]] # add last record as ending time for first NaN event
else:
end = end
return end
def create_event_table(self, data, var): # create dataframe of of start-end-length for current house/tstat
'''
Parameters
----------
Returns
-------
'''
# remove initial and final missing data
self.remove_start_NaN(data, var)
self.remove_end_NaN(data, var)
# create list of start events
start = self.get_start_events(data, var)
# create list of end events
end = self.get_end_events(data, var)
# merge lists into dataframe and calc length
events = pd.DataFrame.from_items([("start",start), ("end",end )])
events["length_min"] = (events["end"] - events["start"]).dt.total_seconds()/60 # note: this needs datetime index
#print events
events.set_index("start",inplace=True)
return events
|
mit
| 3,792,750,501,190,625,300 | 23.756137 | 141 | 0.484959 | false |
bharatmooc/configuration-1
|
util/vpc-tools/vpc-tools.py
|
1
|
4340
|
"""VPC Tools.
Usage:
vpc-tools.py ssh-config (vpc <vpc_id> | stack-name <stack_name>) identity-file <identity_file> user <user> [(config-file <config_file>)] [(strict-host-check <strict_host_check>)]
vpc-tools.py (-h --help)
vpc-tools.py (-v --version)
Options:
-h --help Show this screen.
-v --version Show version.
"""
import boto
from docopt import docopt
from vpcutil import vpc_for_stack_name
from vpcutil import stack_name_for_vpc
from collections import defaultdict
VERSION="vpc tools 0.1"
DEFAULT_USER="ubuntu"
DEFAULT_HOST_CHECK="ask"
JUMPBOX_CONFIG = """
Host {jump_box}
HostName {ip}
IdentityFile {identity_file}
ForwardAgent yes
User {user}
StrictHostKeyChecking {strict_host_check}
"""
HOST_CONFIG = """
# Instance ID: {instance_id}
Host {name}
ProxyCommand ssh {config_file} -W %h:%p {jump_box}
HostName {ip}
IdentityFile {identity_file}
ForwardAgent yes
User {user}
StrictHostKeyChecking {strict_host_check}
"""
def dispatch(args):
if args.get("ssh-config"):
_ssh_config(args)
def _ssh_config(args):
if args.get("vpc"):
vpc_id = args.get("<vpc_id>")
stack_name = stack_name_for_vpc(vpc_id)
elif args.get("stack-name"):
stack_name = args.get("<stack_name>")
vpc_id = vpc_for_stack_name(stack_name)
else:
raise Exception("No vpc_id or stack_name provided.")
vpc = boto.connect_vpc()
identity_file = args.get("<identity_file>")
user = args.get("<user>")
config_file = args.get("<config_file>")
strict_host_check = args.get("<strict_host_check>")
if not user:
user = DEFAULT_USER
if not strict_host_check:
strict_host_check = DEFAULT_HOST_CHECK
if config_file:
config_file = "-F {}".format(config_file)
else:
config_file = ""
jump_box = "{stack_name}-jumpbox".format(stack_name=stack_name)
friendly = "{stack_name}-{logical_id}-{instance_number}"
id_type_counter = defaultdict(int)
reservations = vpc.get_all_instances(filters={'vpc-id' : vpc_id})
for reservation in reservations:
for instance in reservation.instances:
if 'play' in instance.tags:
logical_id = instance.tags['play']
elif 'role' in instance.tags:
# deprecated, use "play" instead
logical_id = instance.tags['role']
elif 'group' in instance.tags:
logical_id = instance.tags['group']
elif 'aws:cloudformation:logical-id' in instance.tags:
logical_id = instance.tags['aws:cloudformation:logical-id']
else:
continue
instance_number = id_type_counter[logical_id]
id_type_counter[logical_id] += 1
if logical_id == "BastionHost" or logical_id == 'bastion':
print JUMPBOX_CONFIG.format(
jump_box=jump_box,
ip=instance.ip_address,
user=user,
identity_file=identity_file,
strict_host_check=strict_host_check)
# Print host config even for the bastion box because that is how
# ansible accesses it.
print HOST_CONFIG.format(
name=instance.private_ip_address,
jump_box=jump_box,
ip=instance.private_ip_address,
user=user,
identity_file=identity_file,
config_file=config_file,
strict_host_check=strict_host_check,
instance_id=instance.id)
#duplicating for convenience with ansible
name = friendly.format(stack_name=stack_name,
logical_id=logical_id,
instance_number=instance_number)
print HOST_CONFIG.format(
name=name,
jump_box=jump_box,
ip=instance.private_ip_address,
user=user,
identity_file=identity_file,
config_file=config_file,
strict_host_check=strict_host_check,
instance_id=instance.id)
if __name__ == '__main__':
args = docopt(__doc__, version=VERSION)
dispatch(args)
|
agpl-3.0
| 3,703,358,002,025,198,600 | 30.223022 | 182 | 0.571429 | false |
samnjugu/python-gmail
|
Tools.py
|
1
|
3354
|
#!/usr/bin/python
import smtplib, imaplib, email, email.header
# Utility class to handle IMAP and SMTP
# IMAP is used to fetch emails while SMTP is used to send emails
class Gmail(object):
def __init__(self, email, password):
self.server = 'smtp.gmail.com'
self.port = 587
session = smtplib.SMTP(self.server, self.port)
session.ehlo()
session.starttls()
session.ehlo
session.login(email, password)
self.session = session
#This method composes an email before sending
def send_onetime_email(self,subject,body):
sender = self.email
receiver = 'user' #shows up in the To field not used 4 sending
receivers = ['someuser@gmail.com'] #used 4 sending
msg = ("From: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n%s" %(sender, receiver, subject, body))
self.session.sendmail(sender, receivers, msg)
self.session.close()
#This method takes a msg which is an email already composed and doesn't close session
def send_email_to(self,receivers,msg):
self.session.sendmail(self.email, receivers, msg)
def gmail_close(self):
self.session.close()
class ImapGmail(object):
def __init__(self, email, password):
self.server = 'imap.gmail.com'
self.session = imaplib.IMAP4_SSL(self.server)
self.session.login(email, password)
self.email_data_li = []
def folder_select(self, folder):
self.session.select(folder)
def folder_search(self,querys):
if querys:#only proceed if we have querys
for query in querys:#loop through all the querys
rv, data = self.session.search(None, query)
if rv != 'OK':
print "No messages found!"
return
for num in data[0].split():
rv, data = self.session.fetch(num, '(RFC822)')#get the whole email
if rv != 'OK':
print "ERROR getting message", num
return
self.email_data_li.append(data)
def imap_close(self):
try:
self.session.close()
except imaplib.IMAP4.error:
print 'Close called without calling select'
finally:
self.session.logout()
def messages_print_header(self):
if self.email_data_li:
for data in self.email_data_li:
msg = email.message_from_string(data[0][1])
decode = email.header.decode_header(msg['Subject'])[0]
subject = unicode(decode[0])
print 'Message Subject: %s' % (subject)
print 'Date:', msg['Date']
def list_mailboxes(self):
rv, mailboxes = self.session.list()
if rv == 'OK':
print "Mailboxes:"
print mailboxes #prints list of all mailboxes
def fwd_emails(self, user, passw, to_addr):
if self.email_data_li:
gm = Gmail(user, passw)
for data in self.email_data_li:
# create a Message instance from the email data
message = email.message_from_string(data[0][1])
# replace headers
message.replace_header("From", user)
message.replace_header("To", ', '.join(to_addr))#convert list to string
gm.send_email_to(to_addr, message.as_string())
print "Done Sending Emails"
gm.gmail_close()
|
gpl-2.0
| 5,599,144,041,551,790,000 | 34.305263 | 91 | 0.59511 | false |
jantman/awslimitchecker
|
awslimitchecker/tests/test_connectable.py
|
1
|
13876
|
"""
awslimitchecker/tests/test_connectable.py
The latest version of this package is available at:
<https://github.com/jantman/awslimitchecker>
################################################################################
Copyright 2015-2018 Jason Antman <jason@jasonantman.com>
This file is part of awslimitchecker, also known as awslimitchecker.
awslimitchecker is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
awslimitchecker is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with awslimitchecker. If not, see <http://www.gnu.org/licenses/>.
The Copyright and Authors attributions contained herein may not be removed or
otherwise altered, except to add the Author attribution of a contributor to
this work. (Additional Terms pursuant to Section 7b of the AGPL v3)
################################################################################
While not legally required, I sincerely request that anyone who finds
bugs please submit them at <https://github.com/jantman/awslimitchecker> or
to me via email, and that you send any contributions or improvements
either as a pull request on GitHub, or to me via email.
################################################################################
AUTHORS:
Jason Antman <jason@jasonantman.com> <http://www.jasonantman.com>
################################################################################
"""
from awslimitchecker.connectable import Connectable, ConnectableCredentials
from datetime import datetime
import sys
import os
# https://code.google.com/p/mock/issues/detail?id=249
# py>=3.4 should use unittest.mock not the mock package on pypi
if (
sys.version_info[0] < 3 or
sys.version_info[0] == 3 and sys.version_info[1] < 4
):
from mock import patch, call, Mock, PropertyMock
else:
from unittest.mock import patch, call, Mock, PropertyMock
pbm = 'awslimitchecker.connectable'
pb = '%s.Connectable' % pbm
class ConnectableTester(Connectable):
"""example class to test Connectable"""
service_name = 'connectable_tester'
def __init__(self, account_id=None, account_role=None, region=None,
external_id=None, mfa_serial_number=None, mfa_token=None,
profile_name=None):
self.account_id = account_id
self.account_role = account_role
self.region = region
self.conn = None
self.resource_conn = None
self.external_id = external_id
self.mfa_serial_number = mfa_serial_number
self.mfa_token = mfa_token
self.profile_name = profile_name
class TestMaxRetriesConfig(object):
@patch.dict(
os.environ,
{'BOTO_MAX_RETRIES_myapi': '10'},
clear=True
)
def test_happy_path(self):
mock_conf = Mock()
cls = ConnectableTester()
cls.api_name = 'myapi'
with patch('%s.Config' % pbm) as m_conf:
m_conf.return_value = mock_conf
res = cls._max_retries_config
assert res == mock_conf
assert m_conf.mock_calls == [call(retries={'max_attempts': 10})]
@patch.dict(
os.environ,
{},
clear=True
)
def test_env_var_not_set(self):
mock_conf = Mock()
cls = ConnectableTester()
cls.api_name = 'myapi'
with patch('%s.Config' % pbm) as m_conf:
m_conf.return_value = mock_conf
res = cls._max_retries_config
assert res is None
assert m_conf.mock_calls == []
@patch.dict(
os.environ,
{'BOTO_MAX_RETRIES_myapi': 'hello'},
clear=True
)
def test_cant_parse_int(self):
mock_conf = Mock()
cls = ConnectableTester()
cls.api_name = 'myapi'
with patch('%s.Config' % pbm) as m_conf:
m_conf.return_value = mock_conf
res = cls._max_retries_config
assert res is None
assert m_conf.mock_calls == []
class Test_Connectable(object):
def test_connect(self):
mock_conn = Mock()
mock_cc = Mock()
type(mock_cc).region_name = 'myregion'
type(mock_conn)._client_config = mock_cc
cls = ConnectableTester()
cls.api_name = 'myapi'
kwargs = {'foo': 'fooval', 'bar': 'barval'}
with patch('%s._boto3_connection_kwargs' % pb,
new_callable=PropertyMock, create=True) as mock_kwargs:
mock_kwargs.return_value = kwargs
with patch('%s.logger' % pbm) as mock_logger:
with patch('%s.boto3.client' % pbm) as mock_client:
with patch(
'%s._max_retries_config' % pb, new_callable=PropertyMock
) as m_mrc:
m_mrc.return_value = None
mock_client.return_value = mock_conn
cls.connect()
assert mock_kwargs.mock_calls == [call()]
assert mock_logger.mock_calls == [
call.info("Connected to %s in region %s",
'myapi',
'myregion')
]
assert mock_client.mock_calls == [
call(
'myapi',
foo='fooval',
bar='barval'
)
]
assert m_mrc.mock_calls == [call()]
assert cls.conn == mock_client.return_value
def test_connect_with_retries(self):
mock_conn = Mock()
mock_cc = Mock()
type(mock_cc).region_name = 'myregion'
type(mock_conn)._client_config = mock_cc
cls = ConnectableTester()
cls.api_name = 'myapi'
kwargs = {'foo': 'fooval', 'bar': 'barval'}
mock_conf = Mock()
with patch('%s._boto3_connection_kwargs' % pb,
new_callable=PropertyMock, create=True) as mock_kwargs:
mock_kwargs.return_value = kwargs
with patch('%s.logger' % pbm) as mock_logger:
with patch('%s.boto3.client' % pbm) as mock_client:
with patch(
'%s._max_retries_config' % pb, new_callable=PropertyMock
) as m_mrc:
m_mrc.return_value = mock_conf
mock_client.return_value = mock_conn
cls.connect()
assert mock_kwargs.mock_calls == [call()]
assert mock_logger.mock_calls == [
call.info("Connected to %s in region %s",
'myapi',
'myregion')
]
assert mock_client.mock_calls == [
call(
'myapi',
foo='fooval',
bar='barval',
config=mock_conf
)
]
assert m_mrc.mock_calls == [call(), call()]
assert cls.conn == mock_client.return_value
def test_connect_again(self):
mock_conn = Mock()
mock_cc = Mock()
type(mock_cc).region_name = 'myregion'
type(mock_conn)._client_config = mock_cc
cls = ConnectableTester()
cls.conn = mock_conn
cls.api_name = 'myapi'
kwargs = {'foo': 'fooval', 'bar': 'barval'}
with patch('%s._boto3_connection_kwargs' % pb,
new_callable=PropertyMock, create=True) as mock_kwargs:
mock_kwargs.return_value = kwargs
with patch('%s.logger' % pbm) as mock_logger:
with patch('%s.boto3.client' % pbm) as mock_client:
with patch(
'%s._max_retries_config' % pb,
new_callable=PropertyMock
) as m_mrc:
m_mrc.return_value = None
mock_client.return_value = mock_conn
cls.connect()
assert mock_kwargs.mock_calls == []
assert mock_logger.mock_calls == []
assert mock_client.mock_calls == []
assert m_mrc.mock_calls == []
assert cls.conn == mock_conn
def test_connect_resource(self):
mock_conn = Mock()
mock_meta = Mock()
mock_client = Mock()
mock_cc = Mock()
type(mock_cc).region_name = 'myregion'
type(mock_client)._client_config = mock_cc
type(mock_meta).client = mock_client
type(mock_conn).meta = mock_meta
cls = ConnectableTester()
cls.api_name = 'myapi'
kwargs = {'foo': 'fooval', 'bar': 'barval'}
with patch('%s._boto3_connection_kwargs' % pb,
new_callable=PropertyMock, create=True) as mock_kwargs:
mock_kwargs.return_value = kwargs
with patch('%s.logger' % pbm) as mock_logger:
with patch('%s.boto3.resource' % pbm) as mock_resource:
with patch(
'%s._max_retries_config' % pb,
new_callable=PropertyMock
) as m_mrc:
m_mrc.return_value = None
mock_resource.return_value = mock_conn
cls.connect_resource()
assert mock_kwargs.mock_calls == [call()]
assert mock_logger.mock_calls == [
call.info("Connected to %s (resource) in region %s",
'myapi',
'myregion')
]
assert mock_resource.mock_calls == [
call(
'myapi',
foo='fooval',
bar='barval'
)
]
assert m_mrc.mock_calls == [call()]
assert cls.resource_conn == mock_resource.return_value
def test_connect_resource_with_max_retries(self):
mock_conn = Mock()
mock_meta = Mock()
mock_client = Mock()
mock_cc = Mock()
type(mock_cc).region_name = 'myregion'
type(mock_client)._client_config = mock_cc
type(mock_meta).client = mock_client
type(mock_conn).meta = mock_meta
cls = ConnectableTester()
cls.api_name = 'myapi'
kwargs = {'foo': 'fooval', 'bar': 'barval'}
mock_conf = Mock()
with patch('%s._boto3_connection_kwargs' % pb,
new_callable=PropertyMock, create=True) as mock_kwargs:
mock_kwargs.return_value = kwargs
with patch('%s.logger' % pbm) as mock_logger:
with patch('%s.boto3.resource' % pbm) as mock_resource:
with patch(
'%s._max_retries_config' % pb,
new_callable=PropertyMock
) as m_mrc:
m_mrc.return_value = mock_conf
mock_resource.return_value = mock_conn
cls.connect_resource()
assert mock_kwargs.mock_calls == [call()]
assert mock_logger.mock_calls == [
call.info("Connected to %s (resource) in region %s",
'myapi',
'myregion')
]
assert mock_resource.mock_calls == [
call(
'myapi',
foo='fooval',
bar='barval',
config=mock_conf
)
]
assert m_mrc.mock_calls == [call(), call()]
assert cls.resource_conn == mock_resource.return_value
def test_connect_resource_again(self):
mock_conn = Mock()
mock_meta = Mock()
mock_client = Mock()
mock_cc = Mock()
type(mock_cc).region_name = 'myregion'
type(mock_client)._client_config = mock_cc
type(mock_meta).client = mock_client
type(mock_conn).meta = mock_meta
cls = ConnectableTester()
cls.api_name = 'myapi'
cls.resource_conn = mock_conn
kwargs = {'foo': 'fooval', 'bar': 'barval'}
with patch('%s._boto3_connection_kwargs' % pb,
new_callable=PropertyMock, create=True) as mock_kwargs:
mock_kwargs.return_value = kwargs
with patch('%s.logger' % pbm) as mock_logger:
with patch('%s.boto3.resource' % pbm) as mock_resource:
with patch(
'%s._max_retries_config' % pb,
new_callable=PropertyMock
) as m_mrc:
m_mrc.return_value = None
mock_resource.return_value = mock_conn
cls.connect_resource()
assert mock_kwargs.mock_calls == []
assert mock_logger.mock_calls == []
assert mock_resource.mock_calls == []
assert m_mrc.mock_calls == []
assert cls.resource_conn == mock_conn
class TestConnectableCredentials(object):
def test_connectable_credentials(self):
result = {
'Credentials': {
'AccessKeyId': 'akid',
'SecretAccessKey': 'secret',
'SessionToken': 'token',
'Expiration': datetime(2015, 1, 1)
},
'AssumedRoleUser': {
'AssumedRoleId': 'roleid',
'Arn': 'arn'
},
'PackedPolicySize': 123
}
c = ConnectableCredentials(result)
assert c.access_key == 'akid'
assert c.secret_key == 'secret'
assert c.session_token == 'token'
assert c.expiration == datetime(2015, 1, 1)
assert c.assumed_role_id == 'roleid'
assert c.assumed_role_arn == 'arn'
|
agpl-3.0
| -885,974,330,035,529,600 | 35.806366 | 80 | 0.528106 | false |
cjaymes/pyscap
|
src/scap/model/oval_5/defs/windows/MetabaseObjectElement.py
|
1
|
1144
|
# Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
import logging
from scap.model.oval_5.defs.windows.ObjectType import ObjectType
logger = logging.getLogger(__name__)
class MetabaseObjectElement(ObjectType):
MODEL_MAP = {
'tag_name': 'metabase_object',
'elements': [
{'tag_name': 'key', 'class': 'scap.model.oval_5.defs.EntityObjectType', 'min': 0},
{'tag_name': 'id', 'class': 'scap.model.oval_5.defs.EntityObjectType', 'nillable': True, 'min': 0},
],
}
|
gpl-3.0
| -1,829,821,459,543,834,000 | 37.133333 | 111 | 0.700175 | false |
osroom/osroom
|
apps/core/logger/logger_server.py
|
1
|
4431
|
#!/usr/bin/env python
# -*-coding:utf-8-*-
# @Time : 2017/11/1 ~ 2019/9/1
# @Author : Allen Woo
import json
import os
import socket
import time
import logging
from logging.handlers import TimedRotatingFileHandler
from apps.configs.sys_config import LOG_PATH, SOCKET_PORT
from apps.utils.osr_async.osr_async import async_process
class LoggerClientUDP:
def __init__(self,
set_level=logging.INFO,
logfile="{}/{}.log".format(LOG_PATH, time.time()),
get_log_name='logger',
formatter='%(asctime)s %(levelname)s %(message)s',
**kwargs
):
self.sk = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.set = {
"kwargs": {
"set_level": set_level,
"logfile": logfile,
"get_log_name": get_log_name,
"formatter": formatter
}
}
self.sp = "__+++++udposrlog+++++__"
def debug(self, log, **kwargs):
self.set["log_level"] = "debug"
self.send_log(log=log)
def info(self, log, **kwargs):
self.set["log_level"] = "info"
self.send_log(log=log)
def warning(self, log, **kwargs):
self.set["log_level"] = "warning"
self.send_log(log=log)
def error(self, log, **kwargs):
self.set["log_level"] = "error"
self.send_log(log=log)
def critical(self, log, **kwargs):
self.set["log_level"] = "critical"
self.send_log(log=log)
def exception(self, log, **kwargs):
self.set["log_level"] = "error"
self.send_log(log=log)
def send_log(self, log):
set = json.dumps(self.set)
log = "{}{}{}".format(set, self.sp, log)
self.sk.sendto(log.encode(), ("127.0.0.1", SOCKET_PORT))
def __del__(self):
self.sk.close()
class LogServerUDP:
def init_app(self):
self.sk = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
self.sk.bind(("127.0.0.1", SOCKET_PORT))
self.failed = False
print(" * [pid:{}] Logger Server...".format(os.getpid()))
except:
self.failed = True
return False
self.loggers = {}
self.self_logfile = "{}/logger_server.log".format(LOG_PATH)
self.get_log_name = "logger_server"
self.kwargs = {
"set_level": logging.DEBUG,
"logfile": self.self_logfile,
"get_log_name": self.get_log_name,
"formatter": '%(asctime)s %(levelname)s %(message)s'
}
self.set_logger(self.kwargs)
self.sp = "__+++++udposrlog+++++__"
return True
def set_logger(self, kwargs):
set_level = kwargs.get("set_level")
logfile = kwargs.get("logfile")
get_log_name = kwargs.get("get_log_name")
formatter = kwargs.get("formatter")
if not os.path.exists(os.path.split(logfile)[0]):
os.makedirs(os.path.split(logfile)[0])
# According to the time
file_handler = TimedRotatingFileHandler(logfile, "midnight", 1, 7)
file_handler.suffix = "%Y-%m-%d"
# According to the size
# file_handler = RotatingFileHandler(filename, maxBytes=10*1024*1024, backupCount=3)
file_handler.setLevel(set_level)
_formatter = logging.Formatter(formatter)
file_handler.setFormatter(_formatter)
logging.getLogger(get_log_name).addHandler(file_handler)
logging.getLogger(get_log_name).setLevel(logging.INFO)
logg = logging.getLogger(get_log_name)
self.loggers[get_log_name] = {
"critical": logg.critical,
"error": logg.error,
"warning": logg.warning,
"info": logg.info,
"debug": logg.debug,
}
return self.loggers[get_log_name]
@async_process()
def log_server(self):
if self.failed:
return
while True:
data = self.sk.recv(10240)
data = data.decode()
data = data.split(self.sp)
sets = data[0]
log = data[1]
sets = json.loads(sets)
kwargs = sets["kwargs"]
logg = self.loggers.get(kwargs["get_log_name"])
if not logg:
logg = self.set_logger(kwargs)
logg[sets["log_level"]](log)
def __del__(self):
self.sk.close()
|
bsd-2-clause
| -8,607,789,152,920,088,000 | 30.877698 | 92 | 0.541864 | false |
baoboa/pyqt5
|
examples/itemviews/frozencolumn/frozencolumn.py
|
1
|
7103
|
#!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2017 Hans-Peter Jansen <hpj@urpla.net>
## Copyright (C) 2016 The Qt Company Ltd.
##
## This file is part of the examples of the Qt Toolkit.
##
## $QT_BEGIN_LICENSE:BSD$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https:#www.qt.io/terms-conditions. For further
## information use the contact form at https:#www.qt.io/contact-us.
##
## BSD License Usage
## Alternatively, you may use self file under the terms of the BSD license
## as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, self list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, self list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of The Qt Company Ltd nor the names of its
## contributors may be used to endorse or promote products derived
## from self software without specific prior written permission.
##
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
##
## $QT_END_LICENSE$
##
#############################################################################
from PyQt5.QtCore import QFile, QFileInfo, Qt
from PyQt5.QtGui import QStandardItem, QStandardItemModel
from PyQt5.QtWidgets import QApplication, QHeaderView, QTableView
class FreezeTableWidget(QTableView):
def __init__(self, model):
super(FreezeTableWidget, self).__init__()
self.setModel(model)
self.frozenTableView = QTableView(self)
self.init()
self.horizontalHeader().sectionResized.connect(self.updateSectionWidth)
self.verticalHeader().sectionResized.connect(self.updateSectionHeight)
self.frozenTableView.verticalScrollBar().valueChanged.connect(
self.verticalScrollBar().setValue)
self.verticalScrollBar().valueChanged.connect(
self.frozenTableView.verticalScrollBar().setValue)
def init(self):
self.frozenTableView.setModel(self.model())
self.frozenTableView.setFocusPolicy(Qt.NoFocus)
self.frozenTableView.verticalHeader().hide()
self.frozenTableView.horizontalHeader().setSectionResizeMode(
QHeaderView.Fixed)
self.viewport().stackUnder(self.frozenTableView)
self.frozenTableView.setStyleSheet('''
QTableView { border: none;
background-color: #8EDE21;
selection-background-color: #999;
}''') # for demo purposes
self.frozenTableView.setSelectionModel(self.selectionModel())
for col in range(1, self.model().columnCount()):
self.frozenTableView.setColumnHidden(col, True)
self.frozenTableView.setColumnWidth(0, self.columnWidth(0))
self.frozenTableView.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.frozenTableView.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.frozenTableView.show()
self.updateFrozenTableGeometry()
self.setHorizontalScrollMode(self.ScrollPerPixel)
self.setVerticalScrollMode(self.ScrollPerPixel)
self.frozenTableView.setVerticalScrollMode(self.ScrollPerPixel)
def updateSectionWidth(self, logicalIndex, oldSize, newSize):
if self.logicalIndex == 0:
self.frozenTableView.setColumnWidth(0, newSize)
self.updateFrozenTableGeometry()
def updateSectionHeight(self, logicalIndex, oldSize, newSize):
self.frozenTableView.setRowHeight(logicalIndex, newSize)
def resizeEvent(self, event):
super(FreezeTableWidget, self).resizeEvent(event)
self.updateFrozenTableGeometry()
def moveCursor(self, cursorAction, modifiers):
current = super(FreezeTableWidget, self).moveCursor(cursorAction, modifiers)
if (cursorAction == self.MoveLeft and
self.current.column() > 0 and
self.visualRect(current).topLeft().x() <
self.frozenTableView.columnWidth(0)):
newValue = (self.horizontalScrollBar().value() +
self.visualRect(current).topLeft().x() -
self.frozenTableView.columnWidth(0))
self.horizontalScrollBar().setValue(newValue)
return current
def scrollTo(self, index, hint):
if index.column() > 0:
super(FreezeTableWidget, self).scrollTo(index, hint)
def updateFrozenTableGeometry(self):
self.frozenTableView.setGeometry(
self.verticalHeader().width() + self.frameWidth(),
self.frameWidth(), self.columnWidth(0),
self.viewport().height() + self.horizontalHeader().height())
def main(args):
def split_and_strip(s, splitter):
return [s.strip() for s in line.split(splitter)]
app = QApplication(args)
model = QStandardItemModel()
file = QFile(QFileInfo(__file__).absolutePath() + '/grades.txt')
if file.open(QFile.ReadOnly):
line = file.readLine(200).decode('utf-8')
header = split_and_strip(line, ',')
model.setHorizontalHeaderLabels(header)
row = 0
while file.canReadLine():
line = file.readLine(200).decode('utf-8')
if not line.startswith('#') and ',' in line:
fields = split_and_strip(line, ',')
for col, field in enumerate(fields):
newItem = QStandardItem(field)
model.setItem(row, col, newItem)
row += 1
file.close()
tableView = FreezeTableWidget(model)
tableView.setWindowTitle("Frozen Column Example")
tableView.resize(560, 680)
tableView.show()
return app.exec_()
if __name__ == '__main__':
import sys
main(sys.argv)
|
gpl-3.0
| 1,153,572,706,326,094,600 | 42.576687 | 84 | 0.669435 | false |
olhoneles/olhoneles
|
montanha/forms.py
|
1
|
1722
|
# -*- coding: utf-8 -*-
#
# Copyright (©) 2013 Marcelo Jorge Vieira <metal@alucinados.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django import forms
from parsley.decorators import parsleyfy
from captcha.fields import ReCaptchaField
@parsleyfy
class ContactUsForm(forms.Form):
name = forms.CharField(
label="Nome",
required=True,
widget=forms.TextInput(
attrs={"placeholder": "Seu nome",
"data-trigger": "change",
"class": "span5"}))
email = forms.EmailField(
label="Email",
required=True,
widget=forms.TextInput(
attrs={"placeholder": "Seu email",
"data-trigger": "change",
"class": "span5"}))
message = forms.CharField(
label="Mensagem",
required=True,
widget=forms.Textarea(
attrs={"placeholder": "Sua mensagem",
"data-trigger": "change",
"class": "span5",
'rows': 5}))
captcha = ReCaptchaField(attrs={'theme': 'clean'})
|
agpl-3.0
| 3,365,229,986,152,435,700 | 32.745098 | 75 | 0.624637 | false |
jiteshjha/remote-screen-control
|
pyxhook.py
|
1
|
15967
|
#!/usr/bin/python
#
# pyxhook -- an extension to emulate some of the PyHook library on linux.
#
# Copyright (C) 2008 Tim Alexander <dragonfyre13@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Thanks to Alex Badea <vamposdecampos@gmail.com> for writing the Record
# demo for the xlib libraries. It helped me immensely working with these
# in this library.
#
# Thanks to the python-xlib team. This wouldn't have been possible without
# your code.
#
# This requires:
# at least python-xlib 1.4
# xwindows must have the "record" extension present, and active.
#
# This file has now been somewhat extensively modified by
# Daniel Folkinshteyn <nanotube@users.sf.net>
# So if there are any bugs, they are probably my fault. :)
import sys
import os
import re
import time
import threading
from Xlib import X, XK, display, error
from Xlib.ext import record
from Xlib.protocol import rq
#######################################################################
########################START CLASS DEF################################
#######################################################################
class HookManager(threading.Thread):
"""This is the main class. Instantiate it, and you can hand it KeyDown and KeyUp (functions in your own code) which execute to parse the pyxhookkeyevent class that is returned.
This simply takes these two values for now:
KeyDown = The function to execute when a key is pressed, if it returns anything. It hands the function an argument that is the pyxhookkeyevent class.
KeyUp = The function to execute when a key is released, if it returns anything. It hands the function an argument that is the pyxhookkeyevent class.
"""
def __init__(self):
threading.Thread.__init__(self)
self.finished = threading.Event()
# Give these some initial values
self.mouse_position_x = 0
self.mouse_position_y = 0
self.ison = {"shift":False, "caps":False}
# Compile our regex statements.
self.isshift = re.compile('^Shift')
self.iscaps = re.compile('^Caps_Lock')
self.shiftablechar = re.compile('^[a-z0-9]$|^minus$|^equal$|^bracketleft$|^bracketright$|^semicolon$|^backslash$|^apostrophe$|^comma$|^period$|^slash$|^grave$')
self.logrelease = re.compile('.*')
self.isspace = re.compile('^space$')
# Assign default function actions (do nothing).
self.KeyDown = lambda x: True
self.KeyUp = lambda x: True
self.MouseAllButtonsDown = lambda x: True
self.MouseAllButtonsUp = lambda x: True
self.MouseMovement = lambda x: True
self.contextEventMask = [X.KeyPress,X.MotionNotify]
# Hook to our display.
self.local_dpy = display.Display()
self.record_dpy = display.Display()
def run(self):
# Check if the extension is present
if not self.record_dpy.has_extension("RECORD"):
#print("RECORD extension not found")
sys.exit(1)
r = self.record_dpy.record_get_version(0, 0)
#print("RECORD extension version %d.%d" % (r.major_version, r.minor_version))
# Create a recording context; we only want key and mouse events
self.ctx = self.record_dpy.record_create_context(
0,
[record.AllClients],
[{
'core_requests': (0, 0),
'core_replies': (0, 0),
'ext_requests': (0, 0, 0, 0),
'ext_replies': (0, 0, 0, 0),
'delivered_events': (0, 0),
'device_events': tuple(self.contextEventMask), #(X.KeyPress, X.ButtonPress),
'errors': (0, 0),
'client_started': False,
'client_died': False,
}])
# Enable the context; this only returns after a call to record_disable_context,
# while calling the callback function in the meantime
self.record_dpy.record_enable_context(self.ctx, self.processevents)
# Finally free the context
self.record_dpy.record_free_context(self.ctx)
def cancel(self):
self.finished.set()
self.local_dpy.record_disable_context(self.ctx)
self.local_dpy.flush()
def printevent(self, event):
print(event)
def HookKeyboard(self):
pass
# We don't need to do anything here anymore, since the default mask
# is now set to contain X.KeyPress
#self.contextEventMask[0] = X.KeyPress
def HookMouse(self):
pass
# We don't need to do anything here anymore, since the default mask
# is now set to contain X.MotionNotify
# need mouse motion to track pointer position, since ButtonPress events
# don't carry that info.
#self.contextEventMask[1] = X.MotionNotify
def processevents(self, reply):
if reply.category != record.FromServer:
return
if reply.client_swapped:
print("bleh")
return
if not len(reply.data) or ord(str(reply.data[0])) < 2:
# not an event
return
data = reply.data
while len(data):
event, data = rq.EventField(None).parse_binary_value(data, self.record_dpy.display, None, None)
if event.type == X.KeyPress:
hookevent = self.keypressevent(event)
self.KeyDown(hookevent)
elif event.type == X.KeyRelease:
hookevent = self.keyreleaseevent(event)
self.KeyUp(hookevent)
elif event.type == X.ButtonPress:
hookevent = self.buttonpressevent(event)
self.MouseAllButtonsDown(hookevent)
elif event.type == X.ButtonRelease:
hookevent = self.buttonreleaseevent(event)
self.MouseAllButtonsUp(hookevent)
elif event.type == X.MotionNotify:
# use mouse moves to record mouse position, since press and release events
# do not give mouse position info (event.root_x and event.root_y have
# bogus info).
hookevent = self.mousemoveevent(event)
self.MouseMovement(hookevent)
#print "processing events...", event.type
def keypressevent(self, event):
matchto = self.lookup_keysym(self.local_dpy.keycode_to_keysym(event.detail, 0))
if self.shiftablechar.match(self.lookup_keysym(self.local_dpy.keycode_to_keysym(event.detail, 0))): ## This is a character that can be typed.
if self.ison["shift"] == False:
keysym = self.local_dpy.keycode_to_keysym(event.detail, 0)
return self.makekeyhookevent(keysym, event)
else:
keysym = self.local_dpy.keycode_to_keysym(event.detail, 1)
return self.makekeyhookevent(keysym, event)
else: ## Not a typable character.
keysym = self.local_dpy.keycode_to_keysym(event.detail, 0)
if self.isshift.match(matchto):
self.ison["shift"] = self.ison["shift"] + 1
elif self.iscaps.match(matchto):
if self.ison["caps"] == False:
self.ison["shift"] = self.ison["shift"] + 1
self.ison["caps"] = True
if self.ison["caps"] == True:
self.ison["shift"] = self.ison["shift"] - 1
self.ison["caps"] = False
return self.makekeyhookevent(keysym, event)
def keyreleaseevent(self, event):
if self.shiftablechar.match(self.lookup_keysym(self.local_dpy.keycode_to_keysym(event.detail, 0))):
if self.ison["shift"] == False:
keysym = self.local_dpy.keycode_to_keysym(event.detail, 0)
else:
keysym = self.local_dpy.keycode_to_keysym(event.detail, 1)
else:
keysym = self.local_dpy.keycode_to_keysym(event.detail, 0)
matchto = self.lookup_keysym(keysym)
if self.isshift.match(matchto):
self.ison["shift"] = self.ison["shift"] - 1
return self.makekeyhookevent(keysym, event)
def buttonpressevent(self, event):
#self.clickx = self.rootx
#self.clicky = self.rooty
return self.makemousehookevent(event)
def buttonreleaseevent(self, event):
#if (self.clickx == self.rootx) and (self.clicky == self.rooty):
##print "ButtonClick " + str(event.detail) + " x=" + str(self.rootx) + " y=" + str(self.rooty)
#if (event.detail == 1) or (event.detail == 2) or (event.detail == 3):
#self.captureclick()
#else:
#pass
return self.makemousehookevent(event)
# sys.stdout.write("ButtonDown " + str(event.detail) + " x=" + str(self.clickx) + " y=" + str(self.clicky) + "\n")
# sys.stdout.write("ButtonUp " + str(event.detail) + " x=" + str(self.rootx) + " y=" + str(self.rooty) + "\n")
#sys.stdout.flush()
def mousemoveevent(self, event):
self.mouse_position_x = event.root_x
self.mouse_position_y = event.root_y
return self.makemousehookevent(event)
# need the following because XK.keysym_to_string() only does printable chars
# rather than being the correct inverse of XK.string_to_keysym()
def lookup_keysym(self, keysym):
for name in dir(XK):
if name.startswith("XK_") and getattr(XK, name) == keysym:
return name.lstrip("XK_")
return "[%d]" % keysym
def asciivalue(self, keysym):
asciinum = XK.string_to_keysym(self.lookup_keysym(keysym))
return asciinum % 256
def makekeyhookevent(self, keysym, event):
storewm = self.xwindowinfo()
if event.type == X.KeyPress:
MessageName = "key down"
elif event.type == X.KeyRelease:
MessageName = "key up"
return pyxhookkeyevent(storewm["handle"], storewm["name"], storewm["class"], self.lookup_keysym(keysym), self.asciivalue(keysym), False, event.detail, MessageName)
def makemousehookevent(self, event):
storewm = self.xwindowinfo()
if event.detail == 1:
MessageName = "mouse left "
elif event.detail == 3:
MessageName = "mouse right "
elif event.detail == 2:
MessageName = "mouse middle "
elif event.detail == 5:
MessageName = "mouse wheel down "
elif event.detail == 4:
MessageName = "mouse wheel up "
else:
MessageName = "mouse " + str(event.detail) + " "
if event.type == X.ButtonPress:
MessageName = MessageName + "down"
elif event.type == X.ButtonRelease:
MessageName = MessageName + "up"
else:
MessageName = "mouse moved"
return pyxhookmouseevent(storewm["handle"], storewm["name"], storewm["class"], (self.mouse_position_x, self.mouse_position_y), MessageName)
def xwindowinfo(self):
try:
windowvar = self.local_dpy.get_input_focus().focus
wmname = windowvar.get_wm_name()
wmclass = windowvar.get_wm_class()
wmhandle = str(windowvar)[20:30]
except:
## This is to keep things running smoothly. It almost never happens, but still...
return {"name":None, "class":None, "handle":None}
if (wmname == None) and (wmclass == None):
try:
windowvar = windowvar.query_tree().parent
wmname = windowvar.get_wm_name()
wmclass = windowvar.get_wm_class()
wmhandle = str(windowvar)[20:30]
except:
## This is to keep things running smoothly. It almost never happens, but still...
return {"name":None, "class":None, "handle":None}
if wmclass == None:
return {"name":wmname, "class":wmclass, "handle":wmhandle}
else:
return {"name":wmname, "class":wmclass[0], "handle":wmhandle}
class pyxhookkeyevent:
"""This is the class that is returned with each key event.f
It simply creates the variables below in the class.
Window = The handle of the window.
WindowName = The name of the window.
WindowProcName = The backend process for the window.
Key = The key pressed, shifted to the correct caps value.
Ascii = An ascii representation of the key. It returns 0 if the ascii value is not between 31 and 256.
KeyID = This is just False for now. Under windows, it is the Virtual Key Code, but that's a windows-only thing.
ScanCode = Please don't use this. It differs for pretty much every type of keyboard. X11 abstracts this information anyway.
MessageName = "key down", "key up".
"""
def __init__(self, Window, WindowName, WindowProcName, Key, Ascii, KeyID, ScanCode, MessageName):
self.Window = Window
self.WindowName = WindowName
self.WindowProcName = WindowProcName
self.Key = Key
self.Ascii = Ascii
self.KeyID = KeyID
self.ScanCode = ScanCode
self.MessageName = MessageName
def __str__(self):
return "Window Handle: " + str(self.Window) + "\nWindow Name: " + str(self.WindowName) + "\nWindow's Process Name: " + str(self.WindowProcName) + "\nKey Pressed: " + str(self.Key) + "\nAscii Value: " + str(self.Ascii) + "\nKeyID: " + str(self.KeyID) + "\nScanCode: " + str(self.ScanCode) + "\nMessageName: " + str(self.MessageName) + "\n"
class pyxhookmouseevent:
"""This is the class that is returned with each key event.f
It simply creates the variables below in the class.
Window = The handle of the window.
WindowName = The name of the window.
WindowProcName = The backend process for the window.
Position = 2-tuple (x,y) coordinates of the mouse click
MessageName = "mouse left|right|middle down", "mouse left|right|middle up".
"""
def __init__(self, Window, WindowName, WindowProcName, Position, MessageName):
self.Window = Window
self.WindowName = WindowName
self.WindowProcName = WindowProcName
self.Position = Position
self.MessageName = MessageName
def __str__(self):
return "Window Handle: " + str(self.Window) + "\nWindow Name: " + str(self.WindowName) + "\nWindow's Process Name: " + str(self.WindowProcName) + "\nPosition: " + str(self.Position) + "\nMessageName: " + str(self.MessageName) + "\n"
#######################################################################
#########################END CLASS DEF#################################
#######################################################################
if __name__ == '__main__':
hm = HookManager()
hm.HookKeyboard()
hm.HookMouse()
hm.KeyDown = hm.printevent
hm.KeyUp = hm.printevent
hm.MouseAllButtonsDown = hm.printevent
hm.MouseAllButtonsUp = hm.printevent
hm.MouseMovement = hm.printevent
hm.start()
time.sleep(10)
hm.cancel()
|
mit
| 6,552,510,022,520,829,000 | 43.352778 | 346 | 0.591032 | false |
f4exb/zfs-mania
|
zfile.py
|
1
|
4902
|
#!/usr/bin/env python
"""
Find details about a directory or plain file in a ZFS dataset using zdb utility
"""
import sys, os, traceback
import re
from optparse import OptionParser
# ======================================================================
class Error(Exception):
"""Base class for exceptions in this module."""
pass
# ======================================================================
class InputError(Error):
"""Exception raised for errors in the input.
Attributes:
msg -- explanation of the error
"""
def __init__(self, msg):
self.msg = msg
# ======================================================================
def getInputOptions():
parser = OptionParser(usage="usage: %%prog options\n\n%s")
parser.add_option("-i", "--input-dump-file", dest="ifile", help="Input dump file (does not use zdb) - required if not -d option", metavar="IFILE", type="string")
parser.add_option("-d", "--dataset", dest="dataset", help="ZFS dataset", metavar="DATASET", type="string")
parser.add_option("-o", "--output-dump-file", dest="ofile", help="Output dump file (result of zdb) - required if -d option", metavar="OFILE", type="string")
parser.add_option("-n", "--numbers", dest="numbers", help="Match object numbers (comma separated list)", metavar="NUMBERS", type="string")
parser.add_option("-t", "--types", dest="types", help="Match object types (comma separated list)", metavar="TYPES", type="string")
parser.add_option("-p", "--path", dest="re_path", help="Regular expression to match file path with", metavar="PATH", type="string")
#parser.add_option("-k", "--mail-ok", dest="mail_ok", help="send informative mail also if result is OK", metavar="MAIL_OK", action="store_true", default=False)
(options, args) = parser.parse_args()
options.types_list = None
options.numbers_list = None
if options.dataset is None:
if options.ifile is None:
raise InputError("You must specify input dump file when dataset is not specified")
else:
if options.ofile is None:
raise InputError("You must specify output dump file when dataset is specified")
if options.types is not None:
options.types_list = options.types.split(',')
if options.numbers is not None:
options.numbers_list = options.numbers.split(',')
return options
# ======================================================================
def dump_zdb_output(options):
os.popen("sudo zdb -dddd %s > %s" % (options.dataset, options.ofile))
return options.ofile
# ======================================================================
def select_zdb_data(options, ifile):
zdump = open(ifile, 'r')
objstr = ""
outstr = ""
objhdr = False
objselect = False
for zline in zdump:
iline = zline.strip("\r\n")
if iline.lstrip().split(' ')[0] == "Object":
objhdr = True
if objselect and len(objstr) > 0:
outstr += objstr
objselect = False
objstr = (iline + '\n')
continue
else:
objstr += (iline + '\n')
if objhdr:
objhdr = False
if options.numbers_list is not None:
for objnum in options.numbers_list:
if iline.lstrip().split(' ')[0] == objnum:
objselect = True
break
continue
if not objselect and options.types_list is not None:
for ztype in options.types_list:
if ztype in iline:
objselect = True
break
continue
if not objselect and options.numbers_list is None and options.types_list is None:
pathLineRE = re.compile(r'^\s+path\s+(\S+)')
pathLineMatch = pathLineRE.match(iline)
if pathLineMatch:
path = pathLineMatch.group(1)
pathRE = re.compile(r'%s' % options.re_path)
if pathRE.match(path):
objselect = True
if objselect:
outstr += objstr
return outstr
# ======================================================================
def main():
try:
options = getInputOptions()
if options.dataset is None:
ifile = options.ifile
else:
ifile = dump_zdb_output(options)
output = select_zdb_data(options, ifile)
print output[:-1]
except KeyboardInterrupt:
return 1
except InputError as e:
print >> sys.stderr, e.msg
return 2
# ======================================================================
if __name__ == "__main__":
rc = main()
exit(rc)
|
mit
| -4,963,215,025,282,937,000 | 34.781022 | 165 | 0.509384 | false |
staticfloat/julia-buildbot
|
master/coverage.py
|
1
|
7918
|
###############################################################################
# Define everything needed to do per-commit coverage testing on Linux
###############################################################################
import os
run_coverage_cmd = """
using Pkg
Pkg.activate("CoverageBase")
using CoverageBase
CoverageBase.runtests(CoverageBase.testnames())
"""
analyse_and_submit_cov_cmd = '''
using Pkg
Pkg.activate("CoverageBase")
using Coverage, CoverageBase
# Process code-coverage files
results = Coverage.LCOV.readfolder(raw"%(prop:juliadir)s/LCOV")
# remove test/ files
filter!(results) do c
!occursin("test/", c.filename)
end
# turn absolute paths into relative, and add base/ to relative paths
CoverageBase.fixpath!(results)
results = Coverage.merge_coverage_counts(results)
# pretty-print what we have got
sort!(results, by=c->c.filename)
for r in results
cov, tot = get_summary(r)
@info "Got coverage data for $(r.filename): $cov/$tot"
end
# keep only files in stdlib/ and base/
let prefixes = (joinpath("base", ""),
joinpath("stdlib", ""))
filter!(results) do c
any(p -> startswith(c.filename, p), prefixes)
end
end
# try to find these files, remove those that are not present
CoverageBase.readsource!(results)
filter!(results) do c
if isempty(c.source)
@info "File $(c.filename) not found"
false
else
true
end
end
# add in any other files we discover
# todo: extend Glob.jl to support these patterns (base/**/*.jl and stdlib/*/src/**/*.jl (except test/))
# todo: consider also or instead looking at the Base._included_files list
allfiles_base = sort!(split(readchomp(Cmd(`find base -name '*.jl'`, dir=CoverageBase.fixabspath(""))), '\n'))
allfiles_stdlib = sort!(map(x -> "stdlib/" * x[3:end],
split(readchomp(Cmd(`find . -name '*.jl' ! -path '*/test/*' ! -path '*/docs/*'`, dir=CoverageBase.fixabspath("stdlib/"))), '\n')))
allfiles = map(fn -> Coverage.FileCoverage(fn, read(CoverageBase.fixabspath(fn), String), Coverage.FileCoverage[]),
[allfiles_base; allfiles_stdlib])
results = Coverage.merge_coverage_counts(results, allfiles)
length(results) == length(allfiles) || @warn "Got coverage for an unexpected file:" symdiff=symdiff(map(x -> x.filename, allfiles), map(x -> x.filename, results))
# drop vendored files
# todo: find a more general way to do this, as this may become hard to maintain
let prefixes = (joinpath("stdlib", "Pkg", ""),
joinpath("stdlib", "Statistics", ""))
filter!(results) do c
all(p -> !startswith(c.filename, p), prefixes)
end
end
# attempt to improve accuracy of the results
foreach(Coverage.amend_coverage_from_src!, results)
# Create git_info for codecov
git_info = Any[
:branch => Base.GIT_VERSION_INFO.branch,
:commit => Base.GIT_VERSION_INFO.commit,
:token => ENV["CODECOV_REPO_TOKEN"],
]
# Submit to codecov
Codecov.submit_generic(results; git_info...)
# Create git_info for Coveralls
git_info = Dict(
"branch" => Base.GIT_VERSION_INFO.branch,
"remotes" => [
Dict(
"name" => "origin",
"url" => "https://github.com/JuliaLang/julia.git"
)
],
"head" => Dict(
"id" => Base.GIT_VERSION_INFO.commit,
"message" => raw"""%(prop:commitmessage)s""",
"committer_name" => raw"""%(prop:commitname)s""",
"committer_email" => raw"""%(prop:commitemail)s""",
"author_name" => raw"""%(prop:authorname)s""",
"author_email" => raw"""%(prop:authoremail)s""",
)
)
# Submit to Coveralls
Coveralls.submit_local(results, git_info)
'''
# Steps to download a linux tarball, extract it, run coverage on it, and upload coverage stats
julia_coverage_factory = util.BuildFactory()
julia_coverage_factory.useProgress = True
julia_coverage_factory.addSteps([
# Clean the place out from previous runs
steps.ShellCommand(
name="clean it out",
command=["/bin/sh", "-c", "rm -rf *"]
),
# Download the appropriate tarball and extract it
steps.ShellCommand(
name="download/extract tarball",
command=["/bin/sh", "-c", util.Interpolate("curl -L %(prop:download_url)s | tar zx")],
),
# Find Julia directory (so we don't have to know the shortcommit)
steps.SetPropertyFromCommand(
name="Find Julia executable",
command=["/bin/sh", "-c", "echo julia-*"],
property="juliadir"
),
# Update packages
steps.ShellCommand(
name="Update packages",
command=[util.Interpolate("%(prop:juliadir)s/bin/julia"), "-e", "import Pkg; Pkg.update(); Pkg.build()"],
),
# Install Coverage, CoverageBase
steps.ShellCommand(
name="Install Coverage and checkout latest master",
command=[util.Interpolate("%(prop:juliadir)s/bin/julia"), "-e", "import Pkg; Pkg.add(Pkg.PackageSpec(name=\"Coverage\", rev=\"master\"))"],
),
steps.ShellCommand(
name="Install CoverageBase and checkout latest master",
command=[util.Interpolate("%(prop:juliadir)s/bin/julia"), "-e", "import Pkg; Pkg.add(Pkg.PackageSpec(url=\"https://github.com/JuliaCI/CoverageBase.jl\", rev=\"master\"))"],
),
# Test CoverageBase to make sure everything's on the up-and-up
steps.ShellCommand(
name="Test CoverageBase.jl",
command=[util.Interpolate("%(prop:juliadir)s/bin/julia"), "-e", "import Pkg; Pkg.test(\"CoverageBase\")"],
haltOnFailure=True,
),
# Run Julia, gathering coverage statistics
steps.MakeDirectory(dir=util.Interpolate("build/%(prop:juliadir)s/LCOV")),
steps.ShellCommand(
name="Run tests",
command=[util.Interpolate("%(prop:juliadir)s/bin/julia"),
"--sysimage-native-code=no", util.Interpolate("--code-coverage=%(prop:juliadir)s/LCOV/cov-%%p.info"),
"-e", run_coverage_cmd],
# Fail out if 60 minutes have gone by with nothing printed to stdout
timeout=60*60,
# Kill everything if the overall job has taken more than 10 hours
maxTime=60*60*10,
# Give the process 10 seconds to print out the current backtraces when being killed
sigtermTime=10,
),
#submit the results!
steps.ShellCommand(
name="Gather test results and Submit",
command=[util.Interpolate("%(prop:juliadir)s/bin/julia"), "-e", util.Interpolate(analyse_and_submit_cov_cmd)],
env={'COVERALLS_TOKEN':COVERALLS_REPO_TOKEN, 'CODECOV_REPO_TOKEN':CODECOV_REPO_TOKEN},
logEnviron=False,
),
])
# Add a dependent scheduler for running coverage after we build tarballs
julia_coverage_builders = ["coverage_linux64"]
julia_coverage_scheduler = schedulers.Triggerable(name="Julia Coverage Testing", builderNames=julia_coverage_builders)
c['schedulers'].append(julia_coverage_scheduler)
c['schedulers'].append(schedulers.ForceScheduler(
name="force_coverage",
label="Force coverage build",
builderNames=julia_coverage_builders,
reason=util.FixedParameter(name="reason", default=""),
codebases=[
util.CodebaseParameter(
"",
name="",
branch=util.FixedParameter(name="branch", default=""),
revision=util.FixedParameter(name="revision", default=""),
repository=util.FixedParameter(name="repository", default=""),
project=util.FixedParameter(name="project", default="Coverage"),
)
],
properties=[
util.StringParameter(
name="download_url",
size=60,
default="https://julialangnightlies-s3.julialang.org/bin/linux/x64/julia-latest-linux64.tar.gz"
),
]
))
# Add coverage builders
c['builders'].append(util.BuilderConfig(
name="coverage_linux64",
workernames=["tabularasa_" + x for x in builder_mapping["linux64"]],
tags=["Coverage"],
factory=julia_coverage_factory
))
|
mit
| -6,598,851,599,382,044,000 | 37.813725 | 180 | 0.638292 | false |
fortnight/Grammar_Game
|
Grammar_Game.py
|
1
|
13707
|
#!/usr/bin/python
import sys
import pygame
from Buttons import Buttons
from GameButton import GameButton
from MenuButton import MenuButton
from gi.repository import Gtk
class Grammar_Game:
def __init__(self):
# Set up a clock for managing the frame rate.
self.clock = pygame.time.Clock()
self.x = -100
self.y = 100
self.vx = 10
self.vy = 0
self.cur_game = None
self.cur_question = 'Question1'
self.space_height = 300
self.space_width = 1200
self.paused = True
self.direction = 1
self.bttnTestRect = pygame.Rect(self.x, self.y, 200, 100)
self.bttnTest = Buttons(self.bttnTestRect.x, self.bttnTestRect.y, self.bttnTestRect, 200, 100)
self.bttnlist = []
self.trophyList = []
self.window = "MainMenu"
def set_TrophyList(self, bttnList):
self.trophyList = bttnList
def add_to_TrophyList(self, button):
if self.trophyList == []:
self.trophyList = [button]
elif self.trophyList != []:
self.trophyList.append(button)
def flush_TrophyList(self):
self.trophyList = []
def set_ButtonList(self, bttnList):
self.bttnlist = bttnList
def add_to_ButtonList(self, button):
if self.bttnlist == []:
self.bttnlist = [button]
elif self.bttnlist != []:
self.bttnlist.append(button)
def flush_ButtonList(self):
self.bttnlist = []
def Game_question(self, screen, gamefile):
self.Title_Text(screen, gamefile.readline()[:-1])
self.Button_Text(screen, gamefile.readline()[:-1], self.bttnlist[0])
self.Button_Text(screen, gamefile.readline()[:-1], self.bttnlist[1])
self.Button_Text(screen, gamefile.readline()[:-1], self.bttnlist[2])
self.Button_Text(screen, gamefile.readline()[:-1], self.bttnlist[3])
#self.curline += 5
def Game_Screen(self, screen):
if self.cur_question == 'Trophy':
self.add_to_TrophyList(self.cur_game)
self.flush_ButtonList()
screen.fill((255, 255, 255)) # 255 for white
answer_A = pygame.draw.rect(screen, (255, 0, 0), (000, 300, 1400, 100))
answer_B = pygame.draw.rect(screen, (0, 255, 0), (000, 400, 1400, 100))
answer_C = pygame.draw.rect(screen, (255, 0, 0), (000, 500, 1400, 100))
answer_D = pygame.draw.rect(screen, (0, 255, 0), (000, 600, 1400, 100))
quit_rect = pygame.draw.rect(screen, (0, 0, 255), (000, 000, 100, 100))
bttnA = GameButton(answer_A.x, answer_A.y, answer_A, answer_A.width, answer_A.height, True)
bttnB = GameButton(answer_B.x, answer_B.y, answer_B, answer_B.width, answer_B.height, False)
bttnC = GameButton(answer_C.x, answer_C.y, answer_C, answer_C.width, answer_C.height, False)
bttnD = GameButton(answer_D.x, answer_D.y, answer_D, answer_D.width, answer_D.height, False)
bttnQuit = MenuButton(quit_rect.x, quit_rect.y, quit_rect, quit_rect.width, quit_rect.height, "GAMES")
self.add_to_ButtonList(bttnA)
self.add_to_ButtonList(bttnB)
self.add_to_ButtonList(bttnC)
self.add_to_ButtonList(bttnD)
self.add_to_ButtonList(bttnQuit)
self.Button_Text(screen, "Quit", bttnQuit)
gamefile = file(self.cur_game + '/' + self.cur_question + '.txt')
self.Game_question(screen, gamefile)
def Game_Menu(self, screen):
self.flush_ButtonList()
screen.fill((255, 255, 255)) # 255 for white
RP = pygame.draw.rect(screen, (0, 255, 0), (565, 290, 463, 111))
PP = pygame.draw.rect(screen, (255, 0, 0), (565, 416, 463, 111))
CS = pygame.draw.rect(screen, (255, 255, 0), (565, 542, 463, 111))
quit_rect = pygame.draw.rect(screen, (0, 0, 255), (535, 668, 463, 111))
bttnA = MenuButton(RP.x, RP.y, RP, RP.width, RP.height, "GAME_RP")
bttnB = MenuButton(PP.x, PP.y, PP, PP.width, PP.height, "GAME_PP")
bttnC = MenuButton(CS.x, CS.y, CS, CS.width, CS.height, "GAME_CS")
bttnQuit = MenuButton(quit_rect.x, quit_rect.y, quit_rect, quit_rect.width, quit_rect.height, "MainMenu")
self.add_to_ButtonList(bttnA)
self.add_to_ButtonList(bttnB)
self.add_to_ButtonList(bttnC)
self.add_to_ButtonList(bttnQuit)
TS = pygame.image.load(file('images/gamescreen.jpg.png'))
screen.blit(TS, (0, 0))
#self.Title_Text(screen, "Main Menu")
#self.Button_Text(screen, "Games", bttnA)
#self.Button_Text(screen, "Trophy Case", bttnB)
#self.Button_Text(screen, "Credits", bttnC)
#self.Button_Text(screen, "Exit", bttnD)
def Main_Menu(self, screen):
self.flush_ButtonList()
screen.fill((255, 255, 255)) # 255 for white
Game = pygame.draw.rect(screen, (0, 255, 0), (565, 290, 463, 111))
TrophyCase = pygame.draw.rect(screen, (255, 0, 0), (565, 416, 463, 111))
Credits = pygame.draw.rect(screen, (255, 255, 0), (565, 542, 463, 111))
Quit = pygame.draw.rect(screen, (0, 0, 255), (565, 668, 463, 111))
NinjaBear = pygame.draw.rect(screen, (255, 255, 255), (1300, 000, 100, 100))
bttnA = MenuButton(Game.x, Game.y, Game, Game.width, Game.height, "GAMES")
bttnB = MenuButton(TrophyCase.x, TrophyCase.y, TrophyCase, TrophyCase.width, TrophyCase.height, "TC")
bttnC = MenuButton(Credits.x, Credits.y, Credits, Credits.width, Credits.height, "CR")
bttnD = MenuButton(Quit.x, Quit.y, Quit, Quit.width, Quit.height, "RECTIFY")
bttnE = MenuButton(NinjaBear.x, NinjaBear.y, NinjaBear, NinjaBear.width, NinjaBear.height, "NB")
self.add_to_ButtonList(bttnA)
self.add_to_ButtonList(bttnB)
self.add_to_ButtonList(bttnC)
self.add_to_ButtonList(bttnD)
self.add_to_ButtonList(bttnE)
self.Title_Text(screen, "Main Menu")
self.Button_Text(screen, "Games", bttnA)
self.Button_Text(screen, "Trophy Case", bttnB)
self.Button_Text(screen, "Credits", bttnC)
self.Button_Text(screen, "Exit", bttnD)
TS = pygame.image.load(file('images/titlescreen.jpg.png'))
screen.blit(TS, (0, 0))
def Trophy_Case(self, screen):
self.flush_ButtonList()
quit_rect = pygame.draw.rect(screen, (0, 0, 255), (000, 000, 100, 100))
bttnQuit = MenuButton(quit_rect.x, quit_rect.y, quit_rect, quit_rect.width, quit_rect.height, "MainMenu")
self.add_to_ButtonList(bttnQuit)
self.Title_Text(screen, "Trophy Case")
self.Button_Text(screen, "Quit", bttnQuit)
for trophy in self.trophyList:
if trophy == 'games/RP':
Banana = pygame.image.load(file('images/banana.png'))
screen.blit(Banana, (100, 300))
if trophy == 'games/PP':
Watermelon = pygame.image.load(file('images/watermelon.png'))
screen.blit(Watermelon, (300, 500))
if trophy == 'games/CS':
Strawberry = pygame.image.load(file('images/Strawberry.png'))
screen.blit(Strawberry, (700, 300))
def Credits(self, screen):
self.flush_ButtonList()
screen.fill((255, 255, 255)) # 255 for white
quit_rect = pygame.draw.rect(screen, (0, 0, 255), (000, 000, 100, 100))
bttnQuit = MenuButton(quit_rect.x, quit_rect.y, quit_rect, quit_rect.width, quit_rect.height, "MainMenu")
self.add_to_ButtonList(bttnQuit)
self.Title_Text(screen, "Credits")
self.Text_Text(screen, "Graphics by Kaffys Code by Fortnight")
self.Button_Text(screen, "Quit", bttnQuit)
def Ninja_Bear(self, screen):
self.flush_ButtonList()
screen.fill((255, 255, 255)) # 255 for white
quit_rect = pygame.draw.rect(screen, (0, 0, 255), (000, 000, 100, 100))
bttnQuit = MenuButton(quit_rect.x, quit_rect.y, quit_rect, quit_rect.width, quit_rect.height, "MainMenu")
self.add_to_ButtonList(bttnQuit)
NB = pygame.image.load(file('images/Ninja-bear.png'))
center_x = (1400-159)/2
center_y = (400-228)/2
screen.blit(NB, (center_x, center_y + 300))
self.Title_Text(screen, "Ninja Bear")
self.Button_Text(screen, "Quit", bttnQuit)
# Load text somewhat in the upper middle of the screen
def Title_Text(self, screen, text):
Font = pygame.font.SysFont("monospace", 64)
Title = Font.render(text, False, (0, 0, 0))
center_x = (self.space_width - Title.get_rect().width)/2
center_y = (self.space_height - Title.get_rect().height)/2
screen.blit(Title, (center_x, center_y))
def Text_Text(self, screen, text):
Font = pygame.font.SysFont("monospace", 40)
Title = Font.render(text, False, (0, 0, 0))
center_x = (self.space_width - Title.get_rect().width)/2
center_y = (900 - Title.get_rect().height)/2
screen.blit(Title, (center_x, center_y))
def Button_Text(self, screen, text, button):
Font = pygame.font.SysFont("monospace", 40)
Title = Font.render(text, False, (0, 0, 0))
center_x = (button.size[0] - Title.get_rect().width)/2
center_y = (button.size[1] - Title.get_rect().height)/2
screen.blit(Title, (button.x + center_x, button.y + center_y))
def set_paused(self, paused):
self.paused = paused
#def button to test events of things
def EVENT_TEST_BUTTON(self):
if self.paused:
self.set_paused(False)
elif self.paused == False:
self.set_paused(True)
# Called to save the state of the game to the Journal.
def write_file(self, file_path):
pass
# Called to load the state of the game from the Journal.
def read_file(self, file_path):
pass
def Set_Screen(self, screen):
screen.fill((255, 255, 255)) # 255 for white
if self.window == "MainMenu":
self.cur_question = 'Question1'
self.Main_Menu(screen)
if self.window == "GAMES":
self.cur_question = 'Question1'
self.Game_Menu(screen)
if self.window == "CR":
self.Credits(screen)
if self.window == "TC":
self.Trophy_Case(screen)
if self.window == "NB":
self.Ninja_Bear(screen)
if self.window == "GAME_RP":
self.cur_game = 'games/RP'
self.Game_Screen(screen)
if self.window == "GAME_PP":
self.cur_game = 'games/PP'
self.Game_Screen(screen)
if self.window == "GAME_CS":
self.cur_game = 'games/CS'
self.Game_Screen(screen)
if self.window == "RECTIFY":
sys.exit()
def Next_Question(self):
if self.cur_question == 'Question5':
self.cur_question = 'Trophy'
if self.cur_question == 'Question4':
self.cur_question = 'Question5'
if self.cur_question == 'Question3':
self.cur_question = 'Question4'
if self.cur_question == 'Question2':
self.cur_question = 'Question3'
if self.cur_question == 'Question1':
self.cur_question = 'Question2'
def GameAnswer(self, screen, answer):
if answer:
self.Next_Question()
# The main game loop.
def run(self):
self.running = True
screen = pygame.display.get_surface()
while self.running:
# Pump GTK messages.
while Gtk.events_pending():
Gtk.main_iteration()
# Pump PyGame messages.
for event in pygame.event.get():
if event.type == pygame.QUIT:
return
elif event.type == pygame.VIDEORESIZE:
pygame.display.set_mode(event.size, pygame.RESIZABLE)
elif event.type == pygame.MOUSEBUTTONDOWN:
#print(self.bttnlist)
for bttn in self.bttnlist:
result = bttn.EVENT_CLICK()
if result != False:
if result[0]:
self.window = result[1]
elif result[0] == False:
self.GameAnswer(screen, result[1])
# Move the ball
# if not self.paused:
# self.x += self.vx * self.direction
# if self.direction == 1 and self.x > screen.get_width() + 100:
# self.x = -100
# elif self.direction == -1 and self.x < -100:
# self.x = screen.get_width() + 100
#
# self.y += self.vy
# if self.y > screen.get_height() - 100:
# self.y = screen.get_height() - 100
# self.vy = -self.vy
#
# self.vy += 5
# Clear Display
screen.fill((255, 255, 255)) # 255 for white
self.Set_Screen(screen)
#draw a rectangle
#pygame.draw.rect(screen, (255, 0, 0), (000, 300, 1400, 100))
# Draw the ball
#pygame.draw.circle(screen, (255, 0, 0), (self.x, self.y), 100)
# Flip Display
pygame.display.flip()
# Try to stay at 30 FPS
self.clock.tick(30)
# def EVENT_CLICK():
# pass
# This function is called when the game is run directly from the command line:
# ./TestGame.py
def main():
pygame.init()
pygame.display.set_mode((0, 0), pygame.RESIZABLE)
game = Grammar_Game()
game.run()
if __name__ == '__main__':
main()
|
gpl-3.0
| -7,798,434,488,820,568,000 | 40.286145 | 113 | 0.571898 | false |
tensorflow/moonlight
|
moonlight/score/reader_test.py
|
1
|
19072
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the OMR score reader."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
import librosa
from protobuf import music_pb2
from moonlight import conversions
from moonlight.protobuf import musicscore_pb2
from moonlight.score import reader
# pylint: disable=invalid-name
Glyph = musicscore_pb2.Glyph
Note = music_pb2.NoteSequence.Note
Point = musicscore_pb2.Point
class ReaderTest(absltest.TestCase):
def testTreble_simple(self):
staff = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=50), Point(x=100, y=50)],
glyph=[
Glyph(
type=Glyph.CLEF_TREBLE,
x=1,
y_position=reader.TREBLE_CLEF_EXPECTED_Y),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=10, y_position=0),
])
notes = conversions.page_to_notesequence(reader.ScoreReader().read_page(
musicscore_pb2.Page(system=[musicscore_pb2.StaffSystem(
staff=[staff])])))
self.assertEqual(
notes,
music_pb2.NoteSequence(notes=[
Note(pitch=librosa.note_to_midi('B4'), start_time=0, end_time=1)
]))
def testBass_simple(self):
staff = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=50), Point(x=100, y=50)],
glyph=[
Glyph(
type=Glyph.CLEF_BASS,
x=1,
y_position=reader.BASS_CLEF_EXPECTED_Y),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=10, y_position=0),
])
notes = conversions.page_to_notesequence(reader.ScoreReader().read_page(
musicscore_pb2.Page(system=[musicscore_pb2.StaffSystem(
staff=[staff])])))
self.assertEqual(
notes,
music_pb2.NoteSequence(notes=[
Note(pitch=librosa.note_to_midi('D3'), start_time=0, end_time=1)
]))
def testTreble_accidentals(self):
staff_1 = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=50), Point(x=100, y=50)],
glyph=[
Glyph(
type=Glyph.CLEF_TREBLE,
x=1,
y_position=reader.TREBLE_CLEF_EXPECTED_Y),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=10, y_position=-6),
Glyph(type=Glyph.FLAT, x=16, y_position=-4),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=20, y_position=-4),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=30, y_position=-2),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=40, y_position=-4),
])
staff_2 = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=150), Point(x=100, y=150)],
glyph=[
Glyph(
type=Glyph.CLEF_TREBLE,
x=1,
y_position=reader.TREBLE_CLEF_EXPECTED_Y),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=10, y_position=-6),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=20, y_position=-4),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=30, y_position=-2),
Glyph(type=Glyph.SHARP, x=35, y_position=-2),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=40, y_position=-2),
Glyph(type=Glyph.NATURAL, x=45, y_position=-2),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=50, y_position=-2),
])
notes = conversions.page_to_notesequence(reader.ScoreReader().read_page(
musicscore_pb2.Page(system=[
musicscore_pb2.StaffSystem(staff=[staff_1]),
musicscore_pb2.StaffSystem(staff=[staff_2])
])))
self.assertEqual(
notes,
music_pb2.NoteSequence(notes=[
# First staff.
Note(pitch=librosa.note_to_midi('C4'), start_time=0, end_time=1),
Note(pitch=librosa.note_to_midi('Eb4'), start_time=1, end_time=2),
Note(pitch=librosa.note_to_midi('G4'), start_time=2, end_time=3),
Note(pitch=librosa.note_to_midi('Eb4'), start_time=3, end_time=4),
# Second staff.
Note(pitch=librosa.note_to_midi('C4'), start_time=4, end_time=5),
Note(pitch=librosa.note_to_midi('E4'), start_time=5, end_time=6),
Note(pitch=librosa.note_to_midi('G4'), start_time=6, end_time=7),
Note(pitch=librosa.note_to_midi('G#4'), start_time=7, end_time=8),
Note(pitch=librosa.note_to_midi('G4'), start_time=8, end_time=9),
]))
def testChords(self):
stem_1 = musicscore_pb2.LineSegment(
start=Point(x=20, y=10), end=Point(x=20, y=70))
stem_2 = musicscore_pb2.LineSegment(
start=Point(x=50, y=10), end=Point(x=50, y=70))
staff = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=50), Point(x=100, y=50)],
glyph=[
Glyph(
type=Glyph.CLEF_TREBLE,
x=1,
y_position=reader.TREBLE_CLEF_EXPECTED_Y),
# Chord of 2 notes.
Glyph(type=Glyph.NOTEHEAD_FILLED, x=10, y_position=-4, stem=stem_1),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=10, y_position=-1, stem=stem_1),
# Note not attached to a stem.
Glyph(type=Glyph.NOTEHEAD_FILLED, x=30, y_position=3),
# Chord of 3 notes.
Glyph(type=Glyph.NOTEHEAD_FILLED, x=40, y_position=0, stem=stem_2),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=60, y_position=2, stem=stem_2),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=60, y_position=4, stem=stem_2),
])
notes = conversions.page_to_notesequence(reader.ScoreReader().read_page(
musicscore_pb2.Page(system=[musicscore_pb2.StaffSystem(
staff=[staff])])))
self.assertEqual(
notes,
music_pb2.NoteSequence(notes=[
# First chord.
Note(pitch=librosa.note_to_midi('E4'), start_time=0, end_time=1),
Note(pitch=librosa.note_to_midi('A4'), start_time=0, end_time=1),
# Note without a stem.
Note(pitch=librosa.note_to_midi('E5'), start_time=1, end_time=2),
# Second chord.
Note(pitch=librosa.note_to_midi('B4'), start_time=2, end_time=3),
Note(pitch=librosa.note_to_midi('D5'), start_time=2, end_time=3),
Note(pitch=librosa.note_to_midi('F5'), start_time=2, end_time=3),
]))
def testBeams(self):
beam_1 = musicscore_pb2.LineSegment(
start=Point(x=10, y=20), end=Point(x=40, y=20))
beam_2 = musicscore_pb2.LineSegment(
start=Point(x=70, y=40), end=Point(x=90, y=40))
beam_3 = musicscore_pb2.LineSegment(
start=Point(x=70, y=60), end=Point(x=90, y=60))
staff = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=50), Point(x=100, y=50)],
glyph=[
Glyph(
type=Glyph.CLEF_TREBLE,
x=1,
y_position=reader.TREBLE_CLEF_EXPECTED_Y),
# 2 eighth notes.
Glyph(
type=Glyph.NOTEHEAD_FILLED, x=10, y_position=-4, beam=[beam_1]),
Glyph(
type=Glyph.NOTEHEAD_FILLED, x=40, y_position=-1, beam=[beam_1]),
# 1 quarter note.
Glyph(type=Glyph.NOTEHEAD_FILLED, x=50, y_position=0),
# 2 sixteenth notes.
Glyph(
type=Glyph.NOTEHEAD_FILLED,
x=60,
y_position=-2,
beam=[beam_2, beam_3]),
Glyph(
type=Glyph.NOTEHEAD_FILLED,
x=90,
y_position=2,
beam=[beam_2, beam_3]),
])
notes = conversions.page_to_notesequence(reader.ScoreReader().read_page(
musicscore_pb2.Page(system=[musicscore_pb2.StaffSystem(
staff=[staff])])))
self.assertEqual(
notes,
music_pb2.NoteSequence(notes=[
Note(pitch=librosa.note_to_midi('E4'), start_time=0, end_time=0.5),
Note(pitch=librosa.note_to_midi('A4'), start_time=0.5, end_time=1),
Note(pitch=librosa.note_to_midi('B4'), start_time=1, end_time=2),
Note(pitch=librosa.note_to_midi('G4'), start_time=2, end_time=2.25),
Note(
pitch=librosa.note_to_midi('D5'), start_time=2.25,
end_time=2.5),
]))
def testAllNoteheadTypes(self):
staff = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=50), Point(x=100, y=50)],
glyph=[
Glyph(
type=Glyph.CLEF_TREBLE,
x=1,
y_position=reader.TREBLE_CLEF_EXPECTED_Y),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=10, y_position=-6),
Glyph(type=Glyph.NOTEHEAD_EMPTY, x=10, y_position=-6),
Glyph(type=Glyph.NOTEHEAD_WHOLE, x=10, y_position=-6),
])
notes = conversions.page_to_notesequence(reader.ScoreReader().read_page(
musicscore_pb2.Page(system=[musicscore_pb2.StaffSystem(
staff=[staff])])))
self.assertEqual(
notes,
music_pb2.NoteSequence(notes=[
Note(pitch=librosa.note_to_midi('C4'), start_time=0, end_time=1),
Note(pitch=librosa.note_to_midi('C4'), start_time=1, end_time=3),
Note(pitch=librosa.note_to_midi('C4'), start_time=3, end_time=7),
]))
def testStaffSystems(self):
# 2 staff systems on separate pages, each with 2 staves, and no bars.
system_1_staff_1 = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=50), Point(x=100, y=50)],
glyph=[
Glyph(
type=Glyph.CLEF_TREBLE,
x=1,
y_position=reader.TREBLE_CLEF_EXPECTED_Y),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=10, y_position=-6),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=50, y_position=-2),
])
system_1_staff_2 = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=150), Point(x=100, y=150)],
glyph=[
Glyph(
type=Glyph.CLEF_BASS,
x=2,
y_position=reader.BASS_CLEF_EXPECTED_Y),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=10, y_position=0),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=40, y_position=2),
# Played after the second note in the first staff, although it is to
# the left of it.
Glyph(type=Glyph.NOTEHEAD_FILLED, x=45, y_position=4),
])
system_2_staff_1 = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=250), Point(x=100, y=250)],
glyph=[
Glyph(
type=Glyph.CLEF_TREBLE,
x=1,
y_position=reader.TREBLE_CLEF_EXPECTED_Y),
Glyph(type=Glyph.REST_QUARTER, x=20, y_position=0),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=50, y_position=-2),
])
system_2_staff_2 = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=250), Point(x=100, y=250)],
glyph=[
Glyph(
type=Glyph.CLEF_BASS,
x=2,
y_position=reader.BASS_CLEF_EXPECTED_Y),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=10, y_position=0),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=40, y_position=2),
])
notes = conversions.score_to_notesequence(reader.ScoreReader()(
musicscore_pb2.Score(page=[
musicscore_pb2.Page(system=[
musicscore_pb2.StaffSystem(
staff=[system_1_staff_1, system_1_staff_2]),
]),
musicscore_pb2.Page(system=[
musicscore_pb2.StaffSystem(
staff=[system_2_staff_1, system_2_staff_2]),
]),
]),))
self.assertEqual(
notes,
music_pb2.NoteSequence(notes=[
# System 1, staff 1.
Note(pitch=librosa.note_to_midi('C4'), start_time=0, end_time=1),
Note(pitch=librosa.note_to_midi('G4'), start_time=1, end_time=2),
# System 1, staff 2.
Note(pitch=librosa.note_to_midi('D3'), start_time=0, end_time=1),
Note(pitch=librosa.note_to_midi('F3'), start_time=1, end_time=2),
Note(pitch=librosa.note_to_midi('A3'), start_time=2, end_time=3),
# System 2, staff 1.
# Quarter rest.
Note(pitch=librosa.note_to_midi('G4'), start_time=4, end_time=5),
# System 2, staff 2.
Note(pitch=librosa.note_to_midi('D3'), start_time=3, end_time=4),
Note(pitch=librosa.note_to_midi('F3'), start_time=4, end_time=5),
]))
def testMeasures(self):
# 2 staves in the same staff system with multiple bars.
staff_1 = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=50), Point(x=300, y=50)],
glyph=[
Glyph(
type=Glyph.CLEF_TREBLE,
x=1,
y_position=reader.TREBLE_CLEF_EXPECTED_Y),
# Key signature.
Glyph(type=Glyph.SHARP, x=10, y_position=+4),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=20, y_position=-2),
# Accidental.
Glyph(type=Glyph.FLAT, x=40, y_position=-1),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=50, y_position=-1),
# Second bar.
Glyph(type=Glyph.NOTEHEAD_FILLED, x=120, y_position=0),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=180, y_position=+4),
# Third bar.
# Accidental not propagated to this note.
Glyph(type=Glyph.NOTEHEAD_FILLED, x=220, y_position=-1),
])
staff_2 = musicscore_pb2.Staff(
staffline_distance=10,
center_line=[Point(x=0, y=150), Point(x=300, y=150)],
glyph=[
Glyph(
type=Glyph.CLEF_BASS,
x=1,
y_position=reader.BASS_CLEF_EXPECTED_Y),
# Key signature.
Glyph(type=Glyph.FLAT, x=15, y_position=-2),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=20, y_position=-2),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=50, y_position=+2),
# Second bar.
Glyph(type=Glyph.NOTEHEAD_FILLED, x=150, y_position=-2),
# Third bar.
Glyph(type=Glyph.REST_QUARTER, x=220, y_position=0),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=280, y_position=-2),
])
staff_system = musicscore_pb2.StaffSystem(
staff=[staff_1, staff_2],
bar=[_bar(0), _bar(100), _bar(200),
_bar(300)])
notes = conversions.page_to_notesequence(reader.ScoreReader().read_page(
musicscore_pb2.Page(system=[staff_system])))
self.assertEqual(
notes,
music_pb2.NoteSequence(notes=[
# Staff 1, bar 1.
Note(pitch=librosa.note_to_midi('G4'), start_time=0, end_time=1),
Note(pitch=librosa.note_to_midi('Ab4'), start_time=1, end_time=2),
# Staff 1, bar 2.
Note(pitch=librosa.note_to_midi('B4'), start_time=2, end_time=3),
Note(pitch=librosa.note_to_midi('F#5'), start_time=3, end_time=4),
# Staff 1, bar 3.
Note(pitch=librosa.note_to_midi('A4'), start_time=4, end_time=5),
# Staff 2, bar 1.
Note(pitch=librosa.note_to_midi('Bb2'), start_time=0, end_time=1),
Note(pitch=librosa.note_to_midi('F3'), start_time=1, end_time=2),
# Staff 2, bar 2.
Note(pitch=librosa.note_to_midi('Bb2'), start_time=2, end_time=3),
# Staff 2, bar 3.
Note(pitch=librosa.note_to_midi('Bb2'), start_time=5, end_time=6),
]))
def testKeySignatures(self):
# One staff per system, two systems.
staff_1 = musicscore_pb2.Staff(glyph=[
Glyph(
type=Glyph.CLEF_TREBLE,
x=5,
y_position=reader.TREBLE_CLEF_EXPECTED_Y),
# D major key signature.
Glyph(type=Glyph.SHARP, x=15, y_position=+4),
Glyph(type=Glyph.SHARP, x=25, y_position=+1),
# Accidental which cannot be interpreted as part of the key
# signature.
Glyph(type=Glyph.SHARP, x=35, y_position=+2),
Glyph(type=Glyph.NOTEHEAD_FILLED, x=45, y_position=+2), # D#5
Glyph(type=Glyph.NOTEHEAD_EMPTY, x=55, y_position=+1), # C#5
Glyph(type=Glyph.NOTEHEAD_FILLED, x=65, y_position=-3), # F#4
# New measure. The key signature should be retained.
Glyph(type=Glyph.NOTEHEAD_EMPTY, x=105, y_position=-3), # F#4
Glyph(type=Glyph.NOTEHEAD_FILLED, x=125, y_position=+1), # C#5
# Accidental is not retained.
Glyph(type=Glyph.NOTEHEAD_FILLED, x=145, y_position=+2), # D5
])
staff_2 = musicscore_pb2.Staff(glyph=[
Glyph(
type=Glyph.CLEF_TREBLE,
x=5,
y_position=reader.TREBLE_CLEF_EXPECTED_Y),
# No key signature on this line. No accidentals.
Glyph(type=Glyph.NOTEHEAD_EMPTY, x=25, y_position=-3), # F4
Glyph(type=Glyph.NOTEHEAD_EMPTY, x=45, y_position=+1), # C5
])
notes = conversions.page_to_notesequence(reader.ScoreReader().read_page(
musicscore_pb2.Page(system=[
musicscore_pb2.StaffSystem(
staff=[staff_1], bar=[_bar(0), _bar(100),
_bar(200)]),
musicscore_pb2.StaffSystem(staff=[staff_2]),
])))
self.assertEqual(
notes,
music_pb2.NoteSequence(notes=[
# First measure.
Note(pitch=librosa.note_to_midi('D#5'), start_time=0, end_time=1),
Note(pitch=librosa.note_to_midi('C#5'), start_time=1, end_time=3),
Note(pitch=librosa.note_to_midi('F#4'), start_time=3, end_time=4),
# Second measure.
Note(pitch=librosa.note_to_midi('F#4'), start_time=4, end_time=6),
Note(pitch=librosa.note_to_midi('C#5'), start_time=6, end_time=7),
Note(pitch=librosa.note_to_midi('D5'), start_time=7, end_time=8),
# Third measure on a new line, with no key signature.
Note(pitch=librosa.note_to_midi('F4'), start_time=8, end_time=10),
Note(pitch=librosa.note_to_midi('C5'), start_time=10, end_time=12),
]))
def _bar(x):
return musicscore_pb2.StaffSystem.Bar(
x=x, type=musicscore_pb2.StaffSystem.Bar.STANDARD_BAR)
if __name__ == '__main__':
absltest.main()
|
apache-2.0
| 5,633,878,991,176,298,000 | 40.641921 | 80 | 0.566328 | false |
psiwczak/openstack
|
nova/utils.py
|
1
|
53103
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import contextlib
import datetime
import errno
import functools
import hashlib
import inspect
import itertools
import json
import os
import pyclbr
import random
import re
import shlex
import shutil
import socket
import struct
import sys
import tempfile
import threading
import time
import types
import uuid
import warnings
from xml.sax import saxutils
from eventlet import corolocal
from eventlet import event
from eventlet import greenthread
from eventlet import semaphore
from eventlet.green import subprocess
import iso8601
import lockfile
import netaddr
from nova import exception
from nova import flags
from nova import log as logging
from nova.openstack.common import cfg
from nova.openstack.common import excutils
from nova.openstack.common import importutils
LOG = logging.getLogger(__name__)
ISO_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
PERFECT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
FLAGS = flags.FLAGS
FLAGS.register_opt(
cfg.BoolOpt('disable_process_locking', default=False,
help='Whether to disable inter-process locks'))
def vpn_ping(address, port, timeout=0.05, session_id=None):
"""Sends a vpn negotiation packet and returns the server session.
Returns False on a failure. Basic packet structure is below.
Client packet (14 bytes)::
0 1 8 9 13
+-+--------+-----+
|x| cli_id |?????|
+-+--------+-----+
x = packet identifier 0x38
cli_id = 64 bit identifier
? = unknown, probably flags/padding
Server packet (26 bytes)::
0 1 8 9 13 14 21 2225
+-+--------+-----+--------+----+
|x| srv_id |?????| cli_id |????|
+-+--------+-----+--------+----+
x = packet identifier 0x40
cli_id = 64 bit identifier
? = unknown, probably flags/padding
bit 9 was 1 and the rest were 0 in testing
"""
if session_id is None:
session_id = random.randint(0, 0xffffffffffffffff)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
data = struct.pack('!BQxxxxx', 0x38, session_id)
sock.sendto(data, (address, port))
sock.settimeout(timeout)
try:
received = sock.recv(2048)
except socket.timeout:
return False
finally:
sock.close()
fmt = '!BQxxxxxQxxxx'
if len(received) != struct.calcsize(fmt):
print struct.calcsize(fmt)
return False
(identifier, server_sess, client_sess) = struct.unpack(fmt, received)
if identifier == 0x40 and client_sess == session_id:
return server_sess
def fetchfile(url, target):
LOG.debug(_('Fetching %s') % url)
execute('curl', '--fail', url, '-o', target)
def execute(*cmd, **kwargs):
"""Helper method to execute command with optional retry.
If you add a run_as_root=True command, don't forget to add the
corresponding filter to nova.rootwrap !
:param cmd: Passed to subprocess.Popen.
:param process_input: Send to opened process.
:param check_exit_code: Single bool, int, or list of allowed exit
codes. Defaults to [0]. Raise
exception.ProcessExecutionError unless
program exits with one of these code.
:param delay_on_retry: True | False. Defaults to True. If set to
True, wait a short amount of time
before retrying.
:param attempts: How many times to retry cmd.
:param run_as_root: True | False. Defaults to False. If set to True,
the command is prefixed by the command specified
in the root_helper FLAG.
:raises exception.NovaException: on receiving unknown arguments
:raises exception.ProcessExecutionError:
:returns: a tuple, (stdout, stderr) from the spawned process, or None if
the command fails.
"""
process_input = kwargs.pop('process_input', None)
check_exit_code = kwargs.pop('check_exit_code', [0])
ignore_exit_code = False
if isinstance(check_exit_code, bool):
ignore_exit_code = not check_exit_code
check_exit_code = [0]
elif isinstance(check_exit_code, int):
check_exit_code = [check_exit_code]
delay_on_retry = kwargs.pop('delay_on_retry', True)
attempts = kwargs.pop('attempts', 1)
run_as_root = kwargs.pop('run_as_root', False)
shell = kwargs.pop('shell', False)
if len(kwargs):
raise exception.NovaException(_('Got unknown keyword args '
'to utils.execute: %r') % kwargs)
if run_as_root:
cmd = shlex.split(FLAGS.root_helper) + list(cmd)
cmd = map(str, cmd)
while attempts > 0:
attempts -= 1
try:
LOG.debug(_('Running cmd (subprocess): %s'), ' '.join(cmd))
_PIPE = subprocess.PIPE # pylint: disable=E1101
obj = subprocess.Popen(cmd,
stdin=_PIPE,
stdout=_PIPE,
stderr=_PIPE,
close_fds=True,
shell=shell)
result = None
if process_input is not None:
result = obj.communicate(process_input)
else:
result = obj.communicate()
obj.stdin.close() # pylint: disable=E1101
_returncode = obj.returncode # pylint: disable=E1101
if _returncode:
LOG.debug(_('Result was %s') % _returncode)
if not ignore_exit_code and _returncode not in check_exit_code:
(stdout, stderr) = result
raise exception.ProcessExecutionError(
exit_code=_returncode,
stdout=stdout,
stderr=stderr,
cmd=' '.join(cmd))
return result
except exception.ProcessExecutionError:
if not attempts:
raise
else:
LOG.debug(_('%r failed. Retrying.'), cmd)
if delay_on_retry:
greenthread.sleep(random.randint(20, 200) / 100.0)
finally:
# NOTE(termie): this appears to be necessary to let the subprocess
# call clean something up in between calls, without
# it two execute calls in a row hangs the second one
greenthread.sleep(0)
def trycmd(*args, **kwargs):
"""
A wrapper around execute() to more easily handle warnings and errors.
Returns an (out, err) tuple of strings containing the output of
the command's stdout and stderr. If 'err' is not empty then the
command can be considered to have failed.
:discard_warnings True | False. Defaults to False. If set to True,
then for succeeding commands, stderr is cleared
"""
discard_warnings = kwargs.pop('discard_warnings', False)
try:
out, err = execute(*args, **kwargs)
failed = False
except exception.ProcessExecutionError, exn:
out, err = '', str(exn)
LOG.debug(err)
failed = True
if not failed and discard_warnings and err:
# Handle commands that output to stderr but otherwise succeed
LOG.debug(err)
err = ''
return out, err
def ssh_execute(ssh, cmd, process_input=None,
addl_env=None, check_exit_code=True):
LOG.debug(_('Running cmd (SSH): %s'), ' '.join(cmd))
if addl_env:
raise exception.NovaException(_('Environment not supported over SSH'))
if process_input:
# This is (probably) fixable if we need it...
msg = _('process_input not supported over SSH')
raise exception.NovaException(msg)
stdin_stream, stdout_stream, stderr_stream = ssh.exec_command(cmd)
channel = stdout_stream.channel
#stdin.write('process_input would go here')
#stdin.flush()
# NOTE(justinsb): This seems suspicious...
# ...other SSH clients have buffering issues with this approach
stdout = stdout_stream.read()
stderr = stderr_stream.read()
stdin_stream.close()
exit_status = channel.recv_exit_status()
# exit_status == -1 if no exit code was returned
if exit_status != -1:
LOG.debug(_('Result was %s') % exit_status)
if check_exit_code and exit_status != 0:
raise exception.ProcessExecutionError(exit_code=exit_status,
stdout=stdout,
stderr=stderr,
cmd=' '.join(cmd))
return (stdout, stderr)
def novadir():
import nova
return os.path.abspath(nova.__file__).split('nova/__init__.py')[0]
def default_flagfile(filename='nova.conf', args=None):
if args is None:
args = sys.argv
for arg in args:
if arg.find('flagfile') != -1:
return arg[arg.index('flagfile') + len('flagfile') + 1:]
else:
if not os.path.isabs(filename):
# turn relative filename into an absolute path
script_dir = os.path.dirname(inspect.stack()[-1][1])
filename = os.path.abspath(os.path.join(script_dir, filename))
if not os.path.exists(filename):
filename = "./nova.conf"
if not os.path.exists(filename):
filename = '/etc/nova/nova.conf'
if os.path.exists(filename):
flagfile = '--flagfile=%s' % filename
args.insert(1, flagfile)
return filename
def debug(arg):
LOG.debug(_('debug in callback: %s'), arg)
return arg
def generate_uid(topic, size=8):
characters = '01234567890abcdefghijklmnopqrstuvwxyz'
choices = [random.choice(characters) for _x in xrange(size)]
return '%s-%s' % (topic, ''.join(choices))
# Default symbols to use for passwords. Avoids visually confusing characters.
# ~6 bits per symbol
DEFAULT_PASSWORD_SYMBOLS = ('23456789', # Removed: 0,1
'ABCDEFGHJKLMNPQRSTUVWXYZ', # Removed: I, O
'abcdefghijkmnopqrstuvwxyz') # Removed: l
# ~5 bits per symbol
EASIER_PASSWORD_SYMBOLS = ('23456789', # Removed: 0, 1
'ABCDEFGHJKLMNPQRSTUVWXYZ') # Removed: I, O
def last_completed_audit_period(unit=None):
"""This method gives you the most recently *completed* audit period.
arguments:
units: string, one of 'hour', 'day', 'month', 'year'
Periods normally begin at the beginning (UTC) of the
period unit (So a 'day' period begins at midnight UTC,
a 'month' unit on the 1st, a 'year' on Jan, 1)
unit string may be appended with an optional offset
like so: 'day@18' This will begin the period at 18:00
UTC. 'month@15' starts a monthly period on the 15th,
and year@3 begins a yearly one on March 1st.
returns: 2 tuple of datetimes (begin, end)
The begin timestamp of this audit period is the same as the
end of the previous."""
if not unit:
unit = FLAGS.instance_usage_audit_period
offset = 0
if '@' in unit:
unit, offset = unit.split("@", 1)
offset = int(offset)
rightnow = utcnow()
if unit not in ('month', 'day', 'year', 'hour'):
raise ValueError('Time period must be hour, day, month or year')
if unit == 'month':
if offset == 0:
offset = 1
end = datetime.datetime(day=offset,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
year = rightnow.year
if 1 >= rightnow.month:
year -= 1
month = 12 + (rightnow.month - 1)
else:
month = rightnow.month - 1
end = datetime.datetime(day=offset,
month=month,
year=year)
year = end.year
if 1 >= end.month:
year -= 1
month = 12 + (end.month - 1)
else:
month = end.month - 1
begin = datetime.datetime(day=offset, month=month, year=year)
elif unit == 'year':
if offset == 0:
offset = 1
end = datetime.datetime(day=1, month=offset, year=rightnow.year)
if end >= rightnow:
end = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 2)
else:
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
elif unit == 'day':
end = datetime.datetime(hour=offset,
day=rightnow.day,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
end = end - datetime.timedelta(days=1)
begin = end - datetime.timedelta(days=1)
elif unit == 'hour':
end = rightnow.replace(minute=offset, second=0, microsecond=0)
if end >= rightnow:
end = end - datetime.timedelta(hours=1)
begin = end - datetime.timedelta(hours=1)
return (begin, end)
def generate_password(length=20, symbolgroups=DEFAULT_PASSWORD_SYMBOLS):
"""Generate a random password from the supplied symbol groups.
At least one symbol from each group will be included. Unpredictable
results if length is less than the number of symbol groups.
Believed to be reasonably secure (with a reasonable password length!)
"""
r = random.SystemRandom()
# NOTE(jerdfelt): Some password policies require at least one character
# from each group of symbols, so start off with one random character
# from each symbol group
password = [r.choice(s) for s in symbolgroups]
# If length < len(symbolgroups), the leading characters will only
# be from the first length groups. Try our best to not be predictable
# by shuffling and then truncating.
r.shuffle(password)
password = password[:length]
length -= len(password)
# then fill with random characters from all symbol groups
symbols = ''.join(symbolgroups)
password.extend([r.choice(symbols) for _i in xrange(length)])
# finally shuffle to ensure first x characters aren't from a
# predictable group
r.shuffle(password)
return ''.join(password)
def last_octet(address):
return int(address.split('.')[-1])
def get_my_linklocal(interface):
try:
if_str = execute('ip', '-f', 'inet6', '-o', 'addr', 'show', interface)
condition = '\s+inet6\s+([0-9a-f:]+)/\d+\s+scope\s+link'
links = [re.search(condition, x) for x in if_str[0].split('\n')]
address = [w.group(1) for w in links if w is not None]
if address[0] is not None:
return address[0]
else:
msg = _('Link Local address is not found.:%s') % if_str
raise exception.NovaException(msg)
except Exception as ex:
msg = _("Couldn't get Link Local IP of %(interface)s"
" :%(ex)s") % locals()
raise exception.NovaException(msg)
def utcnow():
"""Overridable version of utils.utcnow."""
if utcnow.override_time:
return utcnow.override_time
return datetime.datetime.utcnow()
utcnow.override_time = None
def is_older_than(before, seconds):
"""Return True if before is older than seconds."""
return utcnow() - before > datetime.timedelta(seconds=seconds)
def utcnow_ts():
"""Timestamp version of our utcnow function."""
return time.mktime(utcnow().timetuple())
def set_time_override(override_time=datetime.datetime.utcnow()):
"""Override utils.utcnow to return a constant time."""
utcnow.override_time = override_time
def advance_time_delta(timedelta):
"""Advance overriden time using a datetime.timedelta."""
assert(not utcnow.override_time is None)
utcnow.override_time += timedelta
def advance_time_seconds(seconds):
"""Advance overriden time by seconds."""
advance_time_delta(datetime.timedelta(0, seconds))
def clear_time_override():
"""Remove the overridden time."""
utcnow.override_time = None
def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
"""Returns formatted utcnow."""
if not at:
at = utcnow()
return at.strftime(fmt)
def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
"""Turn a formatted time back into a datetime."""
return datetime.datetime.strptime(timestr, fmt)
def isotime(at=None):
"""Stringify time in ISO 8601 format"""
if not at:
at = datetime.datetime.utcnow()
str = at.strftime(ISO_TIME_FORMAT)
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
str += ('Z' if tz == 'UTC' else tz)
return str
def parse_isotime(timestr):
"""Turn an iso formatted time back into a datetime."""
try:
return iso8601.parse_date(timestr)
except (iso8601.ParseError, TypeError) as e:
raise ValueError(e.message)
def normalize_time(timestamp):
"""Normalize time in arbitrary timezone to UTC"""
offset = timestamp.utcoffset()
return timestamp.replace(tzinfo=None) - offset if offset else timestamp
def parse_mailmap(mailmap='.mailmap'):
mapping = {}
if os.path.exists(mailmap):
fp = open(mailmap, 'r')
for l in fp:
l = l.strip()
if not l.startswith('#') and ' ' in l:
canonical_email, alias = l.split(' ')
mapping[alias.lower()] = canonical_email.lower()
return mapping
def str_dict_replace(s, mapping):
for s1, s2 in mapping.iteritems():
s = s.replace(s1, s2)
return s
class LazyPluggable(object):
"""A pluggable backend loaded lazily based on some value."""
def __init__(self, pivot, **backends):
self.__backends = backends
self.__pivot = pivot
self.__backend = None
def __get_backend(self):
if not self.__backend:
backend_name = FLAGS[self.__pivot]
if backend_name not in self.__backends:
msg = _('Invalid backend: %s') % backend_name
raise exception.NovaException(msg)
backend = self.__backends[backend_name]
if isinstance(backend, tuple):
name = backend[0]
fromlist = backend[1]
else:
name = backend
fromlist = backend
self.__backend = __import__(name, None, None, fromlist)
LOG.debug(_('backend %s'), self.__backend)
return self.__backend
def __getattr__(self, key):
backend = self.__get_backend()
return getattr(backend, key)
class LoopingCallDone(Exception):
"""Exception to break out and stop a LoopingCall.
The poll-function passed to LoopingCall can raise this exception to
break out of the loop normally. This is somewhat analogous to
StopIteration.
An optional return-value can be included as the argument to the exception;
this return-value will be returned by LoopingCall.wait()
"""
def __init__(self, retvalue=True):
""":param retvalue: Value that LoopingCall.wait() should return."""
self.retvalue = retvalue
class LoopingCall(object):
def __init__(self, f=None, *args, **kw):
self.args = args
self.kw = kw
self.f = f
self._running = False
def start(self, interval, initial_delay=None):
self._running = True
done = event.Event()
def _inner():
if initial_delay:
greenthread.sleep(initial_delay)
try:
while self._running:
self.f(*self.args, **self.kw)
if not self._running:
break
greenthread.sleep(interval)
except LoopingCallDone, e:
self.stop()
done.send(e.retvalue)
except Exception:
LOG.exception(_('in looping call'))
done.send_exception(*sys.exc_info())
return
else:
done.send(True)
self.done = done
greenthread.spawn(_inner)
return self.done
def stop(self):
self._running = False
def wait(self):
return self.done.wait()
def xhtml_escape(value):
"""Escapes a string so it is valid within XML or XHTML.
"""
return saxutils.escape(value, {'"': '"', "'": '''})
def utf8(value):
"""Try to turn a string into utf-8 if possible.
Code is directly from the utf8 function in
http://github.com/facebook/tornado/blob/master/tornado/escape.py
"""
if isinstance(value, unicode):
return value.encode('utf-8')
assert isinstance(value, str)
return value
def to_primitive(value, convert_instances=False, level=0):
"""Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances,
but since this is a recursive function, we could have cyclical
data structures.
To handle cyclical data structures we could track the actual objects
visited in a set, but not all objects are hashable. Instead we just
track the depth of the object inspections and don't go too deep.
Therefore, convert_instances=True is lossy ... be aware.
"""
nasty = [inspect.ismodule, inspect.isclass, inspect.ismethod,
inspect.isfunction, inspect.isgeneratorfunction,
inspect.isgenerator, inspect.istraceback, inspect.isframe,
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
inspect.isabstract]
for test in nasty:
if test(value):
return unicode(value)
# value of itertools.count doesn't get caught by inspects
# above and results in infinite loop when list(value) is called.
if type(value) == itertools.count:
return unicode(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that
# has a @wrap_exception with a notifier will fail. If
# we up the dependency to 0.5.4 (when it is released) we
# can remove this workaround.
if getattr(value, '__module__', None) == 'mox':
return 'mock'
if level > 3:
return '?'
# The try block may not be necessary after the class check above,
# but just in case ...
try:
if isinstance(value, (list, tuple)):
o = []
for v in value:
o.append(to_primitive(v, convert_instances=convert_instances,
level=level))
return o
elif isinstance(value, dict):
o = {}
for k, v in value.iteritems():
o[k] = to_primitive(v, convert_instances=convert_instances,
level=level)
return o
elif isinstance(value, datetime.datetime):
return str(value)
elif hasattr(value, 'iteritems'):
return to_primitive(dict(value.iteritems()),
convert_instances=convert_instances,
level=level)
elif hasattr(value, '__iter__'):
return to_primitive(list(value), level)
elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles.
# Ignore class member vars.
return to_primitive(value.__dict__,
convert_instances=convert_instances,
level=level + 1)
else:
return value
except TypeError, e:
# Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list().
return unicode(value)
def dumps(value):
try:
return json.dumps(value)
except TypeError:
pass
return json.dumps(to_primitive(value))
def loads(s):
return json.loads(s)
try:
import anyjson
except ImportError:
pass
else:
anyjson._modules.append(("nova.utils", "dumps", TypeError,
"loads", ValueError))
anyjson.force_implementation("nova.utils")
class GreenLockFile(lockfile.FileLock):
"""Implementation of lockfile that allows for a lock per greenthread.
Simply implements lockfile:LockBase init with an addiontall suffix
on the unique name of the greenthread identifier
"""
def __init__(self, path, threaded=True):
self.path = path
self.lock_file = os.path.abspath(path) + ".lock"
self.hostname = socket.gethostname()
self.pid = os.getpid()
if threaded:
t = threading.current_thread()
# Thread objects in Python 2.4 and earlier do not have ident
# attrs. Worm around that.
ident = getattr(t, "ident", hash(t)) or hash(t)
gident = corolocal.get_ident()
self.tname = "-%x-%x" % (ident & 0xffffffff, gident & 0xffffffff)
else:
self.tname = ""
dirname = os.path.dirname(self.lock_file)
self.unique_name = os.path.join(dirname,
"%s%s.%s" % (self.hostname,
self.tname,
self.pid))
_semaphores = {}
def synchronized(name, external=False):
"""Synchronization decorator.
Decorating a method like so::
@synchronized('mylock')
def foo(self, *args):
...
ensures that only one thread will execute the bar method at a time.
Different methods can share the same lock::
@synchronized('mylock')
def foo(self, *args):
...
@synchronized('mylock')
def bar(self, *args):
...
This way only one of either foo or bar can be executing at a time.
The external keyword argument denotes whether this lock should work across
multiple processes. This means that if two different workers both run a
a method decorated with @synchronized('mylock', external=True), only one
of them will execute at a time.
Important limitation: you can only have one external lock running per
thread at a time. For example the following will fail:
@utils.synchronized('testlock1', external=True)
def outer_lock():
@utils.synchronized('testlock2', external=True)
def inner_lock():
pass
inner_lock()
outer_lock()
"""
def wrap(f):
@functools.wraps(f)
def inner(*args, **kwargs):
# NOTE(soren): If we ever go natively threaded, this will be racy.
# See http://stackoverflow.com/questions/5390569/dyn
# amically-allocating-and-destroying-mutexes
if name not in _semaphores:
_semaphores[name] = semaphore.Semaphore()
sem = _semaphores[name]
LOG.debug(_('Attempting to grab semaphore "%(lock)s" for method '
'"%(method)s"...'), {'lock': name,
'method': f.__name__})
with sem:
LOG.debug(_('Got semaphore "%(lock)s" for method '
'"%(method)s"...'), {'lock': name,
'method': f.__name__})
if external and not FLAGS.disable_process_locking:
LOG.debug(_('Attempting to grab file lock "%(lock)s" for '
'method "%(method)s"...'),
{'lock': name, 'method': f.__name__})
lock_file_path = os.path.join(FLAGS.lock_path,
'nova-%s' % name)
lock = GreenLockFile(lock_file_path)
with lock:
LOG.debug(_('Got file lock "%(lock)s" for '
'method "%(method)s"...'),
{'lock': name, 'method': f.__name__})
retval = f(*args, **kwargs)
else:
retval = f(*args, **kwargs)
# If no-one else is waiting for it, delete it.
# See note about possible raciness above.
if not sem.balance < 1:
del _semaphores[name]
return retval
return inner
return wrap
def cleanup_file_locks():
"""clean up stale locks left behind by process failures
The lockfile module, used by @synchronized, can leave stale lockfiles
behind after process failure. These locks can cause process hangs
at startup, when a process deadlocks on a lock which will never
be unlocked.
Intended to be called at service startup.
"""
# NOTE(mikeyp) this routine incorporates some internal knowledge
# from the lockfile module, and this logic really
# should be part of that module.
#
# cleanup logic:
# 1) look for the lockfile modules's 'sentinel' files, of the form
# hostname.[thread-.*]-pid, extract the pid.
# if pid doesn't match a running process, delete the file since
# it's from a dead process.
# 2) check for the actual lockfiles. if lockfile exists with linkcount
# of 1, it's bogus, so delete it. A link count >= 2 indicates that
# there are probably sentinels still linked to it from active
# processes. This check isn't perfect, but there is no way to
# reliably tell which sentinels refer to which lock in the
# lockfile implementation.
if FLAGS.disable_process_locking:
return
hostname = socket.gethostname()
sentinel_re = hostname + r'\..*-(\d+$)'
lockfile_re = r'nova-.*\.lock'
files = os.listdir(FLAGS.lock_path)
# cleanup sentinels
for filename in files:
match = re.match(sentinel_re, filename)
if match is None:
continue
pid = match.group(1)
LOG.debug(_('Found sentinel %(filename)s for pid %(pid)s'),
{'filename': filename, 'pid': pid})
try:
os.kill(int(pid), 0)
except OSError, e:
# PID wasn't found
delete_if_exists(os.path.join(FLAGS.lock_path, filename))
LOG.debug(_('Cleaned sentinel %(filename)s for pid %(pid)s'),
{'filename': filename, 'pid': pid})
# cleanup lock files
for filename in files:
match = re.match(lockfile_re, filename)
if match is None:
continue
try:
stat_info = os.stat(os.path.join(FLAGS.lock_path, filename))
except OSError as e:
if e.errno == errno.ENOENT:
continue
else:
raise
LOG.debug(_('Found lockfile %(file)s with link count %(count)d'),
{'file': filename, 'count': stat_info.st_nlink})
if stat_info.st_nlink == 1:
delete_if_exists(os.path.join(FLAGS.lock_path, filename))
LOG.debug(_('Cleaned lockfile %(file)s with link count %(count)d'),
{'file': filename, 'count': stat_info.st_nlink})
def delete_if_exists(pathname):
"""delete a file, but ignore file not found error"""
try:
os.unlink(pathname)
except OSError as e:
if e.errno == errno.ENOENT:
return
else:
raise
def get_from_path(items, path):
"""Returns a list of items matching the specified path.
Takes an XPath-like expression e.g. prop1/prop2/prop3, and for each item
in items, looks up items[prop1][prop2][prop3]. Like XPath, if any of the
intermediate results are lists it will treat each list item individually.
A 'None' in items or any child expressions will be ignored, this function
will not throw because of None (anywhere) in items. The returned list
will contain no None values.
"""
if path is None:
raise exception.NovaException('Invalid mini_xpath')
(first_token, sep, remainder) = path.partition('/')
if first_token == '':
raise exception.NovaException('Invalid mini_xpath')
results = []
if items is None:
return results
if not isinstance(items, list):
# Wrap single objects in a list
items = [items]
for item in items:
if item is None:
continue
get_method = getattr(item, 'get', None)
if get_method is None:
continue
child = get_method(first_token)
if child is None:
continue
if isinstance(child, list):
# Flatten intermediate lists
for x in child:
results.append(x)
else:
results.append(child)
if not sep:
# No more tokens
return results
else:
return get_from_path(results, remainder)
def flatten_dict(dict_, flattened=None):
"""Recursively flatten a nested dictionary."""
flattened = flattened or {}
for key, value in dict_.iteritems():
if hasattr(value, 'iteritems'):
flatten_dict(value, flattened)
else:
flattened[key] = value
return flattened
def partition_dict(dict_, keys):
"""Return two dicts, one with `keys` the other with everything else."""
intersection = {}
difference = {}
for key, value in dict_.iteritems():
if key in keys:
intersection[key] = value
else:
difference[key] = value
return intersection, difference
def map_dict_keys(dict_, key_map):
"""Return a dict in which the dictionaries keys are mapped to new keys."""
mapped = {}
for key, value in dict_.iteritems():
mapped_key = key_map[key] if key in key_map else key
mapped[mapped_key] = value
return mapped
def subset_dict(dict_, keys):
"""Return a dict that only contains a subset of keys."""
subset = partition_dict(dict_, keys)[0]
return subset
def check_isinstance(obj, cls):
"""Checks that obj is of type cls, and lets PyLint infer types."""
if isinstance(obj, cls):
return obj
raise Exception(_('Expected object of type: %s') % (str(cls)))
def parse_server_string(server_str):
"""
Parses the given server_string and returns a list of host and port.
If it's not a combination of host part and port, the port element
is a null string. If the input is invalid expression, return a null
list.
"""
try:
# First of all, exclude pure IPv6 address (w/o port).
if netaddr.valid_ipv6(server_str):
return (server_str, '')
# Next, check if this is IPv6 address with a port number combination.
if server_str.find("]:") != -1:
(address, port) = server_str.replace('[', '', 1).split(']:')
return (address, port)
# Third, check if this is a combination of an address and a port
if server_str.find(':') == -1:
return (server_str, '')
# This must be a combination of an address and a port
(address, port) = server_str.split(':')
return (address, port)
except Exception:
LOG.error(_('Invalid server_string: %s'), server_str)
return ('', '')
def gen_uuid():
return uuid.uuid4()
def is_uuid_like(val):
"""For our purposes, a UUID is a string in canonical form:
aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa
"""
try:
uuid.UUID(val)
return True
except (TypeError, ValueError, AttributeError):
return False
def bool_from_str(val):
"""Convert a string representation of a bool into a bool value"""
if not val:
return False
try:
return True if int(val) else False
except ValueError:
return val.lower() == 'true'
def is_valid_ipv4(address):
"""valid the address strictly as per format xxx.xxx.xxx.xxx.
where xxx is a value between 0 and 255.
"""
parts = address.split(".")
if len(parts) != 4:
return False
for item in parts:
try:
if not 0 <= int(item) <= 255:
return False
except ValueError:
return False
return True
def is_valid_cidr(address):
"""Check if the provided ipv4 or ipv6 address is a valid
CIDR address or not"""
try:
# Validate the correct CIDR Address
netaddr.IPNetwork(address)
except netaddr.core.AddrFormatError:
return False
except UnboundLocalError:
# NOTE(MotoKen): work around bug in netaddr 0.7.5 (see detail in
# https://github.com/drkjam/netaddr/issues/2)
return False
# Prior validation partially verify /xx part
# Verify it here
ip_segment = address.split('/')
if (len(ip_segment) <= 1 or
ip_segment[1] == ''):
return False
return True
def monkey_patch():
""" If the Flags.monkey_patch set as True,
this function patches a decorator
for all functions in specified modules.
You can set decorators for each modules
using FLAGS.monkey_patch_modules.
The format is "Module path:Decorator function".
Example: 'nova.api.ec2.cloud:nova.notifier.api.notify_decorator'
Parameters of the decorator is as follows.
(See nova.notifier.api.notify_decorator)
name - name of the function
function - object of the function
"""
# If FLAGS.monkey_patch is not True, this function do nothing.
if not FLAGS.monkey_patch:
return
# Get list of modules and decorators
for module_and_decorator in FLAGS.monkey_patch_modules:
module, decorator_name = module_and_decorator.split(':')
# import decorator function
decorator = importutils.import_class(decorator_name)
__import__(module)
# Retrieve module information using pyclbr
module_data = pyclbr.readmodule_ex(module)
for key in module_data.keys():
# set the decorator for the class methods
if isinstance(module_data[key], pyclbr.Class):
clz = importutils.import_class("%s.%s" % (module, key))
for method, func in inspect.getmembers(clz, inspect.ismethod):
setattr(clz, method,
decorator("%s.%s.%s" % (module, key, method), func))
# set the decorator for the function
if isinstance(module_data[key], pyclbr.Function):
func = importutils.import_class("%s.%s" % (module, key))
setattr(sys.modules[module], key,
decorator("%s.%s" % (module, key), func))
def convert_to_list_dict(lst, label):
"""Convert a value or list into a list of dicts"""
if not lst:
return None
if not isinstance(lst, list):
lst = [lst]
return [{label: x} for x in lst]
def timefunc(func):
"""Decorator that logs how long a particular function took to execute"""
@functools.wraps(func)
def inner(*args, **kwargs):
start_time = time.time()
try:
return func(*args, **kwargs)
finally:
total_time = time.time() - start_time
LOG.debug(_("timefunc: '%(name)s' took %(total_time).2f secs") %
dict(name=func.__name__, total_time=total_time))
return inner
def generate_glance_url():
"""Generate the URL to glance."""
# TODO(jk0): This will eventually need to take SSL into consideration
# when supported in glance.
return "http://%s:%d" % (FLAGS.glance_host, FLAGS.glance_port)
def generate_image_url(image_ref):
"""Generate a image URL from an image_ref."""
return "%s/images/%s" % (generate_glance_url(), image_ref)
@contextlib.contextmanager
def logging_error(message):
"""Catches exception, write message to the log, re-raise.
This is a common refinement of save_and_reraise that writes a specific
message to the log.
"""
try:
yield
except Exception as error:
with excutils.save_and_reraise_exception():
LOG.exception(message)
@contextlib.contextmanager
def remove_path_on_error(path):
"""Protect code that wants to operate on PATH atomically.
Any exception will cause PATH to be removed.
"""
try:
yield
except Exception:
with excutils.save_and_reraise_exception():
delete_if_exists(path)
def make_dev_path(dev, partition=None, base='/dev'):
"""Return a path to a particular device.
>>> make_dev_path('xvdc')
/dev/xvdc
>>> make_dev_path('xvdc', 1)
/dev/xvdc1
"""
path = os.path.join(base, dev)
if partition:
path += str(partition)
return path
def total_seconds(td):
"""Local total_seconds implementation for compatibility with python 2.6"""
if hasattr(td, 'total_seconds'):
return td.total_seconds()
else:
return ((td.days * 86400 + td.seconds) * 10 ** 6 +
td.microseconds) / 10.0 ** 6
def sanitize_hostname(hostname):
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs."""
if isinstance(hostname, unicode):
hostname = hostname.encode('latin-1', 'ignore')
hostname = re.sub('[ _]', '-', hostname)
hostname = re.sub('[^\w.-]+', '', hostname)
hostname = hostname.lower()
hostname = hostname.strip('.-')
return hostname
def read_cached_file(filename, cache_info, reload_func=None):
"""Read from a file if it has been modified.
:param cache_info: dictionary to hold opaque cache.
:param reload_func: optional function to be called with data when
file is reloaded due to a modification.
:returns: data from file
"""
mtime = os.path.getmtime(filename)
if not cache_info or mtime != cache_info.get('mtime'):
LOG.debug(_("Reloading cached file %s") % filename)
with open(filename) as fap:
cache_info['data'] = fap.read()
cache_info['mtime'] = mtime
if reload_func:
reload_func(cache_info['data'])
return cache_info['data']
def hash_file(file_like_object):
"""Generate a hash for the contents of a file."""
checksum = hashlib.sha1()
any(map(checksum.update, iter(lambda: file_like_object.read(32768), '')))
return checksum.hexdigest()
@contextlib.contextmanager
def temporary_mutation(obj, **kwargs):
"""Temporarily set the attr on a particular object to a given value then
revert when finished.
One use of this is to temporarily set the read_deleted flag on a context
object:
with temporary_mutation(context, read_deleted="yes"):
do_something_that_needed_deleted_objects()
"""
NOT_PRESENT = object()
old_values = {}
for attr, new_value in kwargs.items():
old_values[attr] = getattr(obj, attr, NOT_PRESENT)
setattr(obj, attr, new_value)
try:
yield
finally:
for attr, old_value in old_values.items():
if old_value is NOT_PRESENT:
del obj[attr]
else:
setattr(obj, attr, old_value)
def warn_deprecated_class(cls, msg):
"""
Issues a warning to indicate that the given class is deprecated.
If a message is given, it is appended to the deprecation warning.
"""
fullname = '%s.%s' % (cls.__module__, cls.__name__)
if msg:
fullmsg = _("Class %(fullname)s is deprecated: %(msg)s")
else:
fullmsg = _("Class %(fullname)s is deprecated")
# Issue the warning
warnings.warn(fullmsg % locals(), DeprecationWarning, stacklevel=3)
def warn_deprecated_function(func, msg):
"""
Issues a warning to indicate that the given function is
deprecated. If a message is given, it is appended to the
deprecation warning.
"""
name = func.__name__
# Find the function's definition
sourcefile = inspect.getsourcefile(func)
# Find the line number, if possible
if inspect.ismethod(func):
code = func.im_func.func_code
else:
code = func.func_code
lineno = getattr(code, 'co_firstlineno', None)
if lineno is None:
location = sourcefile
else:
location = "%s:%d" % (sourcefile, lineno)
# Build up the message
if msg:
fullmsg = _("Function %(name)s in %(location)s is deprecated: %(msg)s")
else:
fullmsg = _("Function %(name)s in %(location)s is deprecated")
# Issue the warning
warnings.warn(fullmsg % locals(), DeprecationWarning, stacklevel=3)
def _stubout(klass, message):
"""
Scans a class and generates wrapping stubs for __new__() and every
class and static method. Returns a dictionary which can be passed
to type() to generate a wrapping class.
"""
overrides = {}
def makestub_class(name, func):
"""
Create a stub for wrapping class methods.
"""
def stub(cls, *args, **kwargs):
warn_deprecated_class(klass, message)
return func(*args, **kwargs)
# Overwrite the stub's name
stub.__name__ = name
stub.func_name = name
return classmethod(stub)
def makestub_static(name, func):
"""
Create a stub for wrapping static methods.
"""
def stub(*args, **kwargs):
warn_deprecated_class(klass, message)
return func(*args, **kwargs)
# Overwrite the stub's name
stub.__name__ = name
stub.func_name = name
return staticmethod(stub)
for name, kind, _klass, _obj in inspect.classify_class_attrs(klass):
# We're only interested in __new__(), class methods, and
# static methods...
if (name != '__new__' and
kind not in ('class method', 'static method')):
continue
# Get the function...
func = getattr(klass, name)
# Override it in the class
if kind == 'class method':
stub = makestub_class(name, func)
elif kind == 'static method' or name == '__new__':
stub = makestub_static(name, func)
# Save it in the overrides dictionary...
overrides[name] = stub
# Apply the overrides
for name, stub in overrides.items():
setattr(klass, name, stub)
def deprecated(message=''):
"""
Marks a function, class, or method as being deprecated. For
functions and methods, emits a warning each time the function or
method is called. For classes, generates a new subclass which
will emit a warning each time the class is instantiated, or each
time any class or static method is called.
If a message is passed to the decorator, that message will be
appended to the emitted warning. This may be used to suggest an
alternate way of achieving the desired effect, or to explain why
the function, class, or method is deprecated.
"""
def decorator(f_or_c):
# Make sure we can deprecate it...
if not callable(f_or_c) or isinstance(f_or_c, types.ClassType):
warnings.warn("Cannot mark object %r as deprecated" % f_or_c,
DeprecationWarning, stacklevel=2)
return f_or_c
# If we're deprecating a class, create a subclass of it and
# stub out all the class and static methods
if inspect.isclass(f_or_c):
klass = f_or_c
_stubout(klass, message)
return klass
# OK, it's a function; use a traditional wrapper...
func = f_or_c
@functools.wraps(func)
def wrapper(*args, **kwargs):
warn_deprecated_function(func, message)
return func(*args, **kwargs)
return wrapper
return decorator
def _showwarning(message, category, filename, lineno, file=None, line=None):
"""
Redirect warnings into logging.
"""
fmtmsg = warnings.formatwarning(message, category, filename, lineno, line)
LOG.warning(fmtmsg)
# Install our warnings handler
warnings.showwarning = _showwarning
def service_is_up(service):
"""Check whether a service is up based on last heartbeat."""
last_heartbeat = service['updated_at'] or service['created_at']
# Timestamps in DB are UTC.
elapsed = total_seconds(utcnow() - last_heartbeat)
return abs(elapsed) <= FLAGS.service_down_time
def generate_mac_address():
"""Generate an Ethernet MAC address."""
# NOTE(vish): We would prefer to use 0xfe here to ensure that linux
# bridge mac addresses don't change, but it appears to
# conflict with libvirt, so we use the next highest octet
# that has the unicast and locally administered bits set
# properly: 0xfa.
# Discussion: https://bugs.launchpad.net/nova/+bug/921838
mac = [0xfa, 0x16, 0x3e,
random.randint(0x00, 0x7f),
random.randint(0x00, 0xff),
random.randint(0x00, 0xff)]
return ':'.join(map(lambda x: "%02x" % x, mac))
def read_file_as_root(file_path):
"""Secure helper to read file as root."""
try:
out, _err = execute('cat', file_path, run_as_root=True)
return out
except exception.ProcessExecutionError:
raise exception.FileNotFound(file_path=file_path)
@contextlib.contextmanager
def temporary_chown(path, owner_uid=None):
"""Temporarily chown a path.
:params owner_uid: UID of temporary owner (defaults to current user)
"""
if owner_uid is None:
owner_uid = os.getuid()
orig_uid = os.stat(path).st_uid
if orig_uid != owner_uid:
execute('chown', owner_uid, path, run_as_root=True)
try:
yield
finally:
if orig_uid != owner_uid:
execute('chown', orig_uid, path, run_as_root=True)
@contextlib.contextmanager
def tempdir(**kwargs):
tmpdir = tempfile.mkdtemp(**kwargs)
try:
yield tmpdir
finally:
try:
shutil.rmtree(tmpdir)
except OSError, e:
LOG.error(_('Could not remove tmpdir: %s'), str(e))
def strcmp_const_time(s1, s2):
"""Constant-time string comparison.
:params s1: the first string
:params s2: the second string
:return: True if the strings are equal.
This function takes two strings and compares them. It is intended to be
used when doing a comparison for authentication purposes to help guard
against timing attacks.
"""
if len(s1) != len(s2):
return False
result = 0
for (a, b) in zip(s1, s2):
result |= ord(a) ^ ord(b)
return result == 0
class UndoManager(object):
"""Provides a mechanism to facilitate rolling back a series of actions
when an exception is raised.
"""
def __init__(self):
self.undo_stack = []
def undo_with(self, undo_func):
self.undo_stack.append(undo_func)
def _rollback(self):
for undo_func in reversed(self.undo_stack):
undo_func()
def rollback_and_reraise(self, msg=None, **kwargs):
"""Rollback a series of actions then re-raise the exception.
.. note:: (sirp) This should only be called within an
exception handler.
"""
with excutils.save_and_reraise_exception():
if msg:
LOG.exception(msg, **kwargs)
self._rollback()
|
apache-2.0
| -3,467,027,477,747,099,000 | 31.439218 | 79 | 0.58718 | false |
michaelbrooks/twitter-feels
|
twitter_feels/settings/prod.py
|
1
|
1165
|
"""
Production settings.
Based on https://github.com/rdegges/django-skel/blob/master/project_name/settings/prod.py
"""
from os import environ
from common import *
# Below are things we might need to deal with later
########## EMAIL CONFIGURATION
########## DATABASE CONFIGURATION
########## CACHE CONFIGURATION
########## STORAGE CONFIGURATION
########## REDIS CONFIGURATION
########## SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = environ.get('SECRET_KEY', SECRET_KEY)
########## END SECRET CONFIGURATION
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
# Just to make totally sure...
DEBUG = False
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG CONFIGURATION
########## COMPRESSION CONFIGURATION
# See: http://django_compressor.readthedocs.org/en/latest/settings/#django.conf.settings.COMPRESS_ENABLED
COMPRESS_ENABLED = True
########## END COMPRESSION CONFIGURATION
# The hosts that we are allowed to serve as
ALLOWED_HOSTS = environ.get("ALLOWED_HOSTS", 'localhost').split(',')
|
mit
| 6,534,998,766,569,324,000 | 30.513514 | 105 | 0.706438 | false |
telminov/django-park-keeper
|
parkkeeper/forms.py
|
1
|
2880
|
# coding: utf-8
import json
from django import forms
from parkkeeper import models
from croniter import croniter
class BaseSchedule(forms.ModelForm):
def clean_options_json(self):
options_json = self.cleaned_data.get('options_json')
if options_json:
try:
options = json.loads(options_json)
except ValueError:
raise forms.ValidationError('Incorrect JSON')
if type(options) is not dict:
raise forms.ValidationError('Options must be JavaScript object.')
return options_json
def clean_cron(self):
cron = self.cleaned_data.get('cron')
if cron:
try:
croniter(cron)
except Exception:
raise ValueError('Incorrect cron schedule')
return cron
def clean(self):
all_hosts = set(self.cleaned_data['hosts'].all())
for group in self.cleaned_data['groups'].all():
all_hosts.update(group.hosts.all())
not_exists_host_credentials = set()
for credential_type in self.cleaned_data['credential_types'].all():
for host in all_hosts:
qs = models.HostCredential.objects.filter(host=host, credential__type=credential_type)
if not qs.exists():
not_exists_host_credentials.add((host, credential_type))
if not_exists_host_credentials:
msg = 'Needs credential types for hosts: '
msg += '. '.join(map(lambda h_ct_args: '%s - %s' % h_ct_args, not_exists_host_credentials))
self.add_error('hosts', msg)
self.add_error('groups', msg)
self.add_error('credential_types', msg)
if not (self.cleaned_data['count'] or self.cleaned_data['interval'] or self.cleaned_data['time_units']) \
and not self.cleaned_data['cron']:
msg = 'You have to specify or period, or cron-style schedule'
for field in ('count', 'interval', 'time_units', 'cron'):
self.add_error(field, msg)
return self.cleaned_data
class MonitSchedule(forms.ModelForm):
class Meta:
model = models.MonitSchedule
fields = '__all__'
class WorkSchedule(forms.ModelForm):
class Meta:
model = models.WorkSchedule
fields = '__all__'
class Credential(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput(), required=False)
class Meta:
model = models.Credential
fields = ('name', 'type', 'username',)
def clean_password(self):
password = self.cleaned_data.get('password')
if password:
self.instance.set_password(password, save=False)
elif not self.instance.id:
raise forms.ValidationError('Password is required for new credential instance.')
return password
|
mit
| 5,198,988,995,579,362,000 | 33.285714 | 113 | 0.601736 | false |
WillisXChen/django-oscar
|
oscar/lib/python2.7/site-packages/behave/runner.py
|
1
|
24139
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
import contextlib
import os.path
import StringIO
import sys
import traceback
import warnings
import weakref
from behave import matchers
from behave.step_registry import setup_step_decorators
from behave.formatter import formatters
from behave.configuration import ConfigError
from behave.log_capture import LoggingCapture
from behave.runner_util import \
collect_feature_locations, parse_features
class ContextMaskWarning(UserWarning):
'''Raised if a context variable is being overwritten in some situations.
If the variable was originally set by user code then this will be raised if
*behave* overwites the value.
If the variable was originally set by *behave* then this will be raised if
user code overwites the value.
'''
pass
class Context(object):
'''Hold contextual information during the running of tests.
This object is a place to store information related to the tests you're
running. You may add arbitrary attributes to it of whatever value you need.
During the running of your tests the object will have additional layers of
namespace added and removed automatically. There is a "root" namespace and
additional namespaces for features and scenarios.
Certain names are used by *behave*; be wary of using them yourself as
*behave* may overwrite the value you set. These names are:
.. attribute:: feature
This is set when we start testing a new feature and holds a
:class:`~behave.model.Feature`. It will not be present outside of a
feature (i.e. within the scope of the environment before_all and
after_all).
.. attribute:: scenario
This is set when we start testing a new scenario (including the
individual scenarios of a scenario outline) and holds a
:class:`~behave.model.Scenario`. It will not be present outside of the
scope of a scenario.
.. attribute:: tags
The current set of active tags (as a Python set containing instances of
:class:`~behave.model.Tag` which are basically just glorified strings)
combined from the feature and scenario. This attribute will not be
present outside of a feature scope.
.. attribute:: aborted
This is set to true in the root namespace when the user aborts a test run
(:exc:`KeyboardInterrupt` exception). Initially: False.
.. attribute:: failed
This is set to true in the root namespace as soon as a step fails.
Initially: False.
.. attribute:: table
This is set at the step level and holds any :class:`~behave.model.Table`
associated with the step.
.. attribute:: text
This is set at the step level and holds any multiline text associated
with the step.
.. attribute:: config
The configuration of *behave* as determined by configuration files and
command-line options. The attributes of this object are the same as the
`configuration file settion names`_.
.. attribute:: active_outline
This is set for each scenario in a scenario outline and references the
:class:`~behave.model.Row` that is active for the current scenario. It is
present mostly for debugging, but may be useful otherwise.
.. attribute:: log_capture
If logging capture is enabled then this attribute contains the captured
logging as an instance of :class:`~behave.log_capture.LoggingCapture`.
It is not present if logging is not being captured.
.. attribute:: stdout_capture
If stdout capture is enabled then this attribute contains the captured
output as a StringIO instance. It is not present if stdout is not being
captured.
.. attribute:: stderr_capture
If stderr capture is enabled then this attribute contains the captured
output as a StringIO instance. It is not present if stderr is not being
captured.
If an attempt made by user code to overwrite one of these variables, or
indeed by *behave* to overwite a user-set variable, then a
:class:`behave.runner.ContextMaskWarning` warning will be raised.
You may use the "in" operator to test whether a certain value has been set
on the context, for example:
'feature' in context
checks whether there is a "feature" value in the context.
Values may be deleted from the context using "del" but only at the level
they are set. You can't delete a value set by a feature at a scenario level
but you can delete a value set for a scenario in that scenario.
.. _`configuration file settion names`: behave.html#configuration-files
'''
BEHAVE = 'behave'
USER = 'user'
def __init__(self, runner):
self._runner = weakref.proxy(runner)
self._config = runner.config
d = self._root = {
'aborted': False,
'failed': False,
'config': self._config,
'active_outline': None,
}
self._stack = [d]
self._record = {}
self._origin = {}
self._mode = self.BEHAVE
self.feature = None
def _push(self):
self._stack.insert(0, {})
def _pop(self):
self._stack.pop(0)
@contextlib.contextmanager
def user_mode(self):
try:
self._mode = self.USER
yield
finally:
# -- NOTE: Otherwise skipped if AssertionError/Exception is raised.
self._mode = self.BEHAVE
def _set_root_attribute(self, attr, value):
for frame in self.__dict__['_stack']:
if frame is self.__dict__['_root']:
continue
if attr in frame:
record = self.__dict__['_record'][attr]
params = {
'attr': attr,
'filename': record[0],
'line': record[1],
'function': record[3],
}
self._emit_warning(attr, params)
self.__dict__['_root'][attr] = value
if attr not in self._origin:
self._origin[attr] = self._mode
def _emit_warning(self, attr, params):
msg = ''
if self._mode is self.BEHAVE and self._origin[attr] is not self.BEHAVE:
msg = "behave runner is masking context attribute '%(attr)s' " \
"orignally set in %(function)s (%(filename)s:%(line)s)"
elif self._mode is self.USER:
if self._origin[attr] is not self.USER:
msg = "user code is masking context attribute '%(attr)s' " \
"orignally set by behave"
elif self._config.verbose:
msg = "user code is masking context attribute " \
"'%(attr)s'; see the tutorial for what this means"
if msg:
msg = msg % params
warnings.warn(msg, ContextMaskWarning, stacklevel=3)
def _dump(self):
for level, frame in enumerate(self._stack):
print 'Level %d' % level
print repr(frame)
def __getattr__(self, attr):
if attr[0] == '_':
return self.__dict__[attr]
for frame in self._stack:
if attr in frame:
return frame[attr]
msg = "'{0}' object has no attribute '{1}'"
msg = msg.format(self.__class__.__name__, attr)
raise AttributeError(msg)
def __setattr__(self, attr, value):
if attr[0] == '_':
self.__dict__[attr] = value
return
for frame in self._stack[1:]:
if attr in frame:
record = self._record[attr]
params = {
'attr': attr,
'filename': record[0],
'line': record[1],
'function': record[3],
}
self._emit_warning(attr, params)
stack_frame = traceback.extract_stack(limit=2)[0]
self._record[attr] = stack_frame
frame = self._stack[0]
frame[attr] = value
if attr not in self._origin:
self._origin[attr] = self._mode
def __delattr__(self, attr):
frame = self._stack[0]
if attr in frame:
del frame[attr]
del self._record[attr]
else:
msg = "'{0}' object has no attribute '{1}' at the current level"
msg = msg.format(self.__class__.__name__, attr)
raise AttributeError(msg)
def __contains__(self, attr):
if attr[0] == '_':
return attr in self.__dict__
for frame in self._stack:
if attr in frame:
return True
return False
def execute_steps(self, steps_text):
'''The steps identified in the "steps" text string will be parsed and
executed in turn just as though they were defined in a feature file.
If the execute_steps call fails (either through error or failure
assertion) then the step invoking it will fail.
ValueError will be raised if this is invoked outside a feature context.
Returns boolean False if the steps are not parseable, True otherwise.
'''
assert isinstance(steps_text, unicode), "Steps must be unicode."
if not self.feature:
raise ValueError('execute_steps() called outside of feature')
# -- PREPARE: Save original context data for current step.
# Needed if step definition that called this method uses .table/.text
original_table = getattr(self, "table", None)
original_text = getattr(self, "text", None)
self.feature.parser.variant = 'steps'
steps = self.feature.parser.parse_steps(steps_text)
for step in steps:
passed = step.run(self._runner, quiet=True, capture=False)
if not passed:
# -- ISSUE #96: Provide more substep info to diagnose problem.
step_line = u"%s %s" % (step.keyword, step.name)
message = "%s SUB-STEP: %s" % (step.status.upper(), step_line)
if step.error_message:
message += "\nSubstep info: %s" % step.error_message
assert False, message
# -- FINALLY: Restore original context data for current step.
self.table = original_table
self.text = original_text
return True
def exec_file(filename, globals={}, locals=None):
if locals is None:
locals = globals
locals['__file__'] = filename
if sys.version_info[0] == 3:
with open(filename) as f:
# -- FIX issue #80: exec(f.read(), globals, locals)
filename2 = os.path.relpath(filename, os.getcwd())
code = compile(f.read(), filename2, 'exec')
exec(code, globals, locals)
else:
execfile(filename, globals, locals)
def path_getrootdir(path):
"""
Extract rootdir from path in a platform independent way.
POSIX-PATH EXAMPLE:
rootdir = path_getrootdir("/foo/bar/one.feature")
assert rootdir == "/"
WINDOWS-PATH EXAMPLE:
rootdir = path_getrootdir("D:\\foo\\bar\\one.feature")
assert rootdir == r"D:\"
"""
drive, _ = os.path.splitdrive(path)
if drive:
# -- WINDOWS:
return drive + os.path.sep
# -- POSIX:
return os.path.sep
class PathManager(object):
"""
Context manager to add paths to sys.path (python search path) within a scope
"""
def __init__(self, paths=None):
self.initial_paths = paths or []
self.paths = None
def __enter__(self):
self.paths = list(self.initial_paths)
sys.path = self.paths + sys.path
def __exit__(self, *crap):
for path in self.paths:
sys.path.remove(path)
self.paths = None
def add(self, path):
if self.paths is None:
# -- CALLED OUTSIDE OF CONTEXT:
self.initial_paths.append(path)
else:
sys.path.insert(0, path)
self.paths.append(path)
class ModelRunner(object):
"""
Test runner for a behave model (features).
Provides the core functionality of a test runner and
the functional API needed by model elements.
.. attribute:: aborted
This is set to true when the user aborts a test run
(:exc:`KeyboardInterrupt` exception). Initially: False.
Stored as derived attribute in :attr:`Context.aborted`.
"""
def __init__(self, config, features=None):
self.config = config
self.features = features or []
self.hooks = {}
self.formatters = []
self.undefined_steps = []
self.context = None
self.feature = None
self.stdout_capture = None
self.stderr_capture = None
self.log_capture = None
self.old_stdout = None
self.old_stderr = None
# @property
def _get_aborted(self):
value = False
if self.context:
value = self.context.aborted
return value
# @aborted.setter
def _set_aborted(self, value):
assert self.context
self.context._set_root_attribute('aborted', bool(value))
aborted = property(_get_aborted, _set_aborted,
doc="Indicates that test run is aborted by the user.")
def run_hook(self, name, context, *args):
if not self.config.dry_run and (name in self.hooks):
# try:
with context.user_mode():
self.hooks[name](context, *args)
# except KeyboardInterrupt:
# self.aborted = True
# if name not in ("before_all", "after_all"):
# raise
def setup_capture(self):
if not self.context:
self.context = Context(self)
if self.config.stdout_capture:
self.stdout_capture = StringIO.StringIO()
self.context.stdout_capture = self.stdout_capture
if self.config.stderr_capture:
self.stderr_capture = StringIO.StringIO()
self.context.stderr_capture = self.stderr_capture
if self.config.log_capture:
self.log_capture = LoggingCapture(self.config)
self.log_capture.inveigle()
self.context.log_capture = self.log_capture
def start_capture(self):
if self.config.stdout_capture:
# -- REPLACE ONLY: In non-capturing mode.
if not self.old_stdout:
self.old_stdout = sys.stdout
sys.stdout = self.stdout_capture
assert sys.stdout is self.stdout_capture
if self.config.stderr_capture:
# -- REPLACE ONLY: In non-capturing mode.
if not self.old_stderr:
self.old_stderr = sys.stderr
sys.stderr = self.stderr_capture
assert sys.stderr is self.stderr_capture
def stop_capture(self):
if self.config.stdout_capture:
# -- RESTORE ONLY: In capturing mode.
if self.old_stdout:
sys.stdout = self.old_stdout
self.old_stdout = None
assert sys.stdout is not self.stdout_capture
if self.config.stderr_capture:
# -- RESTORE ONLY: In capturing mode.
if self.old_stderr:
sys.stderr = self.old_stderr
self.old_stderr = None
assert sys.stderr is not self.stderr_capture
def teardown_capture(self):
if self.config.log_capture:
self.log_capture.abandon()
def run_model(self, features=None):
if not self.context:
self.context = Context(self)
if features is None:
features = self.features
# -- ENSURE: context.execute_steps() works in weird cases (hooks, ...)
context = self.context
self.setup_capture()
self.run_hook('before_all', context)
run_feature = not self.aborted
failed_count = 0
undefined_steps_initial_size = len(self.undefined_steps)
for feature in features:
if run_feature:
try:
self.feature = feature
for formatter in self.formatters:
formatter.uri(feature.filename)
failed = feature.run(self)
if failed:
failed_count += 1
if self.config.stop or self.aborted:
# -- FAIL-EARLY: After first failure.
run_feature = False
except KeyboardInterrupt:
self.aborted = True
failed_count += 1
run_feature = False
# -- ALWAYS: Report run/not-run feature to reporters.
# REQUIRED-FOR: Summary to keep track of untested features.
for reporter in self.config.reporters:
reporter.feature(feature)
# -- AFTER-ALL:
if self.aborted:
print "\nABORTED: By user."
for formatter in self.formatters:
formatter.close()
self.run_hook('after_all', self.context)
for reporter in self.config.reporters:
reporter.end()
# if self.aborted:
# print "\nABORTED: By user."
failed = ((failed_count > 0) or self.aborted or
(len(self.undefined_steps) > undefined_steps_initial_size))
return failed
def run(self):
"""
Implements the run method by running the model.
"""
self.context = Context(self)
return self.run_model()
class Runner(ModelRunner):
"""
Standard test runner for behave:
* setup paths
* loads environment hooks
* loads step definitions
* select feature files, parses them and creates model (elements)
"""
def __init__(self, config):
super(Runner, self).__init__(config)
self.path_manager = PathManager()
self.base_dir = None
def setup_paths(self):
if self.config.paths:
if self.config.verbose:
print 'Supplied path:', \
', '.join('"%s"' % path for path in self.config.paths)
first_path = self.config.paths[0]
if hasattr(first_path, "filename"):
# -- BETTER: isinstance(first_path, FileLocation):
first_path = first_path.filename
base_dir = first_path
if base_dir.startswith('@'):
# -- USE: behave @features.txt
base_dir = base_dir[1:]
file_locations = self.feature_locations()
if file_locations:
base_dir = os.path.dirname(file_locations[0].filename)
base_dir = os.path.abspath(base_dir)
# supplied path might be to a feature file
if os.path.isfile(base_dir):
if self.config.verbose:
print 'Primary path is to a file so using its directory'
base_dir = os.path.dirname(base_dir)
else:
if self.config.verbose:
print 'Using default path "./features"'
base_dir = os.path.abspath('features')
# Get the root. This is not guaranteed to be '/' because Windows.
root_dir = path_getrootdir(base_dir)
new_base_dir = base_dir
while True:
if self.config.verbose:
print 'Trying base directory:', new_base_dir
if os.path.isdir(os.path.join(new_base_dir, 'steps')):
break
if os.path.isfile(os.path.join(new_base_dir, 'environment.py')):
break
if new_base_dir == root_dir:
break
new_base_dir = os.path.dirname(new_base_dir)
if new_base_dir == root_dir:
if self.config.verbose:
if not self.config.paths:
print 'ERROR: Could not find "steps" directory. Please '\
'specify where to find your features.'
else:
print 'ERROR: Could not find "steps" directory in your '\
'specified path "%s"' % base_dir
raise ConfigError('No steps directory in "%s"' % base_dir)
base_dir = new_base_dir
self.config.base_dir = base_dir
for dirpath, dirnames, filenames in os.walk(base_dir):
if [fn for fn in filenames if fn.endswith('.feature')]:
break
else:
if self.config.verbose:
if not self.config.paths:
print 'ERROR: Could not find any "<name>.feature" files. '\
'Please specify where to find your features.'
else:
print 'ERROR: Could not find any "<name>.feature" files '\
'in your specified path "%s"' % base_dir
raise ConfigError('No feature files in "%s"' % base_dir)
self.base_dir = base_dir
self.path_manager.add(base_dir)
if not self.config.paths:
self.config.paths = [base_dir]
if base_dir != os.getcwd():
self.path_manager.add(os.getcwd())
def before_all_default_hook(self, context):
"""
Default implementation for :func:`before_all()` hook.
Setup the logging subsystem based on the configuration data.
"""
context.config.setup_logging()
def load_hooks(self, filename='environment.py'):
hooks_path = os.path.join(self.base_dir, filename)
if os.path.exists(hooks_path):
exec_file(hooks_path, self.hooks)
if 'before_all' not in self.hooks:
self.hooks['before_all'] = self.before_all_default_hook
def load_step_definitions(self, extra_step_paths=[]):
step_globals = {
'use_step_matcher': matchers.use_step_matcher,
'step_matcher': matchers.step_matcher, # -- DEPRECATING
}
setup_step_decorators(step_globals)
# -- Allow steps to import other stuff from the steps dir
# NOTE: Default matcher can be overridden in "environment.py" hook.
steps_dir = os.path.join(self.base_dir, 'steps')
paths = [steps_dir] + list(extra_step_paths)
with PathManager(paths):
default_matcher = matchers.current_matcher
for path in paths:
for name in sorted(os.listdir(path)):
if name.endswith('.py'):
# -- LOAD STEP DEFINITION:
# Reset to default matcher after each step-definition.
# A step-definition may change the matcher 0..N times.
# ENSURE: Each step definition has clean globals.
step_module_globals = step_globals.copy()
exec_file(os.path.join(path, name), step_module_globals)
matchers.current_matcher = default_matcher
def feature_locations(self):
return collect_feature_locations(self.config.paths)
def run(self):
with self.path_manager:
self.setup_paths()
return self.run_with_paths()
def run_with_paths(self):
self.context = Context(self)
self.load_hooks()
self.load_step_definitions()
# -- ENSURE: context.execute_steps() works in weird cases (hooks, ...)
# self.setup_capture()
# self.run_hook('before_all', self.context)
# -- STEP: Parse all feature files (by using their file location).
feature_locations = [ filename for filename in self.feature_locations()
if not self.config.exclude(filename) ]
features = parse_features(feature_locations, language=self.config.lang)
self.features.extend(features)
# -- STEP: Run all features.
stream_openers = self.config.outputs
self.formatters = formatters.get_formatter(self.config, stream_openers)
return self.run_model()
|
bsd-3-clause
| 2,834,012,232,175,173,600 | 34.290936 | 80 | 0.577944 | false |
eyolfson/site-eyl
|
django_blog/hooks.py
|
1
|
1907
|
# Copyright 2014 Jon Eyolfson
#
# This file is distributed under the GPLv3 license
import datetime
import logging
import markdown
import os
import pygit2
from django_blog.models import Post
from django_gitolite.utils import home_dir
logger = logging.getLogger('django_blog')
def update_blog(push):
repo_path = push.repo.path
if repo_path != 'jon/site-eyl-blog':
return
git_repo = pygit2.Repository(
os.path.join(home_dir(), 'repositories', '{}.git'.format(repo_path))
)
# TODO: There is no diff if this is the first commit
for patch in git_repo.diff(push.old_rev, push.new_rev):
delta = patch.delta
if delta.status == pygit2.GIT_DELTA_ADDED:
pass
elif delta.status == pygit2.GIT_DELTA_MODIFIED:
if delta.old_file.path != delta.new_file.path:
print("Old and new file paths do not match")
continue
elif delta.status == pygit2.GIT_DELTA_DELETED:
slug = delta.old_file.path.rstrip('.md')
post = Post.objects.get(slug=slug)
post.delete()
continue
else:
print("Unhandled status '{}'".format(delta.status))
continue
file_path = delta.new_file.path
slug = file_path.rstrip('.md')
markdown_content = git_repo[delta.new_file.id].data.decode()
md = markdown.Markdown(extensions=['headerid(level=2, forceid=False)',
'meta',
'tables'],
output_format='html5')
content = md.convert(markdown_content)
title = md.Meta['title'][0]
post, created = Post.objects.get_or_create(
slug=slug,
defaults={'date': datetime.date.today(),}
)
post.content = content
post.title = title
post.save()
|
gpl-3.0
| -1,440,978,568,700,513,800 | 33.672727 | 78 | 0.571054 | false |
AngelValera/bares-y-tapas-DAI
|
proyecto_DAI/urls.py
|
1
|
1515
|
"""proyecto_DAI URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, patterns, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
# Create a new class that redirects the user to the index page, if successful at logging
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('app.urls')),
]
if not settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += staticfiles_urlpatterns()
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
)
if settings.DEBUG:
urlpatterns += patterns(
'django.views.static',
(r'^media/(?P<path>.*)',
'serve',
{'document_root': settings.MEDIA_ROOT}), )
|
mit
| -4,793,245,582,486,517,000 | 35.95122 | 105 | 0.691089 | false |
gschizas/praw
|
praw/models/reddit/wikipage.py
|
1
|
10000
|
"""Provide the WikiPage class."""
from typing import Any, Dict, Generator, Optional, TypeVar, Union
from ...const import API_PATH
from ...util.cache import cachedproperty
from ..listing.generator import ListingGenerator
from .base import RedditBase
from .redditor import Redditor
_WikiPage = TypeVar("_WikiPage")
Reddit = TypeVar("Reddit")
Subreddit = TypeVar("Subreddit")
class WikiPageModeration:
"""Provides a set of moderation functions for a WikiPage.
For example, to add ``spez`` as an editor on the wikipage ``praw_test``
try:
.. code-block:: python
reddit.subreddit('test').wiki['praw_test'].mod.add('spez')
"""
def __init__(self, wikipage: _WikiPage):
"""Create a WikiPageModeration instance.
:param wikipage: The wikipage to moderate.
"""
self.wikipage = wikipage
def add(self, redditor: Redditor):
"""Add an editor to this WikiPage.
:param redditor: A redditor name (e.g., ``'spez'``) or
:class:`~.Redditor` instance.
To add ``'spez'`` as an editor on the wikipage ``'praw_test'`` try:
.. code-block:: python
reddit.subreddit('test').wiki['praw_test'].mod.add('spez')
"""
data = {"page": self.wikipage.name, "username": str(redditor)}
url = API_PATH["wiki_page_editor"].format(
subreddit=self.wikipage.subreddit, method="add"
)
self.wikipage._reddit.post(url, data=data)
def remove(self, redditor: Redditor):
"""Remove an editor from this WikiPage.
:param redditor: A redditor name (e.g., ``'spez'``) or
:class:`~.Redditor` instance.
To remove ``'spez'`` as an editor on the wikipage ``'praw_test'`` try:
.. code-block:: python
reddit.subreddit('test').wiki['praw_test'].mod.remove('spez')
"""
data = {"page": self.wikipage.name, "username": str(redditor)}
url = API_PATH["wiki_page_editor"].format(
subreddit=self.wikipage.subreddit, method="del"
)
self.wikipage._reddit.post(url, data=data)
def settings(self) -> Dict[str, Any]:
"""Return the settings for this WikiPage."""
url = API_PATH["wiki_page_settings"].format(
subreddit=self.wikipage.subreddit, page=self.wikipage.name
)
return self.wikipage._reddit.get(url)["data"]
def update(
self, listed: bool, permlevel: int, **other_settings: Any
) -> Dict[str, Any]:
"""Update the settings for this WikiPage.
:param listed: (boolean) Show this page on page list.
:param permlevel: (int) Who can edit this page? (0) use subreddit wiki
permissions, (1) only approved wiki contributors for this page may
edit (see :meth:`.WikiPageModeration.add`), (2) only mods may edit
and view
:param other_settings: Additional keyword arguments to pass.
:returns: The updated WikiPage settings.
To set the wikipage ``'praw_test'`` in ``'/r/test'`` to mod only and
disable it from showing in the page list, try:
.. code-block:: python
reddit.subreddit('test').wiki['praw_test'].mod.update(listed=False,
permlevel=2)
"""
other_settings.update({"listed": listed, "permlevel": permlevel})
url = API_PATH["wiki_page_settings"].format(
subreddit=self.wikipage.subreddit, page=self.wikipage.name
)
return self.wikipage._reddit.post(url, data=other_settings)["data"]
class WikiPage(RedditBase):
"""An individual WikiPage object.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
necessarily comprehensive.
======================= ===================================================
Attribute Description
======================= ===================================================
``content_html`` The contents of the wiki page, as HTML.
``content_md`` The contents of the wiki page, as Markdown.
``may_revise`` A ``bool`` representing whether or not the
authenticated user may edit the wiki page.
``name`` The name of the wiki page.
``revision_by`` The :class:`.Redditor` who authored this
revision of the wiki page.
``revision_date`` The time of this revision, in `Unix Time`_.
``subreddit`` The :class:`.Subreddit` this wiki page belongs to.
======================= ===================================================
.. _Unix Time: https://en.wikipedia.org/wiki/Unix_time
"""
__hash__ = RedditBase.__hash__
@staticmethod
def _revision_generator(subreddit, url, generator_kwargs):
for revision in ListingGenerator(
subreddit._reddit, url, **generator_kwargs
):
if revision["author"] is not None:
revision["author"] = Redditor(
subreddit._reddit, _data=revision["author"]["data"]
)
revision["page"] = WikiPage(
subreddit._reddit, subreddit, revision["page"], revision["id"]
)
yield revision
@cachedproperty
def mod(self) -> WikiPageModeration:
"""Provide an instance of :class:`.WikiPageModeration`.
For example, to add ``spez`` as an editor on the wikipage ``praw_test``
try:
.. code-block:: python
reddit.subreddit('test').wiki['praw_test'].mod.add('spez')
"""
return WikiPageModeration(self)
def __eq__(self, other: _WikiPage):
"""Return whether the other instance equals the current."""
return (
isinstance(other, self.__class__)
and str(self).lower() == str(other).lower()
)
def __init__(
self,
reddit: Reddit,
subreddit: Subreddit,
name: str,
revision: Optional[str] = None,
_data: Optional[Dict[str, Any]] = None,
):
"""Construct an instance of the WikiPage object.
:param revision: A specific revision ID to fetch. By default, fetches
the most recent revision.
"""
self.name = name
self._revision = revision
self.subreddit = subreddit
super().__init__(reddit, _data=_data)
def __repr__(self) -> str:
"""Return an object initialization representation of the instance."""
return "{}(subreddit={!r}, name={!r})".format(
self.__class__.__name__, self.subreddit, self.name
)
def __str__(self) -> str:
"""Return a string representation of the instance."""
return "{}/{}".format(self.subreddit, self.name)
def _fetch_info(self):
return (
"wiki_page",
{"subreddit": self.subreddit, "page": self.name},
{"v": self._revision} if self._revision else None,
)
def _fetch_data(self):
name, fields, params = self._fetch_info()
path = API_PATH[name].format(**fields)
return self._reddit.request("GET", path, params)
def _fetch(self):
data = self._fetch_data()
data = data["data"]
if data["revision_by"] is not None:
data["revision_by"] = Redditor(
self._reddit, _data=data["revision_by"]["data"]
)
self.__dict__.update(data)
self._fetched = True
def edit(
self, content: str, reason: Optional[str] = None, **other_settings: Any
):
"""Edit this WikiPage's contents.
:param content: The updated Markdown content of the page.
:param reason: (Optional) The reason for the revision.
:param other_settings: Additional keyword arguments to pass.
For example, to replace the first wiki page of ``r/test`` with the
phrase ``test wiki page``:
.. code-block:: python
page = next(iter(reddit.subreddit('test').wiki))
page.edit(content='test wiki page')
"""
other_settings.update(
{"content": content, "page": self.name, "reason": reason}
)
self._reddit.post(
API_PATH["wiki_edit"].format(subreddit=self.subreddit),
data=other_settings,
)
def revision(self, revision: str):
"""Return a specific version of this page by revision ID.
To view revision ``[ID]`` of ``'praw_test'`` in ``'/r/test'``:
.. code-block:: python
page = reddit.subreddit('test').wiki['praw_test'].revision('[ID]')
"""
return WikiPage(
self.subreddit._reddit, self.subreddit, self.name, revision
)
def revisions(
self, **generator_kwargs: Union[str, int, Dict[str, str]]
) -> Generator[_WikiPage, None, None]:
"""Return a :class:`.ListingGenerator` for page revisions.
Additional keyword arguments are passed in the initialization of
:class:`.ListingGenerator`.
To view the wiki revisions for ``'praw_test'`` in ``'/r/test'`` try:
.. code-block:: python
for item in reddit.subreddit('test').wiki['praw_test'].revisions():
print(item)
To get :class:`.WikiPage` objects for each revision:
.. code-block:: python
for item in reddit.subreddit('test').wiki['praw_test'].revisions():
print(item['page'])
"""
url = API_PATH["wiki_page_revisions"].format(
subreddit=self.subreddit, page=self.name
)
return self._revision_generator(self.subreddit, url, generator_kwargs)
|
bsd-2-clause
| 416,485,246,232,412,200 | 33.482759 | 79 | 0.5675 | false |
tokenly/bitsplit-server
|
util/entity.py
|
1
|
1359
|
"""
ENTITY
"""
class Entity(object):
"""
ENTITY
A wrapped for a data dictionary. Allows interface with data, but also
allows extending to allow methods to manipulate data.
"""
def __init__(self, collection, data=None):
if not data:
data = {}
super(Entity, self).__setattr__('data', data)
super(Entity, self).__setattr__('collection', collection)
def get(self, key, default=None):
""" Get an attribute from the data dictionary, with a fall-back. """
return self.data.get(key, default)
def __eq__(self, other):
""" Is this entity equal to another? """
return other and self.id == other.id
def __neq__(self, other):
""" Is this entity inequal to another? """
return not self.__eq__(other)
def __getattr__(self, attr):
""" Get a data dictionary attribute. """
if attr in self.data:
return self.data[attr]
else:
return None
def __getitem__(self, attr):
""" Override of __getattr__ """
return self.__getattr__(attr)
def __setattr__(self, attr, value):
""" Set a data dictionary attribute. """
self.data[attr] = value
def __setitem__(self, attr, value):
""" Override of __setattr__ """
return self.__setattr__(attr, value)
|
gpl-2.0
| -3,386,305,237,740,637,000 | 27.3125 | 76 | 0.555556 | false |
futurepr0n/Books-solutions
|
Python-For-Everyone-Horstmann/Chapter6-Lists/P6.30.py
|
1
|
1303
|
# Write a function
# def mergeSorted(a, b)
# that merges two sorted lists, producing a new sorted list. Keep an index into each list,
# indicating how much of it has been processed already. Each time, append the small-
# est unprocessed element from either list, then advance the index. For example, if a is
# 1 4 9 16
# and b is
# 4 7 9 9 11
# then mergeSorted returns a new list containing the values
# 1 4 4 7 9 9 9 11 16
# FUNCTIONS
def mergeSorted(listA, listB):
i, j, k = 0, 0, 0
outputList = [ ]
while i < len(listA) and j < len(listB):
if listA[i] < listB[j]:
outputList.append(listA[i])
i += 1
else:
outputList.append(listB[j])
j += 1
k += 1
if i < len(listA):
while i < len(listA):
outputList.append(listA[i])
i += 1
k += 1
elif j < len(listB):
while j < len(listB):
outputList.append(listB[j])
j += 1
k += 1
return outputList
# main
def main():
exampleListA = [ 1, 4, 9, 16 ]
exampleListB = [ 4, 7, 9, 9, 11 ]
print(exampleListA)
print(exampleListB)
print("After merge sort")
print(mergeSorted(exampleListA, exampleListB))
# PROGRAM RUN
main()
|
mit
| -8,833,145,592,245,625,000 | 23.603774 | 90 | 0.558711 | false |
googleinterns/contextual-query-rewrites
|
utils/test_translate_utils.py
|
1
|
1527
|
import unittest
import translate_utils
import os
import shutil
class TestRawData(unittest.TestCase):
def test_translate_text(self):
file = open("dummy_file.txt", 'w')
file.write(" Ram \n")
file.close()
translate_utils.translate_data("dummy_file.txt", "dummy_output.txt")
data = open("dummy_output.txt", 'r').readlines()
print(data)
expected = ['राम\n']
self.assertEqual(data, expected)
def test_translate_text_batch(self):
file = open("dummy_file.txt", 'w')
file.write(" Ram \n")
file.close()
translate_utils.translate_data("dummy_file.txt",
"dummy_output.txt",
threshold=0)
data = open("dummy_output.txt", 'r').readlines()
print(data)
expected = ['राम\n']
self.assertEqual(data, expected)
def test_translate_text_dir(self):
shutil.rmtree('./test_dummy_dir')
os.mkdir('./test_dummy_dir')
file = open("./test_dummy_dir/dummy_file.txt", 'w')
file.write(" Ram \n")
file.close()
translate_utils.translate_data("./test_dummy_dir",
"dummy_output.txt",
threshold=0)
data = open("dummy_output.txt", 'r').readlines()
print(data)
expected = ['राम\n']
self.assertEqual(data, expected)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| 2,116,747,818,563,910,000 | 28.019231 | 76 | 0.5222 | false |
yro/veda_worker
|
tests/test_intake.py
|
1
|
2088
|
import os
import sys
import unittest
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from veda_worker.__init__ import VedaWorker
from veda_worker.abstractions import Video, Encode
from veda_worker.config import WorkerSetup
"""
file intake test
"""
class TestIntake(unittest.TestCase):
def setUp(self):
self.WS = WorkerSetup()
if os.path.exists(self.WS.instance_yaml):
self.WS.run()
self.settings = self.WS.settings_dict
self.encode_profile = 'desktop_mp4'
self.veda_id = 'XXXXXXXX2016-V00TEST'
self.jobid = 'xx4xx'
self.VW = VedaWorker(
veda_id=self.veda_id,
encode_profile=self.encode_profile,
jobid=self.jobid
)
def test_intake(self):
if not os.path.exists(self.WS.instance_yaml):
self.assertTrue(True)
return None
# copied from __init__
self.VW.VideoObject = Video(
veda_id=self.VW.veda_id
)
self.VW.VideoObject.activate()
self.assertTrue(self.VW.VideoObject.valid)
self.VW.settings = self.settings
self.VW._engine_intake()
print self.VW.VideoObject
self.assertTrue(self.VW.VideoObject.valid)
self.assertTrue(
os.path.exists(
os.path.join(
self.VW.workdir,
self.VW.source_file
)
)
)
self.assertTrue(self.VW.VideoObject.valid)
@unittest.skip("not implemented")
def tearDown(self):
if self.jobid is not None:
shutil.rmtree(self.VW.workdir)
else:
os.remove(
os.path.join(
self.VW.workdir,
self.VW.output_file
)
)
os.remove(
os.path.join(
self.VW.workdir,
self.VW.source_file
)
)
def main():
unittest.main()
if __name__ == '__main__':
sys.exit(main())
|
gpl-3.0
| 4,976,633,350,473,498,000 | 23.857143 | 76 | 0.533525 | false |
russelldavies/envparse
|
tests/test_casts.py
|
1
|
4940
|
# -*- coding: utf-8 -*-
import pytest
from envparse import Env, env, ConfigurationError, urlparse
env_vars = dict(
BLANK='',
STR='foo',
INT='42',
FLOAT='33.3',
BOOL_TRUE='1',
BOOL_FALSE='0',
PROXIED='{{STR}}',
LIST_STR='foo,bar',
LIST_STR_WITH_SPACES=' foo, bar',
LIST_INT='1,2,3',
LIST_INT_WITH_SPACES=' 1, 2,3',
DICT_STR='key1=val1, key2=val2',
DICT_INT='key1=1, key2=2',
JSON='{"foo": "bar", "baz": [1, 2, 3]}',
URL='https://example.com/path?query=1',
)
@pytest.fixture(autouse=True, params=['environ', 'envfile'])
def environ(monkeypatch, request):
"""Setup environment with sample variables."""
if request.param == 'environ':
for key, val in env_vars.items():
monkeypatch.setenv(key, val)
elif request.param == 'envfile':
env.read_envfile('tests/envfile')
# Helper function
def assert_type_value(cast, expected, result):
assert cast == type(result)
assert expected == result
def test_var_not_present():
with pytest.raises(ConfigurationError):
env('NOT_PRESENT')
def test_var_not_present_with_default():
default_val = 'default val'
assert default_val, env('NOT_PRESENT', default=default_val)
def test_default_none():
assert_type_value(type(None), None, env('NOT_PRESENT', default=None))
def test_implicit_nonbuiltin_type():
with pytest.raises(AttributeError):
env.foo('FOO')
def test_str():
expected = str(env_vars['STR'])
assert_type_value(str, expected, env('STR'))
assert_type_value(str, expected, env.str('STR'))
def test_int():
expected = int(env_vars['INT'])
assert_type_value(int, expected, env('INT', cast=int))
assert_type_value(int, expected, env.int('INT'))
def test_float():
expected = float(env_vars['FLOAT'])
assert_type_value(float, expected, env.float('FLOAT'))
def test_bool():
assert_type_value(bool, True, env.bool('BOOL_TRUE'))
assert_type_value(bool, False, env.bool('BOOL_FALSE'))
def test_list():
list_str = ['foo', 'bar']
assert_type_value(list, list_str, env('LIST_STR', cast=list))
assert_type_value(list, list_str, env.list('LIST_STR'))
assert_type_value(list, list_str, env.list('LIST_STR_WITH_SPACES'))
list_int = [1, 2, 3]
assert_type_value(list, list_int, env('LIST_INT', cast=list,
subcast=int))
assert_type_value(list, list_int, env.list('LIST_INT', subcast=int))
assert_type_value(list, list_int, env.list('LIST_INT_WITH_SPACES',
subcast=int))
assert_type_value(list, [], env.list('BLANK', subcast=int))
def test_dict():
dict_str = dict(key1='val1', key2='val2')
assert_type_value(dict, dict_str, env.dict('DICT_STR'))
assert_type_value(dict, dict_str, env('DICT_STR', cast=dict))
dict_int = dict(key1=1, key2=2)
assert_type_value(dict, dict_int, env('DICT_INT', cast=dict,
subcast=int))
assert_type_value(dict, dict_int, env.dict('DICT_INT', subcast=int))
assert_type_value(dict, {}, env.dict('BLANK'))
def test_json():
expected = {'foo': 'bar', 'baz': [1, 2, 3]}
assert_type_value(dict, expected, env.json('JSON'))
def test_url():
url = urlparse.urlparse('https://example.com/path?query=1')
assert_type_value(url.__class__, url, env.url('URL'))
def proxied_value():
assert_type_value(str, 'bar', env('PROXIED'))
def test_preprocessor():
assert_type_value(str, 'FOO', env('STR', preprocessor=lambda
v: v.upper()))
def test_postprocessor(monkeypatch):
"""
Test a postprocessor which turns a redis url into a Django compatible
cache url.
"""
redis_url = 'redis://:redispass@127.0.0.1:6379/0'
monkeypatch.setenv('redis_url', redis_url)
expected = {'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': '127.0.0.1:6379:0',
'OPTIONS': {'PASSWORD': 'redispass'}}
def django_redis(url):
return {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': '{}:{}:{}'.format(url.hostname, url.port, url.path.strip('/')),
'OPTIONS': {'PASSWORD': url.password}}
assert_type_value(dict, expected, env.url('redis_url',
postprocessor=django_redis))
def test_schema():
env = Env(STR=str, STR_DEFAULT=dict(cast=str, default='default'),
INT=int, LIST_STR=list, LIST_INT=dict(cast=list, subcast=int))
assert_type_value(str, 'foo', env('STR'))
assert_type_value(str, 'default', env('STR_DEFAULT'))
assert_type_value(int, 42, env('INT'))
assert_type_value(list, ['foo', 'bar'], env('LIST_STR'))
assert_type_value(list, [1, 2, 3], env('LIST_INT'))
# Overrides
assert_type_value(str, '42', env('INT', cast=str))
assert_type_value(str, 'manual_default', env('STR_DEFAULT',
default='manual_default'))
|
mit
| 3,754,972,878,410,318,300 | 30.069182 | 87 | 0.608097 | false |
Melnick/Cnake
|
core/functions.py
|
1
|
1074
|
import os
import shutil
from unicurses import *
from appdirs import *
SEPARATOR = (os.name in ['ce', 'nt', 'dos']) and '\\' or '/';
def add_vector(a, b):
return (a[0] + b[0], a[1] + b[1]);
def w_log( path, attr = "a+", string = '' ):
dir_path = user_log_dir("cnake");
path = "{}{}{}".format(dir_path, SEPARATOR, path);
if (not os.path.isdir( dir_path )):
os.makedirs( dir_path );
f = open(path, attr);
f.write(string);
f.close();
def rm_log_dir():
path = user_log_dir("cnake");
# try 0
# for i in os.walk(path):
# # print(i)
# for j in i[2]:
# file = "{}{}{}".format(i[0], SEPARATOR, j);
# if os.path.isfile(file):
# os.unlink(file);
# # print(file)
# try 1
# if (os.path.isdir( path )):
# shutil.rmtree( '{0}{1}..{1}..{1}'.format(path, SEPARATOR) );
# try 2
path = "{}{}".format(path[:path.find('cnake'):], "cnake");
if (os.path.isdir( path )):
shutil.rmtree( path );
def add_win( win ):
return newwin(win['h'], win['w'], win["start_y"], win["start_x"]);
def del_win( win ):
wclear(win);
wrefresh(win)
delwin(win);
|
mit
| 7,789,643,010,407,323,000 | 18.545455 | 67 | 0.557728 | false |
kapy2010/treeherder
|
tests/seta/test_job_priorities.py
|
1
|
1445
|
import datetime
import pytest
from mock import patch
from treeherder.seta.job_priorities import (SetaError,
seta_job_scheduling)
@pytest.mark.django_db()
@patch('treeherder.seta.job_priorities.SETAJobPriorities._validate_request', return_value=None)
@patch('treeherder.seta.job_priorities.list_runnable_jobs')
def test_gecko_decision_task(runnable_jobs_list, validate_request,
test_repository, runnable_jobs_data,
all_job_priorities_stored):
'''
When the Gecko decision task calls SETA it will return all jobs that are less likely to catch
a regression (low value jobs).
'''
runnable_jobs_list.return_value = runnable_jobs_data
jobs = seta_job_scheduling(project=test_repository.name,
build_system_type='taskcluster')
assert len(jobs['jobtypes'][str(datetime.date.today())]) == 1
def test_gecko_decision_task_invalid_repo():
'''
When the Gecko decision task calls SETA it will return all jobs that are less likely to catch
a regression (low value jobs).
'''
with pytest.raises(SetaError) as exception_info:
seta_job_scheduling(project='mozilla-repo-x', build_system_type='taskcluster')
assert exception_info.value.message == "The specified project repo 'mozilla-repo-x' " \
"is not supported by SETA."
|
mpl-2.0
| -69,469,573,831,768,330 | 40.285714 | 97 | 0.649135 | false |
jgerigmeyer/jquery-django-superformset
|
demo/demo/settings.py
|
1
|
2111
|
"""
Django settings for demo project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'h@&4y-z01hm3wksscqi#o_cmfjm^a&$m0m&jq(o=#8r3x#ldr7'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'demo',
'demo.todos',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'demo.urls'
WSGI_APPLICATION = 'demo.wsgi.application'
TEMPLATE_DIRS = os.path.join(BASE_DIR, 'templates')
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'US/Eastern'
USE_I18N = True
USE_L10N = False
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
|
bsd-3-clause
| -306,492,205,986,182,200 | 23.264368 | 71 | 0.717196 | false |
xupingmao/xnote
|
handlers/fs/fs_tools.py
|
1
|
1725
|
# -*- coding:utf-8 -*-
# @author xupingmao <578749341@qq.com>
# @since 2020/12/08 01:46:35
# @modified 2021/07/04 17:09:54
# -*- coding:utf-8 -*-
# @since 2018-11-22 00:46:26
import os
import re
import math
import time
import web
import xconfig
import xutils
import xauth
import xmanager
import xtables
import random
from xutils import cacheutil
from xutils.htmlutil import *
from xutils import dbutil
from xtemplate import BasePlugin
HEADER = """
<!-- 插件头部 -->
<div class="card">
<div class="grid-title btn-line-height">
<span>{{plugin.title}}</span>
<div class="float-right">
<a class="btn btn-default" href="/fs_list">收藏夹</a>
<a class="btn btn-default" href="/fs_tools">工具</a>
</div>
</div>
</div>
{% include plugin/header/plugin_category.html %}
"""
HTML = '''
<div class="card">
{% for note in notes %}
<a class="list-link" href="{{note.url}}">
<span>{{note.title}}</span>
<div class="float-right">
{% if note.visit_cnt != None %}
<i class="fa fa-eye-o"></i>
<span class="plugin-right-span">热度: {{note.visit_cnt}}</span>
{% end %}
<i class="fa fa-chevron-right"></i>
</div>
</a>
{% end %}
</div>
'''
class Main(BasePlugin):
title = u"文件工具"
category = "dir"
rows = 0
editable = False
def handle(self, input):
user = xauth.current_name()
notes = xmanager.find_plugins("dir")
xmanager.add_visit_log(user, "/fs_tools")
self.writeheader(HEADER, plugin = self, plugin_category = "dir")
self.writetemplate(HTML, notes = notes)
xurls = (
r"/fs_tools", Main
)
|
gpl-3.0
| 5,460,477,757,280,500,000 | 21.905405 | 77 | 0.578171 | false |
fxia22/pointGAN
|
show_ae.py
|
1
|
1680
|
from __future__ import print_function
from show3d_balls import *
import argparse
import os
import random
import numpy as np
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data
import torchvision.datasets as dset
import torchvision.transforms as transforms
import torchvision.utils as vutils
from torch.autograd import Variable
from datasets import PartDataset
from pointnet import PointGen, PointGenC, PointNetAE
import torch.nn.functional as F
import matplotlib.pyplot as plt
#showpoints(np.random.randn(2500,3), c1 = np.random.uniform(0,1,size = (2500)))
parser = argparse.ArgumentParser()
parser.add_argument('--model', type=str, default = '', help='model path')
opt = parser.parse_args()
print (opt)
ae = PointNetAE(num_points = 2048)
ae.load_state_dict(torch.load(opt.model))
dataset = PartDataset(root = 'shapenetcore_partanno_segmentation_benchmark_v0', class_choice = ['Chair'], classification = True, npoints = 2048)
dataloader = torch.utils.data.DataLoader(dataset, batch_size=64,
shuffle=True, num_workers=1)
ae.cuda()
i,data = enumerate(dataloader, 0).next()
points, _ = data
points = Variable(points)
bs = points.size()[0]
points = points.transpose(2,1)
points = points.cuda()
gen = ae(points)
point_np = gen.transpose(2,1).cpu().data.numpy()
#showpoints(points.transpose(2,1).cpu().data.numpy())
showpoints(point_np)
#sim_noise = Variable(torch.randn(1000, 100))
#points = gen(sim_noise)
#point_np = points.transpose(2,1).data.numpy()
#print(point_np.shape)
#np.savez('gan.npz', points = point_np)
|
mit
| -7,822,991,694,803,677,000 | 26.096774 | 144 | 0.727381 | false |
dguerri/git-upstream-old
|
git_upstream/tests/base.py
|
1
|
5187
|
# Copyright 2010-2011 OpenStack Foundation
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import tempfile
import fixtures
import git
import testtools
LOREM_IPSUM = """\
Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy
nibh euismod tincidunt ut laoreet dolore magna aliquam erat volutpat. Ut wisi
enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit lobortis
nisl ut aliquip ex ea commodo consequat. Duis autem vel eum iriure dolor in
hendrerit in vulputate velit esse molestie consequat, vel illum dolore eu
feugiat nulla facilisis at vero eros et accumsan et iusto odio dignissim qui
blandit praesent luptatum zzril delenit augue duis dolore te feugait nulla
facilisi.
Ut wisi enim ad minim veniam, quis nostrud exerci tation ullamcorper suscipit
lobortis nisl ut aliquip ex ea commodo consequat. Duis autem vel eum iriure
dolor in hendrerit in vulputate velit esse molestie consequat, vel illum
dolore eu feugiat nulla facilisis at vero eros et accumsan et iusto odio
dignissim qui blandit praesent luptatum zzril delenit augue duis dolore te
feugait nulla facilisi. Lorem ipsum dolor sit amet, consectetuer adipiscing
elit, sed diam nonummy nibh euismod tincidunt ut laoreet dolore magna aliquam
erat volutpat.
Duis autem vel eum iriure dolor in hendrerit in vulputate velit esse molestie
consequat, vel illum dolore eu feugiat nulla facilisis at vero eros et
accumsan et iusto odio dignissim qui blandit praesent luptatum zzril delenit
augue duis dolore te feugait nulla facilisi. Lorem ipsum dolor sit amet,
consectetuer adipiscing elit, sed diam nonummy nibh euismod tincidunt ut
laoreet dolore magna aliquam erat volutpat. Ut wisi enim ad minim veniam, quis
nostrud exerci tation ullamcorper suscipit lobortis nisl ut aliquip ex ea
commodo consequat."""
class DiveDir(fixtures.Fixture):
"""Dive into given directory and return back on cleanup.
:ivar path: The target directory.
"""
def __init__(self, path):
self.path = path
def setUp(self):
super(DiveDir, self).setUp()
self.addCleanup(os.chdir, os.getcwd())
os.chdir(self.path)
class GitRepo(fixtures.Fixture):
"""Create an empty git repo in which to operate."""
def __init__(self):
self.repo = None
self.path = ''
self._file_list = set()
def setUp(self):
super(GitRepo, self).setUp()
tempdir = fixtures.TempDir()
self.addCleanup(tempdir.cleanUp)
tempdir.setUp()
self.path = os.path.join(tempdir.path, 'git')
os.mkdir(self.path)
g = git.Git(self.path)
g.init()
self.repo = git.Repo(self.path)
self.repo.git.config('user.email', 'user@example.com')
self.repo.git.config('user.name', 'Example User')
self._create_file_commit()
def _create_file(self, contents=None):
if not contents:
contents = LOREM_IPSUM
# always want to ensure the files added to the repo are unique no
# matter which branch they are added to, as otherwise there may
# be conflicts caused by replaying local changes and performing
# merges
while True:
tmpfile = tempfile.NamedTemporaryFile(dir=self.repo.working_dir,
delete=False)
if tmpfile.name not in self._file_list:
self._file_list.add(tmpfile.name)
break
tmpfile.close()
os.remote(tmpfile.name)
tmpfile.write(contents)
tmpfile.close()
return tmpfile.name
def _create_file_commit(self, change_id=None):
filename = self._create_file()
self.repo.git.add(filename)
message = "Adding %s" % os.path.basename(filename)
if change_id:
message = message + "\n\nChange-Id: %s" % change_id
self.repo.git.commit(m=message)
def add_commits(self, num=1, ref="HEAD", change_ids=None):
"""Create the given number of commits using generated files"""
if ref != "HEAD":
self.repo.git.checkout(ref)
num = max(num, len(change_ids))
ids = list(change_ids) + [None] * (num - len(change_ids))
for x in range(num):
self._create_file_commit(ids[x])
class BaseTestCase(testtools.TestCase):
"""Base Test Case for all tests."""
def setUp(self):
super(BaseTestCase, self).setUp()
self.testrepo = self.useFixture(GitRepo())
repo_path = self.testrepo.path
self.useFixture(DiveDir(repo_path))
self.repo = self.testrepo.repo
|
apache-2.0
| 6,074,584,528,144,438,000 | 36.316547 | 78 | 0.685753 | false |
OTL/genrb
|
scripts/genmsg_rb.py
|
1
|
2011
|
#! /usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2012, Takashi Ogura
#
# based on
#
# Copyright (c) 2008, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Revision $Id:$
"""
ROS message source code generation for Ruby
Converts ROS .msg files in a package into Ruby source code implementations.
"""
import os
import sys
import genrb.generator
import genrb.genrb_main
if __name__ == "__main__":
genrb.genrb_main.genmain(sys.argv, 'genmsg_rb.py', genrb.generator.MsgGenerator())
|
bsd-3-clause
| 3,096,508,180,594,001,400 | 37.673077 | 86 | 0.761313 | false |
google-research/google-research
|
rl_repr/tests/train_eval_offline_test.py
|
1
|
1293
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Makes sure that batch_rl/train_eval_offline.py runs without error."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import flags
import tensorflow.compat.v2 as tf
from rl_repr.batch_rl import train_eval_offline
class TrainEvalOfflineTest(tf.test.TestCase):
def test_train_eval_offline(self):
flags.FLAGS.embed_learner = 'acl'
flags.FLAGS.state_embed_dim = 64
flags.FLAGS.embed_pretraining_steps = 10
flags.FLAGS.bc_pretraining_steps = 10
flags.FLAGS.eval_interval = 10
flags.FLAGS.num_updates = 30
train_eval_offline.main(None)
if __name__ == '__main__':
tf.test.main()
|
apache-2.0
| -7,335,390,548,113,482,000 | 31.325 | 74 | 0.738592 | false |
mentionllc/django-invitation
|
invitation/urls.py
|
1
|
1346
|
from django.conf import settings
from django.conf.urls import *
from django.views.generic import TemplateView
if getattr(settings, 'INVITATION_USE_ALLAUTH', False):
from allauth.account.forms import BaseSignupForm as RegistrationFormTermsOfService
reg_backend = 'allauth.account.auth_backends.AuthenticationBackend'
else:
from registration.forms import RegistrationFormTermsOfService
reg_backend = 'registration.backends.default.DefaultBackend'
from invitation.views import invite, invited, register, send_bulk_invitations, token
urlpatterns = patterns('',
url(r'^invite/complete/$',
TemplateView.as_view(template_name='invitation/invitation_complete.html'),
name='invitation_complete'),
url(r'^invite/$',
invite,
name='invitation_invite'),
url(r'^invite/bulk/$',
send_bulk_invitations,
name='invitation_invite_bulk'),
url(r'^invited/(?P<invitation_key>\w+)&(?P<invitation_recipient>\S+@\S+)?/$',
invited,
name='invitation_invited'),
url(r'^register/$',
register,
{ 'backend': reg_backend },
name='registration_register'),
url(r'^token/(?P<key>\w+)/$',
token,
name='invitation_token'),
)
|
bsd-3-clause
| -9,210,309,487,171,473,000 | 37.457143 | 90 | 0.621842 | false |
eugeneai/DME
|
src/icc/dme/fd/Exploration2.py
|
1
|
1877
|
from Numeric import *
from mutils import *
from biomass import *
from free import *
from spelper import *
from logging import *
def open1(name, mode):
f=open(name, mode)
f.write("from mutils import *\nfrom Numeric import *\n")
return f
next_data={
"low":(0,"middle"),
"middle":(0,"high"),
"high":(1,"low"),
}
def get_next_descr(prev):
answer=[]
p=list(prev)
p.reverse()
carry=1
for d in p:
if carry:
(carry, next)=next_data[d]
else:
next=d
answer.append(next)
if carry:
#print "No more scenarios"
raise RuntimeError, "no more scenarios"
answer.reverse()
return tuple(answer)
def get_name(name, descr):
return name+"_".join(list(descr))
def get_first_scen(name):
descr=("low","low","low","low")
return (get_name(name,descr),descr)
def get_next_scen(name,prev_descr):
descr=get_next_descr(prev_descr)
return (get_name(name,descr), descr)
LAST=50
def biomass (name):
name="biomass_"+name
val=globals()[name]
print val.V[LAST],
def free(name):
name="free_"+name
val=globals()[name]
print val.S[LAST],
def spel_per(name):
name="spelper_"+name
val=globals()[name]
print val.S[LAST],
def logging(name):
name="logging_"+name
val=globals()[name]
#print val
v=val.V
vv=v.logged_101 + v.logged_102 + v.logged_103 + v.logged_104+v.logged_125 + v.logged_124
s=0
for vi in vv:
s+=vi
print s,
def do_etwas(name, descr):
print name, # the variable part of the variable
biomass (name)
free(name)
spel_per(name)
logging(name)
print
def main():
(name, descr)=get_first_scen('')
while 1:
do_etwas(name, descr)
try:
(name, descr)=get_next_scen('', descr)
except RuntimeError:
return
if __name__=="__main__":
import sys
f=open("_result.txt","w")
stdout=sys.stdout
sys.stdout=f
main()
print
print "Legend:"
print "Senario_name V_biomass S_free S_spel_per Sum_logging"
sys.stdout=stdout
f.close()
|
gpl-3.0
| -9,175,346,103,418,292,000 | 16.707547 | 89 | 0.664891 | false |
SoftwareKing/zstack-woodpecker
|
zstackwoodpecker/zstackwoodpecker/operations/tag_operations.py
|
1
|
1832
|
'''
Tag related operations
@author: Youyk
'''
import apibinding.api_actions as api_actions
import zstackwoodpecker.test_util as test_util
import account_operations
import apibinding.inventory as inventory
def create_system_tag(resourceType, resourceUuid, tag, session_uuid=None):
action = api_actions.CreateSystemTagAction()
action.timeout = 30000
action.resourceType = resourceType
action.resourceUuid = resourceUuid
action.tag = tag
evt = account_operations.execute_action_with_session(action, session_uuid)
test_util.action_logger('Create System Tag [uuid:] %s for [tag:] %s' % \
(evt.inventory.uuid, tag))
return evt.inventory
def create_user_tag(resourceType, resourceUuid, tag, session_uuid=None):
action = api_actions.CreateUserTagAction()
action.timeout = 30000
action.resourceType = resourceType
action.resourceUuid = resourceUuid
action.tag = tag
evt = account_operations.execute_action_with_session(action, session_uuid)
test_util.action_logger('Create User Tag [uuid:] %s for [tag:] %s' % \
(evt.inventory.uuid, tag))
return evt.inventory
def delete_tag(tag_uuid, session_uuid=None):
action = api_actions.DeleteTagAction()
action.uuid = tag_uuid
action.timeout = 30000
test_util.action_logger('Delete Tag [uuid:] %s' % tag_uuid)
evt = account_operations.execute_action_with_session(action, session_uuid)
return evt
def update_system_tag(tag_uuid, tag, session_uuid = None):
action = api_actions.UpdateSystemTagAction()
action.uuid = tag_uuid
action.tag = tag
test_util.action_logger('Update Tag [uuid:] %s to %s' % (tag_uuid, tag))
evt = account_operations.execute_action_with_session(action, session_uuid)
return evt.inventory
|
apache-2.0
| 8,610,459,213,184,401,000 | 35.387755 | 78 | 0.694323 | false |
xiangke/pycopia
|
aid/pycopia/words.py
|
1
|
1795
|
#!/usr/bin/python2.4
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
#
# $Id$
#
# Copyright (C) 1999-2006 Keith Dart <keith@kdart.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
"""
fetch words from the system word list, using regular expressions.
"""
import re
from pycopia import UserFile
WORDFILES = ["/usr/share/dict/words", "/usr/dict/words"]
def get_wordfile():
for fn in WORDFILES:
try:
wordfile = UserFile.open(fn, "r")
except IOError:
pass
else:
return wordfile
raise ValueError, "cannot find file of words."
def get_random_word():
"""Return a randomly selected word from dict file."""
from pycopia import sysrandom
fo = get_wordfile()
try:
point = sysrandom.randrange(fo.size)
fo.seek(point)
c = fo.read(1)
while c != '\n' and fo.tell() > 0:
fo.seek(-2, 1)
c = fo.read(1)
word = fo.readline().strip()
finally:
fo.close()
return word
def get_word_list(patt, wordfile=None):
"""Yield words matching pattern (like grep)."""
if not wordfile:
wordfile = get_wordfile()
wre = re.compile(patt)
for line in wordfile:
test = line.strip()
if wre.match(test):
yield test
|
lgpl-2.1
| 6,614,251,274,612,924,000 | 25.791045 | 71 | 0.627855 | false |
aleksandar-mitrevski/robot_simulation
|
localisation/scripts/motion_model.py
|
1
|
2619
|
from math import cos, sin, atan2, exp, sqrt, pi
from random import uniform
from copy import copy
from pose import Pose
from velocity import Velocity
from filter_parameters import MotionModelNoiseParameters
class MotionModel(object):
'''Defines a utility for sampling a motion model.
Author -- Aleksandar Mitrevski
'''
def __init__(self, noise_params):
self.noise_params = copy(noise_params)
def sample_motion_model(self, pose, motion_command):
'''Samples the robot's motion model given a pose and a motion command.
The algorithm follows the pseudo code of 'sample_motion_model_odometry' in 'Probabilistic Robotics', chapter 5.
'''
delta_x = 0.
delta_y = 0.
delta_heading = 0.
if abs(motion_command.linear_x) > 0:
delta_x = motion_command.linear_x * cos(pose.heading)
delta_y = motion_command.linear_x * sin(pose.heading)
else:
delta_heading = motion_command.angular
ideal_new_pose = Pose(pose.x + delta_x, pose.y + delta_y, pose.heading + delta_heading)
delta_rot1 = atan2(ideal_new_pose.y - pose.y, ideal_new_pose.x - pose.x) - pose.heading
delta_trans = sqrt((ideal_new_pose.x - pose.x) * (ideal_new_pose.x - pose.x) + (ideal_new_pose.y - pose.y) * (ideal_new_pose.y - pose.y))
delta_rot2 = ideal_new_pose.heading - pose.heading - delta_rot1
delta_rot1_prime = delta_rot1 + self.sample_normal_noise(self.noise_params.alpha1 * delta_rot1 * delta_rot1 + self.noise_params.alpha2 * delta_trans * delta_trans)
delta_trans_prime = delta_trans + self.sample_normal_noise(self.noise_params.alpha3 * delta_trans * delta_trans + self.noise_params.alpha4 * delta_rot1 * delta_rot1 + self.noise_params.alpha4 * delta_rot2 * delta_rot2)
delta_rot2_prime = delta_rot2 + self.sample_normal_noise(self.noise_params.alpha1 * delta_rot2 * delta_rot2 + self.noise_params.alpha2 * delta_trans * delta_trans)
new_pose = Pose()
new_pose.x = pose.x - delta_trans_prime * cos(pose.heading + delta_rot1_prime)
new_pose.y = pose.y - delta_trans_prime * sin(pose.heading + delta_rot1_prime)
new_pose.heading = pose.heading + delta_rot1_prime + delta_rot2_prime
return new_pose
def sample_normal_noise(self, variance):
'''Returns a sample from a zero-mean normal distribution with the given variance.
'''
sample_sum = 0.
std_dev = sqrt(variance)
for i in xrange(12):
sample_sum = sample_sum + uniform(-std_dev, std_dev)
return 0.5 * sample_sum
|
mit
| 7,306,093,108,900,116,000 | 45.767857 | 226 | 0.654066 | false |
TexasLAN/texaslan.org
|
texaslan/users/migrations/0007_auto_20170115_1721.py
|
1
|
1682
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-01-15 23:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0006_auto_20160822_0840'),
]
operations = [
migrations.AddField(
model_name='user',
name='concentration',
field=models.CharField(blank=True, choices=[('CS', 'Computer Science'), ('D', 'Design'), ('B', 'Business'),
('EE', 'Electrical Engineering'), ('M', 'Math'),
('O', 'Other')], max_length=3, null=True),
),
migrations.AddField(
model_name='user',
name='gender',
field=models.CharField(blank=True, choices=[('M', 'Male'), ('F', 'Female'), ('N', 'Non - binary'),
('P', 'Prefer not to answer')], max_length=1, null=True),
),
migrations.AlterField(
model_name='user',
name='full_name',
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name='user',
name='graduation_date',
field=models.DateField(),
),
migrations.AlterField(
model_name='user',
name='nick_name',
field=models.CharField(max_length=255),
),
migrations.AlterField(
model_name='user',
name='resume',
field=models.FileField(blank=True, null=True, upload_to='resumes'),
),
]
|
mit
| 3,448,541,350,329,890,300 | 34.787234 | 119 | 0.483353 | false |
apdjustino/DRCOG_Urbansim
|
src/opus_gui/main/controllers/opus_gui_configuration.py
|
1
|
6695
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2010-2011 University of California, Berkeley, 2005-2009 University of Washington
# See opus_core/LICENSE
import os
from lxml.etree import ElementTree, SubElement
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QSplashScreen, QPixmap
from opus_core.misc import directory_path_from_opus_path
from opus_core import paths
class OpusGuiConfiguration(object):
def __init__(self):
self.application_title = 'URBANSIM' # correct default value?
self.xml_node = None
self.xml_filename = None
self.splash_screen = None
self.fonts = {'tabs': 10,
'menu': 10,
'general': 10}
self.load_latest_on_start = False
self.latest_project_filename = ''
self.load_latest_tab_on_start = True
self.latest_tab_index = ''
def load(self, filename = None, create_if_missing = True):
'''
Load the gui configuration from default directory
@param filename: Filename (String) of the XML to load. If its None, the
default gui configuration is loaded
@param create_if_missing (bool): Flag to create the destinated file if
it's missing.
'''
# Open the GUI configuration file
if filename is None:
# Try the users default configuration file
usr_conf_dir = paths.OPUS_SETTINGS_PATH
if not os.path.exists(usr_conf_dir):
os.mkdir(usr_conf_dir)
filename = os.path.join(usr_conf_dir, 'gui_config.xml')
if not os.path.exists(filename):
# Didn't have a custom gui-configuration -- copy the default one
# into the location before loading it
print('Warning -- did not find GUI configuration file %s.'%
filename)
if not create_if_missing:
print('Not loading any GUI configuration file')
return
print('Copying the default GUI configuration to %s'% filename)
default_config_dir = directory_path_from_opus_path('opus_gui.main')
default_config_filename = os.path.join(default_config_dir,
'default_gui_configuration.xml')
# open the file and write it to the destination
try:
gui_config_file = open(default_config_filename)
user_config_file = open(filename, 'w')
user_config_file.write(''.join(gui_config_file.readlines()))
user_config_file.close()
gui_config_file.close()
# Clean up namespace
del user_config_file, gui_config_file
except IOError, ex:
print('Failed to copy default configuration to %s.\n'
'-- Error:%s\n'
'!- Not loading any GUI configuration file.\n'%
(filename, ex))
return
root = ElementTree(file=filename)
self.xml_node = root
self.xml_filename = filename
# GUI Setting -- Splash screen
node = root.find('startup_options/splash_logo')
if node is None:
# TODO Use a default ?
self.splash_screen = QSplashScreen(QPixmap())
self.splash_screen.showMessage('URBANSIM')
else:
# Load user splash
dir_ = directory_path_from_opus_path('opus_gui.main.views.Images')
splash_abs_filename = os.path.join(dir_, node.text)
splash_pix = QPixmap(splash_abs_filename)
splash_pix = splash_pix.scaled(780,328, Qt.KeepAspectRatio)
self.splash_screen = QSplashScreen(splash_pix)
# GUI Setting -- Application Title
node = root.find('application_options/application_title')
if node is not None:
self.application_title = node.text
# GUI Setting -- Font sizes
for group, node_name in [('menu', 'menu_font_size'),
('tabs', 'main_tabs_font_size'),
('general', 'general_text_font_size')]:
try:
node = root.find('font_settings/%s' %node_name)
self.fonts[group] = int(node.text)
except ValueError:
print 'Could not set font %s to "%s"' %(group, node.text or '')
# GUI Setting -- Previous projects
node = root.find('project_history/previous_project')
if node is not None:
self.latest_project_filename = node.text
node = root.find('project_history/open_latest_project_on_start')
if node is not None:
self.load_latest_on_start = (node.text == "True")
node = root.find('project_history/previous_tab')
if node is not None:
self.latest_tab_index = node.text
node = root.find('project_history/open_latest_tab_on_start')
if node is not None:
self.load_latest_tab_on_start = (node.text == "True")
def save(self):
''' Save the GUI configuration file to disk'''
if self.xml_node is None:
print('Warning -- Tried to save a GUI configuration that is not '
'loaded')
return
# Update font settings
font_settings_node = self.xml_node.find('font_settings')
pairs = [('menu', 'menu_font_size'),
('tabs', 'main_tabs_font_size'),
('general', 'general_text_font_size')]
for group, node_name in pairs:
font_settings_node.find(node_name).text = str(self.fonts[group])
# Update latest project history
proj_hist_node = self.xml_node.find('project_history')
open_latest_node = proj_hist_node.find('open_latest_project_on_start')
prevproj_node = proj_hist_node.find('previous_project')
open_latest_tab_node = proj_hist_node.find('open_latest_tab_on_start')
if open_latest_tab_node is None:
open_latest_tab_node = SubElement(proj_hist_node, 'open_latest_tab_on_start')
prevtab_node = proj_hist_node.find('previous_tab')
if prevtab_node is None:
prevtab_node = SubElement(proj_hist_node, 'previous_tab')
# Ensure that the value is 'True' or 'False'
open_latest_node.text = self.load_latest_on_start and 'True' or 'False'
prevproj_node.text = self.latest_project_filename
open_latest_tab_node.text = self.load_latest_tab_on_start and 'True' or 'False'
prevtab_node.text = self.latest_tab_index
# Write config to disk
self.xml_node.write(self.xml_filename)
|
agpl-3.0
| -3,937,016,562,168,257,000 | 42.474026 | 96 | 0.582823 | false |
ftl/dxpad
|
tests/test.py
|
1
|
8396
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import os
import unittest
sys.path.insert(0, os.path.abspath('..'))
import dxpad._location as _location
import dxpad._grid as _grid
import dxpad._notepad as _notepad
import dxpad._callinfo as _callinfo
class TestLatLon(unittest.TestCase):
def test_neighbours_somewhere_in_the_middle(self):
lat_lon = _location.LatLon(48, 10)
self.assertEqual( lat_lon.neighbours(),
set([
_location.LatLon(47, 8),
_location.LatLon(47, 10),
_location.LatLon(47, 12),
_location.LatLon(48, 8),
_location.LatLon(48, 12),
_location.LatLon(49, 8),
_location.LatLon(49, 10),
_location.LatLon(49, 12)
])
)
def test_neighbours_left_end(self):
lat_lon = _location.LatLon(48, -180)
self.assertEqual( lat_lon.neighbours(),
set([
_location.LatLon(47, 178),
_location.LatLon(47, -180),
_location.LatLon(47, -178),
_location.LatLon(48, 178),
_location.LatLon(48, -178),
_location.LatLon(49, 178),
_location.LatLon(49, -180),
_location.LatLon(49, -178)
])
)
def test_neighbours_right_end(self):
lat_lon = _location.LatLon(48, 180)
self.assertEqual( lat_lon.neighbours(),
set([
_location.LatLon(47, 178),
_location.LatLon(47, 180),
_location.LatLon(47, -178),
_location.LatLon(48, 178),
_location.LatLon(48, -178),
_location.LatLon(49, 178),
_location.LatLon(49, 180),
_location.LatLon(49, -178)
])
)
def test_neighbours_top_end(self):
lat_lon = _location.LatLon(90, 10)
self.assertEqual( lat_lon.neighbours(),
set([
_location.LatLon(89, 8),
_location.LatLon(89, 10),
_location.LatLon(89, 12),
_location.LatLon(90, 8),
_location.LatLon(90, 12),
])
)
def test_neighbours_bottom_end(self):
lat_lon = _location.LatLon(-90, 10)
self.assertEqual( lat_lon.neighbours(),
set([
_location.LatLon(-89, 8),
_location.LatLon(-89, 10),
_location.LatLon(-89, 12),
_location.LatLon(-90, 8),
_location.LatLon(-90, 12),
])
)
def test_neighbours_center(self):
lat_lon = _location.LatLon(0, 0)
self.assertEqual( lat_lon.neighbours(),
set([
_location.LatLon(-1, -2),
_location.LatLon(-1, 0),
_location.LatLon(-1, 2),
_location.LatLon(0, -2),
_location.LatLon(0, 2),
_location.LatLon(1, -2),
_location.LatLon(1, 0),
_location.LatLon(1, 2)
])
)
def test_hash_int(self):
hashes = set()
conflicts = []
for lat in range(-90, 91):
for lon in range(-180, 180):
lat_lon = _location.LatLon(lat, lon)
h = hash(lat_lon)
if lat_lon in hashes:
print("conflict {}".format(lat_lon))
conflicts.append(lat_lon)
else:
hashes.add(lat_lon)
self.assertEqual(
len(conflicts), 0,
"{} conflicting {}".format(len(hashes), len(conflicts)))
class TestNotedQsos(unittest.TestCase):
def setUp(self):
self.qsos = _notepad.NotedQsos()
def assertInQso(self, line):
self.assertTrue(self.qsos.is_in_qso(line))
def assertNotInQso(self, line):
self.assertFalse(self.qsos.is_in_qso(line))
def assertQsosInOrder(self):
for i in range(0, len(self.qsos) - 1):
self.assertTrue(self.qsos[i].end < self.qsos[i + 1].start)
def test_insert_single_qso_should_indicate_in_qso(self):
self.assertNotInQso(10)
self.qsos.insert_qso(10)
self.assertNotInQso(9)
self.assertInQso(10)
self.assertNotInQso(11)
def test_insert_two_qsos_should_insert_in_order(self):
for i in range(20, 10, -2):
self.qsos.insert_qso(i)
self.assertEqual(5, len(self.qsos))
self.assertQsosInOrder()
def test_remove_qso(self):
for i in range(20, 10, -2):
self.qsos.insert_qso(i)
self.qsos.remove_qso(16)
self.assertEqual(4, len(self.qsos))
def test_insert_qso_twice_should_ignore_second_attempt(self):
self.qsos.insert_qso(10)
self.qsos.insert_qso(10)
self.assertEqual(1, len(self.qsos))
def test_get_qso_for_line(self):
self.qsos.insert_qso(10)
self.assertEqual(None, self.qsos.get_qso(9))
self.assertEqual(10, self.qsos.get_qso(10).start)
self.assertEqual(None, self.qsos.get_qso(11))
def test_move_qso_start_up(self):
self.qsos.insert_qso(10)
self.qsos.move_qso_start(8)
qso = self.qsos.get_qso(8)
self.assertEqual(8, qso.start)
self.assertEqual(10, qso.end)
self.assertEqual(qso, self.qsos.get_qso(9))
self.assertEqual(qso, self.qsos.get_qso(10))
def test_move_qso_start_down(self):
self.qsos.insert_qso(10)
self.qsos.move_qso_start(5)
self.qsos.move_qso_start(8)
qso = self.qsos.get_qso(8)
self.assertEqual(8, qso.start)
self.assertEqual(10, qso.end)
self.assertNotInQso(5)
self.assertNotInQso(6)
self.assertNotInQso(7)
def test_insert_qso_by_moving_start_after_last_qso(self):
self.qsos.insert_qso(10)
self.qsos.move_qso_start(12)
self.assertEqual(2, len(self.qsos))
self.assertNotEqual(self.qsos.get_qso(10), self.qsos.get_qso(12))
def test_move_qso_end_down(self):
self.qsos.insert_qso(10)
self.qsos.move_qso_end(12)
qso = self.qsos.get_qso(12)
self.assertEqual(10, qso.start)
self.assertEqual(12, qso.end)
self.assertEqual(qso, self.qsos.get_qso(10))
self.assertEqual(qso, self.qsos.get_qso(11))
class TestCall(unittest.TestCase):
def assertCall(self, call, prefix, base_call, suffix, working_condition):
self.assertEqual(prefix, call.prefix)
self.assertEqual(base_call, call.base_call)
self.assertEqual(suffix, call.suffix)
self.assertEqual(working_condition, call.working_condition)
def test_find_all(self):
calls = _callinfo.Call.find_all(
"DL3NEY W1AW 9A1AA EA6/DJ9MH VE3/DL1NEO/9 DL3NY/HA2 DF2NK/p "
"VK7/DK6MP/9/p",
lambda m: m.group())
self.assertCountEqual(
["DL3NEY", "W1AW", "9A1AA", "EA6/DJ9MH", "VE3/DL1NEO/9",
"DL3NY/HA2", "DF2NK/p", "VK7/DK6MP/9/p"],
calls)
def test_base_call(self):
self.assertCall(
_callinfo.Call("DL3NEY"), None, "DL3NEY", None, None)
def test_base_call_with_prefix(self):
self.assertCall(
_callinfo.Call("EA6/DL3NEY"), "EA6", "DL3NEY", None, None)
def test_base_call_with_working_condition(self):
self.assertCall(
_callinfo.Call("DL3NEY/p"), None, "DL3NEY", None, "P")
def test_base_call_with_suffix(self):
self.assertCall(
_callinfo.Call("DL3NEY/KP4"), None, "DL3NEY", "KP4", None)
def test_base_call_with_suffix_and_working_condition(self):
self.assertCall(
_callinfo.Call("DL3NEY/KP4/MM"), None, "DL3NEY", "KP4", "MM")
def test_base_call_with_prefix_and_working_condition(self):
self.assertCall(
_callinfo.Call("EA8/DL3NEY/MM"), "EA8", "DL3NEY", None, "MM")
def test_base_call_with_prefix_and_suffix(self):
self.assertCall(
_callinfo.Call("WB3/DL3NEY/8"), "WB3", "DL3NEY", "8", None)
def test_base_call_with_prefix_and_suffix_and_working_condition(self):
self.assertCall(
_callinfo.Call("WB3/DL3NEY/8/p"), "WB3", "DL3NEY", "8", "P")
if __name__ == '__main__': unittest.main()
|
mit
| -5,978,264,942,486,913,000 | 32.31746 | 77 | 0.54657 | false |
starkdee/courseware
|
userprofile/views.py
|
1
|
1274
|
from django.shortcuts import render, redirect
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.forms.models import model_to_dict
from userprofile.models import UserProfile
from userprofile.forms import EditUserForm, EditUserProfileForm
@login_required
def profile(request):
if request.method == 'POST':
user_form = EditUserForm(request.POST, instance=request.user)
user_profile = UserProfile.objects.get(user=request.user)
user_profile_form = EditUserProfileForm(request.POST, instance=user_profile)
if user_form.is_valid() and user_profile_form.is_valid():
user_form.save()
user_profile = user_profile_form.save(commit=False)
if 'picture' in request.FILES:
user_profile.picture = request.FILES['picture']
user_profile.save()
return redirect('profile:profile')
else:
user_form = EditUserForm(instance=request.user)
user_profile = UserProfile.objects.get(user=request.user)
user_profile_form = EditUserProfileForm(instance=user_profile)
return render(request, 'userprofile/profile.html', {'user_form': user_form, 'user_profile_form': user_profile_form})
|
mit
| 7,111,900,879,933,584,000 | 37.636364 | 124 | 0.704082 | false |
AllYarnsAreBeautiful/knittingpattern
|
knittingpattern/test/test_dump_json.py
|
1
|
1049
|
from pytest import fixture
from unittest.mock import MagicMock
from knittingpattern.Dumper import JSONDumper
import json
from knittingpattern.ParsingSpecification import ParsingSpecification
@fixture
def obj():
return ["123", 123]
@fixture
def dumper(obj):
def dump():
return obj
return JSONDumper(dump)
@fixture
def parser():
return MagicMock()
def test_dump_object(dumper, obj):
assert dumper.object() == obj
def test_dump_string(dumper, obj):
assert dumper.string() == json.dumps(obj)
def test_dump_to_temporary_file(dumper, obj):
temp_path = dumper.temporary_path()
with open(temp_path) as file:
obj2 = json.load(file)
assert obj2 == obj
def test_dump_to_knitting_pattern(dumper, parser, obj):
spec = ParsingSpecification(new_parser=parser)
dumper.knitting_pattern(spec)
parser.assert_called_with(spec)
parser(spec).knitting_pattern_set.assert_called_with(obj)
def test_string_representation(dumper):
string = repr(dumper)
assert "JSONDumper" in string
|
lgpl-3.0
| -1,891,693,397,049,427,200 | 20.408163 | 69 | 0.714967 | false |
orlenko/bccf
|
src/bccf/migrations/0020_auto__add_field_userprofile_membership_order_free.py
|
1
|
45167
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'UserProfile.membership_order_free'
db.add_column(u'bccf_userprofile', 'membership_order_free',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='free_order', null=True, to=orm['shop.Order']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'UserProfile.membership_order_free'
db.delete_column(u'bccf_userprofile', 'membership_order_free_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'bccf.article': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Article'},
'attached_document': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'})
},
u'bccf.bccfbabypage': {
'Meta': {'ordering': "('order',)", 'object_name': 'BCCFBabyPage', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'bccf.bccfchildpage': {
'Meta': {'ordering': "('-created',)", 'object_name': 'BCCFChildPage'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'bccf_topic': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['bccf.BCCFTopic']", 'null': 'True', 'blank': 'True'}),
u'comments_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'content_model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'gparent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bccf.BCCFPage']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'page_for': ('django.db.models.fields.CharField', [], {'default': "'parent'", 'max_length': '13', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bccf.BCCFChildPage']", 'null': 'True', 'blank': 'True'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'rating_average': ('django.db.models.fields.FloatField', [], {'default': '0'}),
u'rating_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'rating_sum': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'titles': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
u'bccf.bccfgenericpage': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'BCCFGenericPage', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'show_comments': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_rating': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'show_resources': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
u'bccf.bccfpage': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'BCCFPage', '_ormbases': [u'pages.Page']},
'carousel_color': ('django.db.models.fields.CharField', [], {'default': "'dgreen-list'", 'max_length': '11'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'marquee': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bccf.PageMarquee']", 'null': 'True', 'blank': 'True'}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'bccf.bccftopic': {
'Meta': {'object_name': 'BCCFTopic'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'carousel_color': ('django.db.models.fields.CharField', [], {'default': "'dgreen-list'", 'max_length': '11'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'marquee': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bccf.PageMarquee']", 'null': 'True', 'blank': 'True'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
u'bccf.blog': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Blog', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'})
},
u'bccf.campaign': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Campaign'},
'approve': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'approved_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'by_user': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'campaigns'", 'null': 'True', 'to': u"orm['auth.User']"})
},
u'bccf.downloadableform': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'DownloadableForm'},
'attached_document': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'})
},
u'bccf.event': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Event', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'date_end': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'full': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'location_city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'location_postal_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'location_street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'location_street2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'max_seats': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'null': 'True', 'blank': 'True'}),
'price': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'program': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'program'", 'null': 'True', 'to': u"orm['bccf.Program']"}),
'provider': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'events'", 'null': 'True', 'to': u"orm['auth.User']"}),
'survey_after': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'survey_after'", 'null': 'True', 'to': u"orm['builder.FormPublished']"}),
'survey_before': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'survey_before'", 'null': 'True', 'to': u"orm['builder.FormPublished']"})
},
u'bccf.eventregistration': {
'Meta': {'object_name': 'EventRegistration'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'event_registration'", 'to': u"orm['bccf.Event']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'passed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'registration_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'bccf.footermarquee': {
'Meta': {'object_name': 'FooterMarquee'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'bccf.footermarqueeslide': {
'Meta': {'object_name': 'FooterMarqueeSlide'},
'caption': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'marquee': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['bccf.FooterMarquee']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'bccf.homemarquee': {
'Meta': {'object_name': 'HomeMarquee'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'bccf.homemarqueeslide': {
'Meta': {'object_name': 'HomeMarqueeSlide'},
'caption': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'linkLabel': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10', 'null': 'True', 'blank': 'True'}),
'marquee': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['bccf.HomeMarquee']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'bccf.magazine': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Magazine'},
'attached_document': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'})
},
u'bccf.pagemarquee': {
'Meta': {'object_name': 'PageMarquee'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'bccf.pagemarqueeslide': {
'Meta': {'object_name': 'PageMarqueeSlide'},
'caption': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'linkLabel': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10', 'null': 'True', 'blank': 'True'}),
'marquee': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['bccf.PageMarquee']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'bccf.podcast': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Podcast', '_ormbases': [u'bccf.BCCFChildPage']},
'attached_audio': ('mezzanine.core.fields.FileField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'})
},
u'bccf.program': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Program', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'user_added': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
u'bccf.programrequest': {
'Meta': {'ordering': "('-created',)", 'object_name': 'ProgramRequest'},
'accept': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'accepted_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'comment': ('mezzanine.core.fields.RichTextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'program_requests'", 'to': u"orm['auth.User']"})
},
u'bccf.settings': {
'Meta': {'object_name': 'Settings'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'bccf.tipsheet': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'TipSheet'},
'attached_document': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'})
},
u'bccf.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'account_number': ('django.db.models.fields.CharField', [], {'max_length': '12', 'null': 'True', 'blank': 'True'}),
'accreditation': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['bccf.Program']", 'null': 'True', 'blank': 'True'}),
'autosubscribe': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'avatar': ('bccf.fields.MyImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'facebook': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'male'", 'max_length': '6', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_mailing_list': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_forum_moderator': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'job_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '10', 'blank': 'True'}),
'linkedin': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'membership_level': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'membership_order': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order'", 'null': 'True', 'to': u"orm['shop.Order']"}),
'membership_order_free': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'free_order'", 'null': 'True', 'to': u"orm['shop.Order']"}),
'membership_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'members'", 'null': 'True', 'to': u"orm['bccf.UserProfile']"}),
'phone_mobile': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'phone_primary': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'phone_work': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'photo': ('bccf.fields.MyImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pinterest': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'post_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'province': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'region': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'requested_cancellation': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'show_in_list': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'show_signatures': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'signature': ('django.db.models.fields.TextField', [], {'max_length': '1024', 'blank': 'True'}),
'signature_html': ('django.db.models.fields.TextField', [], {'max_length': '1054', 'blank': 'True'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'street_2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'street_3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'time_zone': ('django.db.models.fields.FloatField', [], {'default': '3.0'}),
'twitter': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': u"orm['auth.User']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'youtube': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'bccf.video': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Video', '_ormbases': [u'bccf.BCCFChildPage']},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Product']", 'null': 'True', 'blank': 'True'}),
'video_url': ('embed_video.fields.EmbedVideoField', [], {'default': "''", 'max_length': '1024', 'null': 'True', 'blank': 'True'})
},
u'builder.formpublished': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'FormPublished'},
u'bccfchildpage_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['bccf.BCCFChildPage']", 'unique': 'True', 'primary_key': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'form_structure': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['builder.FormStructure']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'builder.formstructure': {
'Meta': {'object_name': 'FormStructure'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'structure': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'default': "'Form Structure'", 'max_length': '100'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'JSON'", 'max_length': '4'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'pages.page': {
'Meta': {'ordering': "(u'titles',)", 'object_name': 'Page'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_menus': ('mezzanine.pages.fields.MenusField', [], {'default': '(1, 2, 3)', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'children'", 'null': 'True', 'to': u"orm['pages.Page']"}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'titles': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
u'shop.category': {
'Meta': {'ordering': "(u'_order',)", 'object_name': 'Category', '_ormbases': [u'pages.Page']},
'carousel_color': ('django.db.models.fields.CharField', [], {'default': "'dgreen-list'", 'max_length': '11'}),
'combined': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'featured_image': ('mezzanine.core.fields.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'marquee': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bccf.PageMarquee']", 'null': 'True', 'blank': 'True'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'product_options'", 'blank': 'True', 'to': u"orm['shop.ProductOption']"}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'}),
'price_max': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'price_min': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['shop.Product']", 'symmetrical': 'False', 'blank': 'True'}),
'sale': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['shop.Sale']", 'null': 'True', 'blank': 'True'})
},
u'shop.order': {
'Meta': {'ordering': "('-id',)", 'object_name': 'Order'},
'additional_instructions': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'billing_detail_city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'billing_detail_first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'billing_detail_postcode': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'billing_detail_state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'billing_detail_street': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'discount_code': ('cartridge.shop.fields.DiscountCodeField', [], {'max_length': '20', 'blank': 'True'}),
'discount_total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'payment_method': ('django.db.models.fields.CharField', [], {'default': "'paypal'", 'max_length': '6'}),
'shipping_detail_city': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_country': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_first_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_last_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_phone': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'shipping_detail_postcode': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'shipping_detail_state': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_detail_street': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shipping_total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'shipping_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'tax_total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'tax_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'total': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'transaction_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'shop.product': {
'Meta': {'object_name': 'Product'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'available': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['shop.Category']", 'symmetrical': 'False', 'blank': 'True'}),
u'comments_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'content': ('mezzanine.core.fields.RichTextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'num_in_stock': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'rating_average': ('django.db.models.fields.FloatField', [], {'default': '0'}),
u'rating_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'rating_sum': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_products_rel_+'", 'blank': 'True', 'to': u"orm['shop.Product']"}),
'sale_from': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'sale_id': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'sale_price': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'sale_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'sku': ('cartridge.shop.fields.SKUField', [], {'max_length': '20', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'unit_price': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'upsell_products': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'upsell_products_rel_+'", 'blank': 'True', 'to': u"orm['shop.Product']"})
},
u'shop.productoption': {
'Meta': {'object_name': 'ProductOption'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('cartridge.shop.fields.OptionField', [], {'max_length': '50', 'null': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {})
},
u'shop.sale': {
'Meta': {'object_name': 'Sale'},
'active': ('django.db.models.fields.BooleanField', [], {}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'sale_related'", 'blank': 'True', 'to': u"orm['shop.Category']"}),
'discount_deduct': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'discount_exact': ('cartridge.shop.fields.MoneyField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'discount_percent': ('cartridge.shop.fields.PercentageField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'products': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['shop.Product']", 'symmetrical': 'False', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'valid_from': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'valid_to': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
u'sites.site': {
'Meta': {'ordering': "(u'domain',)", 'object_name': 'Site', 'db_table': "u'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['bccf']
|
unlicense
| -3,563,531,983,299,049,000 | 92.709544 | 195 | 0.549251 | false |
aurelieladier/openturns
|
python/test/t_KrigingAlgorithm_std.py
|
1
|
2901
|
#! /usr/bin/env python
from __future__ import print_function
from openturns import *
from openturns.testing import *
TESTPREAMBLE()
# Test 1
sampleSize = 6
dimension = 1
f = NumericalMathFunction(['x0'], ['y'], ['x0 * sin(x0)'])
X = NumericalSample(sampleSize, dimension)
X2 = NumericalSample(sampleSize, dimension)
for i in range(sampleSize):
X[i, 0] = 3.0 + i
X2[i, 0] = 2.5 + i
X[0, 0] = 1.0
X[1, 0] = 3.0
X2[0, 0] = 2.0
X2[1, 0] = 4.0
Y = f(X)
Y2 = f(X2)
# create algorithm
basis = ConstantBasisFactory(dimension).build()
covarianceModel = SquaredExponential([1e-05], [4.11749])
algo = KrigingAlgorithm(X, Y, basis, covarianceModel)
algo.run()
# perform an evaluation
result = algo.getResult()
print("X=", X)
print("f(X)=", Y)
assert_almost_equal(result.getMetaModel()(X), Y)
assert_almost_equal(result.getResiduals(), [1.32804e-07], 1e-3, 1e-3)
assert_almost_equal(result.getRelativeErrors(), [5.20873e-21])
# Kriging variance is 0 on learning points
var = result.getConditionalCovariance(X)
# assert_almost_equal could not be applied to matrices
# application to NumericalPoint
covariancePoint = NumericalPoint(var.getImplementation())
theoricalVariance = NumericalPoint(sampleSize * sampleSize)
assert_almost_equal(covariancePoint, theoricalVariance, 8.95e-7, 8.95e-7)
# Test 2
# Kriging use case
spatialDimension = 2
# Learning data
levels = [8, 5]
box = Box(levels)
inputSample = box.generate()
# Scale each direction
inputSample *= 10.0
model = NumericalMathFunction(['x', 'y'], ['z'], ['cos(0.5*x) + sin(y)'])
outputSample = model(inputSample)
# Validation
sampleSize = 10
inputValidSample = ComposedDistribution(
2 * [Uniform(0, 10.0)]).getSample(sampleSize)
outputValidSample = model(inputValidSample)
# 2) Definition of exponential model
# The parameters have been calibrated using TNC optimization
# and AbsoluteExponential models
covarianceModel = SquaredExponential([1.98824, 0.924731], [3.15352])
# 3) Basis definition
basisCollection = BasisCollection(
1, ConstantBasisFactory(spatialDimension).build())
# Kriring algorithm
algo = KrigingAlgorithm(inputSample, outputSample,
basisCollection, covarianceModel)
algo.run()
result = algo.getResult()
# Get meta model
metaModel = result.getMetaModel()
outData = metaModel(inputValidSample)
# 4) Errors
# Interpolation
assert_almost_equal(outputSample, metaModel(inputSample), 3.0e-5, 3.0e-5)
# 5) Kriging variance is 0 on learning points
var = result.getConditionalCovariance(inputSample)
# assert_almost_equal could not be applied to matrices
# application to NumericalPoint
covariancePoint = NumericalPoint(var.getImplementation())
theoricalVariance = NumericalPoint(covariancePoint.getSize(), 0.0)
assert_almost_equal(covariancePoint, theoricalVariance, 7e-7, 7e-7)
# Estimation
assert_almost_equal(outputValidSample, metaModel(
inputValidSample), 1.e-1, 1e-1)
|
lgpl-3.0
| -6,366,065,284,722,497,000 | 25.372727 | 74 | 0.736987 | false |
bicephale/OctoPrint
|
src/octoprint/plugins/softwareupdate/version_checks/github_commit.py
|
1
|
1955
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import requests
import logging
from ..exceptions import ConfigurationInvalid
BRANCH_HEAD_URL = "https://api.github.com/repos/{user}/{repo}/git/refs/heads/{branch}"
logger = logging.getLogger("octoprint.plugins.softwareupdate.version_checks.github_commit")
def _get_latest_commit(user, repo, branch):
r = requests.get(BRANCH_HEAD_URL.format(user=user, repo=repo, branch=branch))
from . import log_github_ratelimit
log_github_ratelimit(logger, r)
if not r.status_code == requests.codes.ok:
return None
reference = r.json()
if not "object" in reference or not "sha" in reference["object"]:
return None
return reference["object"]["sha"]
def get_latest(target, check):
user = check.get("user")
repo = check.get("repo")
if user is None or repo is None:
raise ConfigurationInvalid("Update configuration for {} of type github_commit needs user and repo set and not None".format(target))
branch = "master"
if "branch" in check and check["branch"] is not None:
branch = check["branch"]
current = None
if "current" in check:
current = check["current"]
remote_commit = _get_latest_commit(check["user"], check["repo"], branch)
information = dict(
local=dict(name="Commit {commit}".format(commit=current if current is not None else "unknown"), value=current),
remote=dict(name="Commit {commit}".format(commit=remote_commit if remote_commit is not None else "unknown"), value=remote_commit)
)
is_current = (current is not None and current == remote_commit) or remote_commit is None
logger.debug("Target: %s, local: %s, remote: %s" % (target, current, remote_commit))
return information, is_current
|
agpl-3.0
| -7,031,101,314,761,969,000 | 32.101695 | 133 | 0.725038 | false |
jantman/RPyMostat-sensor
|
rpymostat_sensor/version.py
|
1
|
1930
|
"""
The latest version of this package is available at:
<http://github.com/jantman/RPyMostat-sensor>
##################################################################################
Copyright 2016 Jason Antman <jason@jasonantman.com> <http://www.jasonantman.com>
This file is part of RPyMostat-sensor, also known as RPyMostat-sensor.
RPyMostat-sensor is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
RPyMostat-sensor is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with RPyMostat-sensor. If not, see <http://www.gnu.org/licenses/>.
The Copyright and Authors attributions contained herein may not be removed or
otherwise altered, except to add the Author attribution of a contributor to
this work. (Additional Terms pursuant to Section 7b of the AGPL v3)
##################################################################################
While not legally required, I sincerely request that anyone who finds
bugs please submit them at <https://github.com/jantman/RPyMostat-sensor> or
to me via email, and that you send any contributions or improvements
either as a pull request on GitHub, or to me via email.
##################################################################################
AUTHORS:
Jason Antman <jason@jasonantman.com> <http://www.jasonantman.com>
##################################################################################
"""
VERSION = '0.1.0'
PROJECT_URL = 'https://github.com/jantman/RPyMostat-sensor'
|
agpl-3.0
| 7,328,817,709,505,778,000 | 48.487179 | 82 | 0.635233 | false |
kreatorkodi/repository.torrentbr
|
plugin.video.youtube/resources/lib/youtube_plugin/youtube/client/__config__.py
|
1
|
10980
|
# -*- coding: utf-8 -*-
from base64 import b64decode
from hashlib import md5
from ...kodion.json_store import APIKeyStore, LoginTokenStore
from ...kodion import Context as __Context
from ... import key_sets
DEFAULT_SWITCH = 1
__context = __Context(plugin_id='plugin.video.youtube')
__settings = __context.get_settings()
class APICheck(object):
def __init__(self, context, settings):
self._context = context
self._settings = settings
self._ui = context.get_ui()
self._api_jstore = APIKeyStore()
self._json_api = self._api_jstore.get_data()
self._am_jstore = LoginTokenStore()
self._json_am = self._am_jstore.get_data()
self.changed = False
self._on_init()
def _on_init(self):
self._json_api = self._api_jstore.get_data()
j_key = self._json_api['keys']['personal'].get('api_key', '')
j_id = self._json_api['keys']['personal'].get('client_id', '')
j_secret = self._json_api['keys']['personal'].get('client_secret', '')
original_key = self._settings.get_string('youtube.api.key')
original_id = self._settings.get_string('youtube.api.id')
original_secret = self._settings.get_string('youtube.api.secret')
if original_key and original_id and original_secret:
own_key, own_id, own_secret = self._strip_api_keys(original_key, original_id, original_secret)
if own_key and own_id and own_secret:
if (original_key != own_key) or (original_id != own_id) or (original_secret != own_secret):
self._settings.set_string('youtube.api.key', own_key)
self._settings.set_string('youtube.api.id', own_id)
self._settings.set_string('youtube.api.secret', own_secret)
if (j_key != own_key) or (j_id != own_id) or (j_secret != own_secret):
self._json_api['keys']['personal'] = {'api_key': own_key, 'client_id': own_id, 'client_secret': own_secret}
self._api_jstore.save(self._json_api)
self._json_api = self._api_jstore.get_data()
j_key = self._json_api['keys']['personal'].get('api_key', '')
j_id = self._json_api['keys']['personal'].get('client_id', '')
j_secret = self._json_api['keys']['personal'].get('client_secret', '')
if not original_key or not original_id or not original_secret and (j_key and j_secret and j_id):
self._settings.set_string('youtube.api.key', j_key)
self._settings.set_string('youtube.api.id', j_id)
self._settings.set_string('youtube.api.secret', j_secret)
self._settings.set_bool('youtube.api.enable', True)
switch = self.get_current_switch()
user = self.get_current_user()
access_token = self._settings.get_string('kodion.access_token', '')
refresh_token = self._settings.get_string('kodion.refresh_token', '')
token_expires = self._settings.get_int('kodion.access_token.expires', -1)
last_hash = self._settings.get_string('youtube.api.last.hash', '')
if not self._json_am['access_manager']['users'].get(user, {}).get('access_token') or \
not self._json_am['access_manager']['users'].get(user, {}).get('refresh_token'):
if access_token and refresh_token:
self._json_am['access_manager']['users'][user]['access_token'] = access_token
self._json_am['access_manager']['users'][user]['refresh_token'] = refresh_token
self._json_am['access_manager']['users'][user]['token_expires'] = token_expires
if switch == 'own':
own_key_hash = self._get_key_set_hash('own')
if last_hash == self._get_key_set_hash('own', True) or \
last_hash == own_key_hash:
self._json_am['access_manager']['users'][user]['last_key_hash'] = own_key_hash
self._am_jstore.save(self._json_am)
if access_token or refresh_token or last_hash:
self._settings.set_string('kodion.access_token', '')
self._settings.set_string('kodion.refresh_token', '')
self._settings.set_int('kodion.access_token.expires', -1)
self._settings.set_string('youtube.api.last.hash', '')
updated_hash = self._api_keys_changed(switch)
if updated_hash:
self._context.log_warning('User: |%s| Switching API key set to |%s|' % (user, switch))
self._json_am['access_manager']['users'][user]['last_key_hash'] = updated_hash
self._am_jstore.save(self._json_am)
self._context.log_debug('API key set changed: Signing out')
self._context.execute('RunPlugin(plugin://plugin.video.youtube/sign/out/?confirmed=true)')
else:
self._context.log_debug('User: |%s| Using API key set: |%s|' % (user, switch))
def get_current_switch(self):
return 'own' if self.has_own_api_keys() else self._settings.get_string('youtube.api.key.switch', str(DEFAULT_SWITCH))
def get_current_user(self):
self._json_am = self._am_jstore.get_data()
return self._json_am['access_manager'].get('current_user', '0')
def has_own_api_keys(self):
self._json_api = self._api_jstore.get_data()
own_key = self._json_api['keys']['personal']['api_key']
own_id = self._json_api['keys']['personal']['client_id']
own_secret = self._json_api['keys']['personal']['client_secret']
return False if not own_key or \
not own_id or \
not own_secret or \
not self._settings.get_string('youtube.api.enable') == 'true' else True
def get_api_keys(self, switch):
self._json_api = self._api_jstore.get_data()
if not switch or (switch == 'own' and not self.has_own_api_keys()):
switch = '1'
if switch == 'youtube-tv':
api_key = b64decode(key_sets['youtube-tv']['key']).decode('utf-8'),
client_id = b64decode(key_sets['youtube-tv']['id']).decode('utf-8') + u'.apps.googleusercontent.com'
client_secret = b64decode(key_sets['youtube-tv']['secret']).decode('utf-8')
elif switch == 'developer':
self._json_api = self._api_jstore.get_data()
return self._json_api['keys']['developer']
elif switch == 'own':
api_key = self._json_api['keys']['personal']['api_key']
client_id = self._json_api['keys']['personal']['client_id'] + u'.apps.googleusercontent.com'
client_secret = self._json_api['keys']['personal']['client_secret']
else:
api_key = b64decode(key_sets['provided'][switch]['key']).decode('utf-8')
client_id = b64decode(key_sets['provided'][switch]['id']).decode('utf-8') + u'.apps.googleusercontent.com'
client_secret = b64decode(key_sets['provided'][switch]['secret']).decode('utf-8')
return api_key, client_id, client_secret
def _api_keys_changed(self, switch):
self._json_am = self._am_jstore.get_data()
if not switch or (switch == 'own' and not self.has_own_api_keys()):
switch = '1'
user = self.get_current_user()
last_set_hash = self._json_am['access_manager']['users'].get(user, {}).get('last_key_hash', '')
current_set_hash = self._get_key_set_hash(switch)
if last_set_hash != current_set_hash:
self.changed = True
return current_set_hash
else:
self.changed = False
return None
def _get_key_set_hash(self, switch, old=False):
if not switch or (switch == 'own' and not self.has_own_api_keys()):
switch = '1'
api_key, client_id, client_secret = self.get_api_keys(switch)
if old and switch == 'own':
client_id = client_id.replace(u'.apps.googleusercontent.com', u'')
m = md5()
m.update(api_key.encode('utf-8'))
m.update(client_id.encode('utf-8'))
m.update(client_secret.encode('utf-8'))
return m.hexdigest()
def _strip_api_keys(self, api_key, client_id, client_secret):
stripped_key = ''.join(api_key.split())
stripped_id = ''.join(client_id.replace('.apps.googleusercontent.com', '').split())
stripped_secret = ''.join(client_secret.split())
if api_key != stripped_key:
if stripped_key not in api_key:
self._context.log_debug('Personal API setting: |Key| Skipped: potentially mangled by stripping')
return_key = api_key
else:
self._context.log_debug('Personal API setting: |Key| had whitespace removed')
return_key = stripped_key
else:
return_key = api_key
if client_id != stripped_id:
if stripped_id not in client_id:
self._context.log_debug('Personal API setting: |Id| Skipped: potentially mangled by stripping')
return_id = client_id
else:
googleusercontent = ''
if '.apps.googleusercontent.com' in client_id:
googleusercontent = ' and .apps.googleusercontent.com'
self._context.log_debug('Personal API setting: |Id| had whitespace%s removed' % googleusercontent)
return_id = stripped_id
else:
return_id = client_id
if client_secret != stripped_secret:
if stripped_secret not in client_secret:
self._context.log_debug('Personal API setting: |Secret| Skipped: potentially mangled by stripping')
return_secret = client_secret
else:
self._context.log_debug('Personal API setting: |Secret| had whitespace removed')
return_secret = stripped_secret
else:
return_secret = client_secret
return return_key, return_id, return_secret
notification_data = {'use_httpd': (__settings.use_dash_videos() and
__settings.use_dash()) or
(__settings.api_config_page()),
'httpd_port': __settings.httpd_port(),
'whitelist': __settings.httpd_whitelist(),
'httpd_address': __settings.httpd_listen()
}
__context.send_notification('check_settings', notification_data)
_api_check = APICheck(__context, __settings)
keys_changed = _api_check.changed
current_user = _api_check.get_current_user()
api = dict()
youtube_tv = dict()
_current_switch = _api_check.get_current_switch()
api['key'], api['id'], api['secret'] = _api_check.get_api_keys(_current_switch)
youtube_tv['key'], youtube_tv['id'], youtube_tv['secret'] = _api_check.get_api_keys('youtube-tv')
developer_keys = _api_check.get_api_keys('developer')
|
gpl-2.0
| 1,809,156,470,379,663,000 | 47.157895 | 127 | 0.576321 | false |
ChameleonCloud/horizon
|
openstack_dashboard/test/unit/api/test_neutron.py
|
1
|
76571
|
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
import netaddr
from neutronclient.common import exceptions as neutron_exc
from oslo_utils import uuidutils
import six
from django.test.utils import override_settings
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.test import helpers as test
class NeutronApiTests(test.APIMockTestCase):
@mock.patch.object(api.neutron, 'neutronclient')
def test_network_list(self, mock_neutronclient):
networks = {'networks': self.api_networks.list()}
subnets = {'subnets': self.api_subnets.list()}
neutronclient = mock_neutronclient.return_value
neutronclient.list_networks.return_value = networks
neutronclient.list_subnets.return_value = subnets
ret_val = api.neutron.network_list(self.request)
for n in ret_val:
self.assertIsInstance(n, api.neutron.Network)
neutronclient.list_networks.assert_called_once_with()
neutronclient.list_subnets.assert_called_once_with()
@override_settings(OPENSTACK_NEUTRON_NETWORK={
'enable_auto_allocated_network': True})
@test.create_mocks({api.neutron: ('network_list',
'subnet_list')})
def _test_network_list_for_tenant(
self, include_external,
filter_params, should_called, **extra_kwargs):
"""Convenient method to test network_list_for_tenant.
:param include_external: Passed to network_list_for_tenant.
:param filter_params: Filters passed to network_list_for_tenant
:param should_called: this argument specifies which methods
should be called. Methods in this list should be called.
Valid values are non_shared, shared, and external.
"""
filter_params = filter_params or {}
all_networks = self.networks.list()
tenant_id = '1'
tenant_networks = [n for n in all_networks
if n['tenant_id'] == tenant_id]
shared_networks = [n for n in all_networks if n['shared']]
external_networks = [n for n in all_networks if n['router:external']]
return_values = []
expected_calls = []
if 'non_shared' in should_called:
params = filter_params.copy()
params['shared'] = False
return_values.append(tenant_networks)
expected_calls.append(
mock.call(test.IsHttpRequest(), tenant_id=tenant_id, **params),
)
if 'shared' in should_called:
params = filter_params.copy()
params['shared'] = True
return_values.append(shared_networks)
expected_calls.append(
mock.call(test.IsHttpRequest(), **params),
)
if 'external' in should_called:
params = filter_params.copy()
params['router:external'] = True
return_values.append(external_networks)
expected_calls.append(
mock.call(test.IsHttpRequest(), **params),
)
self.mock_network_list.side_effect = return_values
extra_kwargs.update(filter_params)
ret_val = api.neutron.network_list_for_tenant(
self.request, tenant_id,
include_external=include_external,
**extra_kwargs)
expected = []
if 'non_shared' in should_called:
expected += tenant_networks
if 'shared' in should_called:
expected += shared_networks
if 'external' in should_called and include_external:
expected += external_networks
self.assertEqual(set(n.id for n in expected),
set(n.id for n in ret_val))
self.mock_network_list.assert_has_calls(expected_calls)
# Ensure all three types of networks are not empty. This is required
# to check 'pre_auto_allocate' network is not included.
self.assertTrue(tenant_networks)
self.assertTrue(shared_networks)
self.assertTrue(external_networks)
self.assertNotIn(api.neutron.AUTO_ALLOCATE_ID,
[n.id for n in ret_val])
def test_network_list_for_tenant(self):
self._test_network_list_for_tenant(
include_external=False, filter_params=None,
should_called=['non_shared', 'shared'])
def test_network_list_for_tenant_with_external(self):
self._test_network_list_for_tenant(
include_external=True, filter_params=None,
should_called=['non_shared', 'shared', 'external'])
def test_network_list_for_tenant_with_filters_shared_false_wo_incext(self):
self._test_network_list_for_tenant(
include_external=False, filter_params={'shared': True},
should_called=['shared'])
def test_network_list_for_tenant_with_filters_shared_true_w_incext(self):
self._test_network_list_for_tenant(
include_external=True, filter_params={'shared': True},
should_called=['shared', 'external'])
def test_network_list_for_tenant_with_filters_ext_false_wo_incext(self):
self._test_network_list_for_tenant(
include_external=False, filter_params={'router:external': False},
should_called=['non_shared', 'shared'])
def test_network_list_for_tenant_with_filters_ext_true_wo_incext(self):
self._test_network_list_for_tenant(
include_external=False, filter_params={'router:external': True},
should_called=['non_shared', 'shared'])
def test_network_list_for_tenant_with_filters_ext_false_w_incext(self):
self._test_network_list_for_tenant(
include_external=True, filter_params={'router:external': False},
should_called=['non_shared', 'shared'])
def test_network_list_for_tenant_with_filters_ext_true_w_incext(self):
self._test_network_list_for_tenant(
include_external=True, filter_params={'router:external': True},
should_called=['non_shared', 'shared', 'external'])
def test_network_list_for_tenant_with_filters_both_shared_ext(self):
# To check 'shared' filter is specified in network_list
# to look up external networks.
self._test_network_list_for_tenant(
include_external=True,
filter_params={'router:external': True, 'shared': True},
should_called=['shared', 'external'])
def test_network_list_for_tenant_with_other_filters(self):
# To check filter parameters other than shared and
# router:external are passed as expected.
self._test_network_list_for_tenant(
include_external=True,
filter_params={'router:external': True, 'shared': False,
'foo': 'bar'},
should_called=['non_shared', 'external'])
def test_network_list_for_tenant_no_pre_auto_allocate_if_net_exists(self):
self._test_network_list_for_tenant(
include_external=True, filter_params=None,
should_called=['non_shared', 'shared', 'external'],
include_pre_auto_allocate=True)
@override_settings(OPENSTACK_NEUTRON_NETWORK={
'enable_auto_allocated_network': True})
@test.create_mocks({api.neutron: ['network_list',
'is_extension_supported'],
api.nova: ['is_feature_available']})
def test_network_list_for_tenant_with_pre_auto_allocate(self):
tenant_id = '1'
self.mock_network_list.return_value = []
self.mock_is_extension_supported.return_value = True
self.mock_is_feature_available.return_value = True
ret_val = api.neutron.network_list_for_tenant(
self.request, tenant_id, include_pre_auto_allocate=True)
self.assertEqual(1, len(ret_val))
self.assertIsInstance(ret_val[0], api.neutron.PreAutoAllocateNetwork)
self.assertEqual(api.neutron.AUTO_ALLOCATE_ID, ret_val[0].id)
self.assertEqual(2, self.mock_network_list.call_count)
self.mock_network_list.assert_has_calls([
mock.call(test.IsHttpRequest(), tenant_id=tenant_id,
shared=False),
mock.call(test.IsHttpRequest(), shared=True),
])
self.mock_is_extension_supported.assert_called_once_with(
test.IsHttpRequest(), 'auto-allocated-topology')
self.mock_is_feature_available.assert_called_once_with(
test.IsHttpRequest(),
('instance_description', 'auto_allocated_network'))
@test.create_mocks({api.neutron: ['network_list']})
def test_network_list_for_tenant_no_pre_auto_allocate_if_disabled(self):
tenant_id = '1'
self.mock_network_list.return_value = []
ret_val = api.neutron.network_list_for_tenant(
self.request, tenant_id, include_pre_auto_allocate=True)
self.assertEqual(0, len(ret_val))
self.assertEqual(2, self.mock_network_list.call_count)
self.mock_network_list.assert_has_calls([
mock.call(test.IsHttpRequest(), tenant_id=tenant_id,
shared=False),
mock.call(test.IsHttpRequest(), shared=True),
])
@mock.patch.object(api.neutron, 'neutronclient')
def test_network_get(self, mock_neutronclient):
network = {'network': self.api_networks.first()}
subnet = {'subnet': self.api_subnets.first()}
subnetv6 = {'subnet': self.api_subnets.list()[1]}
network_id = self.api_networks.first()['id']
subnet_id = self.api_networks.first()['subnets'][0]
subnetv6_id = self.api_networks.first()['subnets'][1]
neutronclient = mock_neutronclient.return_value
neutronclient.show_network.return_value = network
neutronclient.show_subnet.side_effect = [subnet, subnetv6]
ret_val = api.neutron.network_get(self.request, network_id)
self.assertIsInstance(ret_val, api.neutron.Network)
self.assertEqual(2, len(ret_val['subnets']))
self.assertIsInstance(ret_val['subnets'][0], api.neutron.Subnet)
neutronclient.show_network.assert_called_once_with(network_id)
neutronclient.show_subnet.assert_has_calls([
mock.call(subnet_id),
mock.call(subnetv6_id),
])
@mock.patch.object(api.neutron, 'neutronclient')
def test_network_get_with_subnet_get_notfound(self, mock_neutronclient):
network = {'network': self.api_networks.first()}
network_id = self.api_networks.first()['id']
subnet_id = self.api_networks.first()['subnets'][0]
neutronclient = mock_neutronclient.return_value
neutronclient.show_network.return_value = network
neutronclient.show_subnet.side_effect = neutron_exc.NotFound
ret_val = api.neutron.network_get(self.request, network_id)
self.assertIsInstance(ret_val, api.neutron.Network)
self.assertEqual(2, len(ret_val['subnets']))
self.assertNotIsInstance(ret_val['subnets'][0], api.neutron.Subnet)
self.assertIsInstance(ret_val['subnets'][0], str)
neutronclient.show_network.assert_called_once_with(network_id)
neutronclient.show_subnet.assert_called_once_with(subnet_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_network_create(self, mock_neutronclient):
network = {'network': self.api_networks.first()}
form_data = {'network': {'name': 'net1',
'tenant_id': self.request.user.project_id}}
neutronclient = mock_neutronclient.return_value
neutronclient.create_network.return_value = network
ret_val = api.neutron.network_create(self.request, name='net1')
self.assertIsInstance(ret_val, api.neutron.Network)
neutronclient.create_network.assert_called_once_with(body=form_data)
@mock.patch.object(api.neutron, 'neutronclient')
def test_network_update(self, mock_neutronclient):
network = {'network': self.api_networks.first()}
network_id = self.api_networks.first()['id']
neutronclient = mock_neutronclient.return_value
form_data = {'network': {'name': 'net1'}}
neutronclient.update_network.return_value = network
ret_val = api.neutron.network_update(self.request, network_id,
name='net1')
self.assertIsInstance(ret_val, api.neutron.Network)
neutronclient.update_network.assert_called_once_with(network_id,
body=form_data)
@mock.patch.object(api.neutron, 'neutronclient')
def test_network_delete(self, mock_neutronclient):
network_id = self.api_networks.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.delete_network.return_value = None
api.neutron.network_delete(self.request, network_id)
neutronclient.delete_network.assert_called_once_with(network_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_get_network_ip_availability(self, mock_neutronclient):
network = {'network': self.api_networks.first()}
mock_ip_availability = self.ip_availability.get()
neutronclient = mock_neutronclient.return_value
neutronclient.show_network_ip_availability.return_value = \
mock_ip_availability
ret_val = api.neutron.show_network_ip_availability(self.request,
network)
self.assertIsInstance(ret_val, dict)
neutronclient.show_network_ip_availability.assert_called_once_with(
network)
@mock.patch.object(api.neutron, 'neutronclient')
def test_subnet_network_ip_availability(self, mock_neutronclient):
network = {'network': self.api_networks.first()}
mock_ip_availability = self.ip_availability.get()
neutronclient = mock_neutronclient.return_value
neutronclient.show_network_ip_availability.return_value = \
mock_ip_availability
ip_availability = api.neutron. \
show_network_ip_availability(self.request, network)
availabilities = ip_availability.get("network_ip_availability",
{})
ret_val = availabilities.get("subnet_ip_availability", [])
self.assertIsInstance(ret_val, list)
neutronclient.show_network_ip_availability.assert_called_once_with(
network)
@mock.patch.object(api.neutron, 'neutronclient')
def test_subnet_list(self, mock_neutronclient):
subnets = {'subnets': self.api_subnets.list()}
neutronclient = mock_neutronclient.return_value
neutronclient.list_subnets.return_value = subnets
ret_val = api.neutron.subnet_list(self.request)
for n in ret_val:
self.assertIsInstance(n, api.neutron.Subnet)
neutronclient.list_subnets.assert_called_once_with()
@mock.patch.object(api.neutron, 'neutronclient')
def test_subnet_get(self, mock_neutronclient):
subnet = {'subnet': self.api_subnets.first()}
subnet_id = self.api_subnets.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.show_subnet.return_value = subnet
ret_val = api.neutron.subnet_get(self.request, subnet_id)
self.assertIsInstance(ret_val, api.neutron.Subnet)
neutronclient.show_subnet.assert_called_once_with(subnet_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_subnet_create(self, mock_neutronclient):
subnet_data = self.api_subnets.first()
params = {'network_id': subnet_data['network_id'],
'tenant_id': subnet_data['tenant_id'],
'name': subnet_data['name'],
'cidr': subnet_data['cidr'],
'ip_version': subnet_data['ip_version'],
'gateway_ip': subnet_data['gateway_ip']}
neutronclient = mock_neutronclient.return_value
neutronclient.create_subnet.return_value = {'subnet': subnet_data}
ret_val = api.neutron.subnet_create(self.request, **params)
self.assertIsInstance(ret_val, api.neutron.Subnet)
neutronclient.create_subnet.assert_called_once_with(
body={'subnet': params})
@mock.patch.object(api.neutron, 'neutronclient')
def test_subnet_update(self, mock_neutronclient):
subnet_data = self.api_subnets.first()
subnet_id = subnet_data['id']
params = {'name': subnet_data['name'],
'gateway_ip': subnet_data['gateway_ip']}
neutronclient = mock_neutronclient.return_value
neutronclient.update_subnet.return_value = {'subnet': subnet_data}
ret_val = api.neutron.subnet_update(self.request, subnet_id, **params)
self.assertIsInstance(ret_val, api.neutron.Subnet)
neutronclient.update_subnet.assert_called_once_with(
subnet_id, body={'subnet': params})
@mock.patch.object(api.neutron, 'neutronclient')
def test_subnet_delete(self, mock_neutronclient):
subnet_id = self.api_subnets.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.delete_subnet.return_value = None
api.neutron.subnet_delete(self.request, subnet_id)
neutronclient.delete_subnet.assert_called_once_with(subnet_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_subnetpool_list(self, mock_neutronclient):
subnetpools = {'subnetpools': self.api_subnetpools.list()}
neutronclient = mock_neutronclient.return_value
neutronclient.list_subnetpools.return_value = subnetpools
ret_val = api.neutron.subnetpool_list(self.request)
for n in ret_val:
self.assertIsInstance(n, api.neutron.SubnetPool)
neutronclient.list_subnetpools.assert_called_once_with()
@mock.patch.object(api.neutron, 'neutronclient')
def test_subnetpool_get(self, mock_neutronclient):
subnetpool = {'subnetpool': self.api_subnetpools.first()}
subnetpool_id = self.api_subnetpools.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.show_subnetpool.return_value = subnetpool
ret_val = api.neutron.subnetpool_get(self.request, subnetpool_id)
self.assertIsInstance(ret_val, api.neutron.SubnetPool)
neutronclient.show_subnetpool.assert_called_once_with(subnetpool_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_subnetpool_create(self, mock_neutronclient):
subnetpool_data = self.api_subnetpools.first()
params = {'name': subnetpool_data['name'],
'prefixes': subnetpool_data['prefixes'],
'tenant_id': subnetpool_data['tenant_id']}
neutronclient = mock_neutronclient.return_value
neutronclient.create_subnetpool.return_value = {'subnetpool':
subnetpool_data}
ret_val = api.neutron.subnetpool_create(self.request, **params)
self.assertIsInstance(ret_val, api.neutron.SubnetPool)
neutronclient.create_subnetpool.assert_called_once_with(
body={'subnetpool': params})
@mock.patch.object(api.neutron, 'neutronclient')
def test_subnetpool_update(self, mock_neutronclient):
subnetpool_data = self.api_subnetpools.first()
subnetpool_id = subnetpool_data['id']
params = {'name': subnetpool_data['name'],
'prefixes': subnetpool_data['prefixes']}
neutronclient = mock_neutronclient.return_value
neutronclient.update_subnetpool.return_value = {'subnetpool':
subnetpool_data}
ret_val = api.neutron.subnetpool_update(self.request, subnetpool_id,
**params)
self.assertIsInstance(ret_val, api.neutron.SubnetPool)
neutronclient.update_subnetpool.assert_called_once_with(
subnetpool_id, body={'subnetpool': params})
@mock.patch.object(api.neutron, 'neutronclient')
def test_subnetpool_delete(self, mock_neutronclient):
subnetpool_id = self.api_subnetpools.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.delete_subnetpool.return_value = None
api.neutron.subnetpool_delete(self.request, subnetpool_id)
neutronclient.delete_subnetpool.assert_called_once_with(subnetpool_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_port_list(self, mock_neutronclient):
ports = {'ports': self.api_ports.list()}
neutronclient = mock_neutronclient.return_value
neutronclient.list_ports.return_value = ports
ret_val = api.neutron.port_list(self.request)
for p in ret_val:
self.assertIsInstance(p, api.neutron.Port)
neutronclient.list_ports.assert_called_once_with()
@mock.patch.object(api.neutron, 'is_extension_supported')
@mock.patch.object(api.neutron, 'neutronclient')
def test_port_list_with_trunk_types(
self, mock_neutronclient, mock_is_extension_supported):
ports = self.api_tp_ports.list()
trunks = self.api_tp_trunks.list()
# list_extensions is decorated with memoized_with_request, so
# neutronclient() is not called. We need to mock it separately.
mock_is_extension_supported.return_value = True # trunk
neutronclient = mock_neutronclient.return_value
neutronclient.list_ports.return_value = {'ports': ports}
neutronclient.list_trunks.return_value = {'trunks': trunks}
expected_parent_port_ids = set()
expected_subport_ids = set()
for trunk in trunks:
expected_parent_port_ids.add(trunk['port_id'])
expected_subport_ids |= set([p['port_id'] for p
in trunk['sub_ports']])
expected_normal_port_ids = ({p['id'] for p in ports} -
expected_parent_port_ids -
expected_subport_ids)
ret_val = api.neutron.port_list_with_trunk_types(self.request)
self.assertEqual(len(ports), len(ret_val))
parent_port_ids = {p.id for p in ret_val
if isinstance(p, api.neutron.PortTrunkParent)}
subport_ids = {p.id for p in ret_val
if isinstance(p, api.neutron.PortTrunkSubport)}
normal_port_ids = ({p.id for p in ret_val} -
parent_port_ids - subport_ids)
self.assertEqual(expected_parent_port_ids, parent_port_ids)
self.assertEqual(expected_subport_ids, subport_ids)
self.assertEqual(expected_normal_port_ids, normal_port_ids)
mock_is_extension_supported.assert_called_once_with(
test.IsHttpRequest(), 'trunk')
neutronclient.list_ports.assert_called_once_with()
neutronclient.list_trunks.assert_called_once_with()
@mock.patch.object(api.neutron, 'is_extension_supported')
@mock.patch.object(api.neutron, 'neutronclient')
def test_port_list_with_trunk_types_without_trunk_extension(
self, mock_neutronclient, mock_is_extension_supported):
ports = self.api_tp_ports.list()
# list_extensions is decorated with memoized_with_request,
# the simpliest way is to mock it directly.
mock_is_extension_supported.return_value = False # trunk
neutronclient = mock_neutronclient.return_value
neutronclient.list_ports.return_value = {'ports': ports}
ret_val = api.neutron.port_list_with_trunk_types(self.request)
self.assertEqual(len(ports), len(ret_val))
self.assertEqual(set(p['id'] for p in ports),
set(p.id for p in ret_val))
# When trunk extension is disabled, all returned values should be
# instances of Port class.
self.assertTrue(all(isinstance(p, api.neutron.Port) for p in ret_val))
mock_is_extension_supported.assert_called_once_with(
test.IsHttpRequest(), 'trunk')
neutronclient.list_ports.assert_called_once_with()
@mock.patch.object(api.neutron, 'neutronclient')
def test_port_get(self, mock_neutronclient):
port = {'port': self.api_ports.first()}
port_id = self.api_ports.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.show_port.return_value = port
ret_val = api.neutron.port_get(self.request, port_id)
self.assertIsInstance(ret_val, api.neutron.Port)
neutronclient.show_port.assert_called_once_with(port_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_port_create(self, mock_neutronclient):
port = {'port': self.api_ports.first()}
params = {'network_id': port['port']['network_id'],
'tenant_id': port['port']['tenant_id'],
'name': port['port']['name'],
'device_id': port['port']['device_id']}
neutronclient = mock_neutronclient.return_value
neutronclient.create_port.return_value = port
ret_val = api.neutron.port_create(self.request, **params)
self.assertIsInstance(ret_val, api.neutron.Port)
self.assertEqual(api.neutron.Port(port['port']).id, ret_val.id)
neutronclient.create_port.assert_called_once_with(
body={'port': params})
@mock.patch.object(api.neutron, 'neutronclient')
def test_port_update(self, mock_neutronclient):
port_data = self.api_ports.first()
port_id = port_data['id']
params = {'name': port_data['name'],
'device_id': port_data['device_id']}
neutronclient = mock_neutronclient.return_value
neutronclient.update_port.return_value = {'port': port_data}
ret_val = api.neutron.port_update(self.request, port_id, **params)
self.assertIsInstance(ret_val, api.neutron.Port)
self.assertEqual(api.neutron.Port(port_data).id, ret_val.id)
neutronclient.update_port.assert_called_once_with(
port_id, body={'port': params})
@mock.patch.object(api.neutron, 'neutronclient')
def test_port_delete(self, mock_neutronclient):
port_id = self.api_ports.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.delete_port.return_value = None
api.neutron.port_delete(self.request, port_id)
neutronclient.delete_port.assert_called_once_with(port_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_trunk_list(self, mock_neutronclient):
trunks = {'trunks': self.api_trunks.list()}
neutron_client = mock_neutronclient.return_value
neutron_client.list_trunks.return_value = trunks
ret_val = api.neutron.trunk_list(self.request)
for t in ret_val:
self.assertIsInstance(t, api.neutron.Trunk)
neutron_client.list_trunks.assert_called_once_with()
@mock.patch.object(api.neutron, 'neutronclient')
def test_trunk_show(self, mock_neutronclient):
trunk = {'trunk': self.api_trunks.first()}
trunk_id = self.api_trunks.first()['id']
neutron_client = mock_neutronclient.return_value
neutron_client.show_trunk.return_value = trunk
ret_val = api.neutron.trunk_show(self.request, trunk_id)
self.assertIsInstance(ret_val, api.neutron.Trunk)
neutron_client.show_trunk.assert_called_once_with(trunk_id)
def test_trunk_object(self):
trunk = self.api_trunks.first().copy()
obj = api.neutron.Trunk(trunk)
self.assertEqual(0, obj.subport_count)
trunk_dict = obj.to_dict()
self.assertIsInstance(trunk_dict, dict)
self.assertEqual(trunk['name'], trunk_dict['name_or_id'])
self.assertEqual(0, trunk_dict['subport_count'])
trunk['name'] = '' # to test name_or_id
trunk['sub_ports'] = [uuidutils.generate_uuid() for i in range(2)]
obj = api.neutron.Trunk(trunk)
self.assertEqual(2, obj.subport_count)
trunk_dict = obj.to_dict()
self.assertEqual(obj.name_or_id, trunk_dict['name_or_id'])
self.assertEqual(2, trunk_dict['subport_count'])
@mock.patch.object(api.neutron, 'neutronclient')
def test_trunk_create(self, mock_neutronclient):
trunk = {'trunk': self.api_trunks.first()}
params = {'name': trunk['trunk']['name'],
'port_id': trunk['trunk']['port_id'],
'project_id': trunk['trunk']['project_id']}
neutronclient = mock_neutronclient.return_value
neutronclient.create_trunk.return_value = trunk
ret_val = api.neutron.trunk_create(self.request, **params)
self.assertIsInstance(ret_val, api.neutron.Trunk)
self.assertEqual(api.neutron.Trunk(trunk['trunk']).id, ret_val.id)
neutronclient.create_trunk.assert_called_once_with(
body={'trunk': params})
@mock.patch.object(api.neutron, 'neutronclient')
def test_trunk_delete(self, mock_neutronclient):
trunk_id = self.api_trunks.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.delete_trunk.return_value = None
api.neutron.trunk_delete(self.request, trunk_id)
neutronclient.delete_trunk.assert_called_once_with(trunk_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_trunk_update_details(self, mock_neutronclient):
trunk_data = self.api_trunks.first()
trunk_id = trunk_data['id']
old_trunk = {'name': trunk_data['name'],
'description': trunk_data['description'],
'id': trunk_data['id'],
'port_id': trunk_data['port_id'],
'admin_state_up': trunk_data['admin_state_up']}
new_trunk = {'name': 'foo',
'description': trunk_data['description'],
'id': trunk_data['id'],
'port_id': trunk_data['port_id'],
'admin_state_up': trunk_data['admin_state_up']}
neutronclient = mock_neutronclient.return_value
neutronclient.update_trunk.return_value = {'trunk': new_trunk}
ret_val = api.neutron.trunk_update(self.request, trunk_id,
old_trunk, new_trunk)
self.assertIsInstance(ret_val, api.neutron.Trunk)
self.assertEqual(api.neutron.Trunk(trunk_data).id, ret_val.id)
self.assertEqual(ret_val.name, new_trunk['name'])
neutronclient.update_trunk.assert_called_once_with(
trunk_id, body={'trunk': {'name': 'foo'}})
@mock.patch.object(api.neutron, 'neutronclient')
def test_trunk_update_add_subports(self, mock_neutronclient):
trunk_data = self.api_trunks.first()
trunk_id = trunk_data['id']
old_trunk = {'name': trunk_data['name'],
'description': trunk_data['description'],
'id': trunk_data['id'],
'port_id': trunk_data['port_id'],
'sub_ports': trunk_data['sub_ports'],
'admin_state_up': trunk_data['admin_state_up']}
new_trunk = {'name': trunk_data['name'],
'description': trunk_data['description'],
'id': trunk_data['id'],
'port_id': trunk_data['port_id'],
'sub_ports': [
{'port_id': 1,
'segmentation_id': 100,
'segmentation_type': 'vlan'}],
'admin_state_up': trunk_data['admin_state_up']}
neutronclient = mock_neutronclient.return_value
neutronclient.trunk_add_subports.return_value = {'trunk': new_trunk}
ret_val = api.neutron.trunk_update(self.request, trunk_id,
old_trunk, new_trunk)
self.assertIsInstance(ret_val, api.neutron.Trunk)
self.assertEqual(api.neutron.Trunk(trunk_data).id, ret_val.trunk['id'])
self.assertEqual(ret_val.trunk['sub_ports'], new_trunk['sub_ports'])
neutronclient.trunk_add_subports.assert_called_once_with(
trunk_id,
body={'sub_ports': [{'port_id': 1, 'segmentation_id': 100,
'segmentation_type': 'vlan'}]}
)
@mock.patch.object(api.neutron, 'neutronclient')
def test_trunk_update_remove_subports(self, mock_neutronclient):
trunk_data = self.api_trunks.first()
trunk_id = trunk_data['id']
old_trunk = {'name': trunk_data['name'],
'description': trunk_data['description'],
'id': trunk_data['id'],
'port_id': trunk_data['port_id'],
'sub_ports': [
{'port_id': 1,
'segmentation_id': 100,
'segmentation_type': 'vlan'}],
'admin_state_up': trunk_data['admin_state_up']}
new_trunk = {'name': trunk_data['name'],
'description': trunk_data['description'],
'id': trunk_data['id'],
'port_id': trunk_data['port_id'],
'sub_ports': [],
'admin_state_up': trunk_data['admin_state_up']}
neutronclient = mock_neutronclient.return_value
neutronclient.trunk_remove_subports.return_value = {'trunk': new_trunk}
ret_val = api.neutron.trunk_update(self.request, trunk_id,
old_trunk, new_trunk)
self.assertIsInstance(ret_val, api.neutron.Trunk)
self.assertEqual(api.neutron.Trunk(trunk_data).id, ret_val.trunk['id'])
self.assertEqual(ret_val.trunk['sub_ports'], new_trunk['sub_ports'])
neutronclient.trunk_remove_subports.assert_called_once_with(
trunk_id,
body={'sub_ports': [{'port_id':
old_trunk['sub_ports'][0]['port_id']}]}
)
@mock.patch.object(api.neutron, 'neutronclient')
def test_router_list(self, mock_neutronclient):
routers = {'routers': self.api_routers.list()}
neutronclient = mock_neutronclient.return_value
neutronclient.list_routers.return_value = routers
ret_val = api.neutron.router_list(self.request)
for n in ret_val:
self.assertIsInstance(n, api.neutron.Router)
neutronclient.list_routers.assert_called_once_with()
@mock.patch.object(api.neutron, 'neutronclient')
def test_router_get(self, mock_neutronclient):
router = {'router': self.api_routers.first()}
router_id = self.api_routers.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.show_router.return_value = router
ret_val = api.neutron.router_get(self.request, router_id)
self.assertIsInstance(ret_val, api.neutron.Router)
neutronclient.show_router.assert_called_once_with(router_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_router_create(self, mock_neutronclient):
router = {'router': self.api_routers.first()}
neutronclient = mock_neutronclient.return_value
form_data = {'router': {'name': 'router1',
'tenant_id': self.request.user.project_id}}
neutronclient.create_router.return_value = router
ret_val = api.neutron.router_create(self.request, name='router1')
self.assertIsInstance(ret_val, api.neutron.Router)
neutronclient.create_router.assert_called_once_with(body=form_data)
@mock.patch.object(api.neutron, 'neutronclient')
def test_router_delete(self, mock_neutronclient):
router_id = self.api_routers.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.delete_router.return_value = None
api.neutron.router_delete(self.request, router_id)
neutronclient.delete_router.assert_called_once_with(router_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_router_add_interface(self, mock_neutronclient):
subnet_id = self.api_subnets.first()['id']
router_id = self.api_routers.first()['id']
neutronclient = mock_neutronclient.return_value
form_data = {'subnet_id': subnet_id}
neutronclient.add_interface_router.return_value = None
api.neutron.router_add_interface(
self.request, router_id, subnet_id=subnet_id)
neutronclient.add_interface_router.assert_called_once_with(router_id,
form_data)
@mock.patch.object(api.neutron, 'neutronclient')
def test_router_remove_interface(self, mock_neutronclient):
router_id = self.api_routers.first()['id']
fake_port = self.api_ports.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.remove_interface_router.return_value = None
api.neutron.router_remove_interface(
self.request, router_id, port_id=fake_port)
neutronclient.remove_interface_router.assert_called_once_with(
router_id, {'port_id': fake_port})
# Mocking neutronclient() does not work because api.neutron.list_extensions
# is decorated with memoized_with_request, so we need to mock
# neutronclient.v2_0.client directly.
@mock.patch('neutronclient.v2_0.client.Client.list_extensions')
def test_is_extension_supported(self, mock_list_extensions):
extensions = self.api_extensions.list()
mock_list_extensions.return_value = {'extensions': extensions}
self.assertTrue(
api.neutron.is_extension_supported(self.request, 'quotas'))
self.assertFalse(
api.neutron.is_extension_supported(self.request, 'doesntexist'))
mock_list_extensions.assert_called_once_with()
@mock.patch.object(api.neutron, 'neutronclient')
def test_router_static_route_list(self, mock_neutronclient):
router = {'router': self.api_routers_with_routes.first()}
router_id = self.api_routers_with_routes.first()['id']
neutronclient = mock_neutronclient.return_value
neutronclient.show_router.return_value = router
ret_val = api.neutron.router_static_route_list(self.request, router_id)
self.assertIsInstance(ret_val[0], api.neutron.RouterStaticRoute)
neutronclient.show_router.assert_called_once_with(router_id)
@mock.patch.object(api.neutron, 'neutronclient')
def test_router_static_route_remove(self, mock_neutronclient):
router = {'router': self.api_routers_with_routes.first()}
router_id = self.api_routers_with_routes.first()['id']
post_router = copy.deepcopy(router)
route = api.neutron.RouterStaticRoute(post_router['router']
['routes'].pop())
neutronclient = mock_neutronclient.return_value
neutronclient.show_router.return_value = router
neutronclient.update_router.return_value = post_router
api.neutron.router_static_route_remove(self.request,
router_id, route.id)
neutronclient.show_router.assert_called_once_with(router_id)
body = {'router': {'routes': post_router['router']['routes']}}
neutronclient.update_router.assert_called_once_with(
router_id, body=body)
@mock.patch.object(api.neutron, 'neutronclient')
def test_router_static_route_add(self, mock_neutronclient):
router = {'router': self.api_routers_with_routes.first()}
router_id = self.api_routers_with_routes.first()['id']
post_router = copy.deepcopy(router)
route = {'nexthop': '10.0.0.5', 'destination': '40.0.1.0/24'}
post_router['router']['routes'].insert(0, route)
body = {'router': {'routes': post_router['router']['routes']}}
neutronclient = mock_neutronclient.return_value
neutronclient.show_router.return_value = router
neutronclient.update_router.return_value = post_router
api.neutron.router_static_route_add(self.request, router_id, route)
neutronclient.show_router.assert_called_once_with(router_id)
neutronclient.update_router.assert_called_once_with(router_id,
body=body)
# NOTE(amotoki): "dvr" permission tests check most of
# get_feature_permission features.
# These tests are not specific to "dvr" extension.
# Please be careful if you drop "dvr" extension in future.
@override_settings(OPENSTACK_NEUTRON_NETWORK={'enable_distributed_router':
True},
POLICY_CHECK_FUNCTION=None)
@test.create_mocks({api.neutron: ('is_extension_supported',)})
def _test_get_dvr_permission_dvr_supported(self, dvr_enabled):
self.mock_is_extension_supported.return_value = dvr_enabled
self.assertEqual(dvr_enabled,
api.neutron.get_feature_permission(self.request,
'dvr', 'get'))
self.mock_is_extension_supported.assert_called_once_with(
self.request, 'dvr')
def test_get_dvr_permission_dvr_supported(self):
self._test_get_dvr_permission_dvr_supported(dvr_enabled=True)
def test_get_dvr_permission_dvr_not_supported(self):
self._test_get_dvr_permission_dvr_supported(dvr_enabled=False)
@override_settings(OPENSTACK_NEUTRON_NETWORK={'enable_distributed_router':
True},
POLICY_CHECK_FUNCTION='openstack_auth.policy.check')
@test.create_mocks({api.neutron: ('is_extension_supported',),
policy: ('check',)})
def _test_get_dvr_permission_with_policy_check(self, policy_check_allowed,
operation):
if operation == "create":
role = (("network", "create_router:distributed"),)
elif operation == "get":
role = (("network", "get_router:distributed"),)
self.mock_check.return_value = policy_check_allowed
self.mock_is_extension_supported.return_value = policy_check_allowed
self.assertEqual(policy_check_allowed,
api.neutron.get_feature_permission(self.request,
'dvr', operation))
self.mock_check.assert_called_once_with(role, self.request)
if policy_check_allowed:
self.mock_is_extension_supported.assert_called_once_with(
self.request, 'dvr')
else:
self.mock_is_extension_supported.assert_not_called()
def test_get_dvr_permission_with_policy_check_allowed(self):
self._test_get_dvr_permission_with_policy_check(True, "get")
def test_get_dvr_permission_with_policy_check_disallowed(self):
self._test_get_dvr_permission_with_policy_check(False, "get")
def test_get_dvr_permission_create_with_policy_check_allowed(self):
self._test_get_dvr_permission_with_policy_check(True, "create")
def test_get_dvr_permission_create_with_policy_check_disallowed(self):
self._test_get_dvr_permission_with_policy_check(False, "create")
@override_settings(OPENSTACK_NEUTRON_NETWORK={'enable_distributed_router':
False})
def test_get_dvr_permission_dvr_disabled_by_config(self):
self.assertFalse(api.neutron.get_feature_permission(self.request,
'dvr', 'get'))
@override_settings(OPENSTACK_NEUTRON_NETWORK={'enable_distributed_router':
True},
POLICY_CHECK_FUNCTION='openstack_auth.policy.check')
def test_get_dvr_permission_dvr_unsupported_operation(self):
self.assertRaises(ValueError,
api.neutron.get_feature_permission,
self.request, 'dvr', 'unSupported')
@override_settings(OPENSTACK_NEUTRON_NETWORK={})
def test_get_dvr_permission_dvr_default_config(self):
self.assertFalse(api.neutron.get_feature_permission(self.request,
'dvr', 'get'))
@override_settings(OPENSTACK_NEUTRON_NETWORK={})
def test_get_dvr_permission_router_ha_default_config(self):
self.assertFalse(api.neutron.get_feature_permission(self.request,
'l3-ha', 'get'))
# NOTE(amotoki): Most of get_feature_permission are checked by "dvr" check
# above. l3-ha check only checks l3-ha specific code.
@override_settings(OPENSTACK_NEUTRON_NETWORK={'enable_ha_router': True},
POLICY_CHECK_FUNCTION='openstack_auth.policy.check')
@test.create_mocks({api.neutron: ('is_extension_supported',),
policy: ('check',)})
def _test_get_router_ha_permission_with_policy_check(self, ha_enabled):
role = (("network", "create_router:ha"),)
self.mock_check.return_value = True
self.mock_is_extension_supported.return_value = ha_enabled
self.assertEqual(ha_enabled,
api.neutron.get_feature_permission(self.request,
'l3-ha', 'create'))
self.mock_check.assert_called_once_with(role, self.request)
self.mock_is_extension_supported.assert_called_once_with(self.request,
'l3-ha')
def test_get_router_ha_permission_with_l3_ha_extension(self):
self._test_get_router_ha_permission_with_policy_check(True)
def test_get_router_ha_permission_without_l3_ha_extension(self):
self._test_get_router_ha_permission_with_policy_check(False)
@mock.patch.object(api.neutron, 'neutronclient')
def test_list_resources_with_long_filters(self, mock_neutronclient):
# In this tests, port_list is called with id=[10 port ID]
# filter. It generates about 40*10 char length URI.
# Each port ID is converted to "id=<UUID>&" in URI and
# it means 40 chars (len(UUID)=36).
# If excess length is 220, it means 400-220=180 chars
# can be sent in the first request.
# As a result three API calls with 4, 4, 2 port ID
# are expected.
ports = [{'id': uuidutils.generate_uuid(),
'name': 'port%s' % i,
'admin_state_up': True}
for i in range(10)]
port_ids = tuple([port['id'] for port in ports])
neutronclient = mock_neutronclient.return_value
uri_len_exc = neutron_exc.RequestURITooLong(excess=220)
list_ports_retval = [uri_len_exc]
for i in range(0, 10, 4):
list_ports_retval.append({'ports': ports[i:i + 4]})
neutronclient.list_ports.side_effect = list_ports_retval
ret_val = api.neutron.list_resources_with_long_filters(
api.neutron.port_list, 'id', tuple(port_ids),
request=self.request)
self.assertEqual(10, len(ret_val))
self.assertEqual(port_ids, tuple([p.id for p in ret_val]))
expected_calls = []
expected_calls.append(mock.call(id=tuple(port_ids)))
for i in range(0, 10, 4):
expected_calls.append(mock.call(id=tuple(port_ids[i:i + 4])))
neutronclient.list_ports.assert_has_calls(expected_calls)
@mock.patch.object(api.neutron, 'neutronclient')
def test_qos_policies_list(self, mock_neutronclient):
exp_policies = self.qos_policies.list()
api_qos_policies = {'policies': self.api_qos_policies.list()}
neutronclient = mock_neutronclient.return_value
neutronclient.list_qos_policies.return_value = api_qos_policies
ret_val = api.neutron.policy_list(self.request)
self.assertEqual(len(ret_val), len(exp_policies))
self.assertIsInstance(ret_val[0], api.neutron.QoSPolicy)
self.assertEqual(exp_policies[0].name, ret_val[0].name)
neutronclient.list_qos_policies.assert_called_once_with()
@mock.patch.object(api.neutron, 'neutronclient')
def test_qos_policy_create(self, mock_neutronclient):
qos_policy = self.api_qos_policies.first()
post_data = {'policy': {'name': qos_policy['name']}}
neutronclient = mock_neutronclient.return_value
neutronclient.create_qos_policy.return_value = {'policy': qos_policy}
ret_val = api.neutron.policy_create(self.request,
name=qos_policy['name'])
self.assertIsInstance(ret_val, api.neutron.QoSPolicy)
self.assertEqual(qos_policy['name'], ret_val.name)
neutronclient.create_qos_policy.assert_called_once_with(body=post_data)
class NeutronApiSecurityGroupTests(test.APIMockTestCase):
def setUp(self):
super(NeutronApiSecurityGroupTests, self).setUp()
neutronclient = mock.patch.object(api.neutron, 'neutronclient').start()
self.qclient = neutronclient.return_value
self.sg_dict = dict([(sg['id'], sg['name']) for sg
in self.api_security_groups.list()])
def _cmp_sg_rule(self, exprule, retrule):
self.assertEqual(exprule['id'], retrule.id)
self.assertEqual(exprule['security_group_id'],
retrule.parent_group_id)
self.assertEqual(exprule['direction'],
retrule.direction)
self.assertEqual(exprule['ethertype'],
retrule.ethertype)
self.assertEqual(exprule['port_range_min'],
retrule.from_port)
self.assertEqual(exprule['port_range_max'],
retrule.to_port,)
if (exprule['remote_ip_prefix'] is None and
exprule['remote_group_id'] is None):
expcidr = ('::/0' if exprule['ethertype'] == 'IPv6'
else '0.0.0.0/0')
else:
expcidr = exprule['remote_ip_prefix']
self.assertEqual(expcidr, retrule.ip_range.get('cidr'))
self.assertEqual(self.sg_dict.get(exprule['remote_group_id']),
retrule.group.get('name'))
def _cmp_sg(self, exp_sg, ret_sg):
self.assertEqual(exp_sg['id'], ret_sg.id)
self.assertEqual(exp_sg['name'], ret_sg.name)
# When a SG has no rules, neutron API does not contain
# 'security_group_rules' field, so .get() method needs to be used.
exp_rules = exp_sg.get('security_group_rules', [])
self.assertEqual(len(exp_rules), len(ret_sg.rules))
for (exprule, retrule) in six.moves.zip(exp_rules, ret_sg.rules):
self._cmp_sg_rule(exprule, retrule)
def _test_security_group_list(self, **params):
sgs = self.api_security_groups.list()
q_params = {'tenant_id': self.request.user.tenant_id}
# if tenant_id is specified, the passed tenant_id should be sent.
q_params.update(params)
# use deepcopy to ensure self.api_security_groups is not modified.
self.qclient.list_security_groups.return_value = {'security_groups':
copy.deepcopy(sgs)}
rets = api.neutron.security_group_list(self.request, **params)
self.assertEqual(len(sgs), len(rets))
for (exp, ret) in six.moves.zip(sgs, rets):
self._cmp_sg(exp, ret)
self.qclient.list_security_groups.assert_called_once_with(**q_params)
def test_security_group_list(self):
self._test_security_group_list()
def test_security_group_list_with_params(self):
self._test_security_group_list(name='sg1')
def test_security_group_list_with_tenant_id(self):
self._test_security_group_list(tenant_id='tenant1', name='sg1')
def test_security_group_get(self):
secgroup = self.api_security_groups.first()
sg_ids = set([secgroup['id']] +
[rule['remote_group_id'] for rule
in secgroup['security_group_rules']
if rule['remote_group_id']])
related_sgs = [sg for sg in self.api_security_groups.list()
if sg['id'] in sg_ids]
# use deepcopy to ensure self.api_security_groups is not modified.
self.qclient.show_security_group.return_value = \
{'security_group': copy.deepcopy(secgroup)}
self.qclient.list_security_groups.return_value = \
{'security_groups': related_sgs}
ret = api.neutron.security_group_get(self.request, secgroup['id'])
self._cmp_sg(secgroup, ret)
self.qclient.show_security_group.assert_called_once_with(
secgroup['id'])
self.qclient.list_security_groups.assert_called_once_with(
id=sg_ids, fields=['id', 'name'])
def test_security_group_create(self):
secgroup = self.api_security_groups.list()[1]
body = {'security_group':
{'name': secgroup['name'],
'description': secgroup['description'],
'tenant_id': self.request.user.project_id}}
self.qclient.create_security_group.return_value = \
{'security_group': copy.deepcopy(secgroup)}
ret = api.neutron.security_group_create(self.request, secgroup['name'],
secgroup['description'])
self._cmp_sg(secgroup, ret)
self.qclient.create_security_group.assert_called_once_with(body)
def test_security_group_update(self):
secgroup = self.api_security_groups.list()[1]
secgroup = copy.deepcopy(secgroup)
secgroup['name'] = 'newname'
secgroup['description'] = 'new description'
body = {'security_group':
{'name': secgroup['name'],
'description': secgroup['description']}}
self.qclient.update_security_group.return_value = {'security_group':
secgroup}
ret = api.neutron.security_group_update(self.request,
secgroup['id'],
secgroup['name'],
secgroup['description'])
self._cmp_sg(secgroup, ret)
self.qclient.update_security_group.assert_called_once_with(
secgroup['id'], body)
def test_security_group_delete(self):
secgroup = self.api_security_groups.first()
self.qclient.delete_security_group.return_value = None
api.neutron.security_group_delete(self.request, secgroup['id'])
self.qclient.delete_security_group.assert_called_once_with(
secgroup['id'])
def test_security_group_rule_create(self):
self._test_security_group_rule_create(with_desc=True)
def test_security_group_rule_create_without_desc(self):
self._test_security_group_rule_create(with_desc=False)
def test_security_group_rule_create_with_custom_protocol(self):
self._test_security_group_rule_create(custom_ip_proto=True)
def _test_security_group_rule_create(self, with_desc=False,
custom_ip_proto=False):
if custom_ip_proto:
sg_rule = [r for r in self.api_security_group_rules.list()
if r['protocol'] == '99'][0]
else:
sg_rule = [r for r in self.api_security_group_rules.list()
if r['protocol'] == 'tcp' and r['remote_ip_prefix']][0]
sg_id = sg_rule['security_group_id']
secgroup = [sg for sg in self.api_security_groups.list()
if sg['id'] == sg_id][0]
post_rule = copy.deepcopy(sg_rule)
del post_rule['id']
del post_rule['tenant_id']
if not with_desc:
del post_rule['description']
post_body = {'security_group_rule': post_rule}
self.qclient.create_security_group_rule.return_value = \
{'security_group_rule': copy.deepcopy(sg_rule)}
self.qclient.list_security_groups.return_value = \
{'security_groups': [copy.deepcopy(secgroup)]}
if with_desc:
description = sg_rule['description']
else:
description = None
ret = api.neutron.security_group_rule_create(
self.request, sg_rule['security_group_id'],
sg_rule['direction'], sg_rule['ethertype'], sg_rule['protocol'],
sg_rule['port_range_min'], sg_rule['port_range_max'],
sg_rule['remote_ip_prefix'], sg_rule['remote_group_id'],
description)
self._cmp_sg_rule(sg_rule, ret)
self.qclient.create_security_group_rule.assert_called_once_with(
post_body)
self.qclient.list_security_groups.assert_called_once_with(
id=set([sg_id]), fields=['id', 'name'])
def test_security_group_rule_delete(self):
sg_rule = self.api_security_group_rules.first()
self.qclient.delete_security_group_rule.return_value = None
api.neutron.security_group_rule_delete(self.request, sg_rule['id'])
self.qclient.delete_security_group_rule.assert_called_once_with(
sg_rule['id'])
def _get_instance(self, cur_sg_ids):
instance_port = [p for p in self.api_ports.list()
if p['device_owner'].startswith('compute:')][0]
instance_id = instance_port['device_id']
# Emulate an instance with two ports
instance_ports = []
for _i in range(2):
p = copy.deepcopy(instance_port)
p['id'] = uuidutils.generate_uuid()
p['security_groups'] = cur_sg_ids
instance_ports.append(p)
return (instance_id, instance_ports)
def test_server_security_groups(self):
cur_sg_ids = [sg['id'] for sg in self.api_security_groups.list()[:2]]
instance_id, instance_ports = self._get_instance(cur_sg_ids)
self.qclient.list_ports.return_value = {'ports': instance_ports}
secgroups = copy.deepcopy(self.api_security_groups.list())
self.qclient.list_security_groups.return_value = \
{'security_groups': secgroups}
api.neutron.server_security_groups(self.request, instance_id)
self.qclient.list_ports.assert_called_once_with(device_id=instance_id)
self.qclient.list_security_groups.assert_called_once_with(
id=set(cur_sg_ids))
def test_server_update_security_groups(self):
cur_sg_ids = [self.api_security_groups.first()['id']]
new_sg_ids = [sg['id'] for sg in self.api_security_groups.list()[:2]]
instance_id, instance_ports = self._get_instance(cur_sg_ids)
self.qclient.list_ports.return_value = {'ports': instance_ports}
self.qclient.update_port.side_effect = \
[{'port': p} for p in instance_ports]
api.neutron.server_update_security_groups(
self.request, instance_id, new_sg_ids)
self.qclient.list_ports.assert_called_once_with(device_id=instance_id)
body = {'port': {'security_groups': new_sg_ids}}
expected_calls = [mock.call(p['id'], body=body)
for p in instance_ports]
self.qclient.update_port.assert_has_calls(expected_calls)
class NeutronApiFloatingIpTests(test.APIMockTestCase):
def setUp(self):
super(NeutronApiFloatingIpTests, self).setUp()
neutronclient = mock.patch.object(api.neutron, 'neutronclient').start()
self.qclient = neutronclient.return_value
@override_settings(OPENSTACK_NEUTRON_NETWORK={'enable_router': True})
def test_floating_ip_supported(self):
self.assertTrue(api.neutron.floating_ip_supported(self.request))
@override_settings(OPENSTACK_NEUTRON_NETWORK={'enable_router': False})
def test_floating_ip_supported_false(self):
self.assertFalse(api.neutron.floating_ip_supported(self.request))
def test_floating_ip_pools_list(self):
search_opts = {'router:external': True}
ext_nets = [n for n in self.api_networks.list()
if n['router:external']]
self.qclient.list_networks.return_value = {'networks': ext_nets}
rets = api.neutron.floating_ip_pools_list(self.request)
for attr in ['id', 'name']:
self.assertEqual([p[attr] for p in ext_nets],
[getattr(p, attr) for p in rets])
self.qclient.list_networks.assert_called_once_with(**search_opts)
def test_floating_ip_list(self):
fips = self.api_floating_ips.list()
filters = {'tenant_id': self.request.user.tenant_id}
self.qclient.list_floatingips.return_value = {'floatingips': fips}
self.qclient.list_ports.return_value = {'ports': self.api_ports.list()}
rets = api.neutron.tenant_floating_ip_list(self.request)
assoc_port = self.api_ports.list()[1]
self.assertEqual(len(fips), len(rets))
for ret, exp in zip(rets, fips):
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(exp[attr], getattr(ret, attr))
if exp['port_id']:
dev_id = assoc_port['device_id'] if exp['port_id'] else None
self.assertEqual(dev_id, ret.instance_id)
self.assertEqual('compute', ret.instance_type)
else:
self.assertIsNone(ret.instance_id)
self.assertIsNone(ret.instance_type)
self.qclient.list_floatingips.assert_called_once_with(**filters)
self.qclient.list_ports.assert_called_once_with(**filters)
def test_floating_ip_list_all_tenants(self):
fips = self.api_floating_ips.list()
self.qclient.list_floatingips.return_value = {'floatingips': fips}
self.qclient.list_ports.return_value = {'ports': self.api_ports.list()}
fip_manager = api.neutron.FloatingIpManager(self.request)
rets = fip_manager.list(all_tenants=True)
assoc_port = self.api_ports.list()[1]
self.assertEqual(len(fips), len(rets))
for ret, exp in zip(rets, fips):
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(getattr(ret, attr), exp[attr])
if exp['port_id']:
dev_id = assoc_port['device_id'] if exp['port_id'] else None
self.assertEqual(dev_id, ret.instance_id)
self.assertEqual('compute', ret.instance_type)
else:
self.assertIsNone(ret.instance_id)
self.assertIsNone(ret.instance_type)
self.qclient.list_floatingips.assert_called_once_with()
self.qclient.list_ports.assert_called_once_with()
def _test_floating_ip_get_associated(self, assoc_port, exp_instance_type):
fip = self.api_floating_ips.list()[1]
self.qclient.show_floatingip.return_value = {'floatingip': fip}
self.qclient.show_port.return_value = {'port': assoc_port}
ret = api.neutron.tenant_floating_ip_get(self.request, fip['id'])
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(fip[attr], getattr(ret, attr))
self.assertEqual(assoc_port['device_id'], ret.instance_id)
self.assertEqual(exp_instance_type, ret.instance_type)
self.qclient.show_floatingip.assert_called_once_with(fip['id'])
self.qclient.show_port.assert_called_once_with(assoc_port['id'])
def test_floating_ip_get_associated(self):
assoc_port = self.api_ports.list()[1]
self._test_floating_ip_get_associated(assoc_port, 'compute')
def test_floating_ip_get_associated_with_loadbalancer_vip(self):
assoc_port = copy.deepcopy(self.api_ports.list()[1])
assoc_port['device_owner'] = 'neutron:LOADBALANCER'
assoc_port['device_id'] = uuidutils.generate_uuid()
assoc_port['name'] = 'vip-' + uuidutils.generate_uuid()
self._test_floating_ip_get_associated(assoc_port, 'loadbalancer')
def test_floating_ip_get_unassociated(self):
fip = self.api_floating_ips.list()[0]
self.qclient.show_floatingip.return_value = {'floatingip': fip}
ret = api.neutron.tenant_floating_ip_get(self.request, fip['id'])
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(fip[attr], getattr(ret, attr))
self.assertIsNone(ret.instance_id)
self.assertIsNone(ret.instance_type)
self.qclient.show_floatingip.assert_called_once_with(fip['id'])
def test_floating_ip_allocate(self):
ext_nets = [n for n in self.api_networks.list()
if n['router:external']]
ext_net = ext_nets[0]
fip = self.api_floating_ips.first()
self.qclient.create_floatingip.return_value = {'floatingip': fip}
ret = api.neutron.tenant_floating_ip_allocate(self.request,
ext_net['id'])
for attr in ['id', 'ip', 'pool', 'fixed_ip', 'port_id']:
self.assertEqual(fip[attr], getattr(ret, attr))
self.assertIsNone(ret.instance_id)
self.assertIsNone(ret.instance_type)
self.qclient.create_floatingip.assert_called_once_with(
{'floatingip': {'floating_network_id': ext_net['id'],
'tenant_id': self.request.user.project_id}})
def test_floating_ip_release(self):
fip = self.api_floating_ips.first()
self.qclient.delete_floatingip.return_value = None
api.neutron.tenant_floating_ip_release(self.request, fip['id'])
self.qclient.delete_floatingip.assert_called_once_with(fip['id'])
def test_floating_ip_associate(self):
fip = self.api_floating_ips.list()[1]
assoc_port = self.api_ports.list()[1]
ip_address = assoc_port['fixed_ips'][0]['ip_address']
target_id = '%s_%s' % (assoc_port['id'], ip_address)
params = {'port_id': assoc_port['id'],
'fixed_ip_address': ip_address}
self.qclient.update_floatingip.return_value = None
api.neutron.floating_ip_associate(self.request, fip['id'], target_id)
self.qclient.update_floatingip.assert_called_once_with(
fip['id'], {'floatingip': params})
def test_floating_ip_disassociate(self):
fip = self.api_floating_ips.list()[1]
self.qclient.update_floatingip.return_value = None
api.neutron.floating_ip_disassociate(self.request, fip['id'])
self.qclient.update_floatingip.assert_called_once_with(
fip['id'], {'floatingip': {'port_id': None}})
def _get_target_id(self, port, ip=None, index=0):
param = {'id': port['id'],
'addr': ip or port['fixed_ips'][index]['ip_address']}
return '%(id)s_%(addr)s' % param
def _get_target_name(self, port, ip=None):
param = {'svrid': port['device_id'],
'addr': ip or port['fixed_ips'][0]['ip_address']}
return 'server_%(svrid)s: %(addr)s' % param
@override_settings(
OPENSTACK_NEUTRON_NETWORK={
'enable_fip_topology_check': True,
}
)
@mock.patch.object(api._nova, 'novaclient')
def test_floating_ip_target_list(self, mock_novaclient):
ports = self.api_ports.list()
# Port on the first subnet is connected to a router
# attached to external network in neutron_data.
subnet_id = self.subnets.first().id
shared_nets = [n for n in self.api_networks.list() if n['shared']]
shared_subnet_ids = [s for n in shared_nets for s in n['subnets']]
target_ports = []
for p in ports:
if p['device_owner'].startswith('network:'):
continue
port_subnets = [ip['subnet_id'] for ip in p['fixed_ips']]
if not (subnet_id in port_subnets or
(set(shared_subnet_ids) & set(port_subnets))):
continue
for ip in p['fixed_ips']:
if netaddr.IPAddress(ip['ip_address']).version != 4:
continue
target_ports.append((
self._get_target_id(p, ip['ip_address']),
self._get_target_name(p, ip['ip_address'])))
filters = {'tenant_id': self.request.user.tenant_id}
self.qclient.list_ports.return_value = {'ports': ports}
servers = self.servers.list()
novaclient = mock_novaclient.return_value
ver = mock.Mock(min_version='2.1', version='2.45')
novaclient.versions.get_current.return_value = ver
novaclient.servers.list.return_value = servers
ext_nets = [n for n in self.api_networks.list()
if n['router:external']]
list_networks_retvals = [{'networks': ext_nets},
{'networks': shared_nets}]
self.qclient.list_networks.side_effect = list_networks_retvals
self.qclient.list_routers.return_value = {'routers':
self.api_routers.list()}
shared_subs = [s for s in self.api_subnets.list()
if s['id'] in shared_subnet_ids]
self.qclient.list_subnets.return_value = {'subnets': shared_subs}
rets = api.neutron.floating_ip_target_list(self.request)
self.assertEqual(len(target_ports), len(rets))
for ret, exp in zip(rets, target_ports):
pid, ip_address = ret.id.split('_', 1)
self.assertEqual(4, netaddr.IPAddress(ip['ip_address']).version)
self.assertEqual(exp[0], ret.id)
self.assertEqual(exp[1], ret.name)
self.qclient.list_ports.assert_called_once_with(**filters)
novaclient.versions.get_current.assert_called_once_with()
novaclient.servers.list.assert_called_once_with(
False, {'project_id': self.request.user.tenant_id})
self.qclient.list_networks.assert_has_calls([
mock.call(**{'router:external': True}),
mock.call(shared=True),
])
self.qclient.list_routers.assert_called_once_with()
self.qclient.list_subnets.assert_called_once_with()
@mock.patch.object(api._nova, 'novaclient')
def _test_target_floating_ip_port_by_instance(self, server, ports,
candidates, mock_novaclient):
# list_ports and list_networks are called multiple times,
# we prepare a list for return values.
list_ports_retvals = []
self.qclient.list_ports.side_effect = list_ports_retvals
list_nets_retvals = []
self.qclient.list_networks.side_effect = list_nets_retvals
# _target_ports_by_instance()
list_ports_retvals.append({'ports': candidates})
# _get_reachable_subnets()
ext_nets = [n for n in self.api_networks.list()
if n['router:external']]
list_nets_retvals.append({'networks': ext_nets})
self.qclient.list_routers.side_effect = [{'routers':
self.api_routers.list()}]
rinfs = [p for p in ports
if p['device_owner'] in api.neutron.ROUTER_INTERFACE_OWNERS]
list_ports_retvals.append({'ports': rinfs})
shared_nets = [n for n in self.api_networks.list() if n['shared']]
list_nets_retvals.append({'networks': shared_nets})
shared_subnet_ids = [s for n in shared_nets for s in n['subnets']]
shared_subs = [s for s in self.api_subnets.list()
if s['id'] in shared_subnet_ids]
self.qclient.list_subnets.side_effect = [{'subnets': shared_subs}]
# _get_server_name()
novaclient = mock_novaclient.return_value
ver = mock.Mock(min_version='2.1', version='2.45')
novaclient.versions.get_current.return_value = ver
novaclient.servers.get.return_value = server
ret_val = api.neutron.floating_ip_target_list_by_instance(self.request,
server.id)
self.qclient.list_ports.assert_has_calls([
mock.call(device_id=server.id),
mock.call(device_owner=api.neutron.ROUTER_INTERFACE_OWNERS),
])
self.qclient.list_networks.assert_has_calls([
mock.call(**{'router:external': True}),
mock.call(shared=True),
])
self.qclient.list_routers.assert_called_once_with()
self.qclient.list_subnets.assert_called_once_with()
novaclient.versions.get_current.assert_called_once_with()
novaclient.servers.get.assert_called_once_with(server.id)
return ret_val
def test_target_floating_ip_port_by_instance(self):
server = self.servers.first()
ports = self.api_ports.list()
candidates = [p for p in ports if p['device_id'] == server.id]
ret = self._test_target_floating_ip_port_by_instance(server, ports,
candidates)
self.assertEqual(1, len(ret))
ret_val = ret[0]
self.assertEqual(self._get_target_id(candidates[0]), ret_val.id)
self.assertEqual(candidates[0]['id'], ret_val.port_id)
self.assertEqual(candidates[0]['device_id'], ret_val.instance_id)
def test_target_floating_ip_port_by_instance_with_ipv6(self):
server = self.servers.first()
ports = self.api_ports.list()
candidates = [p for p in ports if p['device_id'] == server.id]
# Move the IPv6 entry first
fixed_ips = candidates[0]['fixed_ips']
candidates[0]['fixed_ips'] = [fixed_ips[1], fixed_ips[0]]
# Check the first IP address is IPv6
first_ip = candidates[0]['fixed_ips'][0]['ip_address']
self.assertEqual(6, netaddr.IPAddress(first_ip).version)
ret = self._test_target_floating_ip_port_by_instance(server, ports,
candidates)
self.assertEqual(1, len(ret))
ret_val = ret[0]
self.assertEqual(self._get_target_id(candidates[0], index=1),
ret_val.id)
self.assertEqual(candidates[0]['id'], ret_val.port_id)
self.assertEqual(candidates[0]['device_id'], ret_val.instance_id)
def _get_preloaded_targets(self):
return [
api.neutron.FloatingIpTarget(
api.neutron.Port({'name': 'name11', 'id': 'id11',
'device_id': 'id-vm1'}),
'192.168.1.1', 'vm1'),
api.neutron.FloatingIpTarget(
api.neutron.Port({'name': 'name21', 'id': 'id21',
'device_id': 'id-vm2'}),
'172.16.1.1', 'vm2'),
api.neutron.FloatingIpTarget(
api.neutron.Port({'name': 'name22', 'id': 'id22',
'device_id': 'id-vm2'}),
'10.11.12.13', 'vm3'),
]
def test_target_floating_ip_port_by_instance_with_preloaded_target(self):
target_list = self._get_preloaded_targets()
ret = api.neutron.floating_ip_target_list_by_instance(
self.request, 'id-vm2', target_list)
self.assertEqual(['id21', 'id22'], [r.port_id for r in ret])
|
apache-2.0
| -2,582,173,321,856,126,000 | 44.094817 | 79 | 0.608755 | false |
awsdocs/aws-doc-sdk-examples
|
python/example_code/iotthingsgraph/ms.py
|
1
|
6258
|
#
# Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# This file is licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# snippet-sourcedescription:[ms.py demonstrates how to mock an AWS IoT Things Graph motion sensor device in an AWS IoT Greengrass group.]
# snippet-service:[iotthingsgraph]
# snippet-keyword:[Python]
# snippet-sourcesyntax:[python]
# snippet-sourcesyntax:[python]
# snippet-keyword:[AWS IoT Things Graph]
# snippet-keyword:[Code Sample]
# snippet-sourcetype:[full-example]
# snippet-sourcedate:[2019-07-25]
# snippet-sourceauthor:[AWS]
# snippet-start:[iotthingsgraph.python.ms.complete]
import os
import sys
import time
import uuid
import json
import logging
import argparse
from AWSIoTPythonSDK.core.greengrass.discovery.providers import DiscoveryInfoProvider
from AWSIoTPythonSDK.core.protocol.connection.cores import ProgressiveBackOffCore
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
from AWSIoTPythonSDK.exception.AWSIoTExceptions import DiscoveryInvalidRequestException
AllowedActions = ['both', 'publish', 'subscribe']
# General message notification callback
def customOnMessage(message):
print('Received message on topic %s: %s\n' % (message.topic, message.payload))
MAX_DISCOVERY_RETRIES = 10
GROUP_CA_PATH = "./groupCA/"
# Read in command-line parameters
parser = argparse.ArgumentParser()
parser.add_argument("-e", "--endpoint", action="store", required=True, dest="host", help="Your AWS IoT custom endpoint")
parser.add_argument("-r", "--rootCA", action="store", required=True, dest="rootCAPath", help="Root CA file path")
parser.add_argument("-c", "--cert", action="store", dest="certificatePath", help="Certificate file path")
parser.add_argument("-k", "--key", action="store", dest="privateKeyPath", help="Private key file path")
parser.add_argument("-n", "--thingName", action="store", dest="thingName", default="Bot", help="Targeted thing name")
args = parser.parse_args()
host = args.host
rootCAPath = args.rootCAPath
certificatePath = args.certificatePath
privateKeyPath = args.privateKeyPath
clientId = args.thingName
thingName = args.thingName
if not args.certificatePath or not args.privateKeyPath:
parser.error("Missing credentials for authentication.")
exit(2)
# Configure logging
logger = logging.getLogger("AWSIoTPythonSDK.core")
logger.setLevel(logging.DEBUG)
streamHandler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
streamHandler.setFormatter(formatter)
logger.addHandler(streamHandler)
# Progressive back off core
backOffCore = ProgressiveBackOffCore()
# Discover GGCs
discoveryInfoProvider = DiscoveryInfoProvider()
discoveryInfoProvider.configureEndpoint(host)
discoveryInfoProvider.configureCredentials(rootCAPath, certificatePath, privateKeyPath)
discoveryInfoProvider.configureTimeout(10) # 10 sec
retryCount = MAX_DISCOVERY_RETRIES
discovered = False
groupCA = None
coreInfo = None
while retryCount != 0:
try:
discoveryInfo = discoveryInfoProvider.discover(thingName)
caList = discoveryInfo.getAllCas()
coreList = discoveryInfo.getAllCores()
# We only pick the first ca and core info
groupId, ca = caList[0]
coreInfo = coreList[0]
print("Discovered GGC: %s from Group: %s" % (coreInfo.coreThingArn, groupId))
print("Now we persist the connectivity/identity information...")
groupCA = GROUP_CA_PATH + groupId + "_CA_" + str(uuid.uuid4()) + ".crt"
if not os.path.exists(GROUP_CA_PATH):
os.makedirs(GROUP_CA_PATH)
groupCAFile = open(groupCA, "w")
groupCAFile.write(ca)
groupCAFile.close()
discovered = True
print("Now proceed to the connecting flow...")
break
except DiscoveryInvalidRequestException as e:
print("Invalid discovery request detected!")
print("Type: %s" % str(type(e)))
print("Error message: %s" % e.message)
print("Stopping...")
break
except Exception as e:
print("Error in discovery!")
print("Type: %s" % str(type(e)))
print("Error message: %s" % e)
retryCount -= 1
print("\n%d/%d retries left\n" % (retryCount, MAX_DISCOVERY_RETRIES))
print("Backing off...\n")
backOffCore.backOff()
if not discovered:
print("Discovery failed after %d retries. Exiting...\n" % (MAX_DISCOVERY_RETRIES))
sys.exit(-1)
# Iterate through all connection options for the core and use the first successful one
myAWSIoTMQTTClient = AWSIoTMQTTClient(clientId)
myAWSIoTMQTTClient.configureCredentials(groupCA, privateKeyPath, certificatePath)
myAWSIoTMQTTClient.onMessage = customOnMessage
connected = False
for connectivityInfo in coreInfo.connectivityInfoList:
currentHost = connectivityInfo.host
currentPort = connectivityInfo.port
print("Trying to connect to core at %s:%d" % (currentHost, currentPort))
myAWSIoTMQTTClient.configureEndpoint(currentHost, currentPort)
try:
myAWSIoTMQTTClient.connect()
connected = True
break
except Exception as e:
print("Error in connect!")
print("Type: %s" % str(type(e)))
print("Error message: %s" % e)
if not connected:
print("Cannot connect to core %s. Exiting..." % coreInfo.coreThingArn)
sys.exit(-2)
# Successfully connected to the core
#if args.mode == 'both' or args.mode == 'subscribe':
# myAWSIoTMQTTClient.subscribe(topic, 0, None)
#time.sleep(2)
loopCount = 0
while True:
message = {}
message['isMotionDetected'] = True
messageJson = json.dumps(message)
myAWSIoTMQTTClient.publish(thingName + "/motion", messageJson, 0)
print('Published topic %s: %s\n' % (thingName + "/motion", messageJson))
loopCount += 1
time.sleep(10)
# snippet-end:[iotthingsgraph.python.ms.complete]
|
apache-2.0
| 7,958,414,060,473,398,000 | 35.389535 | 137 | 0.721477 | false |
BU-PyCon/Meeting-2
|
Programs/PyPlot.py
|
1
|
18763
|
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib.patches import *
import pdb
print("""
MatPlotLib Advanced Tutorial
----------------------------
This is a tutorial covering the features and usage of the matplotlib package
in more detail. In truth, no single tutorial can cover all the features that
exist in the matplotlib package since it is extremely expansive. This tutorial
will cover as much material as possible to let you know of the features that
are available to you when plotting data.
Some Notes:
1) A few parts of this program uses pdb to pause the program and allow the
user to try making things for themselves. Use c to continue with the
program.
2) This program uses plt for the reference name of pyplot. All pyplot methods
should be preceded with the qualifier plt such as plt.show().
3) For the best results, run this program with ipython. Regular python may
dislike plotting during program execution.
""")
pause = input("Press [Enter] to continue...")
print('\n'*100)
print("""
###
## pyplot
###
Within the matplotlib package, the main module you want to be using is the
pyplot module. It is generally imported as
import matplotlib.pyplot as plt
The pyplot module has many useful functions that can be called and used and
we will go over them one by one. For reference, some useful methods are shown
below.
>>> plt.close() # Closes the current figure. Optional arguments
include passing in a figure, figure number,
or the string 'all' which closes all figures.
>>> plt.draw() # Forces the figure to be redrawn. Useful if it
was been updated after it was last shown or
drawn.
>>> plt.gca() # Returns the currently active axes object
>>> plt.gcf() # Returns the currently active figure object
>>> plt.show() # Shows the latest figure. By default, matplotlib
pauses and waits for the window to be closed
before continuing. This feature can be turned
off with the keyword block = False.
>>> plt.savefig('title.png') # Saves the figure to a file. The file type is
automatically determined by the extension.
Supported formats include png, pdf, ps, eps,
and svg. This has the keywords dpi which
specifies the resolution of the output and
bbox_inches which, when set to 'tight' reduces
any extra white space in the saved file.
>>> plt.subplots_adjust() # Allows for adjusting parameters of the layout
such as the horizontal space (hspace) or width
space (wspace) between plots, as well as the
left, right, top, and bottom padding.
""")
pause = input("Press [Enter] to continue...")
print('\n'*100)
print("""
###
## Components of a Plot
###
At it's core, matplotlib is nothing more than a graphics package. Every
component of a plot is just a particular "Artist", all drawn on top of
each other to make a nice looking plot.
The beauty of pyplot is the degree of customization that you can have.
You have control over every individual component of this plot and you can
change each of them individually. To do this properly, we will focus on
using the object oriented feature of matplotlib.
Before we talk about how to work with all these features, we need to know
what they are. A window should have just popped up that you can examine.
This window shows all the various components of a figure and the names that
pyplot uses for them. This figure contains the following components
-> Figure The main part of the plot which everything is shown on. This
encompasses the entire area of the window, excluding the toolbar.
-> Axes A single plot, added to the figure. This can have many sets of
data added to it along with other components such as legends.
Axes can even sit on top of other axes, but importantly, they
are still a component of figure, not the axes they may sit inside
of.
-> Axis Note the difference here! This is an axIs not an axEs. This
component is a single axis on the axes and defines how the data
is plotted. An axes, by default has two axises, the x and y
(unless you're plotting in 3D in which case it has a z). You can
add more axises though. Each axis has various components such as
the spine, tick labels, major ticks, and minor ticks.
-> Spine Each axis has various components. One of them is the spine. This
is the actual black line at the border of the plots that the
tick marks are drawn on. Each default axis has 2 spines. For the
x axis, it has the top and bottom spine and likewise the y axis
has the right and left.
-> Legend Each axes can have a legend added to it. The legend can have lines
on it, one for each curve labeled.
""")
x = np.arange(0,4*np.pi+np.pi/8,np.pi/8)
y1 = np.sin(x)
y2 = np.cos(x)
fig, (ax1, ax2) = plt.subplots(2, figsize = (10,7))
fig.canvas.set_window_title('Pyplot Figure Components')
plt.subplots_adjust(hspace = 0.4)
plt.suptitle('Figure title', fontsize = 20)
#Create subplot 1
ax1.plot(x, y1, '-dr', label = '$sin(x)$')
ax1.plot(x, np.array([0]*len(x)), '--k')
ax1.set_xlim([0,4*np.pi])
ax1.set_title('Axes 1 Title')
ax1.set_xlabel('Axes 1 x-axis label')
ax1.set_ylabel('Axes 1 y-axis label')
ax1.legend(loc = 'best')
#Create subplot 2
ax2.plot(x, y2, ':og', label = '$cos(x)$')
ax2.plot(x, np.array([0]*len(x)), '-k')
ax2.set_xlim([0,4*np.pi])
ax2.set_title('Axes 2 Title')
ax2.set_xlabel('Axes 2 x-axis label')
ax2.set_ylabel('Axes 2 y-axis label')
ax2.legend(loc = 'best')
#Add artists
ax = fig.add_axes([0,0,1,1])
ax.xaxis.set_visible(False)
ax.yaxis.set_visible(False)
ax.set_zorder(0)
ax.set_axis_bgcolor((0, 0, 0, 0))
ax.add_patch(Rectangle((0.01,0.01),0.98,0.98, fill = False, lw = 2, ec = 'b', transform=ax.transAxes))
ax.annotate('Figure', (0.02,0.02), textcoords = 'axes fraction',
fontsize = 20, color = 'b', transform=ax.transAxes)
ax.add_patch(Rectangle((0.04,0.5),0.9,0.44, fill = False, lw = 2, ec = 'g', transform=ax.transAxes))
ax.annotate('Axes', (0.05,0.52), textcoords = 'axes fraction',
fontsize = 20, color = 'g', transform=ax.transAxes)
ax.add_patch(Rectangle((0.11,0.08),0.03,0.38, fill = False, lw = 2, ec = 'r', transform=ax.transAxes))
ax.annotate('Axis', (0.045,0.4), textcoords = 'axes fraction',
fontsize = 20, color = 'r', transform=ax.transAxes)
ax.add_patch(Rectangle((0.11,0.08),0.8,0.04, fill = False, lw = 2, ec = 'r', transform=ax.transAxes))
ax.annotate('Axis', (0.85,0.04), textcoords = 'axes fraction',
fontsize = 20, color = 'r')
ax.annotate('Spine', (0.8,0.43), xytext = (0.8,0.35), xycoords = 'axes fraction',
color = (1,0.5,0), fontsize = 20,
textcoords = 'axes fraction', horizontalalignment='left',
arrowprops=dict(arrowstyle = '-|>', fc=(1,0.5,0)))
ax.annotate('', (0.9,0.32), xytext = (0.84,0.34), xycoords = 'axes fraction',
arrowprops=dict(arrowstyle = '-|>', fc=(1,0.5,0)))
plt.show(block = False)
plt.pause(0.01)
pause = input('Press [Enter] to continue...')
print('\n'*100)
print("""
###
## Objects in matplotlib
###
The above mentioned components of a figure (along with a few others) are
all representable as objects. These objects are stored in a variable which
maintains the state of that object and also has functions the object can
call to change its state. Let's look at how we can use these objects to
create a new figure.
""")
pause = input('Press [Enter] to continue...')
print('\n'*100)
print("""
###
## Creating a New Figure
###
There multiple ways to create a new figure. Probably the simplest is
>>> fig = figure(1, figsize = (5,5), tight_layout = True)
The 1 in this case is an ID for the figure (much like the logical unit
number in IDL). The keywords figsize and tight_layout are optional. The
former sets the physical size of the figure and the second tells the layout
manager to make the plots as close as possible. The state of the figure is
stored in the fig variable which knows entirely about this new figure.
Calling this figure method tells matplotlib that any subsequent plotting
commands should apply to this figure. We can switch to plotting on a new
figure by calling the figure command for another figure (or even switch
back to an old figure). Another method for creating figures is the following
>>> fig = plt.subplots()
This method is much more powerful, but these features will be discussed in
the next section. For reference here are a set of methods and their
functionality that the figure object can call
>>> fig.add_subplot(111) # Adds a subplot at the specified position
>>> fig.clear() # Clears the figure's axes
>>> fig.suptitle('Title') # Adds a title to the figure
Many of the methods listed above as pyplot methods, such as subplots_adjust or
draw, can be applied to a specific figure as well.
""")
pause = input('Press [Enter] to continue...')
print('\n'*100)
print("""
###
## Creating a New Axes
###
Once you have a figure, it's time to add some Axeses to it. As mentioned
before, matplotlib supports using objects. If you've properly created your
figure, it will have been stored into an object. You can now call the method
add_subplot.
>>> ax = fig.add_subplot(1,1,1)
The order of these parameters is add_subplot(rows, columns, plotNo), where
plotNo is the number of the plot, starting at 1 in the upper left and counting
left to right then top to bottom. If all values are less than 10, an equivalent
procedure is to do
>>> ax = fig.add_subplot(111)
Note how this function has created and returned an axes object which we have
stored into the variable ax. There is another method which creates the figure
an axes at the same time
>>> fig, (ax1, ax2) = plt.subplots(nrows = 2, ncols = 1, figsize = (8,8))
The figure is stored into the first variable and the axes are stored into
the second variable with is a tuple of axes.
You can also call the plt.subplot() which acts like add_subplot() but adds
an axes to the currently active figure (determined by the last one referenced).
For more control over your axes positioning, you can specify the exact position
and extent of an axes with the subplot2grid function.
>>> ax = plt.subplot2grid((2,3),(1,0), colspan = 2, rowspan = 1)
This tells figure that there will be a grid of 2 x 3 plots (2 rows, 3 cols) and
this creates a new plot at the position (1,0) (second row, first column) with a
column span of 2 and a row span of 1. If you really want to specify the exact
location, try the add_axes method.
>>> ax = fig.add_axes([0.5, 0.5, 0.3, 0.3])
This tells the figure to put the lower left corner of the axes at the position
(0.5, 0.5) (as fractions of the figure size) and have it span a width and height
of (0.3, 0.3). This is useful to putting plots inside plots. Try this out for
yourself!
""")
pdb.set_trace()
print('\n'*100)
print("""
###
## Plotting to Axes
###
There are many types of plots that can be put on an axes. Below are some simple
examples.
>>> ax.plot() # Simple scatter/line plot
>>> ax.bar() # Vertical bar plot
>>> ax.barh() # Horizonatal bar plot
>>> ax.boxplot() # Box and wisker plot
>>> ax.contour() # Contour plot of lines
>>> ax.contourf() # Filled contour plot
>>> ax.errorbar() # Scatter/line plot with errorbars
>>> ax.fill() # Scatter/line plot which is filled below the curve
>>> ax.hist() # A histogram of the input data
>>> ax.loglog() # Scatter/line plot that is logarithmic on both axes
>>> ax.pie() # Pie chart
>>> ax.polar() # Polar plot
>>> ax.quiver() # 2D field of arrows
>>> ax.semilogx() # Scatter/line plot with logarithmic x and linear y.
>>> ax.semilogy() # Equivalent to semilogx, but now y is logarithmic
>>> ax.steamplot()# A streamline of a vector flow
>>> ax.step() # A step plot
Feel free to try out some of these. You may have to look up the proper
procedures online.
""")
pdb.set_trace()
print('\n'*100)
print("""
###
## Axes Methods
###
Aside from the many plots, there are many useful methods to adjust the
properties of of the axes
>>> ax.add_patch() # Adds a 'patch' which is an artist like arrows or circles
>>> ax.annotate() # Adds a string with an arrow to the axes
>>> ax.axhspan() # Adds a horizontal bar across the plot
>>> ax.axvspan() # Adds a vertical bar across the plot
>>> ax.arrow() # Adds an arrow
>>> ax.cla() # Clears the axes
>>> ax.colorbar() # Colorbar added to the plot
>>> ax.grid() # Turns on grid lines, keywords include which (major or
minor) and axis (both, x, or y).
>>> ax.legend() # Legend added to the plot
>>> ax.minorticks_on() # Turns on minor tick marks
>>> ax.set_cmap() # Sets the color map of the axes
>>> ax.set_title() # Sets the title of the axes
>>> ax.set_xlabel() # Sets the x label of the axes
>>> ax.set_xlim() # Sets the x limits of the axes
>>> ax.set_xscale() # Sets the scale, either linear, log, or symlog
>>> ax.set_xticklabels()#A list of strings to use for the tick labels
>>> ax.set_xticks() # Set's values of tick marks with list
## The above x axis specific functions have analagous y axis functions
>>> ax.text() # Adds a string to the axes
>>> ax.tick_params() # Changes tick and tick label appearances
Try playing with these various features after creating a figure and axes.
""")
pdb.set_trace()
print('\n'*100)
print("""
###
## Axis and Spines
###
Just as you can work with the specific axes on a figure, you can also work with
specific axis and spines on your axes. These can be extracted and stored in
their own variables, but it is generally easier to refer to them as the
components of the axes object. They are accessed in the following way.
>>> ax.xaxis
>>> ax.yaxis
>>> ax.spine['top'] # Spine is a dict with components, 'top', 'bottom',
'left', and 'right'.
These components of the axes have the following useful methods.
>>> ax.xaxis.set_major_formatter()# Set's how the tick marks are formatted
>>> ax.xaxis.set_major_locator() # Sets location of tick marks (see locators)
## The above major methods have analagous minor methods
>>> ax.xaxis.set_ticklabels() # Set to empty list to turn off labels
>>> ax.xaxis.set_ticks_position() # Change tick position to only 'top', 'left, etc.
## The above xaxis methods have analagous yaxis methods
>>> ax.spines['top'].set_color() # Sets the color of the spine
>>> ax.spines['top'].set_position()# Changes position of the spine
>>> ax.spines['top'].set_visible()# Turns off the spine
## The above spine methods have analagous methods for 'bottom', 'left', and
'right'
Feel free to play with these properties as well.
""")
pdb.set_trace()
print('\n'*100)
print("""
###
## Higher Degrees of Customization
###
We could choose to go even further down the ladder than axis and spines. It is
possible to get the tickmark objects from an axis (via get_major_ticks()) and
change properties on a tickmark by tickmark basis. However, it is no longer
instructive to continue showing methods and ways of doing this as it can always
be looked up. For extreme control over every component of plotting, it is sometimes
useful to use the rcParams variable. This should be imported as
from matplotlib import rcParams
You can then refer to any component of the figure by referencing the dict's
keyword, and setting the value. Common examples include
>>> rcParams['lines.linewidth'] = 2 # Sets linewidths to be 2 by default
>>> rcParams['lines.color'] = 'r' # Sets line colors to be red by default
There are hundreds of parameters that can be set, all of which can be seen by
going here http://matplotlib.org/users/customizing.html.
""")
pause = input('Press [Enter] to continue...')
print('\n'*100)
print("""
###
## Animations
###
This will only introduce the idea of animations. To actually produce saved
animations in the form of mp4 or some similar format requires installing third
party programs such as ffmpeg. However, matplotlib comes with an animation
package imported as matplotlib.animation. It has tools to allow you to
continually update a plot such that it is animated. There are abilities to
save the animation as well. Below is the code for a very simple animation plot.
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
fig, ax = plt.subplots()
ax.set_xlim([0,2*np.pi])
x = np.arange(0, 2*np.pi, 0.01) # x-array
line, = ax.plot(x, np.sin(x)) # The comma after line makes it a tuple
#Init only required for blitting to give a clean slate.
def init():
line.set_ydata(np.ma.array(x, mask=True))
return line,
def animate(i):
line.set_ydata(np.sin(x+i/10.0)) # update the data
return line,
#blit=True means only redraw the components which have updated. This is
#faster than redrawing everything.
ani = animation.FuncAnimation(fig, animate, init_func=init,
interval=25, blit=True)
plt.show()
""")
fig, ax = plt.subplots()
ax.set_xlim([0,2*np.pi])
x = np.arange(0, 2*np.pi, 0.01) # x-array
line, = ax.plot(x, np.sin(x)) # The comma after line makes it a tuple
#Init only required for blitting to give a clean slate.
def init():
line.set_ydata(np.ma.array(x, mask=True))
return line,
def animate(i):
line.set_ydata(np.sin(x+i/10.0)) # update the data
return line,
#blit=True means only redraw the components which have updated. This is
#faster than redrawing everything.
ani = animation.FuncAnimation(fig, animate, init_func=init,
interval=25, blit=True)
plt.show(block = False)
print('Done...')
|
mit
| 6,750,708,883,860,443,000 | 37.752119 | 102 | 0.65933 | false |
fake-name/ReadableWebProxy
|
WebMirror/management/rss_parser_funcs/feed_parse_extractJingletranslationsWordpressCom.py
|
1
|
1391
|
def extractJingletranslationsWordpressCom(item):
'''
Parser for 'jingletranslations.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('Breaking Off the Engagement… Bring it on!', 'Breaking Off the Engagement… Bring it on!', 'translated'),
('I Favor the Villainess', 'I Favor the Villainess', 'translated'),
('City of Slumber', 'City of Slumber', 'translated'),
('Villainess’s Sweet Everyday', 'Villainess\'s Sweet Everyday', 'translated'),
('Outaishihi ni Nante Naritakunai!!', 'Outaishihi ni Nante Naritakunai!!', 'translated'),
('First Love × First Love', 'First Love × First Love', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
bsd-3-clause
| -4,616,527,478,748,570,000 | 52.230769 | 134 | 0.506869 | false |
oliora/couched
|
setup.py
|
1
|
1040
|
from setuptools import setup
import sys
if sys.version < "2.7":
raise Exception("Couched needs Python 2.7 or above")
LONG_DESCRIPTION = open('README.txt').read()
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Database :: Front-Ends',
]
setup(
name='Couched',
version='0.1.0',
author='Andrey Upadyshev',
author_email='oliora@gmail.com',
packages=['couched'],
url='https://github.com/oliora/couched/',
license='MIT',
description='CouchDB related command-line utils.',
long_description=LONG_DESCRIPTION,
classifiers=CLASSIFIERS,
platforms=['any'],
entry_points={
'console_scripts': [
'couch-load = couched.load:main',
'couch-save = couched.save:main',
],
},
install_requires=[
"couchdb >= 0.7.0",
],
zip_safe = True,
)
|
mit
| 2,050,784,413,894,405,000 | 23.209302 | 56 | 0.6 | false |
tmrowco/electricitymap
|
parsers/lib/validation.py
|
1
|
5884
|
#!/usr/bin/env python3
"""Centralised validation function for all parsers."""
from logging import getLogger
def has_value_for_key(datapoint, key, logger):
"""checks that the key exists in datapoint and that the corresponding value
is not None"""
if datapoint['production'].get(key, None) is None:
logger.warning("Required generation type {} is missing from {}".format(
key, datapoint['zoneKey']), extra={'key': datapoint['zoneKey']})
return None
return True
def check_expected_range(datapoint, value, expected_range, logger, key=None):
low, high = min(expected_range), max(expected_range)
if not (low <= value <= high):
key_str = 'for key `{}`'.format(key) if key else ''
logger.warning("{} reported total of {:.2f}MW falls outside range "
"of {} {}".format(datapoint['zoneKey'], value,
expected_range, key_str),
extra={'key': datapoint['zoneKey']})
return
return True
def validate(datapoint, logger=getLogger(__name__), **kwargs):
"""
Validates a production datapoint based on given constraints.
If the datapoint is found to be invalid then None is returned.
Arguments
---------
logger
datapoint: a production datapoint. See examples
optional keyword arguments
remove_negative: bool
Changes negative production values to None.
Defaults to False.
required: list
Generation types that must be present.
For example ['gas', 'hydro']
If any of these types are None the datapoint will be invalidated.
Defaults to an empty list.
floor: float
Checks production sum is above floor value.
If this is not the case the datapoint is invalidated.
Defaults to None
expected_range: tuple or dict
Checks production total against expected range.
Tuple is in form (low threshold, high threshold), e.g. (1800, 12000).
If a dict, it should be in the form
{
'nuclear': (low, high),
'coal': (low, high),
}
All keys will be required.
If the total is outside this range the datapoint will be invalidated.
Defaults to None.
Examples
--------
>>> test_datapoint = {
>>> 'zoneKey': 'FR',
>>> 'datetime': '2017-01-01T00:00:00Z',
>>> 'production': {
>>> 'biomass': 50.0,
>>> 'coal': 478.0,
>>> 'gas': 902.7,
>>> 'hydro': 190.1,
>>> 'nuclear': None,
>>> 'oil': 0.0,
>>> 'solar': 20.0,
>>> 'wind': 40.0,
>>> 'geothermal': 0.0,
>>> 'unknown': 6.0
>>> },
>>> 'storage': {
>>> 'hydro': -10.0,
>>> },
>>> 'source': 'mysource.com'
>>> }
>>> validate(datapoint, required=['gas'], expected_range=(100, 2000))
datapoint
>>> validate(datapoint, required=['not_a_production_type'])
None
>>> validate(datapoint, required=['gas'],
>>> expected_range={'solar': (0, 1000), 'wind': (100, 2000)})
datapoint
"""
remove_negative = kwargs.pop('remove_negative', False)
required = kwargs.pop('required', [])
floor = kwargs.pop('floor', False)
expected_range = kwargs.pop('expected_range', None)
if kwargs:
raise TypeError('Unexpected **kwargs: %r' % kwargs)
generation = datapoint['production']
storage = datapoint.get('storage', {})
if remove_negative:
for key, val in generation.items():
if val is not None and -5.0 < val < 0.0:
logger.warning("{} returned {:.2f}, setting to None".format(
key, val), extra={'key': datapoint['zoneKey']})
generation[key] = None
if required:
for item in required:
if not has_value_for_key(datapoint, item, logger):
return
if floor:
# when adding power to the system, storage key is negative
total = (sum(v for k, v in generation.items() if v is not None)
- sum(v for k, v in storage.items() if v is not None))
if total < floor:
logger.warning("{} reported total of {}MW does not meet {}MW floor"
" value".format(datapoint['zoneKey'], total, floor),
extra={'key': datapoint['zoneKey']})
return
if expected_range:
if isinstance(expected_range, dict):
for key, range_ in expected_range.items():
if not has_value_for_key(datapoint, key, logger):
return
if not check_expected_range(datapoint, generation[key], range_,
logger, key=key):
return
else:
# when adding power to the system, storage key is negative
total = (sum(v for k, v in generation.items() if v is not None)
- sum(v for k, v in storage.items() if v is not None))
if not check_expected_range(datapoint, total, expected_range,
logger):
return
return datapoint
test_datapoint = {
'zoneKey': 'FR',
'datetime': '2017-01-01T00:00:00Z',
'production': {
'biomass': 50.0,
'coal': 478.0,
'gas': 902.7,
'hydro': 190.1,
'nuclear': None,
'oil': 0.0,
'solar': 20.0,
'wind': 40.0,
'geothermal': -1.0,
'unknown': 6.0
},
'storage': {
'hydro': -10.0,
},
'source': 'mysource.com'
}
if __name__ == '__main__':
print(validate(test_datapoint, required=['gas'],
expected_range=(100, 2000), remove_negative=True))
|
gpl-3.0
| 7,181,092,218,174,970,000 | 33.611765 | 79 | 0.533651 | false |
gurpinars/hash-generator
|
hashGenerator.py
|
1
|
3997
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'hashGenerator.ui'
#
# Created: Tue Dec 18 00:25:17 2012
# by: PyQt4 UI code generator 4.9.3
#
# WARNING! All changes made in this file will be lost!
#
# Author:M. Sami GÜRPINAR
# Email :sami.gurpinar@gmail.com
#
from PyQt4 import QtCore, QtGui
from generator import Generator
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(280, 338)
self.comboBox = QtGui.QComboBox(Form)
self.comboBox.setGeometry(QtCore.QRect(11, 20, 78, 27))
self.comboBox.setObjectName(_fromUtf8("comboBox"))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.comboBox.addItem(_fromUtf8(""))
self.pushButton = QtGui.QPushButton(Form)
self.pushButton.setGeometry(QtCore.QRect(172, 300, 98, 27))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
QtCore.QObject.connect(self.pushButton,QtCore.SIGNAL("clicked()"),self.clickedButton)
self.widget = QtGui.QWidget(Form)
self.widget.setGeometry(QtCore.QRect(11, 61, 258, 227))
self.widget.setObjectName(_fromUtf8("widget"))
self.verticalLayout = QtGui.QVBoxLayout(self.widget)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.lineEdit = QtGui.QLineEdit(self.widget)
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.verticalLayout.addWidget(self.lineEdit)
self.textEdit = QtGui.QTextEdit(self.widget)
self.textEdit.setObjectName(_fromUtf8("textEdit"))
self.verticalLayout.addWidget(self.textEdit)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
self.generator = Generator(self)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Hash Generator", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(0, QtGui.QApplication.translate("Form", "MD5", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(1, QtGui.QApplication.translate("Form", "SHA", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(2, QtGui.QApplication.translate("Form", "SHA224", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(3, QtGui.QApplication.translate("Form", "SHA256", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(4, QtGui.QApplication.translate("Form", "SHA384", None, QtGui.QApplication.UnicodeUTF8))
self.comboBox.setItemText(5, QtGui.QApplication.translate("Form", "SHA512", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton.setText(QtGui.QApplication.translate("Form", "Generate", None, QtGui.QApplication.UnicodeUTF8))
def clickedButton(self):
if self.comboBox.currentText() == "MD5":
self.generator.md5(self.lineEdit.text().toUtf8())
elif self.comboBox.currentText() == "SHA":
self.generator.sha(self.lineEdit.text().toUtf8())
elif self.comboBox.currentText() == "SHA224":
self.generator.sha224(self.lineEdit.text().toUtf8())
elif self.comboBox.currentText() == "SHA256":
self.generator.sha256(self.lineEdit.text().toUtf8())
elif self.comboBox.currentText() == "SHA384":
self.generator.sha384(self.lineEdit.text().toUtf8())
else:
self.generator.sha512(self.lineEdit.text().toUtf8())
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Form = QtGui.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
|
gpl-3.0
| -2,423,430,719,406,215,700 | 43.898876 | 122 | 0.676677 | false |
lipixun/pytest
|
rabbitmq/geventmultimsg/client.py
|
1
|
2058
|
#!/usr/bin/env python
# encoding=utf8
# The gevent test client
import sys
reload(sys)
sys.setdefaultencoding('utf8')
import gevent
from gevent import monkey
monkey.patch_all()
from haigha.connections.rabbit_connection import RabbitConnection
from haigha.message import Message
class Client(object):
"""The RPC Client
"""
def __init__(self, host, port, vhost, user, password):
"""Create a new Server
"""
self._conn = RabbitConnection(transport = 'gevent', host = host, port = port, vhost = vhost, user = user, password = password)
gevent.spawn(self.loop)
self._channel = self._conn.channel()
def loop(self):
"""The loop
"""
while self._conn:
self._conn.read_frames()
gevent.sleep()
def call(self):
"""The call method
"""
self._channel.basic.publish(Message('A test body'), '', 'test_gevent')
if __name__ == '__main__':
from argparse import ArgumentParser
def getArguments():
"""Get arguments
"""
parser = ArgumentParser(description = 'Gevent test client')
parser.add_argument('--host', dest = 'host', required = True, help = 'The host')
parser.add_argument('--port', dest = 'port', default = 5672, type = int, help = 'The port')
parser.add_argument('--vhost', dest = 'vhost', default = '/test', help = 'The virtual host')
parser.add_argument('--user', dest = 'user', default = 'test', help = 'The user name')
parser.add_argument('--password', dest = 'password', default = 'test', help = 'The password')
# Done
return parser.parse_args()
def main():
"""The main entry
"""
args = getArguments()
# Create the server
client = Client(args.host, args.port, args.vhost, args.user, args.password)
for i in range(0, 50):
print 'Send message'
client.call()
gevent.sleep(0.1)
# Done
print 'Done'
gevent.sleep(1000)
main()
|
gpl-2.0
| -590,878,538,824,319,700 | 28.826087 | 134 | 0.580661 | false |
RylanGotto/web-dash
|
websterton/settings.py
|
1
|
1382
|
# -*- coding: utf-8 -*-
import os
os_env = os.environ
class Config(object):
SECRET_KEY = os_env.get('WEBSTERTON_SECRET', 'secret-key') # TODO: Change me
APP_DIR = os.path.abspath(os.path.dirname(__file__)) # This directory
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
BCRYPT_LOG_ROUNDS = 13
ASSETS_DEBUG = False
DEBUG_TB_ENABLED = False # Disable Debug toolbar
DEBUG_TB_INTERCEPT_REDIRECTS = False
CACHE_TYPE = 'simple' # Can be "memcached", "redis", etc.
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/example' # TODO: Change me
DEBUG_TB_ENABLED = False # Disable Debug toolbar
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
DB_NAME = 'dev.db'
# Put the db file in project root
DB_PATH = os.path.join(Config.PROJECT_ROOT, DB_NAME)
SQLALCHEMY_DATABASE_URI = 'sqlite:///{0}'.format(DB_PATH)
DEBUG_TB_ENABLED = True
ASSETS_DEBUG = True # Don't bundle/minify static assets
CACHE_TYPE = 'simple' # Can be "memcached", "redis", etc.
class TestConfig(Config):
TESTING = True
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'sqlite://'
BCRYPT_LOG_ROUNDS = 1 # For faster tests
WTF_CSRF_ENABLED = False # Allows form testing
|
bsd-3-clause
| -2,148,368,910,370,387,200 | 31.139535 | 81 | 0.652677 | false |
google/capirca
|
tests/lib/paloaltofw_test.py
|
1
|
32418
|
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit test for Palo Alto Firewalls acl rendering module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
from capirca.lib import aclgenerator
from capirca.lib import nacaddr
from capirca.lib import naming
from capirca.lib import paloaltofw
from capirca.lib import policy
import mock
GOOD_HEADER_1 = """
header {
comment:: "This is a test acl with a comment"
target:: paloalto from-zone trust to-zone untrust
}
"""
GOOD_HEADER_2 = """
header {
comment:: "This is a test acl with a comment"
target:: paloalto from-zone all to-zone all
}
"""
GOOD_HEADER_INET6 = """
header {
comment:: "This is a test acl with a comment"
target:: paloalto from-zone trust to-zone untrust inet6
}
"""
GOOD_HEADER_MIXED = """
header {
comment:: "This is a test acl with a comment"
target:: paloalto from-zone trust to-zone untrust mixed
}
"""
BAD_HEADER_1 = """
header {
comment:: "This header has two address families"
target:: paloalto from-zone trust to-zone untrust inet6 mixed
}
"""
GRE_PROTO_TERM = """
term test-gre-protocol {
comment:: "allow GRE protocol to FOOBAR"
destination-address:: FOOBAR
protocol:: gre
action:: accept
}
"""
GOOD_TERM_1 = """
term good-term-1 {
comment:: "This header is very very very very very very very very very very very very very very very very very very very very large"
destination-address:: FOOBAR
destination-port:: SMTP
protocol:: tcp
action:: accept
}
"""
GOOD_TERM_2 = """
term good-term-4 {
destination-address:: SOME_HOST
protocol:: tcp
pan-application:: ssl http
action:: accept
}
"""
GOOD_TERM_3 = """
term only-pan-app {
pan-application:: ssl
action:: accept
}
"""
GOOD_TERM_4_STATELESS_REPLY = """
term good-term-stateless-reply {
comment:: "ThisIsAStatelessReply"
destination-address:: SOME_HOST
protocol:: tcp
pan-application:: ssl http
action:: accept
}
"""
SVC_TERM_1 = """
term ssh-term-1 {
comment:: "Allow SSH"
destination-address:: FOOBAR
destination-port:: SSH
protocol:: tcp
action:: accept
}
term smtp-term-1 {
comment:: "Allow SMTP"
destination-address:: FOOBAR
destination-port:: SMTP
protocol:: tcp
action:: accept
}
"""
SVC_TERM_2 = """
term smtp-term-1 {
comment:: "Allow SMTP"
destination-address:: FOOBAR
destination-port:: SMTP
protocol:: tcp
action:: accept
}
"""
TCP_ESTABLISHED_TERM = """
term tcp-established {
destination-address:: SOME_HOST
protocol:: tcp
option:: tcp-established
action:: accept
}
"""
UDP_ESTABLISHED_TERM = """
term udp-established-term {
destination-address:: SOME_HOST
protocol:: udp
option:: established
action:: accept
}
"""
UNSUPPORTED_OPTION_TERM = """
term unsupported-option-term {
destination-address:: SOME_HOST
protocol:: udp
option:: inactive
action:: accept
}
"""
EXPIRED_TERM_1 = """
term expired_test {
expiration:: 2000-1-1
action:: deny
}
"""
EXPIRING_TERM = """
term is_expiring {
expiration:: %s
action:: accept
}
"""
ICMP_TYPE_TERM_1 = """
term test-icmp {
protocol:: icmp
icmp-type:: echo-request echo-reply unreachable
action:: accept
}
"""
ICMPV6_ONLY_TERM = """
term test-icmpv6-only {
protocol:: icmpv6
action:: accept
}
"""
ICMPV6_TYPE_TERM = """
term test-icmpv6-types {
protocol:: icmpv6
icmp-type:: echo-request echo-reply destination-unreachable
action:: accept
}
"""
BAD_ICMPV6_TYPE_TERM = """
term test-icmp {
protocol:: icmpv6
icmp-type:: echo-request echo-reply unreachable
action:: accept
comment:: "This is incorrect because unreachable is not an icmpv6-type."
}
"""
BAD_ICMP_TERM_1 = """
term test-icmp-type {
icmp-type:: echo-request echo-reply
action:: accept
}
"""
ICMP_ONLY_TERM_1 = """
term test-icmp-only {
protocol:: icmp
action:: accept
}
"""
MULTIPLE_PROTOCOLS_TERM = """
term multi-proto {
protocol:: tcp udp icmp
action:: accept
}
"""
DEFAULT_TERM_1 = """
term default-term-1 {
action:: deny
}
"""
TIMEOUT_TERM = """
term timeout-term {
protocol:: icmp
icmp-type:: echo-request
timeout:: 77
action:: accept
}
"""
LOGGING_DISABLED = """
term test-disabled-log {
comment:: "Testing disabling logging for tcp."
protocol:: tcp
logging:: disable
action:: accept
}
"""
LOGGING_BOTH_TERM = """
term test-log-both {
comment:: "Testing enabling log-both for tcp."
protocol:: tcp
logging:: log-both
action:: accept
}
"""
LOGGING_TRUE_KEYWORD = """
term test-true-log {
comment:: "Testing enabling logging for udp with true keyword."
protocol:: udp
logging:: true
action:: accept
}
"""
LOGGING_PYTRUE_KEYWORD = """
term test-pytrue-log {
comment:: "Testing enabling logging for udp with True keyword."
protocol:: udp
logging:: True
action:: accept
}
"""
LOGGING_SYSLOG_KEYWORD = """
term test-syslog-log {
comment:: "Testing enabling logging for udp with syslog keyword."
protocol:: udp
logging:: syslog
action:: accept
}
"""
LOGGING_LOCAL_KEYWORD = """
term test-local-log {
comment:: "Testing enabling logging for udp with local keyword."
protocol:: udp
logging:: local
action:: accept
}
"""
ACTION_ACCEPT_TERM = """
term test-accept-action {
comment:: "Testing accept action for tcp."
protocol:: tcp
action:: accept
}
"""
ACTION_COUNT_TERM = """
term test-count-action {
comment:: "Testing unsupported count action for tcp."
protocol:: tcp
action:: count
}
"""
ACTION_NEXT_TERM = """
term test-next-action {
comment:: "Testing unsupported next action for tcp."
protocol:: tcp
action:: next
}
"""
ACTION_DENY_TERM = """
term test-deny-action {
comment:: "Testing deny action for tcp."
protocol:: tcp
action:: deny
}
"""
ACTION_REJECT_TERM = """
term test-reject-action {
comment:: "Testing reject action for tcp."
protocol:: tcp
action:: reject
}
"""
ACTION_RESET_TERM = """
term test-reset-action {
comment:: "Testing reset action for tcp."
protocol:: tcp
action:: reject-with-tcp-rst
}
"""
HEADER_COMMENTS = """
header {
comment:: "comment 1"
comment:: "comment 2"
target:: paloalto from-zone trust to-zone untrust
}
term policy-1 {
pan-application:: ssh
action:: accept
}
term policy-2 {
pan-application:: web-browsing
action:: accept
}
header {
comment:: "comment 3"
target:: paloalto from-zone trust to-zone dmz
}
term policy-3 {
pan-application:: web-browsing
action:: accept
}
header {
# no comment
target:: paloalto from-zone trust to-zone dmz-2
}
term policy-4 {
pan-application:: web-browsing
action:: accept
}
"""
ZONE_LEN_ERROR = """
header {
target:: paloalto from-zone %s to-zone %s
}
term policy {
pan-application:: web-browsing
action:: accept
}
"""
SUPPORTED_TOKENS = frozenset({
'action',
'comment',
'destination_address',
'destination_address_exclude',
'destination_port',
'expiration',
'icmp_type',
'logging',
'name',
'option',
'owner',
'platform',
'protocol',
'source_address',
'source_address_exclude',
'source_port',
'stateless_reply',
'timeout',
'pan_application',
'translated',
})
SUPPORTED_SUB_TOKENS = {
'action': {'accept', 'deny', 'reject', 'reject-with-tcp-rst'},
'option': {'established', 'tcp-established'},
'icmp_type': {
'alternate-address',
'certification-path-advertisement',
'certification-path-solicitation',
'conversion-error',
'destination-unreachable',
'echo-reply',
'echo-request',
'mobile-redirect',
'home-agent-address-discovery-reply',
'home-agent-address-discovery-request',
'icmp-node-information-query',
'icmp-node-information-response',
'information-request',
'inverse-neighbor-discovery-advertisement',
'inverse-neighbor-discovery-solicitation',
'mask-reply',
'mask-request',
'information-reply',
'mobile-prefix-advertisement',
'mobile-prefix-solicitation',
'multicast-listener-done',
'multicast-listener-query',
'multicast-listener-report',
'multicast-router-advertisement',
'multicast-router-solicitation',
'multicast-router-termination',
'neighbor-advertisement',
'neighbor-solicit',
'packet-too-big',
'parameter-problem',
'redirect',
'redirect-message',
'router-advertisement',
'router-renumbering',
'router-solicit',
'router-solicitation',
'source-quench',
'time-exceeded',
'timestamp-reply',
'timestamp-request',
'unreachable',
'version-2-multicast-listener-report',
},
}
# Print a info message when a term is set to expire in that many weeks.
# This is normally passed from command line.
EXP_INFO = 2
_IPSET = [nacaddr.IP('10.0.0.0/8'), nacaddr.IP('2001:4860:8000::/33')]
_IPSET2 = [nacaddr.IP('10.23.0.0/22'), nacaddr.IP('10.23.0.6/23', strict=False)]
_IPSET3 = [nacaddr.IP('10.23.0.0/23')]
PATH_VSYS = "./devices/entry[@name='localhost.localdomain']/vsys/entry[@name='vsys1']"
PATH_RULES = PATH_VSYS + '/rulebase/security/rules'
PATH_TAG = PATH_VSYS + '/tag'
PATH_SERVICE = PATH_VSYS + '/service'
class PaloAltoFWTest(unittest.TestCase):
def setUp(self):
super(PaloAltoFWTest, self).setUp()
self.naming = mock.create_autospec(naming.Naming)
def testTermAndFilterName(self):
self.naming.GetNetAddr.return_value = _IPSET
self.naming.GetServiceByProto.return_value = ['25']
paloalto = paloaltofw.PaloAltoFW(
policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_1, self.naming), EXP_INFO)
output = str(paloalto)
x = paloalto.config.find(PATH_RULES + "/entry[@name='good-term-1']")
self.assertIsNotNone(x, output)
self.naming.GetNetAddr.assert_called_once_with('FOOBAR')
self.naming.GetServiceByProto.assert_called_once_with('SMTP', 'tcp')
def testServiceMap(self):
definitions = naming.Naming()
definitions._ParseLine('SSH = 22/tcp', 'services')
definitions._ParseLine('SMTP = 25/tcp', 'services')
definitions._ParseLine('FOOBAR = 10.0.0.0/8', 'networks')
definitions._ParseLine(' 2001:4860:8000::/33', 'networks')
pol1 = paloaltofw.PaloAltoFW(
policy.ParsePolicy(GOOD_HEADER_1 + SVC_TERM_1, definitions), EXP_INFO)
self.assertEqual(
pol1.service_map.entries, {
((), ('22',), 'tcp'): {
'name': 'service-ssh-term-1-tcp'
},
((), ('25',), 'tcp'): {
'name': 'service-smtp-term-1-tcp'
}
}, pol1.service_map.entries)
pol2 = paloaltofw.PaloAltoFW(
policy.ParsePolicy(GOOD_HEADER_1 + SVC_TERM_2, definitions), EXP_INFO)
# The expectation is that there will be a single port mapped.
self.assertEqual(
pol2.service_map.entries, {
((), ('25',), 'tcp'): {
'name': 'service-smtp-term-1-tcp'
}
}, pol2.service_map.entries)
def testDefaultDeny(self):
paloalto = paloaltofw.PaloAltoFW(
policy.ParsePolicy(GOOD_HEADER_1 + DEFAULT_TERM_1, self.naming),
EXP_INFO)
output = str(paloalto)
x = paloalto.config.find(PATH_RULES +
"/entry[@name='default-term-1']/action")
self.assertIsNotNone(x, output)
self.assertEqual(x.text, 'deny', output)
def testIcmpTypes(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + ICMP_TYPE_TERM_1, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.find(PATH_RULES +
"/entry[@name='test-icmp']/application")
self.assertIsNotNone(x, output)
members = []
for node in x:
self.assertEqual(node.tag, 'member', output)
members.append(node.text)
self.assertCountEqual(
['icmp-echo-reply', 'icmp-echo-request', 'icmp-unreachable'], members,
output)
def testIcmpV6Types(self):
pol = policy.ParsePolicy(GOOD_HEADER_MIXED + ICMPV6_TYPE_TERM, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.find(PATH_RULES +
"/entry[@name='test-icmpv6-types']/application")
self.assertIsNotNone(x, output)
members = []
for node in x:
self.assertEqual(node.tag, 'member', output)
members.append(node.text)
self.assertCountEqual([
'icmp6-echo-reply', 'icmp6-echo-request',
'icmp6-destination-unreachable'
], members, output)
def testBadICMP(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + BAD_ICMP_TERM_1, self.naming)
self.assertRaises(paloaltofw.UnsupportedFilterError, paloaltofw.PaloAltoFW,
pol, EXP_INFO)
def testBadICMPv6Type(self):
pol = policy.ParsePolicy(GOOD_HEADER_MIXED + BAD_ICMPV6_TYPE_TERM,
self.naming)
self.assertRaises(paloaltofw.PaloAltoFWBadIcmpTypeError,
paloaltofw.PaloAltoFW, pol, EXP_INFO)
def testICMPProtocolOnly(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + ICMP_ONLY_TERM_1, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.find(PATH_RULES +
"/entry[@name='test-icmp-only']/application")
self.assertIsNotNone(x, output)
members = []
for node in x:
self.assertEqual(node.tag, 'member', output)
members.append(node.text)
self.assertEqual(['icmp'], members, output)
def testICMPv6ProtocolOnly(self):
pol = policy.ParsePolicy(GOOD_HEADER_INET6 + ICMPV6_ONLY_TERM, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.find(PATH_RULES +
"/entry[@name='test-icmpv6-only']/application")
self.assertIsNotNone(x, output)
members = []
for node in x:
self.assertEqual(node.tag, 'member', output)
members.append(node.text)
self.assertEqual(['ipv6-icmp'], members, output)
def testSkipStatelessReply(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_4_STATELESS_REPLY,
self.naming)
# Add stateless_reply to terms, there is no current way to include it in the
# term definition.
_, terms = pol.filters[0]
for term in terms:
term.stateless_reply = True
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.find(PATH_RULES +
"/entry[@name='good-term-stateless-reply']")
self.assertIsNone(x, output)
def testSkipEstablished(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + TCP_ESTABLISHED_TERM, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.find(PATH_RULES + "/entry[@name='tcp-established']")
self.assertIsNone(x, output)
pol = policy.ParsePolicy(GOOD_HEADER_1 + UDP_ESTABLISHED_TERM, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.find(PATH_RULES +
"/entry[@name='udp-established-term']")
self.assertIsNone(x, output)
def testUnsupportedOptions(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + UNSUPPORTED_OPTION_TERM,
self.naming)
self.assertRaises(aclgenerator.UnsupportedFilterError,
paloaltofw.PaloAltoFW, pol, EXP_INFO)
def testBuildTokens(self):
self.naming.GetServiceByProto.side_effect = [['25'], ['26']]
pol1 = paloaltofw.PaloAltoFW(
policy.ParsePolicy(GOOD_HEADER_1 + GOOD_TERM_2, self.naming), EXP_INFO)
st, sst = pol1._BuildTokens()
self.assertEqual(st, SUPPORTED_TOKENS)
self.assertEqual(sst, SUPPORTED_SUB_TOKENS)
def testLoggingBoth(self):
paloalto = paloaltofw.PaloAltoFW(
policy.ParsePolicy(GOOD_HEADER_1 + LOGGING_BOTH_TERM, self.naming),
EXP_INFO)
output = str(paloalto)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='test-log-both']/log-start")
self.assertEqual(x, 'yes', output)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='test-log-both']/log-end")
self.assertEqual(x, 'yes', output)
def testDisableLogging(self):
paloalto = paloaltofw.PaloAltoFW(
policy.ParsePolicy(GOOD_HEADER_1 + LOGGING_DISABLED, self.naming),
EXP_INFO)
output = str(paloalto)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='test-disabled-log']/log-start")
self.assertEqual(x, 'no', output)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='test-disabled-log']/log-end")
self.assertEqual(x, 'no', output)
def testLogging(self):
for term in [
LOGGING_SYSLOG_KEYWORD, LOGGING_LOCAL_KEYWORD, LOGGING_PYTRUE_KEYWORD,
LOGGING_TRUE_KEYWORD
]:
paloalto = paloaltofw.PaloAltoFW(
policy.ParsePolicy(GOOD_HEADER_1 + term, self.naming), EXP_INFO)
output = str(paloalto)
# we don't have term name so match all elements with attribute
# name at the entry level
x = paloalto.config.findall(PATH_RULES + '/entry[@name]/log-start')
self.assertEqual(len(x), 0, output)
x = paloalto.config.findall(PATH_RULES + '/entry[@name]/log-end')
self.assertEqual(len(x), 1, output)
self.assertEqual(x[0].text, 'yes', output)
def testAcceptAction(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + ACTION_ACCEPT_TERM, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='test-accept-action']/action")
self.assertEqual(x, 'allow', output)
def testDenyAction(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + ACTION_DENY_TERM, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='test-deny-action']/action")
self.assertEqual(x, 'deny', output)
def testRejectAction(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + ACTION_REJECT_TERM, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='test-reject-action']/action")
self.assertEqual(x, 'reset-client', output)
def testResetAction(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + ACTION_RESET_TERM, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='test-reset-action']/action")
self.assertEqual(x, 'reset-client', output)
def testCountAction(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + ACTION_COUNT_TERM, self.naming)
self.assertRaises(aclgenerator.UnsupportedFilterError,
paloaltofw.PaloAltoFW, pol, EXP_INFO)
def testNextAction(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + ACTION_NEXT_TERM, self.naming)
self.assertRaises(aclgenerator.UnsupportedFilterError,
paloaltofw.PaloAltoFW, pol, EXP_INFO)
def testGreProtoTerm(self):
pol = policy.ParsePolicy(GOOD_HEADER_1 + GRE_PROTO_TERM, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.find(PATH_RULES +
"/entry[@name='test-gre-protocol']/application")
self.assertIsNotNone(x, output)
self.assertEqual(len(x), 1, output)
self.assertEqual(x[0].tag, 'member', output)
self.assertEqual(x[0].text, 'gre', output)
def testHeaderComments(self):
pol = policy.ParsePolicy(HEADER_COMMENTS, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
tag = 'trust_untrust_policy-comment-1'
x = paloalto.config.find(PATH_TAG + "/entry[@name='%s']/comments" % tag)
self.assertIsNotNone(x, output)
self.assertEqual(x.text, 'comment 1 comment 2', output)
x = paloalto.config.find(PATH_RULES + "/entry[@name='policy-2']/tag")
self.assertIsNotNone(x, output)
self.assertEqual(len(x), 1, output)
self.assertEqual(x[0].tag, 'member', output)
self.assertEqual(x[0].text, tag, output)
tag = 'trust_dmz_policy-comment-2'
x = paloalto.config.find(PATH_TAG + "/entry[@name='%s']/comments" % tag)
self.assertIsNotNone(x, output)
self.assertEqual(x.text, 'comment 3', output)
x = paloalto.config.find(PATH_RULES + "/entry[@name='policy-3']/tag")
self.assertIsNotNone(x, output)
self.assertEqual(len(x), 1, output)
self.assertEqual(x[0].tag, 'member', output)
self.assertEqual(x[0].text, tag, output)
x = paloalto.config.find(PATH_RULES + "/entry[@name='policy-4']/tag")
self.assertIsNone(x, output)
def testZoneLen(self):
ZONE_MAX_LEN = 'Z' * 31
ZONE_TOO_LONG = 'Z' * 32
# from
pol = policy.ParsePolicy(ZONE_LEN_ERROR % (ZONE_MAX_LEN, 'dmz'),
self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='policy']/from/member")
self.assertEqual(x, ZONE_MAX_LEN, output)
pol = policy.ParsePolicy(ZONE_LEN_ERROR % (ZONE_TOO_LONG, 'dmz'),
self.naming)
self.assertRaisesRegex(paloaltofw.PaloAltoFWNameTooLongError,
'^Source zone must be 31 characters max',
paloaltofw.PaloAltoFW, pol, EXP_INFO)
# to
pol = policy.ParsePolicy(ZONE_LEN_ERROR % ('dmz', ZONE_MAX_LEN),
self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='policy']/to/member")
self.assertEqual(x, ZONE_MAX_LEN, output)
pol = policy.ParsePolicy(ZONE_LEN_ERROR % ('dmz', ZONE_TOO_LONG),
self.naming)
self.assertRaisesRegex(paloaltofw.PaloAltoFWNameTooLongError,
'^Destination zone must be 31 characters max',
paloaltofw.PaloAltoFW, pol, EXP_INFO)
def test_ZonesRequired(self):
BAD_HEADERS = [
'header{target::paloalto}',
'header{target::paloalto from-zone x}',
'header{target::paloalto x x to-zone x}',
]
msg = ('^Palo Alto Firewall filter arguments '
'must specify from-zone and to-zone[.]$')
for header in BAD_HEADERS:
pol = policy.ParsePolicy(header + GOOD_TERM_3, self.naming)
self.assertRaisesRegex(paloaltofw.UnsupportedFilterError, msg,
paloaltofw.PaloAltoFW, pol, EXP_INFO)
def test_LongComments(self):
POL = """
header {
comment:: "%s"
target:: paloalto from-zone trust to-zone untrust
}
term rule-1 {
comment:: "%s"
pan-application:: ssl
action:: accept
}"""
# get maximum lengths
pol = policy.ParsePolicy(POL % ('C', 'C'), self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
MAX_TAG_COMMENTS_LENGTH = paloalto._MAX_TAG_COMMENTS_LENGTH
MAX_RULE_DESCRIPTION_LENGTH = paloalto._MAX_RULE_DESCRIPTION_LENGTH
tag = 'trust_untrust_policy-comment-1'
# maximum length
pol = policy.ParsePolicy(
POL %
('C' * MAX_TAG_COMMENTS_LENGTH, 'C' * MAX_RULE_DESCRIPTION_LENGTH),
self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.findtext(PATH_TAG + "/entry[@name='%s']/comments" % tag)
self.assertEqual(x, 'C' * MAX_TAG_COMMENTS_LENGTH, output)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='rule-1']/description")
self.assertEqual(x, 'C' * MAX_RULE_DESCRIPTION_LENGTH, output)
# maximum length + 1
pol = policy.ParsePolicy(
POL % ('C' * (MAX_TAG_COMMENTS_LENGTH + 1), 'C' *
(MAX_RULE_DESCRIPTION_LENGTH + 1)), self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
# verify warning
with self.assertLogs(level='WARN') as log:
output = str(paloalto)
self.assertEqual(len(log.output), 2, log.output)
self.assertIn('comments exceeds maximum length', log.output[0])
self.assertIn('description exceeds maximum length', log.output[1])
x = paloalto.config.findtext(PATH_TAG + "/entry[@name='%s']/comments" % tag)
self.assertEqual(x, 'C' * MAX_TAG_COMMENTS_LENGTH, output)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='rule-1']/description")
self.assertEqual(x, 'C' * MAX_RULE_DESCRIPTION_LENGTH, output)
def testTermLen(self):
TERM = """
term %s {
pan-application:: ssl
action:: accept
}
"""
# get maximum length
pol = policy.ParsePolicy(GOOD_HEADER_1 + TERM % 'T', self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
TERM_MAX_LENGTH = paloalto._TERM_MAX_LENGTH
# maximum length
term = 'T' * TERM_MAX_LENGTH
pol = policy.ParsePolicy(GOOD_HEADER_1 + TERM % term, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.find(PATH_RULES + "/entry[@name='%s']" % term)
self.assertIsNotNone(x, output)
# maximum length + 1
term = 'T' * (TERM_MAX_LENGTH + 1)
pol = policy.ParsePolicy(GOOD_HEADER_1 + TERM % term, self.naming)
regex = '^Term .+ is too long[.] Limit is %d characters' % TERM_MAX_LENGTH
self.assertRaisesRegex(aclgenerator.TermNameTooLongError, regex,
paloaltofw.PaloAltoFW, pol, EXP_INFO)
def testPanApplication(self):
POL1 = """
header {
target:: paloalto from-zone trust to-zone untrust
}
term rule-1 {
action:: accept
}"""
POL2 = """
header {
target:: paloalto from-zone trust to-zone untrust
}
term rule-1 {
pan-application:: %s
action:: accept
}"""
APPS = [
{'app1'},
{'app1', 'app2'},
{'app1', 'app2', 'app3'},
]
pol = policy.ParsePolicy(POL1, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.findtext(PATH_RULES +
"/entry[@name='rule-1']/application/member")
self.assertEqual(x, 'any', output)
for i, app in enumerate(APPS):
pol = policy.ParsePolicy(POL2 % ' '.join(app), self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.findall(PATH_RULES +
"/entry[@name='rule-1']/application/member")
apps = {elem.text for elem in x}
self.assertEqual(APPS[i], apps, output)
def testPanPorts(self):
POL = """
header {
target:: paloalto from-zone trust to-zone untrust
}
term rule-1 {
%s
action:: accept
}"""
T = """
protocol:: udp
destination-port:: NTP
"""
definitions = naming.Naming()
definitions._ParseLine('NTP = 123/tcp 123/udp', 'services')
definitions._ParseLine('DNS = 53/tcp 53/udp', 'services')
pol = policy.ParsePolicy(POL % T, definitions)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
name = "service-rule-1-udp"
path = "/entry[@name='%s']/protocol/udp/port" % name
x = paloalto.config.findtext(PATH_SERVICE + path)
self.assertEqual(x, "123", output)
path = "/entry[@name='%s']/protocol/udp/source-port" % name
x = paloalto.config.findtext(PATH_SERVICE + path)
self.assertIsNone(x, output)
T = """
protocol:: udp
source-port:: NTP
"""
pol = policy.ParsePolicy(POL % T, definitions)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
name = "service-rule-1-udp"
path = "/entry[@name='%s']/protocol/udp/port" % name
x = paloalto.config.findtext(PATH_SERVICE + path)
self.assertEqual(x, "0-65535", output)
path = "/entry[@name='%s']/protocol/udp/source-port" % name
x = paloalto.config.findtext(PATH_SERVICE + path)
self.assertEqual(x, "123", output)
T = """
protocol:: tcp
source-port:: NTP
destination-port:: NTP DNS
"""
pol = policy.ParsePolicy(POL % T, definitions)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
name = "service-rule-1-tcp"
path = "/entry[@name='%s']/protocol/tcp/port" % name
x = paloalto.config.findtext(PATH_SERVICE + path)
self.assertEqual(x, "53,123", output)
path = "/entry[@name='%s']/protocol/tcp/source-port" % name
x = paloalto.config.findtext(PATH_SERVICE + path)
self.assertEqual(x, "123", output)
T = """
protocol:: tcp
"""
pol = policy.ParsePolicy(POL % T, definitions)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
name = "any-tcp"
path = "/entry[@name='%s']/protocol/tcp/port" % name
x = paloalto.config.findtext(PATH_SERVICE + path)
self.assertEqual(x, "0-65535", output)
path = "/entry[@name='%s']/protocol/tcp/source-port" % name
x = paloalto.config.find(PATH_SERVICE + path)
self.assertIsNone(x, output)
T = """
protocol:: tcp udp
"""
pol = policy.ParsePolicy(POL % T, definitions)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
name = "any-tcp"
path = "/entry[@name='%s']/protocol/tcp/port" % name
x = paloalto.config.findtext(PATH_SERVICE + path)
self.assertEqual(x, "0-65535", output)
name = "any-udp"
path = "/entry[@name='%s']/protocol/udp/port" % name
x = paloalto.config.findtext(PATH_SERVICE + path)
self.assertEqual(x, "0-65535", output)
x = paloalto.config.findall(PATH_RULES +
"/entry[@name='rule-1']/service/member")
services = {elem.text for elem in x}
self.assertEqual({"any-tcp", "any-udp"}, services, output)
def testPortLessNonPort(self):
POL = """
header {
target:: paloalto from-zone trust to-zone untrust
}
term rule-1 {
%s
action:: accept
}"""
T = """
protocol:: udp icmp
"""
pol = policy.ParsePolicy(POL % T, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.findall(PATH_RULES +
"/entry[@name='rule-1-1']/service/member")
self.assertTrue(len(x) > 0, output)
services = {elem.text for elem in x}
self.assertEqual({"any-udp"}, services, output)
x = paloalto.config.findall(PATH_RULES +
"/entry[@name='rule-1-2']/application/member")
self.assertTrue(len(x) > 0, output)
applications = {elem.text for elem in x}
self.assertEqual({"icmp"}, applications, output)
T = """
protocol:: udp tcp icmp gre
"""
pol = policy.ParsePolicy(POL % T, self.naming)
paloalto = paloaltofw.PaloAltoFW(pol, EXP_INFO)
output = str(paloalto)
x = paloalto.config.findall(PATH_RULES +
"/entry[@name='rule-1-1']/service/member")
self.assertTrue(len(x) > 0, output)
services = {elem.text for elem in x}
self.assertEqual({"any-udp", "any-tcp"}, services, output)
x = paloalto.config.findall(PATH_RULES +
"/entry[@name='rule-1-2']/application/member")
self.assertTrue(len(x) > 0, output)
applications = {elem.text for elem in x}
self.assertEqual({"icmp", "gre"}, applications, output)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| -7,762,802,518,295,838,000 | 28.768595 | 134 | 0.635511 | false |
Villoid/PynamoDB
|
pynamodb/tests/test_model.py
|
1
|
80527
|
"""
Test model API
"""
import random
import json
import copy
from datetime import datetime
import six
from botocore.client import ClientError
from pynamodb.compat import CompatTestCase as TestCase
from pynamodb.tests.deep_eq import deep_eq
from pynamodb.throttle import Throttle
from pynamodb.connection.util import pythonic
from pynamodb.exceptions import TableError
from pynamodb.types import RANGE
from pynamodb.constants import (
ITEM, STRING_SHORT, ALL, KEYS_ONLY, INCLUDE, REQUEST_ITEMS, UNPROCESSED_KEYS, ITEM_COUNT,
RESPONSES, KEYS, ITEMS, LAST_EVALUATED_KEY, EXCLUSIVE_START_KEY, ATTRIBUTES, BINARY_SHORT
)
from pynamodb.models import Model
from pynamodb.indexes import (
GlobalSecondaryIndex, LocalSecondaryIndex, AllProjection,
IncludeProjection, KeysOnlyProjection, Index
)
from pynamodb.attributes import (
UnicodeAttribute, NumberAttribute, BinaryAttribute, UTCDateTimeAttribute,
UnicodeSetAttribute, NumberSetAttribute, BinarySetAttribute)
from pynamodb.tests.data import (
MODEL_TABLE_DATA, GET_MODEL_ITEM_DATA, SIMPLE_MODEL_TABLE_DATA,
BATCH_GET_ITEMS, SIMPLE_BATCH_GET_ITEMS, COMPLEX_TABLE_DATA,
COMPLEX_ITEM_DATA, INDEX_TABLE_DATA, LOCAL_INDEX_TABLE_DATA,
CUSTOM_ATTR_NAME_INDEX_TABLE_DATA, CUSTOM_ATTR_NAME_ITEM_DATA,
BINARY_ATTR_DATA, SERIALIZED_TABLE_DATA
)
if six.PY3:
from unittest.mock import patch, MagicMock
else:
from mock import patch, MagicMock
PATCH_METHOD = 'pynamodb.connection.Connection._make_api_call'
class GamePlayerOpponentIndex(LocalSecondaryIndex):
class Meta:
read_capacity_units = 1
write_capacity_units = 1
table_name = "GamePlayerOpponentIndex"
host = "http://localhost:8000"
projection = AllProjection()
player_id = UnicodeAttribute(hash_key=True)
winner_id = UnicodeAttribute(range_key=True)
class GameOpponentTimeIndex(GlobalSecondaryIndex):
class Meta:
read_capacity_units = 1
write_capacity_units = 1
table_name = "GameOpponentTimeIndex"
host = "http://localhost:8000"
projection = AllProjection()
winner_id = UnicodeAttribute(hash_key=True)
created_time = UnicodeAttribute(range_key=True)
class GameModel(Model):
class Meta:
read_capacity_units = 1
write_capacity_units = 1
table_name = "GameModel"
host = "http://localhost:8000"
player_id = UnicodeAttribute(hash_key=True)
created_time = UTCDateTimeAttribute(range_key=True)
winner_id = UnicodeAttribute()
loser_id = UnicodeAttribute(null=True)
player_opponent_index = GamePlayerOpponentIndex()
opponent_time_index = GameOpponentTimeIndex()
class OldStyleModel(Model):
_table_name = 'IndexedModel'
user_name = UnicodeAttribute(hash_key=True)
class EmailIndex(GlobalSecondaryIndex):
"""
A global secondary index for email addresses
"""
class Meta:
index_name = 'custom_idx_name'
read_capacity_units = 2
write_capacity_units = 1
projection = AllProjection()
email = UnicodeAttribute(hash_key=True)
alt_numbers = NumberSetAttribute(range_key=True, attr_name='numbers')
class LocalEmailIndex(LocalSecondaryIndex):
"""
A global secondary index for email addresses
"""
class Meta:
read_capacity_units = 2
write_capacity_units = 1
projection = AllProjection()
email = UnicodeAttribute(hash_key=True)
numbers = NumberSetAttribute(range_key=True)
class NonKeyAttrIndex(LocalSecondaryIndex):
class Meta:
index_name = "non_key_idx"
read_capacity_units = 2
write_capacity_units = 1
projection = IncludeProjection(non_attr_keys=['numbers'])
email = UnicodeAttribute(hash_key=True)
numbers = NumberSetAttribute(range_key=True)
class IndexedModel(Model):
"""
A model with an index
"""
class Meta:
table_name = 'IndexedModel'
user_name = UnicodeAttribute(hash_key=True)
email = UnicodeAttribute()
email_index = EmailIndex()
include_index = NonKeyAttrIndex()
numbers = NumberSetAttribute()
aliases = UnicodeSetAttribute()
icons = BinarySetAttribute()
class LocalIndexedModel(Model):
"""
A model with an index
"""
class Meta:
table_name = 'LocalIndexedModel'
user_name = UnicodeAttribute(hash_key=True)
email = UnicodeAttribute()
email_index = LocalEmailIndex()
numbers = NumberSetAttribute()
aliases = UnicodeSetAttribute()
icons = BinarySetAttribute()
class SimpleUserModel(Model):
"""
A hash key only model
"""
class Meta:
table_name = 'SimpleModel'
user_name = UnicodeAttribute(hash_key=True)
email = UnicodeAttribute()
numbers = NumberSetAttribute()
custom_aliases = UnicodeSetAttribute(attr_name='aliases')
icons = BinarySetAttribute()
views = NumberAttribute(null=True)
class ThrottledUserModel(Model):
"""
A testing model
"""
class Meta:
table_name = 'UserModel'
user_name = UnicodeAttribute(hash_key=True)
user_id = UnicodeAttribute(range_key=True)
throttle = Throttle('50')
class CustomAttrIndex(LocalSecondaryIndex):
class Meta:
read_capacity_units = 2
write_capacity_units = 1
projection = AllProjection()
overidden_uid = UnicodeAttribute(hash_key=True, attr_name='user_id')
class CustomAttrNameModel(Model):
"""
A testing model
"""
class Meta:
table_name = 'CustomAttrModel'
overidden_user_name = UnicodeAttribute(hash_key=True, attr_name='user_name')
overidden_user_id = UnicodeAttribute(range_key=True, attr_name='user_id')
overidden_attr = UnicodeAttribute(attr_name='foo_attr', null=True)
uid_index = CustomAttrIndex()
class UserModel(Model):
"""
A testing model
"""
class Meta:
table_name = 'UserModel'
read_capacity_units = 25
write_capacity_units = 25
custom_user_name = UnicodeAttribute(hash_key=True, attr_name='user_name')
user_id = UnicodeAttribute(range_key=True)
picture = BinaryAttribute(null=True)
zip_code = NumberAttribute(null=True)
email = UnicodeAttribute(default='needs_email')
callable_field = NumberAttribute(default=lambda: 42)
class HostSpecificModel(Model):
"""
A testing model
"""
class Meta:
host = 'http://localhost'
table_name = 'RegionSpecificModel'
user_name = UnicodeAttribute(hash_key=True)
user_id = UnicodeAttribute(range_key=True)
class RegionSpecificModel(Model):
"""
A testing model
"""
class Meta:
region = 'us-west-1'
table_name = 'RegionSpecificModel'
user_name = UnicodeAttribute(hash_key=True)
user_id = UnicodeAttribute(range_key=True)
class ComplexKeyModel(Model):
"""
This model has a key that must be serialized/deserialized properly
"""
class Meta:
table_name = 'ComplexKey'
name = UnicodeAttribute(hash_key=True)
date_created = UTCDateTimeAttribute(default=datetime.utcnow)
class ModelTestCase(TestCase):
"""
Tests for the models API
"""
def assert_dict_lists_equal(self, list1, list2):
"""
Compares two lists of dictionaries
"""
for d1_item in list1:
found = False
for d2_item in list2:
if d2_item.items() == d1_item.items():
found = True
if not found:
if six.PY3:
# TODO WTF python2?
raise AssertionError("Values not equal: {0} {1}".format(d1_item, list2))
if len(list1) != len(list2):
raise AssertionError("Values not equal: {0} {1}".format(list1, list2))
def test_create_model(self):
"""
Model.create_table
"""
self.maxDiff = None
scope_args = {'count': 0}
def fake_dynamodb(*args):
kwargs = args[1]
if kwargs == {'TableName': UserModel.Meta.table_name}:
if scope_args['count'] == 0:
return {}
else:
return MODEL_TABLE_DATA
else:
return {}
fake_db = MagicMock()
fake_db.side_effect = fake_dynamodb
with patch(PATCH_METHOD, new=fake_db):
with patch("pynamodb.connection.TableConnection.describe_table") as req:
req.return_value = None
with self.assertRaises(TableError):
UserModel.create_table(read_capacity_units=2, write_capacity_units=2, wait=True)
with patch(PATCH_METHOD, new=fake_db) as req:
UserModel.create_table(read_capacity_units=2, write_capacity_units=2)
# Test for default region
self.assertEqual(UserModel.Meta.region, 'us-east-1')
with patch(PATCH_METHOD) as req:
req.return_value = MODEL_TABLE_DATA
UserModel.create_table(read_capacity_units=2, write_capacity_units=2)
# The default region is us-east-1
self.assertEqual(UserModel._connection.connection.region, 'us-east-1')
# A table with a specified region
self.assertEqual(RegionSpecificModel.Meta.region, 'us-west-1')
with patch(PATCH_METHOD) as req:
req.return_value = MODEL_TABLE_DATA
RegionSpecificModel.create_table(read_capacity_units=2, write_capacity_units=2)
self.assertEqual(RegionSpecificModel._connection.connection.region, 'us-west-1')
# A table with a specified host
self.assertEqual(HostSpecificModel.Meta.host, 'http://localhost')
with patch(PATCH_METHOD) as req:
req.return_value = MODEL_TABLE_DATA
HostSpecificModel.create_table(read_capacity_units=2, write_capacity_units=2)
self.assertEqual(HostSpecificModel._connection.connection.host, 'http://localhost')
# A table with a specified capacity
self.assertEqual(UserModel.Meta.read_capacity_units, 25)
self.assertEqual(UserModel.Meta.write_capacity_units, 25)
UserModel._connection = None
def fake_wait(*obj, **kwargs):
if scope_args['count'] == 0:
scope_args['count'] += 1
raise ClientError({'Error': {'Code': 'ResourceNotFoundException', 'Message': 'Not Found'}},
"DescribeTable")
elif scope_args['count'] == 1 or scope_args['count'] == 2:
data = copy.deepcopy(MODEL_TABLE_DATA)
data['Table']['TableStatus'] = 'Creating'
scope_args['count'] += 1
return data
else:
return MODEL_TABLE_DATA
mock_wait = MagicMock()
mock_wait.side_effect = fake_wait
scope_args = {'count': 0}
with patch(PATCH_METHOD, new=mock_wait) as req:
UserModel.create_table(wait=True)
params = {
'AttributeDefinitions': [
{
'AttributeName': 'user_name',
'AttributeType': 'S'
},
{
'AttributeName': 'user_id',
'AttributeType': 'S'
}
],
'KeySchema': [
{
'AttributeName': 'user_name',
'KeyType': 'HASH'
},
{
'AttributeName': 'user_id',
'KeyType': 'RANGE'
}
],
'ProvisionedThroughput': {
'ReadCapacityUnits': 25, 'WriteCapacityUnits': 25
},
'TableName': 'UserModel'
}
actual = req.call_args_list[1][0][1]
self.assertEquals(sorted(actual.keys()), sorted(params.keys()))
self.assertEquals(actual['TableName'], params['TableName'])
self.assertEquals(actual['ProvisionedThroughput'], params['ProvisionedThroughput'])
self.assert_dict_lists_equal(sorted(actual['KeySchema'], key=lambda x: x['AttributeName']),
sorted(actual['KeySchema'], key=lambda x: x['AttributeName']))
# These come in random order
self.assert_dict_lists_equal(sorted(actual['AttributeDefinitions'], key=lambda x: x['AttributeName']),
sorted(params['AttributeDefinitions'], key=lambda x: x['AttributeName']))
def bad_server(*args):
if scope_args['count'] == 0:
scope_args['count'] += 1
return {}
elif scope_args['count'] == 1 or scope_args['count'] == 2:
return {}
bad_mock_server = MagicMock()
bad_mock_server.side_effect = bad_server
scope_args = {'count': 0}
with patch(PATCH_METHOD, new=bad_mock_server) as req:
self.assertRaises(
TableError,
UserModel.create_table,
read_capacity_units=2,
write_capacity_units=2,
wait=True
)
def test_model_attrs(self):
"""
Model()
"""
with patch(PATCH_METHOD) as req:
req.return_value = MODEL_TABLE_DATA
item = UserModel('foo', 'bar')
self.assertEqual(item.email, 'needs_email')
self.assertEqual(item.callable_field, 42)
self.assertEqual(
repr(item), '{0}<{1}, {2}>'.format(UserModel.Meta.table_name, item.custom_user_name, item.user_id)
)
self.assertEqual(repr(UserModel._get_meta_data()), 'MetaTable<{0}>'.format('Thread'))
with patch(PATCH_METHOD) as req:
req.return_value = SIMPLE_MODEL_TABLE_DATA
item = SimpleUserModel('foo')
self.assertEqual(repr(item), '{0}<{1}>'.format(SimpleUserModel.Meta.table_name, item.user_name))
self.assertRaises(ValueError, item.save)
self.assertRaises(ValueError, UserModel.from_raw_data, None)
with patch(PATCH_METHOD) as req:
req.return_value = CUSTOM_ATTR_NAME_INDEX_TABLE_DATA
item = CustomAttrNameModel('foo', 'bar', overidden_attr='test')
self.assertEqual(item.overidden_attr, 'test')
self.assertTrue(not hasattr(item, 'foo_attr'))
def test_overidden_defaults(self):
"""
Custom attribute names
"""
schema = CustomAttrNameModel._get_schema()
correct_schema = {
'KeySchema': [
{'key_type': 'HASH', 'attribute_name': 'user_name'},
{'key_type': 'RANGE', 'attribute_name': 'user_id'}
],
'AttributeDefinitions': [
{'attribute_type': 'S', 'attribute_name': 'user_name'},
{'attribute_type': 'S', 'attribute_name': 'user_id'}
]
}
self.assert_dict_lists_equal(correct_schema['KeySchema'], schema['key_schema'])
self.assert_dict_lists_equal(correct_schema['AttributeDefinitions'], schema['attribute_definitions'])
def test_refresh(self):
"""
Model.refresh
"""
with patch(PATCH_METHOD) as req:
req.return_value = MODEL_TABLE_DATA
item = UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = {}
self.assertRaises(item.DoesNotExist, item.refresh)
with patch(PATCH_METHOD) as req:
req.return_value = GET_MODEL_ITEM_DATA
item.refresh()
self.assertEqual(
item.user_name,
GET_MODEL_ITEM_DATA.get(ITEM).get('user_name').get(STRING_SHORT))
def test_complex_key(self):
"""
Model with complex key
"""
with patch(PATCH_METHOD) as req:
req.return_value = COMPLEX_TABLE_DATA
item = ComplexKeyModel('test')
with patch(PATCH_METHOD) as req:
req.return_value = COMPLEX_ITEM_DATA
item.refresh()
def test_delete(self):
"""
Model.delete
"""
UserModel._meta_table = None
with patch(PATCH_METHOD) as req:
req.return_value = MODEL_TABLE_DATA
item = UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = None
item.delete()
params = {
'Key': {
'user_id': {
'S': 'bar'
},
'user_name': {
'S': 'foo'
}
},
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
args = req.call_args[0][1]
deep_eq(args, params, _assert=True)
with patch(PATCH_METHOD) as req:
req.return_value = None
item.delete(user_id='bar')
params = {
'Key': {
'user_id': {
'S': 'bar'
},
'user_name': {
'S': 'foo'
}
},
'Expected': {
'user_id': {
'Value': {'S': 'bar'},
}
},
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
args = req.call_args[0][1]
deep_eq(args, params, _assert=True)
with patch(PATCH_METHOD) as req:
req.return_value = None
item.delete(user_id='bar')
params = {
'Key': {
'user_id': {
'S': 'bar'
},
'user_name': {
'S': 'foo'
}
},
'Expected': {
'user_id': {
'Value': {'S': 'bar'},
}
},
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
args = req.call_args[0][1]
self.assertEqual(args, params)
with patch(PATCH_METHOD) as req:
req.return_value = None
item.delete(user_id='bar', email__contains='@', conditional_operator='AND')
params = {
'Key': {
'user_id': {
'S': 'bar'
},
'user_name': {
'S': 'foo'
}
},
'Expected': {
'email': {
'AttributeValueList': [
{'S': '@'}
],
'ComparisonOperator': 'CONTAINS'
},
'user_id': {
'Value': {
'S': 'bar'
}
}
},
'ConditionalOperator': 'AND',
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
args = req.call_args[0][1]
deep_eq(args, params, _assert=True)
def test_update_item(self):
"""
Model.update_item
"""
with patch(PATCH_METHOD) as req:
req.return_value = SIMPLE_MODEL_TABLE_DATA
item = SimpleUserModel('foo', email='bar')
with patch(PATCH_METHOD) as req:
req.return_value = {}
item.save()
with patch(PATCH_METHOD) as req:
req.return_value = {
ATTRIBUTES: {
"views": {
"N": "10"
}
}
}
self.assertRaises(ValueError, item.update_item, 'views', 10)
with patch(PATCH_METHOD) as req:
req.return_value = {
ATTRIBUTES: {
"views": {
"N": "10"
}
}
}
item.update_item('views', 10, action='add')
args = req.call_args[0][1]
params = {
'TableName': 'SimpleModel',
'ReturnValues': 'ALL_NEW',
'Key': {
'user_name': {
'S': 'foo'
}
},
'AttributeUpdates': {
'views': {
'Action': 'ADD',
'Value': {
'N': '10'
}
}
},
'ReturnConsumedCapacity': 'TOTAL'
}
deep_eq(args, params, _assert=True)
with patch(PATCH_METHOD) as req:
req.return_value = {
ATTRIBUTES: {
"views": {
"N": "10"
}
}
}
item.update_item('views', 10, action='add', user_name='foo', email__not_contains='@')
args = req.call_args[0][1]
params = {
'TableName': 'SimpleModel',
'ReturnValues': 'ALL_NEW',
'Key': {
'user_name': {
'S': 'foo'
}
},
'Expected': {
'user_name': {
'Value': {'S': 'foo'}
},
'email': {
'AttributeValueList': [
{'S': '@'}
],
'ComparisonOperator': 'NOT_CONTAINS'
},
},
'AttributeUpdates': {
'views': {
'Action': 'ADD',
'Value': {
'N': '10'
}
}
},
'ReturnConsumedCapacity': 'TOTAL'
}
deep_eq(args, params, _assert=True)
with patch(PATCH_METHOD) as req:
req.return_value = {
ATTRIBUTES: {
"views": {
"N": "10"
}
}
}
item.update_item('views', 10, action='add', user_name__exists=False)
args = req.call_args[0][1]
params = {
'TableName': 'SimpleModel',
'ReturnValues': 'ALL_NEW',
'Key': {
'user_name': {
'S': 'foo'
}
},
'Expected': {
'user_name': {'Exists': False}
},
'AttributeUpdates': {
'views': {
'Action': 'ADD',
'Value': {
'N': '10'
}
}
},
'ReturnConsumedCapacity': 'TOTAL'
}
deep_eq(args, params, _assert=True)
# Reproduces https://github.com/jlafon/PynamoDB/issues/59
with patch(PATCH_METHOD) as req:
user = UserModel("test_hash", "test_range")
req.return_value = {
ATTRIBUTES: {}
}
user.update_item('zip_code', 10, action='add')
args = req.call_args[0][1]
params = {
'AttributeUpdates': {
'zip_code': {'Action': 'ADD', 'Value': {'N': '10'}}
},
'TableName': 'UserModel',
'ReturnValues': 'ALL_NEW',
'Key': {
'user_id': {'S': u'test_range'},
'user_name': {'S': u'test_hash'}
},
'ReturnConsumedCapacity': 'TOTAL'}
deep_eq(args, params, _assert=True)
with patch(PATCH_METHOD) as req:
req.return_value = {
ATTRIBUTES: {
"views": {
"N": "10"
}
}
}
# Reproduces https://github.com/jlafon/PynamoDB/issues/34
item.email = None
item.update_item('views', 10, action='add')
args = req.call_args[0][1]
params = {
'TableName': 'SimpleModel',
'ReturnValues': 'ALL_NEW',
'Key': {
'user_name': {
'S': 'foo'
}
},
'AttributeUpdates': {
'views': {
'Action': 'ADD',
'Value': {
'N': '10'
}
}
},
'ReturnConsumedCapacity': 'TOTAL'
}
deep_eq(args, params, _assert=True)
with patch(PATCH_METHOD) as req:
req.return_value = {
ATTRIBUTES: {
}
}
item.email = None
item.update_item('views', action='delete')
args = req.call_args[0][1]
params = {
'TableName': 'SimpleModel',
'ReturnValues': 'ALL_NEW',
'Key': {
'user_name': {
'S': 'foo'
}
},
'AttributeUpdates': {
'views': {
'Action': 'DELETE',
}
},
'ReturnConsumedCapacity': 'TOTAL'
}
deep_eq(args, params, _assert=True)
with patch(PATCH_METHOD) as req:
req.return_value = {
ATTRIBUTES: {
"views": {
"N": "10"
}
}
}
item.update_item('views', 10, action='add', numbers__eq=[1, 2])
args = req.call_args[0][1]
params = {
'TableName': 'SimpleModel',
'ReturnValues': 'ALL_NEW',
'Key': {
'user_name': {
'S': 'foo'
}
},
'Expected': {
'numbers': {
'AttributeValueList': [
{'NS': ['1', '2']}
],
'ComparisonOperator': 'EQ'
},
},
'AttributeUpdates': {
'views': {
'Action': 'ADD',
'Value': {
'N': '10'
}
}
},
'ReturnConsumedCapacity': 'TOTAL'
}
deep_eq(args, params, _assert=True)
# Reproduces https://github.com/jlafon/PynamoDB/issues/102
with patch(PATCH_METHOD) as req:
req.return_value = {
ATTRIBUTES: {
"views": {
"N": "10"
}
}
}
item.update_item('views', 10, action='add', email__in=['1@pynamo.db','2@pynamo.db'])
args = req.call_args[0][1]
params = {
'TableName': 'SimpleModel',
'ReturnValues': 'ALL_NEW',
'Key': {
'user_name': {
'S': 'foo'
}
},
'Expected': {
'email': {
'AttributeValueList': [
{'S': '1@pynamo.db'},
{'S': '2@pynamo.db'}
],
'ComparisonOperator': 'IN'
},
},
'AttributeUpdates': {
'views': {
'Action': 'ADD',
'Value': {
'N': '10'
}
}
},
'ReturnConsumedCapacity': 'TOTAL'
}
deep_eq(args, params, _assert=True)
def test_save(self):
"""
Model.save
"""
with patch(PATCH_METHOD) as req:
req.return_value = MODEL_TABLE_DATA
item = UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = {}
item.save()
args = req.call_args[0][1]
params = {
'Item': {
'callable_field': {
'N': '42'
},
'email': {
'S': u'needs_email'
},
'user_id': {
'S': u'bar'
},
'user_name': {
'S': u'foo'
},
},
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
deep_eq(args, params, _assert=True)
with patch(PATCH_METHOD) as req:
req.return_value = {}
item.save(email__exists=False)
args = req.call_args[0][1]
params = {
'Item': {
'callable_field': {
'N': '42'
},
'email': {
'S': u'needs_email'
},
'user_id': {
'S': u'bar'
},
'user_name': {
'S': u'foo'
},
},
'Expected': {
'email': {
'Exists': False
}
},
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
deep_eq(args, params, _assert=True)
with patch(PATCH_METHOD) as req:
req.return_value = {}
item.save(email__exists=False, zip_code__null=False)
args = req.call_args[0][1]
params = {
'Item': {
'callable_field': {
'N': '42'
},
'email': {
'S': u'needs_email'
},
'user_id': {
'S': u'bar'
},
'user_name': {
'S': u'foo'
},
},
'Expected': {
'email': {
'Exists': False
},
'zip_code': {
'ComparisonOperator': 'NOT_NULL'
}
},
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
deep_eq(args, params, _assert=True)
with patch(PATCH_METHOD) as req:
req.return_value = {}
item.save(user_name='bar', zip_code__null=True, email__contains='@', conditional_operator='OR')
args = req.call_args[0][1]
params = {
'Item': {
'callable_field': {
'N': '42'
},
'email': {
'S': u'needs_email'
},
'user_id': {
'S': u'bar'
},
'user_name': {
'S': u'foo'
},
},
'ConditionalOperator': 'OR',
'Expected': {
'user_name': {
'Value': {'S': 'bar'}
},
'zip_code': {
'ComparisonOperator': 'NULL'
},
'email': {
'ComparisonOperator': 'CONTAINS',
'AttributeValueList': [
{'S': '@'}
]
}
},
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
deep_eq(args, params, _assert=True)
with patch(PATCH_METHOD) as req:
req.return_value = {}
item.save(custom_user_name='foo')
args = req.call_args[0][1]
params = {
'Item': {
'callable_field': {
'N': '42'
},
'email': {
'S': u'needs_email'
},
'user_id': {
'S': u'bar'
},
'user_name': {
'S': u'foo'
},
},
'Expected': {
'user_name': {
'Value': {'S': 'foo'}
}
},
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
deep_eq(args, params, _assert=True)
def test_filter_count(self):
"""
Model.count(**filters)
"""
with patch(PATCH_METHOD) as req:
req.return_value = {'Count': 10}
res = UserModel.count('foo')
self.assertEqual(res, 10)
args = req.call_args[0][1]
params = {
'KeyConditions': {
'user_name': {
'ComparisonOperator': 'EQ',
'AttributeValueList': [{'S': u'foo'}]
}
},
'TableName': 'UserModel',
'ReturnConsumedCapacity': 'TOTAL',
'Select': 'COUNT'
}
deep_eq(args, params, _assert=True)
def test_count(self):
"""
Model.count()
"""
def fake_dynamodb(*args, **kwargs):
return MODEL_TABLE_DATA
fake_db = MagicMock()
fake_db.side_effect = fake_dynamodb
with patch(PATCH_METHOD, new=fake_db) as req:
res = UserModel.count()
self.assertEqual(res, 42)
args = req.call_args[0][1]
params = {'TableName': 'UserModel'}
self.assertEqual(args, params)
def test_index_count(self):
"""
Model.index.count()
"""
with patch(PATCH_METHOD) as req:
req.return_value = {'Count': 42}
res = CustomAttrNameModel.uid_index.count('foo', limit=2, user_name__begins_with='bar')
self.assertEqual(res, 42)
args = req.call_args[0][1]
params = {
'KeyConditions': {
'user_name': {
'ComparisonOperator': 'BEGINS_WITH',
'AttributeValueList': [{'S': u'bar'}]
},
'user_id': {
'ComparisonOperator': 'EQ',
'AttributeValueList': [{'S': u'foo'}]
}
},
'Limit': 2,
'IndexName': 'uid_index',
'TableName': 'CustomAttrModel',
'ReturnConsumedCapacity': 'TOTAL',
'Select': 'COUNT'
}
deep_eq(args, params, _assert=True)
def test_query(self):
"""
Model.query
"""
with patch(PATCH_METHOD) as req:
req.return_value = MODEL_TABLE_DATA
UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__between=['id-1', 'id-3']):
queried.append(item._serialize().get(RANGE))
self.assertListEqual(
[item.get('user_id').get(STRING_SHORT) for item in items],
queried
)
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__gt='id-1', user_id__le='id-2'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__lt='id-1'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__ge='id-1'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__le='id-1'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__eq='id-1'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in UserModel.query('foo', user_id__begins_with='id'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in UserModel.query('foo'):
queried.append(item._serialize())
self.assertTrue(len(queried) == len(items))
def fake_query(*args):
kwargs = args[1]
start_key = kwargs.get(EXCLUSIVE_START_KEY, None)
if start_key:
item_idx = 0
for query_item in BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name):
item_idx += 1
if query_item == start_key:
break
query_items = BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name)[item_idx:item_idx + 1]
else:
query_items = BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name)[:1]
data = {
ITEMS: query_items,
LAST_EVALUATED_KEY: query_items[-1] if len(query_items) else None
}
return data
mock_query = MagicMock()
mock_query.side_effect = fake_query
with patch(PATCH_METHOD, new=mock_query) as req:
for item in UserModel.query('foo'):
self.assertIsNotNone(item)
with patch(PATCH_METHOD) as req:
req.return_value = CUSTOM_ATTR_NAME_INDEX_TABLE_DATA
CustomAttrNameModel._get_meta_data()
with patch(PATCH_METHOD) as req:
req.return_value = {ITEMS: [CUSTOM_ATTR_NAME_ITEM_DATA.get(ITEM)]}
for item in CustomAttrNameModel.query('bar', overidden_user_name__eq='foo'):
self.assertIsNotNone(item)
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in UserModel.query(
'foo',
user_id__begins_with='id',
email__contains='@',
picture__null=False,
zip_code__between=[2, 3]):
queried.append(item._serialize())
params = {
'KeyConditions': {
'user_id': {
'AttributeValueList': [
{'S': 'id'}
],
'ComparisonOperator': 'BEGINS_WITH'
},
'user_name': {
'AttributeValueList': [
{'S': 'foo'}
],
'ComparisonOperator': 'EQ'
}
},
'QueryFilter': {
'email': {
'AttributeValueList': [
{'S': '@'}
],
'ComparisonOperator': 'CONTAINS'
},
'zip_code': {
'ComparisonOperator': 'BETWEEN',
'AttributeValueList': [
{'N': '2'},
{'N': '3'}
]
},
'picture': {
'ComparisonOperator': 'NOT_NULL'
}
},
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
self.assertEqual(params, req.call_args[0][1])
self.assertTrue(len(queried) == len(items))
def test_scan_limit(self):
"""
Model.scan(limit)
"""
def fake_scan(*args):
scan_items = BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name)
data = {
ITEM_COUNT: len(scan_items),
ITEMS: scan_items,
}
return data
mock_scan = MagicMock()
mock_scan.side_effect = fake_scan
with patch(PATCH_METHOD, new=mock_scan) as req:
count = 0
for item in UserModel.scan(limit=4):
count += 1
self.assertIsNotNone(item)
self.assertEqual(count, 4)
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in UserModel.query(
'foo',
user_id__begins_with='id',
email__contains='@',
picture__null=False,
zip_code__ge=2,
conditional_operator='AND'):
queried.append(item._serialize())
params = {
'KeyConditions': {
'user_id': {
'AttributeValueList': [
{'S': 'id'}
],
'ComparisonOperator': 'BEGINS_WITH'
},
'user_name': {
'AttributeValueList': [
{'S': 'foo'}
],
'ComparisonOperator': 'EQ'
}
},
'query_filter': {
'email': {
'AttributeValueList': [
{'S': '@'}
],
'ComparisonOperator': 'CONTAINS'
},
'zip_code': {
'ComparisonOperator': 'GE',
'AttributeValueList': [
{'N': '2'},
]
},
'picture': {
'ComparisonOperator': 'NOT_NULL'
}
},
'ConditionalOperator': 'AND',
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
for key in ('ConditionalOperator', 'ReturnConsumedCapacity', 'TableName'):
self.assertEqual(req.call_args[0][1][key], params[key])
for key in ('user_id', 'user_name'):
self.assertEqual(
req.call_args[0][1]['KeyConditions'][key],
params['KeyConditions'][key]
)
for key in ('email', 'zip_code', 'picture'):
self.assertEqual(
sorted(req.call_args[0][1]['QueryFilter'][key].items(), key=lambda x: x[0]),
sorted(params['query_filter'][key].items(), key=lambda x: x[0]),
)
self.assertTrue(len(queried) == len(items))
def test_scan(self):
"""
Model.scan
"""
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
scanned_items = []
for item in UserModel.scan():
scanned_items.append(item._serialize().get(RANGE))
self.assertListEqual(
[item.get('user_id').get(STRING_SHORT) for item in items],
scanned_items
)
def fake_scan(*args):
kwargs = args[1]
start_key = kwargs.get(EXCLUSIVE_START_KEY, None)
if start_key:
item_idx = 0
for scan_item in BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name):
item_idx += 1
if scan_item == start_key:
break
scan_items = BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name)[item_idx:item_idx + 1]
else:
scan_items = BATCH_GET_ITEMS.get(RESPONSES).get(UserModel.Meta.table_name)[:1]
data = {
ITEMS: scan_items,
LAST_EVALUATED_KEY: scan_items[-1] if len(scan_items) else None
}
return data
mock_scan = MagicMock()
mock_scan.side_effect = fake_scan
with patch(PATCH_METHOD, new=mock_scan) as req:
for item in UserModel.scan():
self.assertIsNotNone(item)
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
for item in UserModel.scan(user_id__contains='tux', zip_code__null=False, email__null=True):
self.assertIsNotNone(item)
params = {
'ReturnConsumedCapacity': 'TOTAL',
'ScanFilter': {
'user_id': {
'AttributeValueList': [
{'S': 'tux'}
],
'ComparisonOperator': 'CONTAINS'
},
'zip_code': {
'ComparisonOperator': 'NOT_NULL'
},
'email': {
'ComparisonOperator': 'NULL'
}
},
'TableName': 'UserModel'
}
self.assertEquals(params, req.call_args[0][1])
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
for item in UserModel.scan(
user_id__contains='tux',
zip_code__null=False,
conditional_operator='OR',
email__null=True):
self.assertIsNotNone(item)
params = {
'ReturnConsumedCapacity': 'TOTAL',
'ScanFilter': {
'user_id': {
'AttributeValueList': [
{'S': 'tux'}
],
'ComparisonOperator': 'CONTAINS'
},
'zip_code': {
'ComparisonOperator': 'NOT_NULL'
},
'email': {
'ComparisonOperator': 'NULL'
},
},
'ConditionalOperator': 'OR',
'TableName': 'UserModel'
}
self.assertEquals(params, req.call_args[0][1])
def test_get(self):
"""
Model.get
"""
def fake_dynamodb(*args):
kwargs = args[1]
if kwargs == {'TableName': UserModel.Meta.table_name}:
return MODEL_TABLE_DATA
elif kwargs == {
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel',
'Key': {
'user_name': {'S': 'foo'},
'user_id': {'S': 'bar'}
},
'ConsistentRead': False}:
return GET_MODEL_ITEM_DATA
return MODEL_TABLE_DATA
fake_db = MagicMock()
fake_db.side_effect = fake_dynamodb
with patch(PATCH_METHOD, new=fake_db) as req:
item = UserModel.get(
'foo',
'bar'
)
self.assertEqual(item._get_keys(), {'user_id': 'bar', 'user_name': 'foo'})
params = {
'ConsistentRead': False,
'Key': {
'user_id': {
'S': 'bar'
},
'user_name': {
'S': 'foo'
}
},
'ReturnConsumedCapacity': 'TOTAL',
'TableName': 'UserModel'
}
self.assertEqual(req.call_args[0][1], params)
item.zip_code = 88030
self.assertEqual(item.zip_code, 88030)
with patch(PATCH_METHOD) as req:
req.return_value = {}
self.assertRaises(UserModel.DoesNotExist, UserModel.get, 'foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = CUSTOM_ATTR_NAME_INDEX_TABLE_DATA
CustomAttrNameModel._get_meta_data()
with patch(PATCH_METHOD) as req:
req.return_value = {"ConsumedCapacity": {"CapacityUnits": 0.5, "TableName": "UserModel"}}
self.assertRaises(CustomAttrNameModel.DoesNotExist, CustomAttrNameModel.get, 'foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = {}
self.assertRaises(CustomAttrNameModel.DoesNotExist, CustomAttrNameModel.get, 'foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = CUSTOM_ATTR_NAME_ITEM_DATA
item = CustomAttrNameModel.get('foo', 'bar')
self.assertEqual(item.overidden_attr, CUSTOM_ATTR_NAME_ITEM_DATA['Item']['foo_attr']['S'])
self.assertEqual(item.overidden_user_name, CUSTOM_ATTR_NAME_ITEM_DATA['Item']['user_name']['S'])
self.assertEqual(item.overidden_user_id, CUSTOM_ATTR_NAME_ITEM_DATA['Item']['user_id']['S'])
def test_batch_get(self):
"""
Model.batch_get
"""
with patch(PATCH_METHOD) as req:
req.return_value = SIMPLE_MODEL_TABLE_DATA
SimpleUserModel('foo')
with patch(PATCH_METHOD) as req:
req.return_value = SIMPLE_BATCH_GET_ITEMS
item_keys = ['hash-{0}'.format(x) for x in range(10)]
for item in SimpleUserModel.batch_get(item_keys):
self.assertIsNotNone(item)
params = {
'ReturnConsumedCapacity': 'TOTAL',
'RequestItems': {
'SimpleModel': {
'Keys': [
{'user_name': {'S': 'hash-9'}},
{'user_name': {'S': 'hash-8'}},
{'user_name': {'S': 'hash-7'}},
{'user_name': {'S': 'hash-6'}},
{'user_name': {'S': 'hash-5'}},
{'user_name': {'S': 'hash-4'}},
{'user_name': {'S': 'hash-3'}},
{'user_name': {'S': 'hash-2'}},
{'user_name': {'S': 'hash-1'}},
{'user_name': {'S': 'hash-0'}}
]
}
}
}
self.assertEqual(params, req.call_args[0][1])
with patch(PATCH_METHOD) as req:
req.return_value = MODEL_TABLE_DATA
UserModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
item_keys = [('hash-{0}'.format(x), '{0}'.format(x)) for x in range(10)]
item_keys_copy = list(item_keys)
req.return_value = BATCH_GET_ITEMS
for item in UserModel.batch_get(item_keys):
self.assertIsNotNone(item)
self.assertEqual(item_keys, item_keys_copy)
params = {
'RequestItems': {
'UserModel': {
'Keys': [
{'user_name': {'S': 'hash-0'}, 'user_id': {'S': '0'}},
{'user_name': {'S': 'hash-1'}, 'user_id': {'S': '1'}},
{'user_name': {'S': 'hash-2'}, 'user_id': {'S': '2'}},
{'user_name': {'S': 'hash-3'}, 'user_id': {'S': '3'}},
{'user_name': {'S': 'hash-4'}, 'user_id': {'S': '4'}},
{'user_name': {'S': 'hash-5'}, 'user_id': {'S': '5'}},
{'user_name': {'S': 'hash-6'}, 'user_id': {'S': '6'}},
{'user_name': {'S': 'hash-7'}, 'user_id': {'S': '7'}},
{'user_name': {'S': 'hash-8'}, 'user_id': {'S': '8'}},
{'user_name': {'S': 'hash-9'}, 'user_id': {'S': '9'}}
]
}
}
}
args = req.call_args[0][1]
self.assertTrue('RequestItems' in params)
self.assertTrue('UserModel' in params['RequestItems'])
self.assertTrue('Keys' in params['RequestItems']['UserModel'])
self.assert_dict_lists_equal(
params['RequestItems']['UserModel']['Keys'],
args['RequestItems']['UserModel']['Keys'],
)
def fake_batch_get(*batch_args):
kwargs = batch_args[1]
if REQUEST_ITEMS in kwargs:
batch_item = kwargs.get(REQUEST_ITEMS).get(UserModel.Meta.table_name).get(KEYS)[0]
batch_items = kwargs.get(REQUEST_ITEMS).get(UserModel.Meta.table_name).get(KEYS)[1:]
response = {
UNPROCESSED_KEYS: {
UserModel.Meta.table_name: {
KEYS: batch_items
}
},
RESPONSES: {
UserModel.Meta.table_name: [batch_item]
}
}
return response
return {}
batch_get_mock = MagicMock()
batch_get_mock.side_effect = fake_batch_get
with patch(PATCH_METHOD, new=batch_get_mock) as req:
item_keys = [('hash-{0}'.format(x), '{0}'.format(x)) for x in range(200)]
for item in UserModel.batch_get(item_keys):
self.assertIsNotNone(item)
def test_batch_write(self):
"""
Model.batch_write
"""
with patch(PATCH_METHOD) as req:
req.return_value = {}
with UserModel.batch_write(auto_commit=False) as batch:
pass
with UserModel.batch_write() as batch:
self.assertIsNone(batch.commit())
with self.assertRaises(ValueError):
with UserModel.batch_write(auto_commit=False) as batch:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(26)]
for item in items:
batch.delete(item)
self.assertRaises(ValueError, batch.save, UserModel('asdf', '1234'))
with UserModel.batch_write(auto_commit=False) as batch:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(25)]
for item in items:
batch.delete(item)
self.assertRaises(ValueError, batch.save, UserModel('asdf', '1234'))
with UserModel.batch_write(auto_commit=False) as batch:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(25)]
for item in items:
batch.save(item)
self.assertRaises(ValueError, batch.save, UserModel('asdf', '1234'))
with UserModel.batch_write() as batch:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(30)]
for item in items:
batch.delete(item)
with UserModel.batch_write() as batch:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(30)]
for item in items:
batch.save(item)
def fake_unprocessed_keys(*args, **kwargs):
if pythonic(REQUEST_ITEMS) in kwargs:
batch_items = kwargs.get(pythonic(REQUEST_ITEMS)).get(UserModel.Meta.table_name)[1:]
unprocessed = {
UNPROCESSED_KEYS: {
UserModel.Meta.table_name: batch_items
}
}
return unprocessed
return {}
batch_write_mock = MagicMock()
batch_write_mock.side_effect = fake_unprocessed_keys
with patch(PATCH_METHOD, new=batch_write_mock) as req:
items = [UserModel('hash-{0}'.format(x), '{0}'.format(x)) for x in range(500)]
for item in items:
batch.save(item)
def test_index_queries(self):
"""
Model.Index.Query
"""
with patch(PATCH_METHOD) as req:
req.return_value = CUSTOM_ATTR_NAME_INDEX_TABLE_DATA
CustomAttrNameModel._get_meta_data()
with patch(PATCH_METHOD) as req:
req.return_value = INDEX_TABLE_DATA
IndexedModel._get_connection().describe_table()
with patch(PATCH_METHOD) as req:
req.return_value = LOCAL_INDEX_TABLE_DATA
LocalIndexedModel._get_meta_data()
self.assertEqual(IndexedModel.include_index.Meta.index_name, "non_key_idx")
queried = []
with patch(PATCH_METHOD) as req:
with self.assertRaises(ValueError):
for item in IndexedModel.email_index.query('foo', user_id__between=['id-1', 'id-3']):
queried.append(item._serialize().get(RANGE))
with patch(PATCH_METHOD) as req:
with self.assertRaises(ValueError):
for item in IndexedModel.email_index.query('foo', user_name__startswith='foo'):
queried.append(item._serialize().get(RANGE))
with patch(PATCH_METHOD) as req:
with self.assertRaises(ValueError):
for item in IndexedModel.email_index.query('foo', name='foo'):
queried.append(item._serialize().get(RANGE))
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_name'] = {STRING_SHORT: 'id-{0}'.format(idx)}
item['email'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in IndexedModel.email_index.query('foo', limit=2, user_name__begins_with='bar'):
queried.append(item._serialize())
params = {
'KeyConditions': {
'user_name': {
'ComparisonOperator': 'BEGINS_WITH',
'AttributeValueList': [
{
'S': u'bar'
}
]
},
'email': {
'ComparisonOperator': 'EQ',
'AttributeValueList': [
{
'S': u'foo'
}
]
}
},
'IndexName': 'custom_idx_name',
'TableName': 'IndexedModel',
'ReturnConsumedCapacity': 'TOTAL',
'Limit': 2
}
self.assertEqual(req.call_args[0][1], params)
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_name'] = {STRING_SHORT: 'id-{0}'.format(idx)}
item['email'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in LocalIndexedModel.email_index.query(
'foo',
limit=1,
user_name__begins_with='bar',
aliases__contains=1):
queried.append(item._serialize())
params = {
'KeyConditions': {
'user_name': {
'ComparisonOperator': 'BEGINS_WITH',
'AttributeValueList': [
{
'S': u'bar'
}
]
},
'email': {
'ComparisonOperator': 'EQ',
'AttributeValueList': [
{
'S': u'foo'
}
]
}
},
'QueryFilter': {
'aliases': {
'AttributeValueList': [
{
'SS': ['1']
}
],
'ComparisonOperator': 'CONTAINS'
}
},
'IndexName': 'email_index',
'TableName': 'LocalIndexedModel',
'ReturnConsumedCapacity': 'TOTAL',
'Limit': 1
}
self.assertEqual(req.call_args[0][1], params)
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_name'] = {STRING_SHORT: 'id-{0}'.format(idx)}
items.append(item)
req.return_value = {'Items': items}
queried = []
for item in CustomAttrNameModel.uid_index.query('foo', limit=2, user_name__begins_with='bar'):
queried.append(item._serialize())
params = {
'KeyConditions': {
'user_name': {
'ComparisonOperator': 'BEGINS_WITH',
'AttributeValueList': [
{
'S': u'bar'
}
]
},
'user_id': {
'ComparisonOperator': 'EQ',
'AttributeValueList': [
{
'S': u'foo'
}
]
}
},
'IndexName': 'uid_index',
'TableName': 'CustomAttrModel',
'ReturnConsumedCapacity': 'TOTAL',
'Limit': 2
}
self.assertEqual(req.call_args[0][1], params)
def test_multiple_indices_share_non_key_attribute(self):
"""
Models.Model
"""
scope_args = {'count': 0}
def fake_dynamodb(*args, **kwargs):
if scope_args['count'] == 0:
scope_args['count'] += 1
raise ClientError({'Error': {'Code': 'ResourceNotFoundException', 'Message': 'Not Found'}},
"DescribeTable")
return {}
fake_db = MagicMock()
fake_db.side_effect = fake_dynamodb
with patch(PATCH_METHOD, new=fake_db) as req:
IndexedModel.create_table(read_capacity_units=2, write_capacity_units=2)
params = {
'AttributeDefinitions': [
{'AttributeName': 'email', 'AttributeType': 'S'},
{'AttributeName': 'numbers', 'AttributeType': 'NS'},
{'AttributeName': 'user_name', 'AttributeType': 'S'}
]
}
args = req.call_args[0][1]
self.assert_dict_lists_equal(args['AttributeDefinitions'], params['AttributeDefinitions'])
scope_args['count'] = 0
with patch(PATCH_METHOD, new=fake_db) as req:
GameModel.create_table()
params = {
'KeySchema': [
{'KeyType': 'HASH', 'AttributeName': 'player_id'},
{'KeyType': 'RANGE', 'AttributeName': 'created_time'}
],
'LocalSecondaryIndexes': [
{
'KeySchema': [
{'KeyType': 'HASH', 'AttributeName': 'player_id'},
{'KeyType': 'RANGE', 'AttributeName': 'winner_id'}
],
'IndexName': 'player_opponent_index',
'Projection': {'ProjectionType': 'ALL'}
}
],
'TableName': 'GameModel',
'ProvisionedThroughput': {'WriteCapacityUnits': 1, 'ReadCapacityUnits': 1},
'GlobalSecondaryIndexes': [
{
'ProvisionedThroughput': {'WriteCapacityUnits': 1, 'ReadCapacityUnits': 1},
'KeySchema': [
{'KeyType': 'HASH', 'AttributeName': 'winner_id'},
{'KeyType': 'RANGE', 'AttributeName': 'created_time'}
],
'IndexName': 'opponent_time_index',
'Projection': {'ProjectionType': 'ALL'}
}
],
'AttributeDefinitions': [
{'AttributeName': 'created_time', 'AttributeType': 'S'},
{'AttributeName': 'player_id', 'AttributeType': 'S'},
{'AttributeName': 'winner_id', 'AttributeType': 'S'}
]
}
args = req.call_args[0][1]
for key in ['KeySchema', 'AttributeDefinitions', 'LocalSecondaryIndexes', 'GlobalSecondaryIndexes']:
self.assert_dict_lists_equal(args[key], params[key])
def test_global_index(self):
"""
Models.GlobalSecondaryIndex
"""
self.assertIsNotNone(IndexedModel.email_index._hash_key_attribute())
self.assertEqual(IndexedModel.email_index.Meta.projection.projection_type, AllProjection.projection_type)
with patch(PATCH_METHOD) as req:
req.return_value = INDEX_TABLE_DATA
with self.assertRaises(ValueError):
IndexedModel('foo', 'bar')
IndexedModel._get_meta_data()
scope_args = {'count': 0}
def fake_dynamodb(*args, **kwargs):
if scope_args['count'] == 0:
scope_args['count'] += 1
raise ClientError({'Error': {'Code': 'ResourceNotFoundException', 'Message': 'Not Found'}},
"DescribeTable")
else:
return {}
fake_db = MagicMock()
fake_db.side_effect = fake_dynamodb
with patch(PATCH_METHOD, new=fake_db) as req:
IndexedModel.create_table(read_capacity_units=2, write_capacity_units=2)
params = {
'AttributeDefinitions': [
{'attribute_name': 'email', 'attribute_type': 'S'},
{'attribute_name': 'numbers', 'attribute_type': 'NS'}
],
'KeySchema': [
{'AttributeName': 'numbers', 'KeyType': 'RANGE'},
{'AttributeName': 'email', 'KeyType': 'HASH'}
]
}
schema = IndexedModel.email_index._get_schema()
args = req.call_args[0][1]
self.assertEqual(
args['GlobalSecondaryIndexes'][0]['ProvisionedThroughput'],
{
'ReadCapacityUnits': 2,
'WriteCapacityUnits': 1
}
)
self.assert_dict_lists_equal(schema['key_schema'], params['KeySchema'])
self.assert_dict_lists_equal(schema['attribute_definitions'], params['AttributeDefinitions'])
def test_local_index(self):
"""
Models.LocalSecondaryIndex
"""
with self.assertRaises(ValueError):
with patch(PATCH_METHOD) as req:
req.return_value = LOCAL_INDEX_TABLE_DATA
# This table has no range key
LocalIndexedModel('foo', 'bar')
with patch(PATCH_METHOD) as req:
req.return_value = LOCAL_INDEX_TABLE_DATA
LocalIndexedModel('foo')
schema = IndexedModel._get_indexes()
expected = {
'local_secondary_indexes': [
{
'KeySchema': [
{'KeyType': 'HASH', 'AttributeName': 'email'},
{'KeyType': 'RANGE', 'AttributeName': 'numbers'}
],
'IndexName': 'include_index',
'projection': {
'ProjectionType': 'INCLUDE',
'NonKeyAttributes': ['numbers']
}
}
],
'global_secondary_indexes': [
{
'KeySchema': [
{'KeyType': 'HASH', 'AttributeName': 'email'},
{'KeyType': 'RANGE', 'AttributeName': 'numbers'}
],
'IndexName': 'email_index',
'projection': {'ProjectionType': 'ALL'},
'provisioned_throughput': {
'WriteCapacityUnits': 1,
'ReadCapacityUnits': 2
}
}
],
'attribute_definitions': [
{'attribute_type': 'S', 'attribute_name': 'email'},
{'attribute_type': 'NS', 'attribute_name': 'numbers'},
{'attribute_type': 'S', 'attribute_name': 'email'},
{'attribute_type': 'NS', 'attribute_name': 'numbers'}
]
}
self.assert_dict_lists_equal(
schema['attribute_definitions'],
expected['attribute_definitions']
)
self.assertEqual(schema['local_secondary_indexes'][0]['projection']['ProjectionType'], 'INCLUDE')
self.assertEqual(schema['local_secondary_indexes'][0]['projection']['NonKeyAttributes'], ['numbers'])
scope_args = {'count': 0}
def fake_dynamodb(*args, **kwargs):
if scope_args['count'] == 0:
scope_args['count'] += 1
raise ClientError({'Error': {'Code': 'ResourceNotFoundException', 'Message': 'Not Found'}},
"DescribeTable")
else:
return {}
fake_db = MagicMock()
fake_db.side_effect = fake_dynamodb
with patch(PATCH_METHOD, new=fake_db) as req:
LocalIndexedModel.create_table(read_capacity_units=2, write_capacity_units=2)
params = {
'AttributeDefinitions': [
{
'attribute_name': 'email', 'attribute_type': 'S'
},
{
'attribute_name': 'numbers',
'attribute_type': 'NS'
}
],
'KeySchema': [
{
'AttributeName': 'email', 'KeyType': 'HASH'
},
{
'AttributeName': 'numbers', 'KeyType': 'RANGE'
}
]
}
schema = LocalIndexedModel.email_index._get_schema()
args = req.call_args[0][1]
self.assert_dict_lists_equal(schema['attribute_definitions'], params['AttributeDefinitions'])
self.assert_dict_lists_equal(schema['key_schema'], params['KeySchema'])
self.assertTrue('ProvisionedThroughput' not in args['LocalSecondaryIndexes'][0])
def test_projections(self):
"""
Models.Projection
"""
projection = AllProjection()
self.assertEqual(projection.projection_type, ALL)
projection = KeysOnlyProjection()
self.assertEqual(projection.projection_type, KEYS_ONLY)
projection = IncludeProjection(non_attr_keys=['foo', 'bar'])
self.assertEqual(projection.projection_type, INCLUDE)
self.assertEqual(projection.non_key_attributes, ['foo', 'bar'])
self.assertRaises(ValueError, IncludeProjection, None)
with self.assertRaises(ValueError):
class BadIndex(Index):
pass
BadIndex()
with self.assertRaises(ValueError):
class BadIndex(Index):
class Meta:
pass
pass
BadIndex()
def test_throttle(self):
"""
Throttle.add_record
"""
throt = Throttle(30)
throt.add_record(None)
for i in range(10):
throt.add_record(1)
throt.throttle()
for i in range(2):
throt.add_record(50)
throt.throttle()
def test_old_style_model_exception(self):
"""
Display warning for pre v1.0 Models
"""
with self.assertRaises(AttributeError):
OldStyleModel._get_meta_data()
with self.assertRaises(AttributeError):
OldStyleModel.exists()
def test_dumps(self):
"""
Model.dumps
"""
with patch(PATCH_METHOD) as req:
items = []
for idx in range(10):
item = copy.copy(GET_MODEL_ITEM_DATA.get(ITEM))
item['user_id'] = {STRING_SHORT: 'id-{0}'.format(idx)}
item['email'] = {STRING_SHORT: 'email-{0}'.format(random.randint(0, 65536))}
item['picture'] = {BINARY_SHORT: BINARY_ATTR_DATA}
items.append(item)
req.return_value = {'Items': items}
content = UserModel.dumps()
serialized_items = json.loads(content)
for original, new_item in zip(items, serialized_items):
self.assertEqual(new_item[0], original['user_name'][STRING_SHORT])
self.assertEqual(new_item[1][pythonic(ATTRIBUTES)]['zip_code']['N'], original['zip_code']['N'])
self.assertEqual(new_item[1][pythonic(ATTRIBUTES)]['email']['S'], original['email']['S'])
self.assertEqual(new_item[1][pythonic(ATTRIBUTES)]['picture']['B'], original['picture']['B'])
def test_loads(self):
"""
Model.loads
"""
with patch(PATCH_METHOD) as req:
req.return_value = {}
UserModel.loads(json.dumps(SERIALIZED_TABLE_DATA))
args = {
'UserModel': [
{
'PutRequest': {
'Item': {
'user_id': {'S': u'id-0'},
'callable_field': {'N': '42'},
'user_name': {'S': u'foo'},
'email': {'S': u'email-7980'},
'picture': {
"B": "aGVsbG8sIHdvcmxk"
},
'zip_code': {'N': '88030'}
}
}
},
{
'PutRequest': {
'Item': {
'user_id': {'S': u'id-1'},
'callable_field': {'N': '42'},
'user_name': {'S': u'foo'},
'email': {'S': u'email-19770'},
'picture': {
"B": "aGVsbG8sIHdvcmxk"
},
'zip_code': {'N': '88030'}
}
}
}
]
}
self.assert_dict_lists_equal(req.call_args[0][1]['RequestItems']['UserModel'], args['UserModel'])
|
mit
| 284,875,441,389,348,600 | 35.013864 | 114 | 0.442237 | false |
cbertinato/pandas
|
pandas/tests/plotting/test_backend.py
|
1
|
1151
|
import pytest
import pandas
def test_matplotlib_backend_error():
msg = ('matplotlib is required for plotting when the default backend '
'"matplotlib" is selected.')
try:
import matplotlib # noqa
except ImportError:
with pytest.raises(ImportError, match=msg):
pandas.set_option('plotting.backend', 'matplotlib')
def test_backend_is_not_module():
msg = ('"not_an_existing_module" does not seem to be an installed module. '
'A pandas plotting backend must be a module that can be imported')
with pytest.raises(ValueError, match=msg):
pandas.set_option('plotting.backend', 'not_an_existing_module')
def test_backend_is_correct(monkeypatch):
monkeypatch.setattr('pandas.core.config_init.importlib.import_module',
lambda name: None)
pandas.set_option('plotting.backend', 'correct_backend')
assert pandas.get_option('plotting.backend') == 'correct_backend'
# Restore backend for other tests (matplotlib can be not installed)
try:
pandas.set_option('plotting.backend', 'matplotlib')
except ImportError:
pass
|
bsd-3-clause
| 529,602,287,782,063,040 | 33.878788 | 79 | 0.674196 | false |
jaalto/httping
|
plot-json.py
|
2
|
1581
|
#! /usr/bin/python
import sys
import json
import os
import math
fin = sys.argv[1]
print "Loading from %s" % (fin)
fh = open(fin)
json_data = json.load(fh)
print "Number of rows: %d" % (len(json_data))
fdata = fin + ".dat"
print "Writing data to %s" % (fdata)
data_fh = open(fdata, "w")
host='?'
total=0
total_sd=0
n=0
avg=0
sd=0
minp = 999999999
maxp = -minp
for row in json_data:
if row['status'] == '1':
val = float(row['total_s'])
data_fh.write("%f %f\n" % (float(row['start_ts']), val))
host=row['host']
total += val
total_sd += val * val
n += 1
if val > maxp:
maxp = val
if val < minp:
minp = val
print "Rows ignored: %d" % (len(json_data) - n)
data_fh.close()
if n > 0:
avg = total / n
sd = math.sqrt((total_sd / n) - math.pow(avg, 2.0))
print "Average ping time: %fs (%d pings)" % (avg, n)
print "Standard deviation: %fs" % (sd)
print "Minimum ping value: %fs" % (minp)
print "Maximum ping value: %fs" % (maxp)
fscript = fin + ".sh"
print "Writing script to %s" % (fscript)
fpng = fin + ".png"
script_fh = open(fscript, "w")
script_fh.write("#! /bin/sh\n\n")
script_fh.write("gnuplot <<EOF > " + fpng + "\n")
script_fh.write("set term png size 800,600 tiny\n")
script_fh.write("set autoscale\n")
script_fh.write("set timefmt \"%s\"\n")
script_fh.write("set xdata time\n")
script_fh.write("set format x \"%H:%M:%S\"\n")
script_fh.write("plot \"" + fdata + "\" using 1:2 with lines title \"" + host + "\"\n")
script_fh.write("EOF\n")
os.chmod(fscript, 0755)
script_fh.close()
print "Now invoke %s to generate %s" % (fscript, fpng)
|
gpl-2.0
| 7,975,171,851,592,360,000 | 19.269231 | 87 | 0.611006 | false |
mediafactory/yats
|
modules/yats/__init__.py
|
1
|
1499
|
# -*- coding: utf-8 -*-
from django.conf import settings
from sys import version_info
from yats.api import *
def update_permissions_after_migration(app,**kwargs):
"""
Update app permission just after every migration.
This is based on app django_extensions update_permissions management command.
"""
from django.db.models import get_app, get_models
from django.contrib.auth.management import create_permissions
create_permissions(get_app(app), get_models(), 2 if settings.DEBUG else 0)
version = '@version@'
if 'version' in version:
VERSION = ('a', 'b', 'c', '', 0)
else:
VERSION = version.split('.')
VERSION.append('')
VERSION.append(0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if VERSION[3] != 'final':
version = '%s %s %s' % (version, VERSION[3], VERSION[4])
return version
def get_python_version():
version = '%s.%s' % (version_info[0], version_info[1])
if version_info[2]:
version = '%s.%s' % (version, version_info[2])
if version_info[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
if version_info[3] != 'final':
version = '%s %s %s' % (version, version_info[3], version_info[4])
return version
def access_to_settings(request):
return {'SETTINGS': settings}
|
mit
| -2,771,371,696,905,066,500 | 29.591837 | 81 | 0.607071 | false |
L33tCh/afj-flask
|
manage.py
|
1
|
1644
|
# manage.py
import os
import unittest
import coverage
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from project.server import app, db, models
COV = coverage.coverage(
branch=True,
include='project/*',
omit=[
'project/tests/*',
'project/server/config.py',
'project/server/*/__init__.py'
]
)
COV.start()
migrate = Migrate(app, db)
manager = Manager(app)
# migrations
manager.add_command('db', MigrateCommand)
@manager.command
def test():
"""Runs the unit tests without test coverage."""
tests = unittest.TestLoader().discover('project/tests', pattern='test*.py')
result = unittest.TextTestRunner(verbosity=2).run(tests)
print('well')
if result.wasSuccessful():
print('nada')
return 0
print('hmm')
return 1
@manager.command
def cov():
"""Runs the unit tests with coverage."""
tests = unittest.TestLoader().discover('project/tests')
result = unittest.TextTestRunner(verbosity=2).run(tests)
if result.wasSuccessful():
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
basedir = os.path.abspath(os.path.dirname(__file__))
covdir = os.path.join(basedir, 'tmp/coverage')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
COV.erase()
return 0
return 1
@manager.command
def create_db():
"""Creates the db tables."""
db.create_all()
@manager.command
def drop_db():
"""Drops the db tables."""
db.drop_all()
if __name__ == '__main__':
manager.run()
|
mit
| 2,866,174,267,722,228,700 | 20.363636 | 79 | 0.628954 | false |
dek-odoo/python-samples
|
python exercises/dek_program017.py
|
1
|
1356
|
#!/user/bin/python
# -*- coding: utf-8 -*-
# Author : (DEK) Devendra Kavthekar
# program# : Name
# =>
# Write a program that computes the net amount of a bank account
# based a transaction log from console input. The transaction log
# format is shown as following:
# D 100
# W 200
# ��
# D means deposit while W means withdrawal.
# Suppose the following input is supplied to the program:
# D 300
# D 300
# W 200
# D 100
# Then, the output should be:
# 500
# Hints:
# In case of input data being supplied to the question, it should be
# assumed to be a console input.
def main():
bal = 0
print "\n************Press Enter to Exit()*******************\n"
print '\n\'CODE(D/W) Amount\' : '
while True:
print "Balance: ", bal
str = raw_input()
if not str: # If str is blank, exit
break
str = str.split()
command = [x for x in str]
wd = command[0]
amount = int(command[1])
if wd == 'd' or wd == 'D':
bal = bal + amount
elif wd == 'w' or wd == 'W':
if bal > 0 and bal >= amount:
bal = bal - amount
else:
print "\"can't withdraw More\", your balance is: %d" % (bal)
# break
else:
print "Invalid command"
print "Balance is ", bal
main()
# checked
|
apache-2.0
| -1,340,673,859,671,175,200 | 24.509434 | 76 | 0.54068 | false |
srlobo/slutils
|
slutils/sqlalchemy/group_concat.py
|
1
|
1779
|
# -*- coding: utf-8 -*-
# Añade la funcionalidad group_concat de mysql al sqlalchemy
from sqlalchemy.ext import compiler
from sqlalchemy.sql import ColumnElement
from sqlalchemy.orm.attributes import InstrumentedAttribute
class group_concat(ColumnElement):
def __init__(self, col1, col2=None, separator=None):
if isinstance(col1, InstrumentedAttribute):
self.col1 = col1.property.columns[0]
else:
self.col1 = col1
if col2 is not None:
if isinstance(col2, InstrumentedAttribute):
self.col2 = col2.property.columns[0]
else:
self.col2 = col2
else:
self.col2 = self.col1
self.type = self.col1.type
self.separator = separator
@compiler.compiles(group_concat, 'mysql')
def compile_group_concat(element, compiler, **kw):
if element.separator:
return "GROUP_CONCAT(%s ORDER BY %s SEPARATOR '%s')" % (
compiler.process(element.col1),
compiler.process(element.col2),
element.separator,
)
else:
return "GROUP_CONCAT(%s ORDER BY %s)" % (
compiler.process(element.col1),
compiler.process(element.col2),
)
if __name__ == "__main__":
from sqlalchemy import MetaData, Table, Column, String, select
from sqlalchemy import create_engine
m = MetaData()
t = Table('t1', m, Column('foo', String), Column('bar', String))
print select([group_concat(t.c.foo, t.c.bar)]).\
compile(bind=create_engine('mysql://'))
print select([group_concat(t.c.foo)]).\
compile(bind=create_engine('mysql://'))
print select([group_concat(t.c.foo, separator="tocata")]).\
compile(bind=create_engine('mysql://'))
|
gpl-2.0
| -7,678,533,400,014,553,000 | 29.655172 | 68 | 0.610236 | false |
mosquito/lumper
|
lumper/modes/worker.py
|
1
|
1481
|
#!/usr/bin/env python
# encoding: utf-8
from crew.worker import Listener, Context, NODE_UUID, UUID
from pika import PlainCredentials
from crew.worker import context
import logging
import docker
import docker.tls
import lumper.worker
def run(args):
log = logging.getLogger("main")
if args.docker_tls:
tls = docker.tls.TLSConfig(client_cert=(args.docker_client_cert, args.docker_client_key),
ca_cert=args.docker_ca_cert, assert_hostname=False)
else:
tls = False
docker_client = docker.Client(base_url=args.docker_url, tls=tls, timeout=300)
docker_client.verify = args.docker_tls_strict
try:
log.info('Testing docker connection: %s', args.docker_url)
docker_client.info()
Listener(
port=args.amqp_port,
host=args.amqp_address,
credentials=PlainCredentials(username=args.amqp_user, password=args.amqp_password) if args.amqp_user else None,
virtual_host=args.amqp_vhost,
handlers=context.handlers,
set_context=Context(
options=args,
node_uuid=NODE_UUID,
uuid=UUID,
heartbeat_counter=0,
docker=docker_client
)
).loop()
except Exception as e:
if logging.getLogger().level < logging.INFO:
log.exception(e)
else:
log.fatal("Exiting by fatal error: %s", e)
return 0
|
lgpl-3.0
| 2,745,685,323,294,813,000 | 30.510638 | 123 | 0.607022 | false |
stormandco/aiohttp_debugtoolbar
|
aiohttp_debugtoolbar/main.py
|
1
|
3654
|
import os
import jinja2
import aiohttp_jinja2
from . import views
from . import panels
from .middlewares import middleware
from .utils import APP_KEY, TEMPLATE_KEY, STATIC_ROUTE_NAME, hexlify, \
ToolbarStorage, ExceptionHistory
from .views import ExceptionDebugView
default_panel_names = [
panels.HeaderDebugPanel,
panels.PerformanceDebugPanel,
panels.RequestVarsDebugPanel,
panels.TracebackPanel,
panels.LoggingPanel,
]
default_global_panel_names = [
panels.RoutesDebugPanel,
panels.SettingsDebugPanel,
panels.MiddlewaresDebugPanel,
panels.VersionDebugPanel,
]
default_settings = {
'enabled': True,
'intercept_exc': 'debug', # display or debug or False
'intercept_redirects': True,
'panels': default_panel_names,
'extra_panels': [],
'global_panels': default_global_panel_names,
'extra_global_panels': [],
'hosts': ['127.0.0.1', '::1'],
'exclude_prefixes': [],
'button_style': '',
'max_request_history': 100,
'max_visible_requests': 10,
}
def setup(app, **kw):
config = {}
config.update(default_settings)
config.update(kw)
APP_ROOT = os.path.dirname(os.path.abspath(__file__))
app[APP_KEY] = {}
if middleware not in app.middlewares:
app.middlewares.append(middleware)
templates_app = os.path.join(APP_ROOT, 'templates')
templates_panels = os.path.join(APP_ROOT, 'panels/templates')
app[APP_KEY]['settings'] = config
loader = jinja2.FileSystemLoader([templates_app, templates_panels])
aiohttp_jinja2.setup(app, loader=loader, app_key=TEMPLATE_KEY)
static_location = os.path.join(APP_ROOT, 'static')
exc_handlers = ExceptionDebugView()
app.router.add_static('/_debugtoolbar/static', static_location,
name=STATIC_ROUTE_NAME)
app.router.add_route('GET', '/_debugtoolbar/source', exc_handlers.source,
name='debugtoolbar.source')
app.router.add_route('GET', '/_debugtoolbar/execute', exc_handlers.execute,
name='debugtoolbar.execute')
# app.router.add_route('GET', '/_debugtoolbar/console',
# exc_handlers.console,
# name='debugtoolbar.console')
app.router.add_route('GET', '/_debugtoolbar/exception',
exc_handlers.exception,
name='debugtoolbar.exception')
# TODO: fix when sql will be ported
# app.router.add_route('GET', '_debugtoolbar/sqlalchemy/sql_select',
# name='debugtoolbar.sql_select')
# app.router.add_route('GET', '_debugtoolbar/sqlalchemy/sql_explain',
# name='debugtoolbar.sql_explain')
app.router.add_route('GET', '/_debugtoolbar/sse', views.sse,
name='debugtoolbar.sse')
app.router.add_route('GET', '/_debugtoolbar/{request_id}',
views.request_view, name='debugtoolbar.request')
app.router.add_route('GET', '/_debugtoolbar', views.request_view,
name='debugtoolbar.main')
app.router.add_route('GET', '/_debugtoolbar', views.request_view,
name='debugtoolbar')
def settings_opt(name):
return app[APP_KEY]['settings'][name]
max_request_history = settings_opt('max_request_history')
app[APP_KEY]['request_history'] = ToolbarStorage(max_request_history)
app[APP_KEY]['exc_history'] = ExceptionHistory()
app[APP_KEY]['pdtb_token'] = hexlify(os.urandom(10))
intercept_exc = settings_opt('intercept_exc')
if intercept_exc:
app[APP_KEY]['exc_history'].eval_exc = intercept_exc == 'debug'
|
apache-2.0
| -954,536,894,783,035,800 | 33.8 | 79 | 0.6341 | false |
jbd/treewatcher
|
setup.py
|
1
|
1411
|
#!/usr/bin/env python
# check Python's version
import sys
if sys.version < '2.4':
sys.stderr.write('This module requires at least Python 2.5\n')
sys.exit(1)
# import statements
import platform
from setuptools import setup, Command
# debug
DISTUTILS_DEBUG = False
# check linux platform
if not platform.system().startswith('Linux'):
sys.stderr.write("inotify is not available under %s\n" % platform)
sys.exit(1)
#class test(Command):
# description = 'run tests'
# user_options = []
#
# def initialize_options(self):
# pass
#
# def finalize_options(self):
# pass
#
# def run(self):
# from tests.run_all import main
# main()
classif = [
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: GPL v3',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Monitoring'
]
setup(
# cmdclass = {'test': test},
name = 'treewatcher',
version = '0.0.1',
description = 'Linux filesystem directory monitoring',
author = 'Jean-Baptiste Denis',
author_email = 'jeanbaptiste.denis@gmail.com',
license = 'GPL v3',
platforms = 'Linux',
classifiers = classif,
packages = ['treewatcher'],
install_requires = ["inotifyx>=0.1.1"]
)
|
gpl-3.0
| 1,692,870,964,562,512,400 | 22.131148 | 70 | 0.628632 | false |
borg-project/cargo
|
src/python/cargo/random.py
|
1
|
1157
|
"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
from __future__ import absolute_import
import numpy
def get_random_random(random = numpy.random):
"""
Get a randomly-initialized PRNG.
"""
from numpy.random import RandomState
return RandomState(random.randint(numpy.iinfo(int).max))
def random_subsets(sequence, sizes, random = numpy.random):
"""
Return a series of non-intersecting random subsets of a sequence.
"""
sa = shuffled(sequence, random = random)
index = 0
subsets = []
for size in sizes:
assert len(sa) >= index + size
subsets.append(sa[index:index + size])
index += size
return subsets
def draw(p, random = numpy.random, normalize = True):
"""
Return an index selected according to array of probabilities C{p}.
Normalizes by default.
"""
if normalize:
p = p / numpy.sum(p)
((i,),) = numpy.nonzero(random.multinomial(1, p))
return i
def grab(sequence, random = numpy.random):
"""
Return a randomly-selected element from the sequence.
"""
return sequence[random.randint(len(sequence))]
|
mit
| -8,786,405,354,850,565,000 | 19.660714 | 70 | 0.633535 | false |
ADicksonLab/wepy
|
tasks/modules/docs.py
|
1
|
16705
|
from invoke import task
# from ..config import ()
import os
import os.path as osp
from pathlib import Path
import shutil as sh
from warnings import warn
## Paths for the different things
DOCS_TEST_DIR = "tests/test_docs/_tangled_docs"
DOCS_EXAMPLES_DIR = "tests/test_docs/_examples"
DOCS_TUTORIALS_DIR = "tests/test_docs/_tutorials"
DOCS_SPEC = {
'LANDING_PAGE' : "README.org",
'INFO_INDEX' : "info/README.org",
'QUICK_START' : "info/quick_start.org",
'INTRODUCTION' : "info/introduction.org",
'INSTALLATION' : "info/installation.org",
'USERS_GUIDE' : "info/users_guide.org",
'HOWTOS' : "info/howtos.org",
'REFERENCE' : "info/reference.org",
'TROUBLESHOOTING' : "info/troubleshooting.org",
'GLOSSARY' : "info/glossary.rst",
'BIBLIOGRAPHY' : "info/docs.bib",
'DEV_GUIDE' : "info/dev_guide.org",
'GENERAL' : "info/general_info.org",
'NEWS' : "info/news.org",
'CHANGELOG' : "info/changelog.org",
'EXAMPLES_DIR' : "info/examples",
'EXAMPLES_LISTING_INDEX' : "info/examples/README.org",
# Other examples must be in a directory in the EXAMPLES_DIR and have
# their own structure:
# potentially literate document with source code. If not literate then
# code should be in the EXAMPLE_SOURCE directory. This index should
# still exist and give instructions on how to use and run etc. tangled
# source will go in the EXAMPLE_TANGLE_SOURCE folder.
'EXAMPLE_INDEX' : "README.org",
'EXAMPLE_TASKS' : "tasks.py",
'EXAMPLE_BUILD' : "dodo.py",
# Source code for example that is not literately included in the
# README.org
'EXAMPLE_SOURCE' : "source",
# included in the source tree
'EXAMPLE_INPUT' : "input",
# values are automatically excluded from the source tree via
# .gitignore
'EXAMPLE_OUTPUT' : "_output",
# the directory that tangled source files will go, separate from the
# source dir, this folder will be ignored by VCS
'EXAMPLE_TANGLE_SOURCE' : "_tangle_source",
# the actual dir the env will be built into
'EXAMPLE_ENV' : "_env",
'TUTORIALS_DIR' : "info/tutorials",
'TUTORIALS_LISTING_INDEX' : "info/tutorials/README.org",
# Other tutorials must be in a directory in the TUTORIALS_DIR and have
# their own structure:
# the main document for the tutorial can be *one* of any of the
# values supporting: org, Jupyter Notebooks. In order of
# precedence.
'TUTORIAL_INDEX' : (
"README.org",
"README.ipynb",
),
'TUTORIAL_TASKS' : "tasks.py",
'TUTORIAl_BUILD' : "dodo.py",
# Source code for tutorial that is not literately included in the
# README.org
'TUTORIAL_SOURCE' : "source",
# included in the source tree
'TUTORIAL_INPUT' : "input",
# values are automatically excluded from the source tree via
# .gitignore
'TUTORIAL_OUTPUT' : "_output",
# the directory that tangled source files will go, separate from the
# source dir, this folder will be ignored by VCS
'TUTORIAL_TANGLE_SOURCE' : "_tangle_source",
# the actual dir the env will be built into
'TUTORIAL_ENV' : "_env",
}
# here for reference potentially could be applied with an init function
GITIGNORE_LINES = [
"info/examples/*/_output",
"info/examples/*/_tangle_source",
"info/examples/*/_env",
"info/tutorials/*/_output",
"info/tutorials/*/_tangle_source",
"info/tutorials/*/_env",
]
# TODO: add a docs init task that generates all the files and adds to
# the gitignore.
def visit_docs():
"""Returns a list of all the doc pages with their relative paths to
the root of the project. Not including examples and tutorials
which are tested differently.
"""
# get the pages which are always there
page_keys = [
'LANDING_PAGE',
'INFO_INDEX',
'QUICK_START',
'INTRODUCTION',
'INSTALLATION',
'USERS_GUIDE',
'HOWTOS',
'REFERENCE',
'TROUBLESHOOTING',
'GLOSSARY',
'DEV_GUIDE',
'GENERAL',
'NEWS',
'CHANGELOG',
'EXAMPLES_LISTING_INDEX',
'TUTORIALS_LISTING_INDEX',
]
# dereference their paths
page_paths = [DOCS_SPEC[key] for key in page_keys]
return page_paths
def visit_examples():
"""Get the relative paths to all of the example dirs."""
# get the pages for the tutorials and examples
examples = [ex for ex in os.listdir(DOCS_SPEC['EXAMPLES_DIR'])
if (
ex != Path(DOCS_SPEC['EXAMPLES_LISTING_INDEX']).parts[-1] and
ex != '.keep' and
not ex.endswith("~")
)
]
example_dirs = [Path(DOCS_SPEC['EXAMPLES_DIR']) / example for example in examples]
return example_dirs
def visit_example_contents(example):
example_pages = []
if osp.exists(DOCS_SPEC['EXAMPLE_INDEX']):
example_index = example_dir / DOCS_SPEC['EXAMPLE_INDEX']
example_pages.append(example_index)
else:
warn(f"No example index page for {example}")
page_paths.extend(example_pages)
def visit_tutorials():
"""Get the relative paths to all of the tutorial dirs."""
# get the pages for the tutorials and tutorials
tutorials = [tut for tut in os.listdir(DOCS_SPEC['TUTORIALS_DIR'])
if (
tut != Path(DOCS_SPEC['TUTORIALS_LISTING_INDEX']).parts[-1] and
tut != 'index.rst' and
tut != '.keep' and
not tut.endswith("~")
)
]
tutorial_dirs = [Path(DOCS_SPEC['TUTORIALS_DIR']) / tutorial for tutorial in tutorials]
return tutorial_dirs
def tangle_orgfile(cx, file_path):
"""Tangle the target file using emacs in batch mode. Implicitly dumps
things relative to the file."""
cx.run(f"emacs -Q --batch -l org {file_path} -f org-babel-tangle")
def tangle_jupyter(cx, file_path):
"""Tangle the target file using jupyter-nbconvert to a python
script. Implicitly dumps things relative to the file. Only can
make a single script from the notebook with the same name.
"""
cx.run(f"jupyter-nbconvert --to 'python' {file_path}")
@task
def list_docs(cx):
"""List paths relative to this context"""
print('\n'.join([str(Path(cx.cwd) / p) for p in visit_docs()]))
@task
def list_examples(cx):
"""List paths relative to this context"""
print('\n'.join([str(Path(cx.cwd) / ex) for ex in visit_examples()]))
@task
def list_tutorials(cx):
"""List paths relative to this context"""
print('\n'.join([str(Path(cx.cwd) / tut) for tut in visit_tutorials()]))
@task()
def clean_tangle(cx):
"""remove the tangle dirs"""
sh.rmtree(Path(cx.cwd) / DOCS_TEST_DIR,
ignore_errors=True)
sh.rmtree(Path(cx.cwd) / DOCS_EXAMPLES_DIR,
ignore_errors=True)
sh.rmtree(Path(cx.cwd) / DOCS_TUTORIALS_DIR,
ignore_errors=True)
@task(pre=[clean_tangle])
def tangle_pages(cx):
"""Tangle the docs into the docs testing directory."""
docs_test_dir = Path(cx.cwd) / DOCS_TEST_DIR
os.makedirs(
docs_test_dir,
exist_ok=True,
)
doc_pages = visit_docs()
for page_path in doc_pages:
page_path = Path(page_path)
page_name_parts = page_path.parts[0:-1] + (page_path.stem,)
page_name = Path(*page_name_parts)
page_type = page_path.suffix.strip('.')
page_tangle_dir = docs_test_dir / page_name
# make a directory for this file to have it's own tangle environment
os.makedirs(page_tangle_dir,
exist_ok=False)
# copy the page to its directory
target_orgfile = docs_test_dir / page_name / f"{page_name.stem}.{page_type}"
sh.copyfile(page_path,
target_orgfile)
# then tangle them
tangle_orgfile(cx, target_orgfile)
@task(pre=[clean_tangle])
def tangle_examples(cx):
examples_test_dir = Path(cx.cwd) / DOCS_EXAMPLES_DIR
os.makedirs(
examples_test_dir,
exist_ok=True,
)
for example_dir in visit_examples():
example = example_dir.stem
# ignore if there are any built files at the start location,
# need to build fresh for tests
sh.copytree(
example_dir,
examples_test_dir / example,
ignore=sh.ignore_patterns("_*"),
)
with cx.cd(str(examples_test_dir / example)):
cx.run("inv clean")
cx.run("inv tangle")
@task(pre=[clean_tangle])
def tangle_tutorials(cx):
tutorials_test_dir = Path(cx.cwd) / DOCS_TUTORIALS_DIR
os.makedirs(
tutorials_test_dir,
exist_ok=True,
)
for tutorial_dir in visit_tutorials():
tutorial = tutorial_dir.stem
# ignore if there are any built files at the start location,
# need to build fresh for tests
sh.copytree(
tutorial_dir,
tutorials_test_dir / tutorial,
ignore=sh.ignore_patterns("_*"),
)
with cx.cd(str(tutorials_test_dir / tutorial)):
cx.run("inv clean")
cx.run("inv tangle")
@task(pre=[clean_tangle, tangle_pages, tangle_examples, tangle_tutorials])
def tangle(cx):
"""Tangle the doc pages, examples, and tutorials into the docs testing
directories."""
pass
@task
def new_example(cx, name=None, template="org", env='venv_blank'):
"""Create a new example in the info/examples directory.
Can choose between the following templates:
- 'org' :: org mode notebook
Choose from the following env templates:
- None
- venv_blank
- venv_dev
- conda_blank
- conda_dev
"""
assert name is not None, "Must provide a name"
template_path = Path(f"templates/examples/{template}")
# check if the template exists
if not template_path.is_dir():
raise ValueError(
f"Unkown template {template}. Check the 'templates/examples' folder")
# check if the env exists
if env is not None:
env_tmpl_path = Path(f"templates/envs/{env}")
if not env_tmpl_path.is_dir():
raise ValueError(
f"Unkown env template {env}. Check the 'templates/envs' folder")
target_path = Path(f"info/examples/{name}")
if target_path.exists():
raise FileExistsError(f"Example with name {name} already exists. Not overwriting.")
# copy the template
cx.run(f"cp -r {template_path} {target_path}")
# copy the env
cx.run(f"cp -r {env_tmpl_path} {target_path / 'env'}")
print(f"New example created at: {target_path}")
@task
def new_tutorial(cx, name=None, template="org", env='venv_blank'):
"""Create a new tutorial in the info/tutorials directory.
Can choose between the following templates:
- 'org' :: org mode notebook
- 'jupyter' :: Jupyter notebook
Choose from the following env templates:
- None
- venv_blank
- venv_dev
- conda_blank
- conda_dev
"""
assert name is not None, "Must provide a name"
template_path = Path(f"templates/tutorials/{template}")
# check if the template exists
if not template_path.is_dir():
raise ValueError(
f"Unkown template {template}. Check the 'templates/tutorials' folder")
# check if the env exists
if env is not None:
env_tmpl_path = Path(f"templates/envs/{env}")
if not env_tmpl_path.is_dir():
raise ValueError(
f"Unkown env template {env}. Check the 'templates/envs' folder")
target_path = Path(f"info/tutorials/{name}")
if target_path.exists():
raise FileExistsError(f"Tutorial with name {name} already exists. Not overwriting.")
# copy the template
cx.run(f"cp -r {template_path} {target_path}")
# copy the env
cx.run(f"cp -r {env_tmpl_path} {target_path / 'env'}")
print(f"New tutorial created at: {target_path}")
@task
def test_example(cx,
name=None,
tag=None,
):
"""Test a specific doc example in the current virtual environment."""
if name is None:
examples = visit_examples()
else:
examples = [Path("info/examples") / name]
for example in examples:
path = example
assert path.exists() and path.is_dir(), \
f"Example {example.stem} doesn't exist at {path}"
# TODO: add support for reports and such
print("tag is ignored")
cx.run(f"pytest tests/test_docs/test_examples/test_{example.stem}.py",
warn=True)
@task
def test_examples_nox(cx,
name=None):
"""Test either a specific example when 'name' is given or all of them,
using the nox test matrix specified in the noxfile.py file for
'test_example' session.
"""
if name is None:
examples = [example.stem for example in visit_examples()]
else:
examples = [name]
for example in examples:
cx.run(f"nox -s test_example -- {example}",
warn=True)
@task
def test_tutorial(cx,
name=None,
tag=None,
):
"""Test a specific doc tutorial in the current virtual environment."""
if name is None:
tutorials = visit_tutorials()
else:
tutorials = [Path("info/tutorials") / name]
for tutorial in tutorials:
path = tutorial
assert path.exists() and path.is_dir(), \
f"Tutorial {tutorial} doesn't exist at {path}"
# TODO: add support for reports and such
print("tag is ignored")
cx.run(f"pytest tests/test_docs/test_tutorials/test_{tutorial.stem}.py",
warn=True)
@task
def test_tutorials_nox(cx,
name=None):
"""Test either a specific tutorial when 'name' is given or all of them,
using the nox test matrix specified in the noxfile.py file for
'test_tutorial' session.
"""
if name is None:
tutorials = [tutorial.stem for tutorial in visit_tutorials()]
else:
tutorials = [name]
for tutorial in tutorials:
cx.run(f"nox -s test_tutorial -- {tutorial}")
@task
def test_pages(cx, tag=None):
"""Test the doc pages in the current virtual environment."""
if tag is None:
cx.run("pytest tests/test_docs/test_pages",
warn=True)
else:
cx.run(f"pytest --html=reports/pytest/{tag}/docs/report.html tests/test_docs/test_pages",
warn=True)
@task
def test_pages_nox(cx, tag=None):
"""Test the doc pages in the nox test matrix session."""
cx.run(f"nox -s test_doc_pages")
@task
def pin_example(cx, name=None):
"""Pin the deps for an example or all of them if 'name' is None."""
if name is None:
examples = visit_examples()
else:
examples = [name]
print(examples)
for example in examples:
path = example / 'env'
assert path.exists() and path.is_dir(), \
f"Env for Example {example} doesn't exist"
cx.run(f"inv env.deps-pin-path -p {path}")
@task
def pin_tutorial(cx, name=None):
if name is None:
tutorials = visit_tutorials()
else:
tutorials = [name]
for tutorial in tutorials:
path = tutorial / 'env'
assert path.exists() and path.is_dir(), \
f"Env for Tutorial {tutorial} doesn't exist"
cx.run(f"inv env.deps-pin-path -p {path}")
@task
def env_example(cx, name=None):
"""Make a the example env in its local dir."""
if name is None:
examples = visit_examples()
else:
examples = [name]
for example in examples:
spec_path = Path(DOCS_SPEC['EXAMPLES_DIR']) / example / 'env'
env_path = Path(DOCS_SPEC['EXAMPLES_DIR']) / example / DOCS_SPEC['EXAMPLE_ENV']
assert spec_path.exists() and spec_path.is_dir(), \
f"Tutorial {example} doesn't exist"
cx.run(f"inv env.make-env -s {spec_path} -p {env_path}")
@task
def env_tutorial(cx, name=None):
"""Make a the tutorial env in its local dir."""
if name is None:
tutorials = visit_tutorials()
else:
tutorials = [name]
for tutorial in tutorials:
spec_path = Path(DOCS_SPEC['TUTORIALS_DIR']) / tutorial / 'env'
env_path = Path(DOCS_SPEC['TUTORIALS_DIR']) / tutorial / DOCS_SPEC['TUTORIAL_ENV']
assert spec_path.exists() and spec_path.is_dir(), \
f"Tutorial {tutorial} doesn't exist"
cx.run(f"inv env.make-env -s {spec_path} -p {env_path}")
|
mit
| -2,381,575,786,931,566,600 | 25.728 | 97 | 0.607603 | false |
mandli/multilayer-examples
|
2d/plane_wave/setrun.py
|
1
|
15949
|
"""
Module to set up run time parameters for Clawpack.
The values set in the function setrun are then written out to data files
that will be read in by the Fortran code.
"""
import numpy as numpy
import clawpack.geoclaw.multilayer.data as multilayer
import clawpack.geoclaw.surge.data as surge
import clawpack.geoclaw.topotools as tt
# Rotation transformations
def transform_c2p(x,y,x0,y0,theta):
return ((x+x0)*numpy.cos(theta) - (y+y0)*numpy.sin(theta),
(x+x0)*numpy.sin(theta) + (y+y0)*numpy.cos(theta))
def transform_p2c(x,y,x0,y0,theta):
return ( x*numpy.cos(theta) + y*numpy.sin(theta) - x0,
-x*numpy.sin(theta) + y*numpy.cos(theta) - y0)
#------------------------------
def setrun(claw_pkg='geoclaw'):
#------------------------------
"""
Define the parameters used for running Clawpack.
INPUT:
claw_pkg expected to be "geoclaw" for this setrun.
OUTPUT:
rundata - object of class ClawRunData
"""
from clawpack.clawutil import data
assert claw_pkg.lower() == 'geoclaw', "Expected claw_pkg = 'geoclaw'"
num_dim = 2
rundata = data.ClawRunData(claw_pkg, num_dim)
#------------------------------------------------------------------
# GeoClaw specific parameters:
#------------------------------------------------------------------
rundata = setgeo(rundata)
rundata.add_data(multilayer.MultilayerData(), 'multilayer_data')
set_multilayer(rundata)
rundata.add_data(surge.FrictionData(),'frictiondata')
set_friction(rundata)
rundata.add_data(surge.SurgeData(),'stormdata')
set_storm(rundata)
#------------------------------------------------------------------
# Standard Clawpack parameters to be written to claw.data:
# (or to amr2ez.data for AMR)
#------------------------------------------------------------------
clawdata = rundata.clawdata # initialized when rundata instantiated
# Set single grid parameters first.
# See below for AMR parameters.
# ---------------
# Spatial domain:
# ---------------
# Number of space dimensions:
clawdata.num_dim = num_dim
# Lower and upper edge of computational domain:
clawdata.lower[0] = -1 # west longitude
clawdata.upper[0] = 2.0 # east longitude
clawdata.lower[1] = -1.0 # south latitude
clawdata.upper[1] = 2.0 # north latitude
# Number of grid cells: Coarsest grid
clawdata.num_cells[0] = 150
clawdata.num_cells[1] = 150
# ---------------
# Size of system:
# ---------------
# Number of equations in the system:
clawdata.num_eqn = 6
# Number of auxiliary variables in the aux array (initialized in setaux)
clawdata.num_aux = 4 + rundata.multilayer_data.num_layers
if rundata.stormdata.storm_type > 0:
clawdata.num_aux += 3
# Index of aux array corresponding to capacity function, if there is one:
clawdata.capa_index = 0
# -------------
# Initial time:
# -------------
clawdata.t0 = 0.0
# Restart from checkpoint file of a previous run?
# Note: If restarting, you must also change the Makefile to set:
# RESTART = True
# If restarting, t0 above should be from original run, and the
# restart_file 'fort.chkNNNNN' specified below should be in
# the OUTDIR indicated in Makefile.
clawdata.restart = False # True to restart from prior results
clawdata.restart_file = 'fort.chk00036' # File to use for restart data
# -------------
# Output times:
#--------------
# Specify at what times the results should be written to fort.q files.
# Note that the time integration stops after the final output time.
# The solution at initial time t0 is always written in addition.
clawdata.output_style = 1
if clawdata.output_style==1:
# Output nout frames at equally spaced times up to tfinal:
clawdata.num_output_times = 40
clawdata.tfinal = 1.0
clawdata.output_t0 = True # output at initial (or restart) time?
elif clawdata.output_style == 2:
# Specify a list of output times.
clawdata.output_times = [0.5, 1.0]
elif clawdata.output_style == 3:
# Output every iout timesteps with a total of ntot time steps:
clawdata.output_step_interval = 1
clawdata.total_steps = 10
clawdata.output_t0 = True
clawdata.output_format = 'ascii' # 'ascii' or 'binary'
clawdata.output_q_components = 'all' # need all
clawdata.output_aux_components = 'all' # eta=h+B is in q
clawdata.output_aux_onlyonce = False # output aux arrays each frame
# ---------------------------------------------------
# Verbosity of messages to screen during integration:
# ---------------------------------------------------
# The current t, dt, and cfl will be printed every time step
# at AMR levels <= verbosity. Set verbosity = 0 for no printing.
# (E.g. verbosity == 2 means print only on levels 1 and 2.)
clawdata.verbosity = 1
# --------------
# Time stepping:
# --------------
# if dt_variable==1: variable time steps used based on cfl_desired,
# if dt_variable==0: fixed time steps dt = dt_initial will always be used.
clawdata.dt_variable = True
# Initial time step for variable dt.
# If dt_variable==0 then dt=dt_initial for all steps:
clawdata.dt_initial = 0.00225
# Max time step to be allowed if variable dt used:
clawdata.dt_max = 1e+99
# Desired Courant number if variable dt used, and max to allow without
# retaking step with a smaller dt:
clawdata.cfl_desired = 0.75
clawdata.cfl_max = 1.0
# clawdata.cfl_desired = 0.45
# clawdata.cfl_max = 0.5
# Maximum number of time steps to allow between output times:
clawdata.steps_max = 5000
# ------------------
# Method to be used:
# ------------------
# Order of accuracy: 1 => Godunov, 2 => Lax-Wendroff plus limiters
clawdata.order = 2
# Use dimensional splitting? (not yet available for AMR)
# 0 or 'unsplit' or none' ==> Unsplit
# 1 or 'increment' ==> corner transport of waves
# 2 or 'all' ==> corner transport of 2nd order corrections too
clawdata.dimensional_split = "unsplit"
# For unsplit method, transverse_waves can be
# 0 or 'none' ==> donor cell (only normal solver used)
# 1 or 'increment' ==> corner transport of waves
# 2 or 'all' ==> corner transport of 2nd order corrections too
clawdata.transverse_waves = 2
# Number of waves in the Riemann solution:
clawdata.num_waves = 6
# List of limiters to use for each wave family:
# Required: len(limiter) == num_waves
# Some options:
# 0 or 'none' ==> no limiter (Lax-Wendroff)
# 1 or 'minmod' ==> minmod
# 2 or 'superbee' ==> superbee
# 3 or 'mc' ==> MC limiter
# 4 or 'vanleer' ==> van Leer
clawdata.limiter = ['mc', 'mc', 'mc', 'mc', 'mc', 'mc']
clawdata.use_fwaves = True # True ==> use f-wave version of algorithms
# Source terms splitting:
# src_split == 0 or 'none' ==> no source term (src routine never called)
# src_split == 1 or 'godunov' ==> Godunov (1st order) splitting used,
# src_split == 2 or 'strang' ==> Strang (2nd order) splitting used, not recommended.
clawdata.source_split = 'godunov'
# --------------------
# Boundary conditions:
# --------------------
# Number of ghost cells (usually 2)
clawdata.num_ghost = 2
# Choice of BCs at xlower and xupper:
# 0 => user specified (must modify bcN.f to use this option)
# 1 => extrapolation (non-reflecting outflow)
# 2 => periodic (must specify this at both boundaries)
# 3 => solid wall for systems where q(2) is normal velocity
clawdata.bc_lower[0] = 'extrap'
clawdata.bc_upper[0] = 'extrap'
clawdata.bc_lower[1] = 'extrap'
clawdata.bc_upper[1] = 'extrap'
# --------------
# Checkpointing:
# --------------
# Specify when checkpoint files should be created that can be
# used to restart a computation.
clawdata.checkpt_style = 0
if clawdata.checkpt_style == 0:
# Do not checkpoint at all
pass
elif clawdata.checkpt_style == 1:
# Checkpoint only at tfinal.
pass
elif clawdata.checkpt_style == 2:
# Specify a list of checkpoint times.
clawdata.checkpt_times = [0.1,0.15]
elif clawdata.checkpt_style == 3:
# Checkpoint every checkpt_interval timesteps (on Level 1)
# and at the final time.
clawdata.checkpt_interval = 5
# ---------------
# AMR parameters:
# ---------------
amrdata = rundata.amrdata
# max number of refinement levels:
amrdata.amr_levels_max = 1
# List of refinement ratios at each level (length at least mxnest-1)
amrdata.refinement_ratios_x = [2,6]
amrdata.refinement_ratios_y = [2,6]
amrdata.refinement_ratios_t = [2,6]
# Specify type of each aux variable in amrdata.auxtype.
# This must be a list of length maux, each element of which is one of:
# 'center', 'capacity', 'xleft', or 'yleft' (see documentation).
amrdata.aux_type = ['center','center','yleft','center','center','center']
# Flag using refinement routine flag2refine rather than richardson error
amrdata.flag_richardson = False # use Richardson?
amrdata.flag2refine = True
# steps to take on each level L between regriddings of level L+1:
amrdata.regrid_interval = 3
# width of buffer zone around flagged points:
# (typically the same as regrid_interval so waves don't escape):
amrdata.regrid_buffer_width = 2
# clustering alg. cutoff for (# flagged pts) / (total # of cells refined)
# (closer to 1.0 => more small grids may be needed to cover flagged cells)
amrdata.clustering_cutoff = 0.700000
# print info about each regridding up to this level:
amrdata.verbosity_regrid = 0
# ----- For developers -----
# Toggle debugging print statements:
amrdata.dprint = False # print domain flags
amrdata.eprint = False # print err est flags
amrdata.edebug = False # even more err est flags
amrdata.gprint = False # grid bisection/clustering
amrdata.nprint = False # proper nesting output
amrdata.pprint = False # proj. of tagged points
amrdata.rprint = False # print regridding summary
amrdata.sprint = False # space/memory output
amrdata.tprint = True # time step reporting each level
amrdata.uprint = False # update/upbnd reporting
# More AMR parameters can be set -- see the defaults in pyclaw/data.py
# ---------------
# Regions:
# ---------------
rundata.regiondata.regions = []
# to specify regions of refinement append lines of the form
# [minlevel,maxlevel,t1,t2,x1,x2,y1,y2]
# ---------------
# Gauges:
# ---------------
rundata.gaugedata.gauges = []
# for gauges append lines of the form [gaugeno, x, y, t1, t2]
gauge_locations = [-0.1,0.0,0.1,0.2,0.3]
for (i,x_c) in enumerate(gauge_locations):
# y0 = (self.run_data.clawdata.yupper - self.run_data.clawdata.ylower) / 2.0
# x_p,y_p = transform_c2p(x_c,0.0,location[0],location[1],angle)
x_p = x_c * numpy.cos(0.0)
y_p = x_c * numpy.sin(0.0)
# print "+=====+"
# print x_c,0.0
# print x_p,y_p
if (rundata.clawdata.lower[0] < x_p < rundata.clawdata.upper[0] and
rundata.clawdata.lower[1] < y_p < rundata.clawdata.upper[1]):
rundata.gaugedata.gauges.append([i, x_p, y_p, 0.0, 1e10])
# print "Gauge %s: (%s,%s)" % (i,x_p,y_p)
# print "+=====+"
return rundata
# end of function setrun
# ----------------------
#-------------------
def setgeo(rundata):
#-------------------
"""
Set GeoClaw specific runtime parameters.
For documentation see ....
"""
try:
geo_data = rundata.geo_data
except:
print "*** Error, this rundata has no geo_data attribute"
raise AttributeError("Missing geo_data attribute")
# == Physics ==
geo_data.gravity = 9.81
geo_data.coordinate_system = 1
geo_data.earth_radius = 6367.5e3
# == Forcing Options
geo_data.coriolis_forcing = False
# == Algorithm and Initial Conditions ==
geo_data.sea_level = 0.0
geo_data.dry_tolerance = 1.e-3
geo_data.friction_forcing = True
geo_data.manning_coefficient = 0.025
geo_data.friction_depth = 1e6
# Refinement settings
refinement_data = rundata.refinement_data
refinement_data.variable_dt_refinement_ratios = True
refinement_data.wave_tolerance = 1.e-1
refinement_data.deep_depth = 1e2
refinement_data.max_level_deep = 3
# == settopo.data values ==
topo_data = rundata.topo_data
# for topography, append lines of the form
# [topotype, minlevel, maxlevel, t1, t2, fname]
topo_data.topofiles.append([2, 1, 5, 0.0, 1e10, 'topo.tt2'])
# == setdtopo.data values ==
dtopo_data = rundata.dtopo_data
# for moving topography, append lines of the form : (<= 1 allowed for now!)
# [topotype, minlevel,maxlevel,fname]
return rundata
# end of function setgeo
# ----------------------
def set_friction(rundata):
data = rundata.frictiondata
# Variable friction
data.variable_friction = False
def set_multilayer(rundata):
data = rundata.multilayer_data
# Physics parameters
data.num_layers = 2
data.rho = [0.9,1.0]
data.eta = [0.0,-0.6]
# Algorithm parameters
data.eigen_method = 2
data.inundation_method = 2
data.richardson_tolerance = 0.95
# data.wave_tolerance = [0.1,0.1]
# data.dry_limit = True
# Set special initial conditions for qinit
rundata.replace_data('qinit_data', multilayer.QinitMultilayerData())
rundata.qinit_data.qinit_type = 6
rundata.qinit_data.epsilon = 0.02
rundata.qinit_data.angle = 0.0
rundata.qinit_data.sigma = 0.02
rundata.qinit_data.wave_family = 4
rundata.qinit_data.init_location = [-0.1,0.0]
def set_storm(rundata):
# No storm
rundata.stormdata.storm_type = 0
def bathy_step(x, y, location=0.15, angle=0.0, left=-1.0, right=-0.2):
x_c,y_c = transform_p2c(x, y, location, 0.0, angle)
return ((x_c <= 0.0) * left
+ (x_c > 0.0) * right)
def write_topo_file(run_data, out_file, **kwargs):
# Make topography
topo_func = lambda x, y: bathy_step(x, y, **kwargs)
topo = tt.Topography(topo_func=topo_func)
topo.x = numpy.linspace(run_data.clawdata.lower[0],
run_data.clawdata.upper[0],
run_data.clawdata.num_cells[0] + 8)
topo.y = numpy.linspace(run_data.clawdata.lower[1],
run_data.clawdata.upper[1],
run_data.clawdata.num_cells[1] + 8)
topo.write(out_file)
# Write out simple bathy geometry file for communication to the plotting
with open("./bathy_geometry.data", 'w') as bathy_geometry_file:
if kwargs.has_key("location"):
location = kwargs['location']
else:
location = 0.15
if kwargs.has_key("angle"):
angle = kwargs['angle']
else:
angle = 0.0
bathy_geometry_file.write("%s\n%s" % (location, angle) )
if __name__ == '__main__':
# Set up run-time parameters and write all data files.
import sys
if len(sys.argv) == 2:
rundata = setrun(sys.argv[1])
else:
rundata = setrun()
rundata.write()
write_topo_file(rundata, 'topo.tt2')
|
mit
| 5,311,222,775,262,506,000 | 30.459566 | 92 | 0.589818 | false |
ovpn-to/oVPN.to-Client-Software
|
else/python/hooks.py
|
1
|
17289
|
# -*- coding: utf-8 -*-
#
# Hooks module for py2exe.
# Inspired by cx_freeze's hooks.py, which is:
#
# Copyright © 2007-2013, Anthony Tuininga.
# Copyright © 2001-2006, Computronix (Canada) Ltd., Edmonton, Alberta, Canada.
# All rights reserved.
#
import os, sys
# Exclude modules that the standard library imports (conditionally),
# but which are not present on windows.
#
# _memimporter can be excluded because it is built into the run-stub.
windows_excludes = """
_curses
_dummy_threading
_emx_link
_gestalt
_posixsubprocess
ce
clr
console
fcntl
grp
java
org
os2
posix
pwd
site
termios
vms_lib
_memimporter
""".split()
def init_finder(finder):
# what about renamed functions, like urllib.pathname2url?
#
# We should use ignore() for Python 2 names so that my py2to3
# importhook works. For modules that are not present on Windows,
# we should probably use excludes.append()
finder.excludes.extend(windows_excludes)
# python2 modules are ignored (but not excluded)
finder.ignore("BaseHTTPServer")
finder.ignore("ConfigParser")
finder.ignore("IronPython")
finder.ignore("SimpleHTTPServer")
finder.ignore("StringIO")
finder.ignore("__builtin__")
finder.ignore("_winreg")
finder.ignore("cPickle")
finder.ignore("cStringIO")
finder.ignore("commands")
finder.ignore("compiler")
finder.ignore("copy_reg")
finder.ignore("dummy_thread")
finder.ignore("future_builtins")
finder.ignore("htmlentitydefs")
finder.ignore("httplib")
finder.ignore("md5")
finder.ignore("new")
finder.ignore("thread")
finder.ignore("unittest2")
finder.ignore("urllib2")
finder.ignore("urlparse")
def hook_pycparser(finder, module):
"""pycparser needs lextab.py and yacctab.py which are not picked
up automatically. Make sure the complete package is included;
otherwise the exe-files may create yacctab.py and lextab.py when
they are run.
"""
finder.import_package_later("pycparser")
def hook_pycparser__build_tables(finder, module):
finder.ignore("lextab")
finder.ignore("yacctab")
finder.ignore("_ast_gen")
finder.ignore("c_ast")
def hook_pycparser_ply(finder, module):
finder.ignore("lex")
finder.ignore("ply")
def hook_OpenSSL(finder, module):
"""OpenSSL needs the cryptography package."""
finder.import_package_later("cryptography")
def hook_cffi_cparser(finder, module):
finder.ignore("cffi._pycparser")
def hook_cffi(finder, module):
# We need to patch two methods in the
# cffi.vengine_cpy.VCPythonEngine class so that cffi libraries
# work from within zip-files.
finder.add_bootcode("""
def patch_cffi():
def find_module(self, module_name, path, so_suffixes):
import sys
name = "%s.%s" % (self.verifier.ext_package, module_name)
try:
__import__(name)
except ImportError:
return None
self.__module = mod = sys.modules[name]
return mod.__file__
def load_library(self):
from cffi import ffiplatform
import sys
# XXX review all usages of 'self' here!
# import it as a new extension module
module = self.__module
#
# call loading_cpy_struct() to get the struct layout inferred by
# the C compiler
self._load(module, 'loading')
#
# the C code will need the <ctype> objects. Collect them in
# order in a list.
revmapping = dict([(value, key)
for (key, value) in self._typesdict.items()])
lst = [revmapping[i] for i in range(len(revmapping))]
lst = list(map(self.ffi._get_cached_btype, lst))
#
# build the FFILibrary class and instance and call _cffi_setup().
# this will set up some fields like '_cffi_types', and only then
# it will invoke the chained list of functions that will really
# build (notably) the constant objects, as <cdata> if they are
# pointers, and store them as attributes on the 'library' object.
class FFILibrary(object):
_cffi_python_module = module
_cffi_ffi = self.ffi
_cffi_dir = []
def __dir__(self):
return FFILibrary._cffi_dir + list(self.__dict__)
library = FFILibrary()
if module._cffi_setup(lst, ffiplatform.VerificationError, library):
import warnings
warnings.warn("reimporting %r might overwrite older definitions"
% (self.verifier.get_module_name()))
#
# finally, call the loaded_cpy_xxx() functions. This will perform
# the final adjustments, like copying the Python->C wrapper
# functions from the module to the 'library' object, and setting
# up the FFILibrary class with properties for the global C variables.
self._load(module, 'loaded', library=library)
module._cffi_original_ffi = self.ffi
module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions
return library
from cffi.vengine_cpy import VCPythonEngine
VCPythonEngine.find_module = find_module
VCPythonEngine.load_library = load_library
patch_cffi()
del patch_cffi
""")
def hook_multiprocessing(finder, module):
module.__globalnames__.add("AuthenticationError")
module.__globalnames__.add("BufferTooShort")
module.__globalnames__.add("Manager")
module.__globalnames__.add("TimeoutError")
module.__globalnames__.add("cpu_count")
module.__globalnames__.add("current_process")
module.__globalnames__.add("get_context")
module.__globalnames__.add("get_start_method")
module.__globalnames__.add("set_start_method")
module.__globalnames__.add("JoinableQueue")
module.__globalnames__.add("Lock")
module.__globalnames__.add("Process")
module.__globalnames__.add("Queue")
module.__globalnames__.add("freeze_support")
def import_psutil(finder, module):
"""Exclude stuff for other operating systems."""
finder.excludes.append("_psutil_bsd")
finder.excludes.append("_psutil_linux")
finder.excludes.append("_psutil_osx")
finder.excludes.append("_psutil_posix")
finder.excludes.append("_psutil_sunos")
def hook_PIL(finder, module):
# c:\Python33-64\lib\site-packages\PIL
"""Pillow loads plugins"""
# Exclude python 2 imports
finder.excludes.append("Tkinter")
finder.import_package_later("PIL")
def hook__socket(finder, module):
"""
_socket.pyd uses the 'idna' encoding; and that requires
'unicodedata.pyd'.
"""
finder.import_hook("encodings.idna")
finder.import_hook("unicodedata")
def hook_pyreadline(finder, module):
"""
"""
finder.ignore("IronPythonConsole")
finder.excludes.append("StringIO") # in pyreadline.py3k_compat
finder.ignore("System")
finder.excludes.append("sets")
finder.ignore("startup")
def hook_xml_etree_ElementTree(finder, module):
"""ElementC14N is an optional extension. Ignore if it is not
found.
"""
finder.ignore("ElementC14N")
def hook_urllib_request(finder, module):
"""urllib.request imports _scproxy on darwin
"""
finder.excludes.append("_scproxy")
def hook_pythoncom(finder, module):
"""pythoncom is a Python extension module with .dll extension,
usually in the windows system directory as pythoncom3X.dll.
"""
import pythoncom
finder.add_dll(pythoncom.__file__)
def hook_pywintypes(finder, module):
"""pywintypes is a Python extension module with .dll extension,
usually in the windows system directory as pywintypes3X.dll.
"""
import pywintypes
finder.add_dll(pywintypes.__file__)
def hook_win32com(finder, module):
"""The win32com package extends it's __path__ at runtime.
"""
finder.import_hook("pywintypes")
finder.import_hook("pythoncom")
import win32com
module.__path__ = win32com.__path__
def hook_win32api(finder, module):
"""win32api.FindFiles(...) needs this."""
#finder.import_hook("pywintypes")
finder.import_hook("win32timezone")
def hook_tkinter(finder, module):
"""Recusively copy tcl and tk directories.
"""
# It probably doesn't make sense to exclude tix from the tcl distribution,
# and only copy it when tkinter.tix is imported...
import tkinter._fix as fix
tcl_dir = os.path.normpath(os.path.join(fix.tcldir, ".."))
assert os.path.isdir(tcl_dir)
finder.add_datadirectory("tcl", tcl_dir, recursive=True)
finder.set_min_bundle("tkinter", 2)
def hook_six(finder, module):
"""six.py has an object 'moves'. This allows to import
modules/packages via attribute access under new names.
We install a fake module named 'six.moves' which simulates this
behaviour.
"""
class SixImporter(type(module)):
"""Simulate six.moves.
Import renamed modules when retrived as attributes.
"""
__code__ = None
def __init__(self, mf, *args, **kw):
import six
self.__moved_modules = {item.name: item.mod
for item in six._moved_attributes
if isinstance(item, six.MovedModule)}
super().__init__(*args, **kw)
self.__finder = mf
def __getattr__(self, name):
if name in self.__moved_modules:
renamed = self.__moved_modules[name]
self.__finder.safe_import_hook(renamed, caller=self)
mod = self.__finder.modules[renamed]
# add the module again with the renamed name:
self.__finder._add_module("six.moves." + name, mod)
return mod
else:
raise AttributeError(name)
m = SixImporter(finder,
None, "six.moves", finder._optimize)
finder._add_module("six.moves", m)
def hook_matplotlib(finder, module):
"""matplotlib requires data files in a 'mpl-data' subdirectory in
the same directory as the executable.
"""
# c:\Python33\lib\site-packages\matplotlib
mpl_data_path = os.path.join(os.path.dirname(module.__loader__.path),
"mpl-data")
finder.add_datadirectory("mpl-data", mpl_data_path, recursive=True)
finder.excludes.append("wx")
# XXX matplotlib requires tkinter which modulefinder does not
# detect because of the six bug.
def hook_numpy(finder, module):
"""numpy for Python 3 still tries to import some Python 2 modules;
exclude them."""
# I'm not sure if we can safely exclude these:
finder.ignore("Numeric")
finder.ignore("numarray")
finder.ignore("numpy_distutils")
finder.ignore("setuptools")
finder.ignore("Pyrex")
finder.ignore("nose")
finder.ignore("scipy")
def hook_nose(finder, module):
finder.ignore("IronPython")
finder.ignore("cStringIO")
finder.ignore("unittest2")
def hook_sysconfig(finder, module):
finder.ignore("_sysconfigdata")
def hook_numpy_random_mtrand(finder, module):
"""the numpy.random.mtrand module is an extension module and the
numpy.random module imports * from this module; define the list of
global names available to this module in order to avoid spurious
errors about missing modules.
"""
module.__globalnames__.add('RandomState')
module.__globalnames__.add('beta')
module.__globalnames__.add('binomial')
module.__globalnames__.add('bytes')
module.__globalnames__.add('chisquare')
module.__globalnames__.add('choice')
module.__globalnames__.add('dirichlet')
module.__globalnames__.add('exponential')
module.__globalnames__.add('f')
module.__globalnames__.add('gamma')
module.__globalnames__.add('geometric')
module.__globalnames__.add('get_state')
module.__globalnames__.add('gumbel')
module.__globalnames__.add('hypergeometric')
module.__globalnames__.add('laplace')
module.__globalnames__.add('logistic')
module.__globalnames__.add('lognormal')
module.__globalnames__.add('logseries')
module.__globalnames__.add('multinomial')
module.__globalnames__.add('multivariate_normal')
module.__globalnames__.add('negative_binomial')
module.__globalnames__.add('noncentral_chisquare')
module.__globalnames__.add('noncentral_f')
module.__globalnames__.add('normal')
module.__globalnames__.add('np')
module.__globalnames__.add('operator')
module.__globalnames__.add('pareto')
module.__globalnames__.add('permutation')
module.__globalnames__.add('poisson')
module.__globalnames__.add('power')
module.__globalnames__.add('rand')
module.__globalnames__.add('randint')
module.__globalnames__.add('randn')
module.__globalnames__.add('random_integers')
module.__globalnames__.add('random_sample')
module.__globalnames__.add('rayleigh')
module.__globalnames__.add('seed')
module.__globalnames__.add('set_state')
module.__globalnames__.add('shuffle')
module.__globalnames__.add('standard_cauchy')
module.__globalnames__.add('standard_exponential')
module.__globalnames__.add('standard_gamma')
module.__globalnames__.add('standard_normal')
module.__globalnames__.add('standard_t')
module.__globalnames__.add('triangular')
module.__globalnames__.add('uniform')
module.__globalnames__.add('vonmises')
module.__globalnames__.add('wald')
module.__globalnames__.add('weibull')
module.__globalnames__.add('zipf')
def hook_numpy_distutils(finder, module):
"""In a 'if sys.version_info[0] < 3:' block numpy.distutils does
an implicit relative import: 'import __config__'. This will not
work in Python3 so ignore it.
"""
finder.excludes.append("__config__")
def hook_numpy_f2py(finder, module):
""" numpy.f2py tries to import __svn_version__. Ignore when his fails.
"""
finder.excludes.append("__svn_version__")
def hook_numpy_core_umath(finder, module):
"""the numpy.core.umath module is an extension module and the numpy module
imports * from this module; define the list of global names available
to this module in order to avoid spurious errors about missing
modules"""
module.__globalnames__.add("add")
module.__globalnames__.add("absolute")
module.__globalnames__.add("arccos")
module.__globalnames__.add("arccosh")
module.__globalnames__.add("arcsin")
module.__globalnames__.add("arcsinh")
module.__globalnames__.add("arctan")
module.__globalnames__.add("arctanh")
module.__globalnames__.add("bitwise_and")
module.__globalnames__.add("bitwise_or")
module.__globalnames__.add("bitwise_xor")
module.__globalnames__.add("ceil")
module.__globalnames__.add("conjugate")
module.__globalnames__.add("cosh")
module.__globalnames__.add("divide")
module.__globalnames__.add("exp")
module.__globalnames__.add("e")
module.__globalnames__.add("fabs")
module.__globalnames__.add("floor")
module.__globalnames__.add("floor_divide")
module.__globalnames__.add("fmod")
module.__globalnames__.add("geterrobj")
module.__globalnames__.add("greater")
module.__globalnames__.add("hypot")
module.__globalnames__.add("invert")
module.__globalnames__.add("isfinite")
module.__globalnames__.add("isinf")
module.__globalnames__.add("isnan")
module.__globalnames__.add("less")
module.__globalnames__.add("left_shift")
module.__globalnames__.add("log")
module.__globalnames__.add("logical_and")
module.__globalnames__.add("logical_not")
module.__globalnames__.add("logical_or")
module.__globalnames__.add("logical_xor")
module.__globalnames__.add("maximum")
module.__globalnames__.add("minimum")
module.__globalnames__.add("multiply")
module.__globalnames__.add("negative")
module.__globalnames__.add("not_equal")
module.__globalnames__.add("power")
module.__globalnames__.add("remainder")
module.__globalnames__.add("right_shift")
module.__globalnames__.add("sign")
module.__globalnames__.add("signbit")
module.__globalnames__.add("sinh")
module.__globalnames__.add("sqrt")
module.__globalnames__.add("tan")
module.__globalnames__.add("tanh")
module.__globalnames__.add("true_divide")
def hook_numpy_core_numerictypes(finder, module):
"""the numpy.core.numerictypes module adds a number of items to itself
dynamically; define these to avoid spurious errors about missing
modules"""
module.__globalnames__.add("bool_")
module.__globalnames__.add("cdouble")
module.__globalnames__.add("complexfloating")
module.__globalnames__.add("csingle")
module.__globalnames__.add("double")
module.__globalnames__.add("longdouble")
module.__globalnames__.add("float32")
module.__globalnames__.add("float64")
module.__globalnames__.add("float_")
module.__globalnames__.add("inexact")
module.__globalnames__.add("integer")
module.__globalnames__.add("intc")
module.__globalnames__.add("int32")
module.__globalnames__.add("number")
module.__globalnames__.add("single")
def hook_numpy_core(finder, module):
finder.ignore("numpy.core._dotblas")
|
gpl-2.0
| 4,706,690,525,302,096,000 | 34.79089 | 81 | 0.64939 | false |
mekkablue/ShowCrosshair
|
ShowCrosshair.glyphsReporter/Contents/Resources/plugin.py
|
1
|
11905
|
# encoding: utf-8
from __future__ import division, print_function, unicode_literals
#######################################################################################
#
# Reporter Plugin
#
# Read the docs:
# https://github.com/schriftgestalt/GlyphsSDK/tree/master/Python%20Templates/Reporter
#
#######################################################################################
import objc
from GlyphsApp import *
from GlyphsApp.plugins import *
from math import radians, tan
class ShowCrosshair(ReporterPlugin):
@objc.python_method
def settings(self):
self.menuName = Glyphs.localize({
'en': u'Crosshair',
'de': u'Fadenkreuz',
'es': u'cruz',
'fr': u'réticule',
'jp': u'カーソル照準',
'zh': u'✨准星线',
})
Glyphs.registerDefault("com.mekkablue.ShowCrosshair.universalCrosshair", 1)
Glyphs.registerDefault("com.mekkablue.ShowCrosshair.showCoordinates", 0)
Glyphs.registerDefault("com.mekkablue.ShowCrosshair.showThickness", 0)
Glyphs.registerDefault("com.mekkablue.ShowCrosshair.fontSize", 10.0)
Glyphs.registerDefault("com.mekkablue.ShowCrosshair.ignoreItalicAngle", 0)
self.generalContextMenus = self.buildContextMenus()
@objc.python_method
def buildContextMenus(self, sender=None):
return [
{
'name': Glyphs.localize({
'en': u"Crosshair Options:",
'de': u"Fadenkreuz-Einstellungen:",
'es': u"Opciones de la cruz:",
'fr': u"Options pour le réticule:",
'jp': u"照準プラグインオプション",
'zh': u"准星线选项",
}),
'action': None,
},
{
'name': Glyphs.localize({
'en': u"Always Show Crosshair",
'de': u"Fadenkreuz immer anzeigen",
'es': u"Siempre mostrar la cruz",
'fr': u"Toujours afficher le réticule",
'jp': u"照準を常に表示",
'zh': u"始终显示准星线",
}),
'action': self.toggleUniversalCrosshair,
'state': Glyphs.defaults["com.mekkablue.ShowCrosshair.universalCrosshair"],
},
{
'name': Glyphs.localize({
'en': u"Show Coordinates",
'de': u"Koordinaten anzeigen",
'es': u"Mostrar coordinados",
'fr': u"Afficher les coordonnées",
'jp': u"マウスの座標を左下に表示",
'zh': u"在左下角显示坐标值",
}),
'action': self.toggleShowCoordinates,
'state': Glyphs.defaults["com.mekkablue.ShowCrosshair.showCoordinates"],
},
{
'name': Glyphs.localize({
'en': u"Show Thicknesses",
'de': u"Dicken anzeigen",
'es': u"Mostrar grosores",
'fr': u"Afficher les épaisseurs",
'jp': u"縦横の太さを表示",
'zh': u"显示纵横坐标差",
}),
'action': self.toggleShowThickness,
'state': Glyphs.defaults["com.mekkablue.ShowCrosshair.showThickness"],
},
]
@objc.python_method
def drawCircle(self, center, size):
radius = size*0.5
circle = NSBezierPath.bezierPathWithRoundedRect_xRadius_yRadius_(
NSMakeRect(center.x-radius, center.y-radius, size, size),
radius,
radius,
)
circle.fill()
@objc.python_method
def foreground(self, layer):
toolEventHandler = self.controller.view().window().windowController().toolEventHandler()
toolIsDragging = toolEventHandler.dragging()
toolIsTextTool = toolEventHandler.className() == "GlyphsToolText"
shouldDisplay = (Glyphs.boolDefaults["com.mekkablue.ShowCrosshair.universalCrosshair"] and not toolIsTextTool) or toolIsDragging
if Glyphs.boolDefaults["com.mekkablue.ShowCrosshair.showThickness"] and shouldDisplay:
font = Glyphs.font
master = layer.associatedFontMaster()
scale = self.getScale()
mousePosition = self.mousePosition()
# intersection markers:
handleSize = self.getHandleSize() * scale**-0.7
try:
NSColor.separatorColor().set()
except:
NSColor.systemGrayColor().set() # pre 10.14
# stem thickness horizontal slice
sliceY = mousePosition.y
minX = -1000*(font.upm/1000.0)
maxX = layer.width + 1000*(font.upm/1000.0)
prev = minX
xs = {}
intersections = layer.calculateIntersectionsStartPoint_endPoint_decompose_(
(minX,sliceY),
(maxX,sliceY),
True,
)
for inter in intersections[1:-1]:
self.drawCircle(inter, handleSize)
if prev != minX:
xs[(inter.x-prev)/2+prev] = inter.x-prev
prev = inter.x
# stem thickness vertical slice
sliceX = mousePosition.x
minY = master.descender - 1000*(font.upm/1000.0)
maxY = master.ascender + 1000*(font.upm/1000.0)
prev = minY
ys = {}
italicAngle=0
try:
# GLYPHS 3
storedAngle = master.defaultMetricForKey_("italic angle")
if storedAngle < 1000000: # not-found constant
italicAngle = storedAngle
except:
# GLYPHS 2
italicAngle = master.italicAngle
verticalIntersections = layer.calculateIntersectionsStartPoint_endPoint_decompose_(
self.italicize( NSPoint(sliceX,minY), italicAngle=italicAngle, pivotalY=sliceY ),
self.italicize( NSPoint(sliceX,maxY), italicAngle=italicAngle, pivotalY=sliceY ),
True,
)
for inter in verticalIntersections[1:-1]:
self.drawCircle(inter, handleSize)
if prev != minY:
ys[(inter.y-prev)/2+prev] = inter.y-prev
prev = inter.y
# set font attributes
fontSize = Glyphs.defaults["com.mekkablue.ShowCrosshair.fontSize"]
thicknessFontAttributes = {
NSFontAttributeName: NSFont.monospacedDigitSystemFontOfSize_weight_(fontSize/scale,0.0),
NSForegroundColorAttributeName: NSColor.textColor()
}
# number badges on vertical slice:
for key in ys:
item = ys[key]
item = round(item, 1)
if item != 0:
x, y = sliceX, key
# adjust x for italic angle if necessary:
if italicAngle:
x = self.italicize( NSPoint(x,y), italicAngle=italicAngle, pivotalY=sliceY ).x
self.drawThicknessBadge(scale, fontSize, x, y, item)
self.drawThicknessText(thicknessFontAttributes, x, y, item)
# number badges on horizontal slice:
for key in xs:
item = xs[key]
item = round(item, 1)
if item != 0:
x, y = key, sliceY
self.drawThicknessBadge(scale, fontSize, x, y, item)
self.drawThicknessText(thicknessFontAttributes, x, y, item)
@objc.python_method
def italicize( self, thisPoint, italicAngle=0.0, pivotalY=0.0 ):
"""
Returns the italicized position of an NSPoint 'thisPoint'
for a given angle 'italicAngle' and the pivotal height 'pivotalY',
around which the italic slanting is executed, usually half x-height.
Usage: myPoint = italicize(myPoint,10,xHeight*0.5)
"""
if Glyphs.boolDefaults["com.mekkablue.ShowCrosshair.ignoreItalicAngle"]:
return thisPoint
else:
x = thisPoint.x
yOffset = thisPoint.y - pivotalY # calculate vertical offset
italicAngle = radians( italicAngle ) # convert to radians
tangens = tan( italicAngle ) # math.tan needs radians
horizontalDeviance = tangens * yOffset # vertical distance from pivotal point
x += horizontalDeviance # x of point that is yOffset from pivotal point
return NSPoint( x, thisPoint.y )
@objc.python_method
def background(self, layer):
toolEventHandler = self.controller.view().window().windowController().toolEventHandler()
toolIsDragging = toolEventHandler.dragging()
toolIsTextTool = toolEventHandler.className() == "GlyphsToolText"
crossHairCenter = self.mousePosition()
shouldDisplay = (Glyphs.boolDefaults["com.mekkablue.ShowCrosshair.universalCrosshair"] and not toolIsTextTool) or toolIsDragging
if crossHairCenter.x < NSNotFound and shouldDisplay:
# determine italic angle:
italicAngle = 0.0
try:
thisMaster = layer.associatedFontMaster()
italicAngle = thisMaster.italicAngle
except:
pass
# set up bezierpath:
offset = 1000000
NSColor.disabledControlTextColor().set() # subtle grey
crosshairPath = NSBezierPath.bezierPath()
crosshairPath.setLineWidth_( 0.75 / self.getScale() )
# vertical line:
crosshairPath.moveToPoint_( self.italicize( NSPoint(crossHairCenter.x,-offset), italicAngle=italicAngle, pivotalY=crossHairCenter.y) )
crosshairPath.lineToPoint_( self.italicize( NSPoint(crossHairCenter.x,+offset), italicAngle=italicAngle, pivotalY=crossHairCenter.y) )
# horizontal line:
crosshairPath.moveToPoint_( NSPoint(-offset,crossHairCenter.y) )
crosshairPath.lineToPoint_( NSPoint(+offset,crossHairCenter.y) )
# execute stroke:
crosshairPath.stroke()
def mousePosition(self):
view = self.controller.graphicView()
mousePosition = view.getActiveLocation_(Glyphs.currentEvent())
return mousePosition
@objc.python_method
def foregroundInViewCoords(self, layer=None):
toolEventHandler = self.controller.view().window().windowController().toolEventHandler()
toolIsTextTool = toolEventHandler.className() == "GlyphsToolText"
if Glyphs.boolDefaults["com.mekkablue.ShowCrosshair.showCoordinates"] and not toolIsTextTool:
mousePosition = self.mousePosition()
coordinateText = "%4d, %4d" % (
round(mousePosition.x),
round(mousePosition.y)
)
fontSize = Glyphs.defaults["com.mekkablue.ShowCrosshair.fontSize"]
fontAttributes = {
#NSFontAttributeName: NSFont.labelFontOfSize_(10.0),
NSFontAttributeName: NSFont.monospacedDigitSystemFontOfSize_weight_(fontSize,0.0),
NSForegroundColorAttributeName: NSColor.textColor()
}
displayText = NSAttributedString.alloc().initWithString_attributes_(
coordinateText,
fontAttributes
)
textAlignment = 0 # top left: 6, top center: 7, top right: 8, center left: 3, center center: 4, center right: 5, bottom left: 0, bottom center: 1, bottom right: 2
#font = layer.parent.parent
lowerLeftCorner = self.controller.viewPort.origin
displayText.drawAtPoint_alignment_(lowerLeftCorner, textAlignment)
@objc.python_method
def drawThicknessBadge(self, scale, fontSize, x, y, value):
width = len(str(value)) * fontSize * 0.7 / scale
rim = fontSize * 0.3 / scale
badge = NSRect()
badge.origin = NSPoint( x-width/2, y-fontSize/2-rim )
badge.size = NSSize( width, fontSize + rim*2 )
NSColor.textBackgroundColor().set()
NSBezierPath.bezierPathWithRoundedRect_xRadius_yRadius_( badge, fontSize*0.5, fontSize*0.5 ).fill()
@objc.python_method
def drawThicknessText(self, thicknessFontAttributes, x, y, item):
displayText = NSAttributedString.alloc().initWithString_attributes_(
str(item),
thicknessFontAttributes
)
displayText.drawAtPoint_alignment_((x, y), 4)
def mouseDidMove_(self, notification):
if hasattr(self, 'controller') and self.controller:
self.controller.redraw()
else:
Glyphs.redraw()
def willActivate(self):
Glyphs.addCallback(self.mouseDidMove_, MOUSEMOVED)
def willDeactivate(self):
try:
Glyphs.removeCallback(self.mouseDidMove_, MOUSEMOVED)
except:
import traceback
NSLog(traceback.format_exc())
def toggleUniversalCrosshair(self):
self.toggleSetting("universalCrosshair")
def toggleShowCoordinates(self):
self.toggleSetting("showCoordinates")
def toggleShowThickness(self):
self.toggleSetting("showThickness")
@objc.python_method
def toggleSetting(self, prefName):
pref = "com.mekkablue.ShowCrosshair.%s" % prefName
oldSetting = Glyphs.boolDefaults[pref]
Glyphs.defaults[pref] = int(not oldSetting)
self.generalContextMenus = self.buildContextMenus()
# def addMenuItemsForEvent_toMenu_(self, event, contextMenu):
# '''
# The event can tell you where the user had clicked.
# '''
# try:
#
# if self.generalContextMenus:
# setUpMenuHelper(contextMenu, self.generalContextMenus, self)
#
# newSeparator = NSMenuItem.separatorItem()
# contextMenu.addItem_(newSeparator)
#
# contextMenus = self.conditionalContextMenus()
# if contextMenus:
# setUpMenuHelper(contextMenu, contextMenus, self)
#
# except:
# import traceback
# NSLog(traceback.format_exc())
@objc.python_method
def __file__(self):
"""Please leave this method unchanged"""
return __file__
|
apache-2.0
| -7,762,868,965,284,269,000 | 32.369318 | 165 | 0.698876 | false |
coderum/crony
|
crony/crony.py
|
1
|
3147
|
import click
from . import utils, views
from . import parsers as p
from .crontab import Crontab
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
@click.group()
def crony():
pass
@crony.command()
@click.option('--limit', default=0,
help="Number of crons to display, displays all by default")
@click.argument('host', default='localhost',
callback=p.parse_hostname_callback,
required=False)
def ls(limit, host):
"""List cron jobs on a remote or local system"""
ct = Crontab(**host)
cps = ct.list()
jobs = p.parse_file(cronfd=cps.stdout, num_lines=limit)
if not jobs:
return
# create the table and display it
click.echo(views.horizontal_table(jobs))
@crony.command()
@click.option('--ids', default="0", callback=p.parse_range_callback,
help='IDs of jobs to be deleted.')
@click.argument('dst_host', default='localhost',
callback=p.parse_hostname_callback,
required=False)
def rm(ids, dst_host):
"""Delete cron jobs from a remote or local system"""
confirm_msg = "Delete all jobs at: %s? (yes/no)" % (dst_host['hostname'],)
# delete entire crontab
if 0 in ids and (click.prompt(confirm_msg) == 'yes'):
dst_ct = Crontab(**dst_host)
dst_ps = dst_ct.remove()
for out in dst_ps.stdout:
click.echo(out)
click.echo("Crontab deleted")
else:
if not click.prompt("Delete selected jobs? (yes/no)") == "yes":
return # exit if not confirmed
# delete selected jobs
click.echo("Fetching remote jobs")
dst_ct = Crontab(**dst_host)
dst_ps = dst_ct.list()
dst_jobs = p.parse_file(cronfd=dst_ps.stdout)
rm_jobs = dst_jobs.in_ids(ids)
job_str = StringIO()
for rm_job in rm_jobs:
dst_jobs.remove(rm_job)
utils.write_jobs(dst_jobs, job_str)
rmt_ct = Crontab(**dst_host)
# if there was only one job, delete the crontab
click.echo("Applying changes")
if len(dst_jobs):
rmt_ps = rmt_ct.copy_new(job_str.getvalue())
else:
rmt_ps = rmt_ct.remove()
for out in rmt_ps.stdout:
click.echo(out)
click.echo("Selected jobs deleted")
@crony.command()
@click.option('--ids', callback=p.parse_range_callback,
help="IDs of crons to be deleted.")
@click.argument('src_host', nargs=1, callback=p.parse_hostname_callback)
@click.argument('dst_host', nargs=1, callback=p.parse_hostname_callback)
def cp(ids, src_host, dst_host):
"""Copy cron jobs across servers"""
src_ct = Crontab(**src_host)
src_ps = src_ct.list()
src_jobs = p.parse_file(cronfd=src_ps.stdout).in_ids(ids)
job_str = StringIO()
utils.write_jobs(src_jobs, job_str)
dst_ct = Crontab(**dst_host)
rmt_pd = dst_ct.append(job_str.getvalue())
for out in rmt_pd.stdout:
click.echo(out)
click.echo("Done, copied: " + str(len(ids)))
if __name__ == '__main__':
crony()
|
gpl-3.0
| -5,281,866,693,860,838,000 | 29.553398 | 78 | 0.593581 | false |
denismakogon/trove-guestagent
|
trove_guestagent/common/context.py
|
1
|
1865
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Simple class that stores security context information in the web request.
Projects should subclass this class if they wish to enhance the request
context or provide additional information in their specific WSGI pipeline.
"""
from trove_guestagent.openstack.common import context
from trove_guestagent.openstack.common import local
class TroveContext(context.RequestContext):
"""
Stores information about the security context under which the user
accesses the system, as well as additional request information.
"""
def __init__(self, **kwargs):
self.limit = kwargs.pop('limit', None)
self.marker = kwargs.pop('marker', None)
super(TroveContext, self).__init__(**kwargs)
if not hasattr(local.store, 'context'):
self.update_store()
def to_dict(self):
parent_dict = super(TroveContext, self).to_dict()
parent_dict.update({'limit': self.limit,
'marker': self.marker
})
return parent_dict
def update_store(self):
local.store.context = self
@classmethod
def from_dict(cls, values):
return cls(**values)
|
apache-2.0
| -7,662,403,318,449,441,000 | 33.537037 | 78 | 0.680965 | false |
yishayv/lyacorr
|
extinction_correlation.py
|
1
|
10396
|
"""
Computes the Lyman-alpha forest auto-correlation estimator.
The work is split between MPI nodes based on the first QSO in each possible pair.
Partial data is gathered and the correlation estimator file is saved after processing each sub-chunk.
"""
import cProfile
import itertools
import numpy as np
from astropy import coordinates as coord
from astropy import table
from astropy import units as u
from astropy.coordinates import Angle
from astropy.coordinates import matching as matching
from mpi4py import MPI
import common_settings
import mpi_helper
from data_access.read_spectrum_fits import QSORecord
from physics_functions import comoving_distance
from physics_functions.spherical_math import SkyGroups, find_spherical_mean_deg
from python_compat import reduce, zip
settings = common_settings.Settings() # type: common_settings.Settings
cd = comoving_distance.ComovingDistance()
comm = MPI.COMM_WORLD
def calc_angular_separation(pairs, pair_angles, ar_extinction, extinction_mean):
ar_angular_separation_bins = np.zeros(shape=(2, 50))
for spec1_index, spec2_index, group_id, angle_index in pairs:
pair_angle = pair_angles[angle_index]
extinction1 = ar_extinction[spec1_index]
extinction2 = ar_extinction[spec2_index]
correlation = (extinction1 - extinction_mean) * (extinction2 - extinction_mean)
bin_number = int(pair_angle / np.pi * 180. * 10.)
if bin_number < 50 and np.isfinite(correlation):
# sum of products
ar_angular_separation_bins[0][bin_number] += correlation
# counts
ar_angular_separation_bins[1][bin_number] += 1
return ar_angular_separation_bins
class SubChunkHelper:
def __init__(self, ar_extinction):
self.angular_separation_bins = None
self.ar_extinction = ar_extinction
self.extinction_mean = np.nanmean(ar_extinction)
def add_pairs_in_sub_chunk(self, local_pair_angles, pairs):
local_angular_separation_bins = \
calc_angular_separation(pairs, local_pair_angles, self.ar_extinction, self.extinction_mean)
mpi_helper.l_print('local pair count:', local_angular_separation_bins[1].sum())
local_pair_separation_bins_array = local_angular_separation_bins
local_pair_separation_bins_metadata = None
local_array_shape = local_pair_separation_bins_array.shape
array_block_size = np.prod(local_array_shape[1:])
comm.Barrier()
mpi_helper.r_print("BEGIN GATHER")
mpi_helper.l_print_no_barrier('local array shape:', local_array_shape)
array_counts = comm.allgather(local_array_shape[0])
pair_separation_bins_array = None
array_endings = np.cumsum(array_counts)
array_displacements = array_endings - np.array(array_counts)
if comm.rank == 0:
mpi_helper.r_print('array count:', array_counts)
root_array_shape = (np.sum(array_counts),) + local_array_shape[1:]
mpi_helper.r_print('root array shape:', root_array_shape)
pair_separation_bins_array = np.ones(shape=root_array_shape, dtype=np.float64)
send_buf = [local_pair_separation_bins_array,
local_array_shape[0] * array_block_size]
receive_buf = [pair_separation_bins_array, np.multiply(array_counts, array_block_size),
np.multiply(array_displacements, array_block_size), MPI.DOUBLE]
# mpi_helper.l_print(send_buf)
comm.Gatherv(sendbuf=send_buf, recvbuf=receive_buf)
list_pair_separation_bins_metadata = comm.gather(local_pair_separation_bins_metadata)
comm.Barrier()
mpi_helper.r_print("END_GATHER")
if comm.rank == 0:
# mpi_helper.r_print(receive_buf[0][0][0:10])
list_pair_separation_bins = [
pair_separation_bins_array[array_displacements[rank]:array_endings[rank]]
for rank, metadata in enumerate(list_pair_separation_bins_metadata)]
# initialize bins only if this is the first time we get here
# for now use a function level static variable
if self.angular_separation_bins is None:
self.angular_separation_bins = np.zeros_like(local_angular_separation_bins)
# add new results to existing bins
if list_pair_separation_bins:
self.angular_separation_bins = reduce(lambda x, y: x + y, list_pair_separation_bins,
self.angular_separation_bins)
mpi_helper.r_print('total number of pixel pairs in bins:',
self.angular_separation_bins[1].sum())
np.save("../../data/extinction_correlation.npy", self.angular_separation_bins)
# pixel_pairs.significant_qso_pairs.save(settings.get_significant_qso_pairs_npy())
else:
print('no results received.')
def profile_main():
# x = coord.SkyCoord(ra=10.68458*u.deg, dec=41.26917*u.deg, frame='icrs')
# min_distance = cd.comoving_distance_transverse(2.1, **fidcosmo)
# print('minimum distance', min_distance, 'Mpc/rad')
# initialize data sources
qso_record_table = table.Table(np.load(settings.get_qso_metadata_npy()))
# prepare data for quicker access
qso_record_list = [QSORecord.from_row(i) for i in qso_record_table]
ar_ra = np.array([i.ra for i in qso_record_list])
ar_dec = np.array([i.dec for i in qso_record_list])
ar_z = np.array([i.z for i in qso_record_list])
ar_extinction = np.array([i.extinction_g for i in qso_record_list])
ar_distance = cd.fast_comoving_distance(ar_z)
mpi_helper.r_print('QSO table size:', len(ar_distance))
# TODO: find a more precise value instead of z=1.9
# set maximum QSO angular separation to 200Mpc/h (in co-moving coordinates)
# the article assumes h is measured in units of 100km/s/mpc
radius_quantity = (200. * (100. * u.km / (u.Mpc * u.s)) / cd.H0) # type: u.Quantity
radius = radius_quantity.value
max_angular_separation = radius / (cd.comoving_distance(1.9) / u.radian)
mpi_helper.r_print('maximum separation of QSOs:', Angle(max_angular_separation).to_string(unit=u.degree))
# print(ar_list)
coord_set = coord.SkyCoord(ra=ar_ra * u.degree, dec=ar_dec * u.degree,
distance=ar_distance * u.Mpc)
# print(coord_set)
# find all QSO pairs
chunk_sizes, chunk_offsets = mpi_helper.get_chunks(len(coord_set), comm.size)
local_start_index = chunk_offsets[comm.rank]
local_end_index = local_start_index + chunk_sizes[comm.rank]
mpi_helper.l_print('matching objects in range:', local_start_index, 'to', local_end_index)
# each node matches a range of objects against the full list.
count = matching.search_around_sky(coord_set[local_start_index:local_end_index],
coord_set,
max_angular_separation)
# search around sky returns indices in the input lists.
# each node should add its offset to get the QSO index in the original list (only for x[0]).
# qso2 which contains the unmodified index to the full list of QSOs.
# the third vector is a count so we can keep a reference to the angles vector.
local_qso_index_1 = count[0] + local_start_index
local_qso_index_2 = count[1]
# find the mean ra,dec for each pair
local_qso_ra_pairs = np.vstack((ar_ra[local_qso_index_1], ar_ra[local_qso_index_2]))
local_qso_dec_pairs = np.vstack((ar_dec[local_qso_index_1], ar_dec[local_qso_index_2]))
# we can safely assume that separations is small enough so we don't have catastrophic cancellation of the mean,
# so checking the unit radius value is not required
local_pair_means_ra, local_pair_means_dec, _ = find_spherical_mean_deg(local_qso_ra_pairs, local_qso_dec_pairs,
axis=0)
sky_groups = SkyGroups(nside=settings.get_healpix_nside())
group_id = sky_groups.get_group_ids(local_pair_means_ra, local_pair_means_dec)
local_qso_pairs_with_unity = np.vstack((local_qso_index_1,
local_qso_index_2,
group_id,
np.arange(count[0].size)))
local_qso_pair_angles = count[2].to(u.rad).value
mpi_helper.l_print('number of QSO pairs (including identity pairs):', count[0].size)
mpi_helper.l_print('angle vector size:', local_qso_pair_angles.size)
# remove pairs of the same QSO.
# local_qso_pairs = local_qso_pairs_with_unity.T[local_qso_pairs_with_unity[1] != local_qso_pairs_with_unity[0]]
# remove pairs of the same QSO, which have different [plate,mjd,fiber]
# assume that QSOs within roughly 10 arc-second (5e-5 rads) are the same object.
local_qso_pairs = local_qso_pairs_with_unity.T[local_qso_pair_angles > 5e-5]
mpi_helper.l_print('total number of redundant objects removed:', local_qso_pairs_with_unity.shape[1] -
local_qso_pairs.shape[0] - chunk_sizes[comm.rank])
# l_print(pairs)
mpi_helper.l_print('number of QSO pairs:', local_qso_pairs.shape[0])
# l_print('angle vector:', x[2])
# divide the work into sub chunks
# Warning: the number of sub chunks must be identical for all nodes because gather is called after each sub chunk.
# divide by comm.size to make sub chunk size independent of number of nodes.
num_sub_chunks_per_node = settings.get_mpi_num_sub_chunks() // comm.size
pixel_pair_sub_chunks = mpi_helper.get_chunks(local_qso_pairs.shape[0], num_sub_chunks_per_node)
sub_chunk_helper = SubChunkHelper(ar_extinction)
for i, j, k in zip(pixel_pair_sub_chunks[0], pixel_pair_sub_chunks[1], itertools.count()):
sub_chunk_start = j
sub_chunk_end = j + i
mpi_helper.l_print("sub_chunk: size", i, ", starting at", j, ",", k, "out of", len(pixel_pair_sub_chunks[0]))
sub_chunk_helper.add_pairs_in_sub_chunk(local_qso_pair_angles,
local_qso_pairs[sub_chunk_start:sub_chunk_end])
if settings.get_profile():
cProfile.run('profile_main()', filename='generate_pair_list.prof', sort=2)
else:
profile_main()
|
mit
| 99,461,778,343,690,620 | 47.807512 | 118 | 0.649288 | false |
cb1234/pynet-test
|
learnpy/class7/ex1a_cdp_parsing.py
|
1
|
1528
|
#!/usr/bin/env python
'''
Create a program that opens the 'r1_cdp.txt' file and using regular
expressions extracts the remote hostname, remote IP address, model, vendor,
and device_type.
'''
import re
from pprint import pprint
def generic_cdp_parser(pattern, cdp):
'''
Search for pattern in the cdp data
Return relevant .group(1)
Else return ''
'''
# Break the CDP data up into its individual lines
cdp = cdp.split('\n')
for line in cdp:
# Search for pattern
re_pattern = re.search(pattern, line)
# Return match if found
if re_pattern:
return_val = re_pattern.group(1)
return return_val.strip()
return ''
# Technique to allow importable and executable code to coexist (will explain in class#8)
if __name__ == '__main__':
cdp_file = 'CDP_DATA/r1_cdp.txt'
f = open(cdp_file)
# Read cdp_data into a list
cdp_data = f.read()
f.close()
network_devices = {}
network_devices['remote_hostname'] = generic_cdp_parser(r'Device ID: (.+)', cdp_data)
network_devices['ip'] = generic_cdp_parser(r'IP address: (.+)', cdp_data)
network_devices['vendor'] = generic_cdp_parser(r'^Platform: (.+?) ', cdp_data)
network_devices['model'] = generic_cdp_parser(r'^Platform: \w+ (.+),', cdp_data)
network_devices['device_type'] = generic_cdp_parser(r'^Platform: .+Capabilities: (.+?) ',
cdp_data)
print
pprint(network_devices)
print
|
apache-2.0
| -6,601,483,307,684,706,000 | 25.807018 | 93 | 0.604712 | false |
kaushik94/sympy
|
sympy/parsing/tests/test_maxima.py
|
2
|
1711
|
from sympy.parsing.maxima import parse_maxima
from sympy import Rational, Abs, Symbol, sin, cos, E, oo, log, factorial
from sympy.abc import x
n = Symbol('n', integer=True)
def test_parser():
assert Abs(parse_maxima('float(1/3)') - 0.333333333) < 10**(-5)
assert parse_maxima('13^26') == 91733330193268616658399616009
assert parse_maxima('sin(%pi/2) + cos(%pi/3)') == Rational(3, 2)
assert parse_maxima('log(%e)') == 1
def test_injection():
parse_maxima('c: x+1', globals=globals())
# c created by parse_maxima
assert c == x + 1
parse_maxima('g: sqrt(81)', globals=globals())
# g created by parse_maxima
assert g == 9
def test_maxima_functions():
assert parse_maxima('expand( (x+1)^2)') == x**2 + 2*x + 1
assert parse_maxima('factor( x**2 + 2*x + 1)') == (x + 1)**2
assert parse_maxima('2*cos(x)^2 + sin(x)^2') == 2*cos(x)**2 + sin(x)**2
assert parse_maxima('trigexpand(sin(2*x)+cos(2*x))') == \
-1 + 2*cos(x)**2 + 2*cos(x)*sin(x)
assert parse_maxima('solve(x^2-4,x)') == [-2, 2]
assert parse_maxima('limit((1+1/x)^x,x,inf)') == E
assert parse_maxima('limit(sqrt(-x)/x,x,0,minus)') is -oo
assert parse_maxima('diff(x^x, x)') == x**x*(1 + log(x))
assert parse_maxima('sum(k, k, 1, n)', name_dict=dict(
n=Symbol('n', integer=True),
k=Symbol('k', integer=True)
)) == (n**2 + n)/2
assert parse_maxima('product(k, k, 1, n)', name_dict=dict(
n=Symbol('n', integer=True),
k=Symbol('k', integer=True)
)) == factorial(n)
assert parse_maxima('ratsimp((x^2-1)/(x+1))') == x - 1
assert Abs( parse_maxima(
'float(sec(%pi/3) + csc(%pi/3))') - 3.154700538379252) < 10**(-5)
|
bsd-3-clause
| -5,753,884,372,647,215,000 | 37.022222 | 75 | 0.576856 | false |
epeli/subssh
|
tests/test_assert_function_args.py
|
1
|
6487
|
'''
Created on Mar 26, 2010
@author: epeli
'''
import unittest
from subssh.tools import assert_args
from subssh import InvalidArguments
class TestRequireArgumensWithFunctions(unittest.TestCase):
def test_require_zero(self):
def f():
pass
assert_args(f, [])
args = [1]
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
args = [1,2]
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
def test_localvar(self):
def f():
foo = 1
assert_args(f, [])
args = [1]
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
args = [1,2]
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
def test_require_one(self):
def f(user):
pass
assert_args(f, [1])
args = [1,2]
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
def test_require_two(self):
def f(user, second):
pass
assert_args(f, [1, 2])
args = [1]
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
args = [1,2,3]
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
def test_require_1_to_n(self):
def f(user, *args):
pass
assert_args(f, [1])
assert_args(f, [1, 2])
assert_args(f, [1, 2 ,3])
args = []
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
def test_require_2_to_n(self):
def f(user, second, *args):
pass
assert_args(f, [1, 2])
assert_args(f, [1, 2 ,3])
assert_args(f, range(100))
args = []
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
args = [1]
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
def test_default_value(self):
def f(user, value="default"):
pass
assert_args(f, [1])
assert_args(f, [1, 2])
args = []
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
args = [1,2,3]
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
def test_multi_default_values(self):
def f(user, value="default", value2="default2"):
pass
assert_args(f, [1])
assert_args(f, [1, 2])
assert_args(f, [1, 2, 3])
args = []
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
args = [1,2,3,4]
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
def test_default_with_n(self):
def f(user, value="default", *args):
pass
assert_args(f, [1])
assert_args(f, [1, 2])
assert_args(f, [1, 2, 3])
assert_args(f, [1, 2, 3, 4])
args = []
self.assertRaises(InvalidArguments, assert_args, f, args)
self.assertRaises(TypeError, f, *args)
class TestRequireArgumensWithMethods(unittest.TestCase):
def test_require_zero(self):
class C(object):
def m(self):
pass
o = C()
assert_args(o.m, [])
args = [1]
self.assertRaises(InvalidArguments, assert_args, o.m, args)
self.assertRaises(TypeError, o.m, *args)
args = [1,2]
self.assertRaises(InvalidArguments, assert_args, o.m, args)
self.assertRaises(TypeError, o.m, *args)
def test_require_one(self):
class C(object):
def m(self, user):
pass
o = C()
assert_args(o.m, [1])
args = [1,2]
self.assertRaises(InvalidArguments, assert_args, o.m, args)
self.assertRaises(TypeError, o.m, *args)
def test_require_two(self):
class C(object):
def m(self, user, second):
pass
o = C()
assert_args(o.m, [1, 2])
args = [1]
self.assertRaises(InvalidArguments, assert_args, o.m, args)
self.assertRaises(TypeError, o.m, *args)
args = [1,2,3]
self.assertRaises(InvalidArguments, assert_args, o.m, args)
self.assertRaises(TypeError, o.m, *args)
def test_require_1_to_n(self):
class C(object):
def m(self, user, *args):
pass
o = C()
assert_args(o.m, [1])
assert_args(o.m, [1, 2])
assert_args(o.m, [1, 2 ,3])
args = []
self.assertRaises(InvalidArguments, assert_args, o.m, args)
self.assertRaises(TypeError, o.m, *args)
def test_require_2_to_n(self):
class C(object):
def m(self, user, second, *args):
pass
o = C()
assert_args(o.m, [1, 2])
assert_args(o.m, [1, 2 ,3])
assert_args(o.m, range(100))
args = []
self.assertRaises(InvalidArguments, assert_args, o.m, args)
self.assertRaises(TypeError, o.m, *args)
args = [1]
self.assertRaises(InvalidArguments, assert_args, o.m, args)
self.assertRaises(TypeError, o.m, *args)
class TestInvalidArgumentsArg(unittest.TestCase):
def test_zero(self):
def f():
pass
raised = False
try:
assert_args(f, [1])
except InvalidArguments, e:
raised = True
self.assertEquals(e.args[0], 'Invalid argument count 1. Required 0 arguments.')
self.assert_(raised)
def test_1_to_n(self):
class C(object):
def m(self, user, second, *args):
pass
o = C()
raised = False
try:
assert_args(o.m, [])
except InvalidArguments, e:
raised = True
self.assertEquals(e.args[0], 'Invalid argument count 0. Required 2-n arguments.')
self.assert_(raised)
|
agpl-3.0
| -7,018,316,184,167,904,000 | 21.681818 | 93 | 0.546323 | false |
colemanja91/PyEloqua-Examples
|
venv/lib/python3.4/site-packages/simple_salesforce/api.py
|
1
|
27063
|
"""Core classes and exceptions for Simple-Salesforce"""
# has to be defined prior to login import
DEFAULT_API_VERSION = '29.0'
import requests
import json
try:
from urlparse import urlparse
except ImportError:
# Python 3+
from urllib.parse import urlparse
from simple_salesforce.login import SalesforceLogin
from simple_salesforce.util import date_to_iso8601, SalesforceError
try:
from collections import OrderedDict
except ImportError:
# Python < 2.7
from ordereddict import OrderedDict
class Salesforce(object):
"""Salesforce Instance
An instance of Salesforce is a handy way to wrap a Salesforce session
for easy use of the Salesforce REST API.
"""
def __init__(
self, username=None, password=None, security_token=None,
session_id=None, instance=None, instance_url=None,
organizationId=None, sandbox=False, version=DEFAULT_API_VERSION,
proxies=None, session=None):
"""Initialize the instance with the given parameters.
Available kwargs
Password Authentication:
* username -- the Salesforce username to use for authentication
* password -- the password for the username
* security_token -- the security token for the username
* sandbox -- True if you want to login to `test.salesforce.com`, False
if you want to login to `login.salesforce.com`.
Direct Session and Instance Access:
* session_id -- Access token for this session
Then either
* instance -- Domain of your Salesforce instance, i.e. `na1.salesforce.com`
OR
* instance_url -- Full URL of your instance i.e. `https://na1.salesforce.com
Universal Kwargs:
* version -- the version of the Salesforce API to use, for example `29.0`
* proxies -- the optional map of scheme to proxy server
* session -- Custom requests session, created in calling code. This
enables the use of requets Session features not otherwise
exposed by simple_salesforce.
"""
# Determine if the user passed in the optional version and/or sandbox kwargs
self.sf_version = version
self.sandbox = sandbox
self.proxies = proxies
# Determine if the user wants to use our username/password auth or pass in their own information
if all(arg is not None for arg in (username, password, security_token)):
self.auth_type = "password"
# Pass along the username/password to our login helper
self.session_id, self.sf_instance = SalesforceLogin(
session=session,
username=username,
password=password,
security_token=security_token,
sandbox=self.sandbox,
sf_version=self.sf_version,
proxies=self.proxies)
elif all(arg is not None for arg in (session_id, instance or instance_url)):
self.auth_type = "direct"
self.session_id = session_id
# If the user provides the full url (as returned by the OAuth interface for
# example) extract the hostname (which we rely on)
if instance_url is not None:
self.sf_instance = urlparse(instance_url).hostname
else:
self.sf_instance = instance
elif all(arg is not None for arg in (username, password, organizationId)):
self.auth_type = 'ipfilter'
# Pass along the username/password to our login helper
self.session_id, self.sf_instance = SalesforceLogin(
session=session,
username=username,
password=password,
organizationId=organizationId,
sandbox=self.sandbox,
sf_version=self.sf_version,
proxies=self.proxies)
else:
raise TypeError(
'You must provide login information or an instance and token'
)
if self.sandbox:
self.auth_site = 'https://test.salesforce.com'
else:
self.auth_site = 'https://login.salesforce.com'
self.request = session or requests.Session()
self.request.proxies = self.proxies
self.headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + self.session_id,
'X-PrettyPrint': '1'
}
self.base_url = ('https://{instance}/services/data/v{version}/'
.format(instance=self.sf_instance,
version=self.sf_version))
self.apex_url = ('https://{instance}/services/apexrest/'
.format(instance=self.sf_instance))
def describe(self):
url = self.base_url + "sobjects"
result = self.request.get(url, headers=self.headers)
if result.status_code != 200:
raise SalesforceGeneralError(url,
'describe',
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
# SObject Handler
def __getattr__(self, name):
"""Returns an `SFType` instance for the given Salesforce object type
(given in `name`).
The magic part of the SalesforceAPI, this function translates
calls such as `salesforce_api_instance.Lead.metadata()` into fully
constituted `SFType` instances to make a nice Python API wrapper
for the REST API.
Arguments:
* name -- the name of a Salesforce object type, e.g. Lead or Contact
"""
# fix to enable serialization (https://github.com/heroku/simple-salesforce/issues/60)
if name.startswith('__'):
return super(Salesforce, self).__getattr__(name)
return SFType(name, self.session_id, self.sf_instance, self.sf_version, self.proxies)
# User utlity methods
def set_password(self, user, password):
"""Sets the password of a user
salesforce dev documentation link:
https://www.salesforce.com/us/developer/docs/api_rest/Content/dome_sobject_user_password.htm
Arguments:
* user: the userID of the user to set
* password: the new password
"""
url = self.base_url + 'sobjects/User/%s/password' % user
params = { 'NewPassword' : password, }
result = self.request.post(url, headers=self.headers, data=json.dumps(params))
# salesforce return 204 No Content when the request is successful
if result.status_code != 200 and result.status_code != 204:
raise SalesforceGeneralError(url,
'User',
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
def setPassword(self, user, password):
import warnings
warnings.warn(
"This method has been deprecated. Please use set_password instread.", DeprecationWarning)
return self.set_password(user, password)
# Generic Rest Function
def restful(self, path, params):
"""Allows you to make a direct REST call if you know the path
Arguments:
* path: The path of the request
Example: sobjects/User/ABC123/password'
* params: dict of parameters to pass to the path
"""
url = self.base_url + path
result = self.request.get(url, headers=self.headers, params=params)
if result.status_code != 200:
raise SalesforceGeneralError(url,
path,
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
# Search Functions
def search(self, search):
"""Returns the result of a Salesforce search as a dict decoded from
the Salesforce response JSON payload.
Arguments:
* search -- the fully formatted SOSL search string, e.g.
`FIND {Waldo}`
"""
url = self.base_url + 'search/'
# `requests` will correctly encode the query string passed as `params`
params = {'q': search}
result = self.request.get(url, headers=self.headers, params=params)
if result.status_code != 200:
raise SalesforceGeneralError(url,
'search',
result.status_code,
result.content)
json_result = result.json(object_pairs_hook=OrderedDict)
if len(json_result) == 0:
return None
else:
return json_result
def quick_search(self, search):
"""Returns the result of a Salesforce search as a dict decoded from
the Salesforce response JSON payload.
Arguments:
* search -- the non-SOSL search string, e.g. `Waldo`. This search
string will be wrapped to read `FIND {Waldo}` before being
sent to Salesforce
"""
search_string = u'FIND {{{search_string}}}'.format(search_string=search)
return self.search(search_string)
# Query Handler
def query(self, query, **kwargs):
"""Return the result of a Salesforce SOQL query as a dict decoded from
the Salesforce response JSON payload.
Arguments:
* query -- the SOQL query to send to Salesforce, e.g.
`SELECT Id FROM Lead WHERE Email = "waldo@somewhere.com"`
"""
url = self.base_url + 'query/'
params = {'q': query}
# `requests` will correctly encode the query string passed as `params`
result = self.request.get(url, headers=self.headers, params=params, **kwargs)
if result.status_code != 200:
_exception_handler(result)
return result.json(object_pairs_hook=OrderedDict)
def query_more(self, next_records_identifier, identifier_is_url=False, **kwargs):
"""Retrieves more results from a query that returned more results
than the batch maximum. Returns a dict decoded from the Salesforce
response JSON payload.
Arguments:
* next_records_identifier -- either the Id of the next Salesforce
object in the result, or a URL to the
next record in the result.
* identifier_is_url -- True if `next_records_identifier` should be
treated as a URL, False if
`next_records_identifer` should be treated as
an Id.
"""
if identifier_is_url:
# Don't use `self.base_url` here because the full URI is provided
url = (u'https://{instance}{next_record_url}'
.format(instance=self.sf_instance,
next_record_url=next_records_identifier))
else:
url = self.base_url + 'query/{next_record_id}'
url = url.format(next_record_id=next_records_identifier)
result = self.request.get(url, headers=self.headers, **kwargs)
if result.status_code != 200:
_exception_handler(result)
return result.json(object_pairs_hook=OrderedDict)
def query_all(self, query, **kwargs):
"""Returns the full set of results for the `query`. This is a
convenience wrapper around `query(...)` and `query_more(...)`.
The returned dict is the decoded JSON payload from the final call to
Salesforce, but with the `totalSize` field representing the full
number of results retrieved and the `records` list representing the
full list of records retrieved.
Arguments
* query -- the SOQL query to send to Salesforce, e.g.
`SELECT Id FROM Lead WHERE Email = "waldo@somewhere.com"`
"""
def get_all_results(previous_result, **kwargs):
"""Inner function for recursing until there are no more results.
Returns the full set of results that will be the return value for
`query_all(...)`
Arguments:
* previous_result -- the modified result of previous calls to
Salesforce for this query
"""
if previous_result['done']:
return previous_result
else:
result = self.query_more(previous_result['nextRecordsUrl'],
identifier_is_url=True, **kwargs)
result['totalSize'] += previous_result['totalSize']
# Include the new list of records with the previous list
previous_result['records'].extend(result['records'])
result['records'] = previous_result['records']
# Continue the recursion
return get_all_results(result, **kwargs)
# Make the initial query to Salesforce
result = self.query(query, **kwargs)
# The number of results might have exceeded the Salesforce batch limit
# so check whether there are more results and retrieve them if so.
return get_all_results(result, **kwargs)
def apexecute(self, action, method='GET', data=None, **kwargs):
"""Makes an HTTP request to an APEX REST endpoint
Arguments:
* action -- The REST endpoint for the request.
* method -- HTTP method for the request (default GET)
* data -- A dict of parameters to send in a POST / PUT request
* kwargs -- Additional kwargs to pass to `requests.request`
"""
result = self._call_salesforce(method, self.apex_url + action,
data=json.dumps(data), **kwargs)
if result.status_code == 200:
try:
response_content = result.json()
except Exception:
response_content = result.text
return response_content
def _call_salesforce(self, method, url, **kwargs):
"""Utility method for performing HTTP call to Salesforce.
Returns a `requests.result` object.
"""
result = self.request.request(method, url, headers=self.headers, **kwargs)
if result.status_code >= 300:
_exception_handler(result)
return result
class SFType(object):
"""An interface to a specific type of SObject"""
def __init__(self, object_name, session_id, sf_instance, sf_version='27.0', proxies=None):
"""Initialize the instance with the given parameters.
Arguments:
* object_name -- the name of the type of SObject this represents,
e.g. `Lead` or `Contact`
* session_id -- the session ID for authenticating to Salesforce
* sf_instance -- the domain of the instance of Salesforce to use
* sf_version -- the version of the Salesforce API to use
* proxies -- the optional map of scheme to proxy server
"""
self.session_id = session_id
self.name = object_name
self.request = requests.Session()
self.request.proxies = proxies
self.base_url = (u'https://{instance}/services/data/v{sf_version}/sobjects/{object_name}/'
.format(instance=sf_instance,
object_name=object_name,
sf_version=sf_version))
def metadata(self):
"""Returns the result of a GET to `.../{object_name}/` as a dict
decoded from the JSON payload returned by Salesforce.
"""
result = self._call_salesforce('GET', self.base_url)
return result.json(object_pairs_hook=OrderedDict)
def describe(self):
"""Returns the result of a GET to `.../{object_name}/describe` as a
dict decoded from the JSON payload returned by Salesforce.
"""
result = self._call_salesforce('GET', self.base_url + 'describe')
return result.json(object_pairs_hook=OrderedDict)
def describe_layout(self, record_id):
"""Returns the result of a GET to `.../{object_name}/describe/layouts/<recordid>` as a
dict decoded from the JSON payload returned by Salesforce.
"""
result = self._call_salesforce('GET', self.base_url + 'describe/layouts/' + record_id)
return result.json(object_pairs_hook=OrderedDict)
def get(self, record_id):
"""Returns the result of a GET to `.../{object_name}/{record_id}` as a
dict decoded from the JSON payload returned by Salesforce.
Arguments:
* record_id -- the Id of the SObject to get
"""
result = self._call_salesforce('GET', self.base_url + record_id)
return result.json(object_pairs_hook=OrderedDict)
def get_by_custom_id(self, custom_id_field, custom_id):
"""Returns the result of a GET to `.../{object_name}/{custom_id_field}/{custom_id}` as a
dict decoded from the JSON payload returned by Salesforce.
Arguments:
* custom_id_field -- the API name of a custom field that was defined as an External ID
* custom_id - the External ID value of the SObject to get
"""
custom_url = self.base_url + '{custom_id_field}/{custom_id}'.format(
custom_id_field=custom_id_field, custom_id=custom_id)
result = self._call_salesforce('GET', custom_url)
return result.json(object_pairs_hook=OrderedDict)
def create(self, data):
"""Creates a new SObject using a POST to `.../{object_name}/`.
Returns a dict decoded from the JSON payload returned by Salesforce.
Arguments:
* data -- a dict of the data to create the SObject from. It will be
JSON-encoded before being transmitted.
"""
result = self._call_salesforce('POST', self.base_url,
data=json.dumps(data))
return result.json(object_pairs_hook=OrderedDict)
def upsert(self, record_id, data, raw_response=False):
"""Creates or updates an SObject using a PATCH to
`.../{object_name}/{record_id}`.
If `raw_response` is false (the default), returns the status code
returned by Salesforce. Otherwise, return the `requests.Response`
object.
Arguments:
* record_id -- an identifier for the SObject as described in the
Salesforce documentation
* data -- a dict of the data to create or update the SObject from. It
will be JSON-encoded before being transmitted.
* raw_response -- a boolean indicating whether to return the response
directly, instead of the status code.
"""
result = self._call_salesforce('PATCH', self.base_url + record_id,
data=json.dumps(data))
return self._raw_response(result, raw_response)
def update(self, record_id, data, raw_response=False):
"""Updates an SObject using a PATCH to
`.../{object_name}/{record_id}`.
If `raw_response` is false (the default), returns the status code
returned by Salesforce. Otherwise, return the `requests.Response`
object.
Arguments:
* record_id -- the Id of the SObject to update
* data -- a dict of the data to update the SObject from. It will be
JSON-encoded before being transmitted.
* raw_response -- a boolean indicating whether to return the response
directly, instead of the status code.
"""
result = self._call_salesforce('PATCH', self.base_url + record_id,
data=json.dumps(data))
return self._raw_response(result, raw_response)
def delete(self, record_id, raw_response=False):
"""Deletes an SObject using a DELETE to
`.../{object_name}/{record_id}`.
If `raw_response` is false (the default), returns the status code
returned by Salesforce. Otherwise, return the `requests.Response`
object.
Arguments:
* record_id -- the Id of the SObject to delete
* raw_response -- a boolean indicating whether to return the response
directly, instead of the status code.
"""
result = self._call_salesforce('DELETE', self.base_url + record_id)
return self._raw_response(result, raw_response)
def deleted(self, start, end):
"""Use the SObject Get Deleted resource to get a list of deleted records for the specified object.
.../deleted/?start=2013-05-05T00:00:00+00:00&end=2013-05-10T00:00:00+00:00
* start -- start datetime object
* end -- end datetime object
"""
url = self.base_url + 'deleted/?start={start}&end={end}'.format(
start=date_to_iso8601(start), end=date_to_iso8601(end))
result = self._call_salesforce('GET', url)
return result.json(object_pairs_hook=OrderedDict)
def updated(self, start, end):
"""Use the SObject Get Updated resource to get a list of updated (modified or added)
records for the specified object.
.../updated/?start=2014-03-20T00:00:00+00:00&end=2014-03-22T00:00:00+00:00
* start -- start datetime object
* end -- end datetime object
"""
url = self.base_url + 'updated/?start={start}&end={end}'.format(
start=date_to_iso8601(start), end=date_to_iso8601(end))
result = self._call_salesforce('GET', url)
return result.json(object_pairs_hook=OrderedDict)
def _call_salesforce(self, method, url, **kwargs):
"""Utility method for performing HTTP call to Salesforce.
Returns a `requests.result` object.
"""
headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + self.session_id,
'X-PrettyPrint': '1'
}
result = self.request.request(method, url, headers=headers, **kwargs)
if result.status_code >= 300:
_exception_handler(result, self.name)
return result
def _raw_response(self, response, body_flag):
"""Utility method for processing the response and returning either the
status code or the response object.
Returns either an `int` or a `requests.Response` object.
"""
if not body_flag:
return response.status_code
else:
return response
class SalesforceAPI(Salesforce):
"""Depreciated SalesforceAPI Instance
This class implements the Username/Password Authentication Mechanism using Arguments
It has since been surpassed by the 'Salesforce' class, which relies on kwargs
"""
def __init__(self, username, password, security_token, sandbox=False,
sf_version='27.0'):
"""Initialize the instance with the given parameters.
Arguments:
* username -- the Salesforce username to use for authentication
* password -- the password for the username
* security_token -- the security token for the username
* sandbox -- True if you want to login to `test.salesforce.com`, False
if you want to login to `login.salesforce.com`.
* sf_version -- the version of the Salesforce API to use, for example
"27.0"
"""
import warnings
warnings.warn(
"Use of login arguments has been depreciated. Please use kwargs",
DeprecationWarning
)
super(SalesforceAPI, self).__init__(username=username,
password=password,
security_token=security_token,
sandbox=sandbox,
version=sf_version)
def _exception_handler(result, name=""):
"""Exception router. Determines which error to raise for bad results"""
try:
response_content = result.json()
except Exception:
response_content = result.text
exc_map = {
300: SalesforceMoreThanOneRecord,
400: SalesforceMalformedRequest,
401: SalesforceExpiredSession,
403: SalesforceRefusedRequest,
404: SalesforceResourceNotFound,
}
exc_cls = exc_map.get(result.status_code, SalesforceGeneralError)
raise exc_cls(result.url, result.status_code, name, response_content)
class SalesforceMoreThanOneRecord(SalesforceError):
"""
Error Code: 300
The value returned when an external ID exists in more than one record. The
response body contains the list of matching records.
"""
message = u"More than one record for {url}. Response content: {content}"
class SalesforceMalformedRequest(SalesforceError):
"""
Error Code: 400
The request couldn't be understood, usually becaue the JSON or XML body contains an error.
"""
message = u"Malformed request {url}. Response content: {content}"
class SalesforceExpiredSession(SalesforceError):
"""
Error Code: 401
The session ID or OAuth token used has expired or is invalid. The response
body contains the message and errorCode.
"""
message = u"Expired session for {url}. Response content: {content}"
class SalesforceRefusedRequest(SalesforceError):
"""
Error Code: 403
The request has been refused. Verify that the logged-in user has
appropriate permissions.
"""
message = u"Request refused for {url}. Response content: {content}"
class SalesforceResourceNotFound(SalesforceError):
"""
Error Code: 404
The requested resource couldn't be found. Check the URI for errors, and
verify that there are no sharing issues.
"""
message = u'Resource {name} Not Found. Response content: {content}'
def __str__(self):
return self.message.format(name=self.resource_name,
content=self.content)
class SalesforceGeneralError(SalesforceError):
"""
A non-specific Salesforce error.
"""
message = u'Error Code {status}. Response content: {content}'
def __str__(self):
return self.message.format(status=self.status, content=self.content)
|
gpl-2.0
| -4,290,808,311,357,684,000 | 37.661429 | 106 | 0.595389 | false |
GiggleLiu/nrg_mapping
|
doc/source/conf.py
|
1
|
9702
|
# -*- coding: utf-8 -*-
#
# NRGMapping documentation build configuration file, created by
# sphinx-quickstart on Mon Sep 7 00:20:20 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# This will include the necessary source files folders in the PATH to be able to generate the documentation from.
devdir='../source/'
sys.path.insert(0,devdir)
# Sort members by type
autodoc_member_order = 'groupwise'
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
source_suffix = ['.rst']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'NRGMapping'
copyright = u'2015, Jinguo Leo'
author = u'Jinguo Leo'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'NRG-Mapping-doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
'classoptions': ',oneside,openany',
'babel': '\\usepackage[english]{babel}'
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'NRGMapping.tex', u'NRGMapping Documentation',
u'Jinguo Leo', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'nrgmapping', u'NRGMapping Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'NRGMapping', u'NRGMapping Documentation',
author, 'NRGMapping', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
|
mit
| -6,485,356,685,220,344,000 | 31.232558 | 113 | 0.708101 | false |
elfxwt/SpiderOfPython
|
main_simple.py
|
1
|
3003
|
# -*- coding: utf-8 -*-
import requests
import bs4
import time
import codecs
class Tool:
def __init__(self,cityName):
self.rootUrl = 'http://www.anjuke.com/'
self.baseUrl = self.rootUrl + cityName + '/cm/'
self.checkHref = self.rootUrl + cityName + '/'
self.blUrlModle = 'http://'+cityName+'.anjuke.com/community/view/'
self.cityName = cityName
self.ak = 'E4805d16520de693a3fe707cdc962045'
self.l1 = 'l1='
self.l2 = 'l2='
self.l3 = 'l3='
def sendRequest(self,url):
response = requests.get(url)
soup = bs4.BeautifulSoup(response.text)
return soup
def getAreasHref(self):
response = requests.get(self.baseUrl)
soup = bs4.BeautifulSoup(response.text)
areas = soup.select('.P2a')
herfList = []
for area in areas:
href = area.get('href')
herfList.append(href)
return herfList
def getcrmCodeFromHref(self,href):
str1= href
str2= self.checkHref + 'cm'
str2Len = len(str2)
endIndex = len(str1) - 1
code = str1[str2Len:endIndex]
return code
def getOnePageEstates(self,pageUrl,areaName):
soup = self.sendRequest(pageUrl)
f = codecs.open(areaName+'.txt','a','utf-8')
districts = soup.select('ul.P3 a')
for district in districts:
href = district.get("href")
if(self.checkHref in href):
cmCode = self.getcrmCodeFromHref(href)
if(cmCode.isdigit()):
blValue = self.getEstatesBL(cmCode)
resultStr = "('%s','%s','%s')" % (self.cityName,areaName,district.string)
f.write(resultStr + '\n')
f.close()
def getOneAreaEstates(self,areaUrl,areaName):
count = 0
while count < 2:
count += 1;
str_count = str(count)
pageUrl = areaUrl + 'p'+str_count + '/'
response = requests.get(pageUrl)
soup = bs4.BeautifulSoup(response.text)
nextPageTag = soup.find_all(class_='nolink nextpage')
if len(nextPageTag):
break
self.getOnePageEstates(pageUrl,areaName)
#href="http://hangzhou.anjuke.com/map/sale/?#l1=30.3421789&l2=120.083714&l3=18&commid=193851&commname=金地自在城"
def getEstatesBL(self,code):
blUrl = self.blUrlModle + code
soup = self.sendRequest(blUrl)
tag = soup.select('div.border-info a[class="comm-icon"]')
if(len(tag)):
blHref = tag[0].get("href")
l1IndexBegin = blHref.index(self.l1) + len(self.l1)
l2Index = blHref.index(self.l2)
l1End = l2Index - 1
l2IndexBegin = l2Index + len(self.l2)
l3Index = blHref.index(self.l3)
l2End = l3Index - 1
l1Value = blHref[l1IndexBegin:l1End]
l2Value = blHref[l2IndexBegin:l2End]
blValue = "'%s','%s'" % (l1Value,l2Value)
return blValue
def oneStart(self,areaName):
url = self.baseUrl + areaName + '/'
self.getOneAreaEstates(url,areaName)
def start(self):
soup = self.sendRequest(self.baseUrl)
areas = soup.select('.P2a')
herfList = []
for area in areas:
href = area.get('href')
areaName = area.string
self.getOneAreaEstates(href,areaName)
spider = Tool('hangzhou')
spider.oneStart('xiaoshan')
|
apache-2.0
| 3,719,469,298,661,651,000 | 22.023077 | 108 | 0.659539 | false |
caglar10ur/anvio
|
anvio/ccollections.py
|
1
|
8504
|
# -*- coding: utf-8
"""Implements the collections class (the file name has an extra 'c' to avoid
masking the standard collections library).
If the user have analyzed their metagenome using a metagenome binning software
and identified draft genomes in their data (or by any other means binned their
contigs based on any criterion), this information can be stored in the
annotation database's collections_* tables. The class implemented here collects
this information from the database, and presents it as an intuitive data structure
for the client.
"""
import anvio
import anvio.db as db
import anvio.tables as t
import anvio.utils as utils
import anvio.terminal as terminal
import anvio.filesnpaths as filesnpaths
from anvio.errors import ConfigError
__author__ = "A. Murat Eren"
__copyright__ = "Copyright 2015, The anvio Project"
__credits__ = []
__license__ = "GPL 3.0"
__version__ = anvio.__version__
__maintainer__ = "A. Murat Eren"
__email__ = "a.murat.eren@gmail.com"
__status__ = "Development"
run = terminal.Run()
progress = terminal.Progress()
class Collections:
def __init__(self, r = run, p = progress):
self.sources_dict = {}
self.run = r
self.progress = p
def populate_sources_dict(self, db_path, version):
database = db.DB(db_path, version)
db_type = database.get_meta_value('db_type')
collections_info_table = database.get_table_as_dict(t.collections_info_table_name)
database.disconnect()
# collections info must be read only if its coming from the annotation database.
if db_type == 'annotation':
read_only = True
elif db_type == 'profile':
read_only = False
else:
raise ConfigError, 'Collections class does not know about this "%s" database type :/' % db_type
for source in collections_info_table:
self.sources_dict[source] = collections_info_table[source]
self.sources_dict[source]['read_only'] = read_only
self.sources_dict[source]['source_db_path'] = db_path
self.sources_dict[source]['source_db_version'] = version
def sanity_check(self, source):
if source not in self.sources_dict:
raise ConfigError, 'There is no "%s" I know of. Maybe the populate_sources_dict was not called\
for whatever database you are trying to get collections from? (anvio asks this\
rhetorical question to the programmer).' % source
def get_collection_dict(self, source):
self.sanity_check(source)
c = self.sources_dict[source]
database = db.DB(c['source_db_path'], c['source_db_version'])
collections_splits_table = database.get_table_as_dict(t.collections_splits_table_name)
database.disconnect()
# FIXME: this could be resolved with a WHERE clause in the SQL query:
collection = utils.get_filtered_dict(collections_splits_table, 'source', set([source]))
collection_dict = {}
for entry in collection.values():
source = entry['source']
cluster_id = entry['cluster_id']
split = entry['split']
if collection_dict.has_key(cluster_id):
collection_dict[cluster_id].append(split)
else:
collection_dict[cluster_id] = [split]
return collection_dict
def get_collection_colors(self, source):
self.sanity_check(source)
c = self.sources_dict[source]
database = db.DB(c['source_db_path'], c['source_db_version'])
collections_colors = database.get_table_as_dict(t.collections_colors_table_name)
database.disconnect()
# FIXME: this could be resolved with a WHERE clause in the SQL query:
collection = utils.get_filtered_dict(collections_colors, 'source', set([source]))
collection_color_dict = {}
for entry in collection.values():
collection_color_dict[entry['cluster_id']] = entry['htmlcolor']
return collection_color_dict
def list_collections(self):
for collection_id in self.sources_dict:
c = self.sources_dict[collection_id]
output = '%s (%d clusters, representing %d splits).' % (collection_id, c['num_clusters'], c['num_splits'])
self.run.info_single(output)
class GetSplitNamesInBins:
def __init__(self, args):
# we will fill this in and return it
self.split_names_of_interest = set([])
A = lambda x: args.__dict__[x] if args.__dict__.has_key(x) else None
self.bin_ids_file_path = A('bin_ids_file')
self.bin_id = A('bin_id')
self.collection_id = A('collection_id')
self.annotation_db_path = A('annotation_db')
self.profile_db_path = A('profile_db')
self.debug = A('debug')
if self.bin_ids_file_path and self.bin_id:
raise ConfigError, 'Either use a file to list all the bin ids (-B), or declare a single bin (-b)\
you would like to focus. Not both :/'
if (not self.bin_ids_file_path) and (not self.bin_id):
raise ConfigError, "You must either use a file to list all the bin ids (-B) you would like to\
focus on, or declare a single bin id (-b) from your collection. You have\
not really given anvi'o anything to work with."
if not self.collection_id:
raise ConfigError, 'This will not work without a collection ID for your bins :/'
if self.bin_ids_file_path:
filesnpaths.is_file_exists(self.bin_ids_file_path)
self.bins = set([b.strip() for b in open(self.bin_ids_file_path).readlines()])
if self.bin_id:
self.bins = set([self.bin_id])
if not len(self.bins):
raise ConfigError, 'There is no bin to work with :/'
self.collections = Collections()
self.collections.populate_sources_dict(self.profile_db_path, anvio.__profile__version__)
if self.collection_id not in self.collections.sources_dict:
raise ConfigError, 'The collection id "%s" does not seem to be in the profile database. These are the\
collections that are available through this profile database: %s.'\
% (self.collection_id, ', '.join(self.collections.sources_dict))
self.collection_dict = self.collections.get_collection_dict(self.collection_id)
bins_in_collection = self.collection_dict.keys()
bins_that_does_not_exist_in_collection = [b for b in self.bins if b not in bins_in_collection]
if len(bins_that_does_not_exist_in_collection):
raise ConfigError, 'Some of the bins you requested does not appear to have been described in the collection\
"%s". Here is a list of bins that are missing: %s'\
% (self.collection_id, ', '.join(bins_that_does_not_exist_in_collection))
def get_split_names_only(self):
split_names_of_interest = []
for bin_id in self.bins:
split_names_of_interest.extend(self.collection_dict[bin_id])
self.split_names_of_interest = set(split_names_of_interest)
return self.split_names_of_interest
def get_dict(self):
d = {}
for bin_id in self.bins:
d[bin_id] = set(self.collection_dict[bin_id])
return d
class GetSequentialBlocksOfSplits:
"""A simple class to identify longest stretches in a list of integers.
>>> sequentials = SequentialBlocksOfSplits([1, 2, 3, 5, 6, 9])
>>> print sequentials.blocks
[[1, 2, 3], [5, 6], [9]]
>>>
"""
def __init__(self, l):
self.l = sorted(list(set(l)))
self.blocks = []
self.current_block = []
def finalize_block(self):
self.blocks.append(self.current_block)
self.current_block = []
def process(self):
while 1:
if not self.l:
break
current = self.l.pop(0)
if not len(self.current_block) or current == self.current_block[-1] + 1:
self.current_block.append(current)
else:
self.finalize_block()
self.current_block.append(current)
self.finalize_block()
return self.blocks
|
gpl-3.0
| -115,297,844,322,236,850 | 35.033898 | 120 | 0.608302 | false |
clubcapra/Ibex
|
src/seagoatvision_ros/scripts/CapraVision/client/gtk/filters/implementation/WinMorphology.py
|
1
|
3720
|
#! /usr/bin/env python
# Copyright (C) 2012 Club Capra - capra.etsmtl.ca
#
# This file is part of CapraVision.
#
# CapraVision is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import copy
from gi.repository import Gtk
from CapraVision.client.gtk import get_ui
from CapraVision.client.gtk import win_name
class WinMorphology():
def __init__(self, filtre, cb):
self.filtre = filtre
self.filtre_init = copy.copy(filtre)
self.cb = cb
ui = get_ui(self)
self.window = ui.get_object(win_name(self))
self.spnKernelWidth = ui.get_object('spnKernelWidth')
self.spnKernelWidth.set_adjustment(self.create_adj())
self.spnKernelHeight = ui.get_object('spnKernelHeight')
self.spnKernelHeight.set_adjustment(self.create_adj())
self.spnAnchorX = ui.get_object('spnAnchorX')
self.spnAnchorX.set_adjustment(self.create_adj_anchor())
self.spnAnchorY = ui.get_object('spnAnchorY')
self.spnAnchorY.set_adjustment(self.create_adj_anchor())
self.spnIterations = ui.get_object('spnIterations')
self.spnIterations.set_adjustment(self.create_adj())
self.init_window()
def init_window(self):
self.spnKernelWidth.set_value(self.filtre_init.kernel_width.get_current_value())
self.spnKernelHeight.set_value(self.filtre_init.kernel_height.get_current_value())
self.spnAnchorX.set_value(self.filtre_init.anchor_x.get_current_value())
self.spnAnchorY.set_value(self.filtre_init.anchor_y.get_current_value())
def create_adj(self):
return Gtk.Adjustment(1, 1, 255, 1, 10, 0)
def create_adj_anchor(self):
return Gtk.Adjustment(1, -1, 255, 1, 10, 0)
def on_btnOK_clicked(self, widget):
self.cb()
self.window.destroy()
def on_btnCancel_clicked(self, widget):
self.filtre.kernel_width = self.filtre_init.kernel_width
self.filtre.kernel_height = self.filtre_init.kernel_height
self.filtre.anchor_x = self.filtre_init.anchor_x
self.filtre.anchor_y = self.filtre_init.anchor_y
self.filtre.iterations = self.filtre_init.iterations
self.filtre.configure()
self.init_window()
def on_spnKernelWidth_value_changed(self, widget):
self.filtre.kernel_width.set_current_value(self.spnKernelWidth.get_value())
self.filtre.configure()
def on_spnKernelHeight_value_changed(self, widget):
self.filtre.kernel_height.set_current_value(self.spnKernelHeight.get_value())
self.filtre.configure()
def on_spnAnchorX_value_changed(self, widget):
self.filtre.anchor_x.set_current_value(self.spnAnchorX.get_value())
self.filtre.configure()
def on_spnAnchorY_value_changed(self, widget):
self.filtre.anchor_y.set_current_value(self.spnAnchorY.get_value())
self.filtre.configure()
def on_spnIterations_value_changed(self, widget):
self.filtre.iterations.set_current_value(self.spnIterations.get_value())
self.filtre.configure()
|
gpl-3.0
| 7,680,349,863,580,811,000 | 39.445652 | 90 | 0.676075 | false |
musteryu/Data-Mining
|
assignment-黄煜-3120100937/question_4.py
|
1
|
1258
|
from mylib import *
import os,sys
import numpy as np
import matplotlib.pyplot as plt
import math
import random
from time import time
if __name__ == '__main__':
DIR_PATH = sys.path[0] + '\\'
# normal distribution vector file
nvctr_file1 = DIR_PATH + 'normal_500_1.txt'
nvctr_file2 = DIR_PATH + 'normal_500_2.txt'
# uniform distribution vector file
uvctr_file1 = DIR_PATH + 'uniform_500_1.txt'
uvctr_file2 = DIR_PATH + 'uniform_500_2.txt'
# normal distribution matrix
nmtrx = fget_mtrx(nvctr_file1) + fget_mtrx(nvctr_file2)
# uniform distribution matrix
umtrx = fget_mtrx(uvctr_file1) + fget_mtrx(uvctr_file2)
# plist is list the numbers of dimensions after DCT compression
# nplist is for normal distribution data set
# uplist is for uniform distribution data set
nplist = []
uplist = []
for vector in nmtrx:
u, p = my_DCT_compression(vector, 0.01)
nplist.append(p)
for vector in umtrx:
u, p = my_DCT_compression(vector, 0.01)
uplist.append(p)
# draw histogram
plt.figure(1)
plt.subplot(2,1,1)
my_hist(nplist, bucket_size = 1, flat_edge = False, title = "For normal distribution data set")
plt.subplot(2,1,2)
my_hist(uplist, bucket_size = 1, flat_edge = False, title = "For uniform distribution data set")
plt.show()
|
gpl-2.0
| -7,111,678,879,534,298,000 | 26.977778 | 97 | 0.705087 | false |
tipabu/swift
|
test/unit/common/middleware/s3api/test_s3api.py
|
1
|
61449
|
# Copyright (c) 2011-2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import unittest
from mock import patch, MagicMock
import calendar
from datetime import datetime
import hashlib
import mock
import requests
import json
import six
from six.moves.urllib.parse import unquote, quote
import swift.common.middleware.s3api
from swift.common.middleware.keystoneauth import KeystoneAuth
from swift.common import swob, utils
from swift.common.swob import Request
from keystonemiddleware.auth_token import AuthProtocol
from keystoneauth1.access import AccessInfoV2
from test.unit import debug_logger
from test.unit.common.middleware.s3api import S3ApiTestCase
from test.unit.common.middleware.s3api.helpers import FakeSwift
from test.unit.common.middleware.s3api.test_s3token import \
GOOD_RESPONSE_V2, GOOD_RESPONSE_V3
from swift.common.middleware.s3api.s3request import SigV4Request, S3Request
from swift.common.middleware.s3api.etree import fromstring
from swift.common.middleware.s3api.s3api import filter_factory, \
S3ApiMiddleware
from swift.common.middleware.s3api.s3token import S3Token
class TestListingMiddleware(S3ApiTestCase):
def test_s3_etag_in_json(self):
# This translation happens all the time, even on normal swift requests
body_data = json.dumps([
{'name': 'obj1', 'hash': '0123456789abcdef0123456789abcdef'},
{'name': 'obj2', 'hash': 'swiftetag; s3_etag=mu-etag'},
{'name': 'obj2', 'hash': 'swiftetag; something=else'},
{'subdir': 'path/'},
]).encode('ascii')
self.swift.register(
'GET', '/v1/a/c', swob.HTTPOk,
{'Content-Type': 'application/json; charset=UTF-8'},
body_data)
req = Request.blank('/v1/a/c')
status, headers, body = self.call_s3api(req)
self.assertEqual(json.loads(body), [
{'name': 'obj1', 'hash': '0123456789abcdef0123456789abcdef'},
{'name': 'obj2', 'hash': 'swiftetag', 's3_etag': '"mu-etag"'},
{'name': 'obj2', 'hash': 'swiftetag; something=else'},
{'subdir': 'path/'},
])
def test_s3_etag_non_json(self):
self.swift.register(
'GET', '/v1/a/c', swob.HTTPOk,
{'Content-Type': 'application/json; charset=UTF-8'},
b'Not actually JSON')
req = Request.blank('/v1/a/c')
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'Not actually JSON')
# Yes JSON, but wrong content-type
body_data = json.dumps([
{'name': 'obj1', 'hash': '0123456789abcdef0123456789abcdef'},
{'name': 'obj2', 'hash': 'swiftetag; s3_etag=mu-etag'},
{'name': 'obj2', 'hash': 'swiftetag; something=else'},
{'subdir': 'path/'},
]).encode('ascii')
self.swift.register(
'GET', '/v1/a/c', swob.HTTPOk,
{'Content-Type': 'text/plain; charset=UTF-8'},
body_data)
req = Request.blank('/v1/a/c')
status, headers, body = self.call_s3api(req)
self.assertEqual(body, body_data)
class TestS3ApiMiddleware(S3ApiTestCase):
def setUp(self):
super(TestS3ApiMiddleware, self).setUp()
self.swift.register('GET', '/something', swob.HTTPOk, {}, 'FAKE APP')
def test_non_s3_request_passthrough(self):
req = Request.blank('/something')
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'FAKE APP')
def test_bad_format_authorization(self):
req = Request.blank('/something',
headers={'Authorization': 'hoge',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_bad_method(self):
req = Request.blank('/',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
def test_bad_method_but_method_exists_in_controller(self):
req = Request.blank(
'/bucket',
environ={'REQUEST_METHOD': '_delete_segments_bucket'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'MethodNotAllowed')
def test_path_info_encode(self):
bucket_name = 'b%75cket'
object_name = 'ob%6aect:1'
self.swift.register('GET', '/v1/AUTH_test/bucket/object:1',
swob.HTTPOk, {}, None)
req = Request.blank('/%s/%s' % (bucket_name, object_name),
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
raw_path_info = "/%s/%s" % (bucket_name, object_name)
path_info = req.environ['PATH_INFO']
self.assertEqual(path_info, unquote(raw_path_info))
self.assertEqual(req.path, quote(path_info))
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object:1',
req.environ['swift.backend_path'])
def test_canonical_string_v2(self):
"""
The hashes here were generated by running the same requests against
boto.utils.canonical_string
"""
def canonical_string(path, headers):
if '?' in path:
path, query_string = path.split('?', 1)
else:
query_string = ''
env = {
'REQUEST_METHOD': 'GET',
'PATH_INFO': path,
'QUERY_STRING': query_string,
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
}
for header, value in headers.items():
header = 'HTTP_' + header.replace('-', '_').upper()
if header in ('HTTP_CONTENT_TYPE', 'HTTP_CONTENT_LENGTH'):
header = header[5:]
env[header] = value
with patch('swift.common.middleware.s3api.s3request.'
'S3Request._validate_headers'), \
patch('swift.common.middleware.s3api.s3request.'
'S3Request._validate_dates'):
req = S3Request(env)
return req.environ['s3api.auth_details']['string_to_sign']
def verify(hash, path, headers):
s = canonical_string(path, headers)
self.assertEqual(hash, hashlib.md5(s).hexdigest())
verify('6dd08c75e42190a1ce9468d1fd2eb787', '/bucket/object',
{'Content-Type': 'text/plain', 'X-Amz-Something': 'test',
'Date': 'whatever'})
verify('c8447135da232ae7517328f3429df481', '/bucket/object',
{'Content-Type': 'text/plain', 'X-Amz-Something': 'test'})
verify('bf49304103a4de5c325dce6384f2a4a2', '/bucket/object',
{'content-type': 'text/plain'})
verify('be01bd15d8d47f9fe5e2d9248cc6f180', '/bucket/object', {})
verify('e9ec7dca45eef3e2c7276af23135e896', '/bucket/object',
{'Content-MD5': 'somestuff'})
verify('a822deb31213ad09af37b5a7fe59e55e', '/bucket/object?acl', {})
verify('cce5dd1016595cb706c93f28d3eaa18f', '/bucket/object',
{'Content-Type': 'text/plain', 'X-Amz-A': 'test',
'X-Amz-Z': 'whatever', 'X-Amz-B': 'lalala',
'X-Amz-Y': 'lalalalalalala'})
verify('7506d97002c7d2de922cc0ec34af8846', '/bucket/object',
{'Content-Type': None, 'X-Amz-Something': 'test'})
verify('28f76d6162444a193b612cd6cb20e0be', '/bucket/object',
{'Content-Type': None,
'X-Amz-Date': 'Mon, 11 Jul 2011 10:52:57 +0000',
'Date': 'Tue, 12 Jul 2011 10:52:57 +0000'})
verify('ed6971e3eca5af4ee361f05d7c272e49', '/bucket/object',
{'Content-Type': None,
'Date': 'Tue, 12 Jul 2011 10:52:57 +0000'})
verify('41ecd87e7329c33fea27826c1c9a6f91', '/bucket/object?cors', {})
verify('d91b062f375d8fab407d6dab41fd154e', '/bucket/object?tagging',
{})
verify('ebab878a96814b30eb178e27efb3973f', '/bucket/object?restore',
{})
verify('f6bf1b2d92b054350d3679d28739fc69', '/bucket/object?'
'response-cache-control&response-content-disposition&'
'response-content-encoding&response-content-language&'
'response-content-type&response-expires', {})
str1 = canonical_string('/', headers={'Content-Type': None,
'X-Amz-Something': 'test'})
str2 = canonical_string('/', headers={'Content-Type': '',
'X-Amz-Something': 'test'})
str3 = canonical_string('/', headers={'X-Amz-Something': 'test'})
self.assertEqual(str1, str2)
self.assertEqual(str2, str3)
# Note that boto does not do proper stripping (as of 2.42.0).
# These were determined by examining the StringToSignBytes element of
# resulting SignatureDoesNotMatch errors from AWS.
str1 = canonical_string('/', {'Content-Type': 'text/plain',
'Content-MD5': '##'})
str2 = canonical_string('/', {'Content-Type': '\x01\x02text/plain',
'Content-MD5': '\x1f ##'})
str3 = canonical_string('/', {'Content-Type': 'text/plain \x10',
'Content-MD5': '##\x18'})
self.assertEqual(str1, str2)
self.assertEqual(str2, str3)
def test_signed_urls_expired(self):
expire = '1000000000'
req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
'AWSAccessKeyId=test:tester' % expire,
environ={'REQUEST_METHOD': 'GET'},
headers={'Date': self.get_date_header()})
req.headers['Date'] = datetime.utcnow()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_signed_urls(self):
# Set expire to last 32b timestamp value
# This number can't be higher, because it breaks tests on 32b systems
expire = '2147483647' # 19 Jan 2038 03:14:07
utc_date = datetime.utcnow()
req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
'AWSAccessKeyId=test:tester&Timestamp=%s' %
(expire, utc_date.isoformat().rsplit('.')[0]),
environ={'REQUEST_METHOD': 'GET'},
headers={'Date': self.get_date_header()})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ['swift.backend_path'])
for _, path, headers in self.swift.calls_with_headers:
self.assertNotIn('Authorization', headers)
def test_signed_urls_no_timestamp(self):
expire = '2147483647' # 19 Jan 2038 03:14:07
req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
'AWSAccessKeyId=test:tester' % expire,
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
# Curious! But actually S3 doesn't verify any x-amz-date/date headers
# for signed_url access and it also doesn't check timestamp
self.assertEqual(status.split()[0], '200')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ['swift.backend_path'])
for _, _, headers in self.swift.calls_with_headers:
self.assertNotIn('Authorization', headers)
def test_signed_urls_invalid_expire(self):
expire = 'invalid'
req = Request.blank('/bucket/object?Signature=X&Expires=%s&'
'AWSAccessKeyId=test:tester' % expire,
environ={'REQUEST_METHOD': 'GET'},
headers={'Date': self.get_date_header()})
req.headers['Date'] = datetime.utcnow()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_signed_urls_no_sign(self):
expire = '2147483647' # 19 Jan 2038 03:14:07
req = Request.blank('/bucket/object?Expires=%s&'
'AWSAccessKeyId=test:tester' % expire,
environ={'REQUEST_METHOD': 'GET'},
headers={'Date': self.get_date_header()})
req.headers['Date'] = datetime.utcnow()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_signed_urls_no_access(self):
expire = '2147483647' # 19 Jan 2038 03:14:07
req = Request.blank('/bucket/object?Expires=%s&'
'AWSAccessKeyId=' % expire,
environ={'REQUEST_METHOD': 'GET'})
req.headers['Date'] = datetime.utcnow()
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_signed_urls_v4(self):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test:tester/%s/us-east-1/s3/aws4_request'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host'
'&X-Amz-Signature=X' % (
self.get_v4_amz_date_header().split('T', 1)[0],
self.get_v4_amz_date_header()),
headers={'Date': self.get_date_header()},
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ['swift.backend_path'])
self.assertEqual(status.split()[0], '200', body)
for _, _, headers in self.swift.calls_with_headers:
self.assertNotIn('Authorization', headers)
self.assertNotIn('X-Auth-Token', headers)
def test_signed_urls_v4_bad_credential(self):
def test(credential, message, extra=b''):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=%s'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host'
'&X-Amz-Signature=X' % (
credential,
self.get_v4_amz_date_header()),
headers={'Date': self.get_date_header()},
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400', body)
self.assertEqual(self._get_error_code(body),
'AuthorizationQueryParametersError')
self.assertEqual(self._get_error_message(body), message)
self.assertIn(extra, body)
dt = self.get_v4_amz_date_header().split('T', 1)[0]
test('test:tester/not-a-date/us-east-1/s3/aws4_request',
'Invalid credential date "not-a-date". This date is not the same '
'as X-Amz-Date: "%s".' % dt)
test('test:tester/%s/us-west-1/s3/aws4_request' % dt,
"Error parsing the X-Amz-Credential parameter; the region "
"'us-west-1' is wrong; expecting 'us-east-1'",
b'<Region>us-east-1</Region>')
test('test:tester/%s/us-east-1/not-s3/aws4_request' % dt,
'Error parsing the X-Amz-Credential parameter; incorrect service '
'"not-s3". This endpoint belongs to "s3".')
test('test:tester/%s/us-east-1/s3/not-aws4_request' % dt,
'Error parsing the X-Amz-Credential parameter; incorrect '
'terminal "not-aws4_request". This endpoint uses "aws4_request".')
def test_signed_urls_v4_missing_x_amz_date(self):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_request'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host'
'&X-Amz-Signature=X',
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_signed_urls_v4_invalid_algorithm(self):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=FAKE'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_request'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host'
'&X-Amz-Signature=X' %
self.get_v4_amz_date_header(),
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_signed_urls_v4_missing_signed_headers(self):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_request'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-Signature=X' %
self.get_v4_amz_date_header(),
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body),
'AuthorizationHeaderMalformed')
def test_signed_urls_v4_invalid_credentials(self):
req = Request.blank('/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host'
'&X-Amz-Signature=X' %
self.get_v4_amz_date_header(),
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_signed_urls_v4_missing_signature(self):
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_request'
'&X-Amz-Date=%s'
'&X-Amz-Expires=1000'
'&X-Amz-SignedHeaders=host' %
self.get_v4_amz_date_header(),
environ={'REQUEST_METHOD': 'GET'})
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_bucket_virtual_hosted_style(self):
req = Request.blank('/',
environ={'HTTP_HOST': 'bucket.localhost:80',
'REQUEST_METHOD': 'HEAD',
'HTTP_AUTHORIZATION':
'AWS test:tester:hmac'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket',
req.environ['swift.backend_path'])
def test_object_virtual_hosted_style(self):
req = Request.blank('/object',
environ={'HTTP_HOST': 'bucket.localhost:80',
'REQUEST_METHOD': 'HEAD',
'HTTP_AUTHORIZATION':
'AWS test:tester:hmac'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ['swift.backend_path'])
def test_token_generation(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/'
'object/123456789abcdef',
swob.HTTPOk, {}, None)
self.swift.register('PUT', '/v1/AUTH_test/bucket+segments/'
'object/123456789abcdef/1',
swob.HTTPCreated, {}, None)
req = Request.blank('/bucket/object?uploadId=123456789abcdef'
'&partNumber=1',
environ={'REQUEST_METHOD': 'PUT'})
req.headers['Authorization'] = 'AWS test:tester:hmac'
date_header = self.get_date_header()
req.headers['Date'] = date_header
with mock.patch('swift.common.middleware.s3api.s3request.'
'S3Request.check_signature') as mock_cs:
status, headers, body = self.call_s3api(req)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual(
'/v1/AUTH_test/bucket+segments/object/123456789abcdef/1',
req.environ['swift.backend_path'])
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(req.environ['s3api.auth_details'], {
'access_key': 'test:tester',
'signature': 'hmac',
'string_to_sign': b'\n'.join([
b'PUT', b'', b'', date_header.encode('ascii'),
b'/bucket/object?partNumber=1&uploadId=123456789abcdef']),
'check_signature': mock_cs})
def test_non_ascii_user(self):
self.swift.register('HEAD', '/v1/AUTH_test/bucket+segments/'
'object/123456789abcdef',
swob.HTTPOk, {}, None)
self.swift.register('PUT', '/v1/AUTH_test/bucket+segments/'
'object/123456789abcdef/1',
swob.HTTPCreated, {}, None)
req = Request.blank('/bucket/object?uploadId=123456789abcdef'
'&partNumber=1',
environ={'REQUEST_METHOD': 'PUT'})
# NB: WSGI string for a snowman
req.headers['Authorization'] = 'AWS test:\xe2\x98\x83:sig'
date_header = self.get_date_header()
req.headers['Date'] = date_header
with mock.patch('swift.common.middleware.s3api.s3request.'
'S3Request.check_signature') as mock_cs:
status, headers, body = self.call_s3api(req)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual(
'/v1/AUTH_test/bucket+segments/object/123456789abcdef/1',
req.environ['swift.backend_path'])
_, _, headers = self.swift.calls_with_headers[-1]
self.assertEqual(req.environ['s3api.auth_details'], {
'access_key': (u'test:\N{SNOWMAN}'.encode('utf-8') if six.PY2
else u'test:\N{SNOWMAN}'),
'signature': 'sig',
'string_to_sign': b'\n'.join([
b'PUT', b'', b'', date_header.encode('ascii'),
b'/bucket/object?partNumber=1&uploadId=123456789abcdef']),
'check_signature': mock_cs})
def test_invalid_uri(self):
req = Request.blank('/bucket/invalid\xffname',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidURI')
def test_object_create_bad_md5_unreadable(self):
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
'HTTP_CONTENT_MD5': '#'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
def test_object_create_bad_md5_too_short(self):
too_short_digest = hashlib.md5(b'hey').digest()[:-1]
md5_str = base64.b64encode(too_short_digest).strip()
if not six.PY2:
md5_str = md5_str.decode('ascii')
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
'HTTP_CONTENT_MD5': md5_str},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
def test_object_create_bad_md5_too_long(self):
too_long_digest = hashlib.md5(b'hey').digest() + b'suffix'
md5_str = base64.b64encode(too_long_digest).strip()
if not six.PY2:
md5_str = md5_str.decode('ascii')
req = Request.blank(
'/bucket/object',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
'HTTP_CONTENT_MD5': md5_str},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidDigest')
def test_invalid_metadata_directive(self):
req = Request.blank('/',
environ={'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
'HTTP_X_AMZ_METADATA_DIRECTIVE':
'invalid'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def test_invalid_storage_class(self):
req = Request.blank('/',
environ={'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z',
'HTTP_X_AMZ_STORAGE_CLASS': 'INVALID'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidStorageClass')
def test_invalid_ssc(self):
req = Request.blank('/',
environ={'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z'},
headers={'x-amz-server-side-encryption': 'invalid',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'InvalidArgument')
def _test_unsupported_header(self, header, value=None):
if value is None:
value = 'value'
req = Request.blank('/error',
environ={'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z'},
headers={header: value,
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NotImplemented')
def test_mfa(self):
self._test_unsupported_header('x-amz-mfa')
@mock.patch.object(utils, '_swift_admin_info', new_callable=dict)
def test_server_side_encryption(self, mock_info):
sse_header = 'x-amz-server-side-encryption'
self._test_unsupported_header(sse_header, 'AES256')
self._test_unsupported_header(sse_header, 'aws:kms')
utils.register_swift_info('encryption', admin=True, enabled=False)
self._test_unsupported_header(sse_header, 'AES256')
self._test_unsupported_header(sse_header, 'aws:kms')
utils.register_swift_info('encryption', admin=True, enabled=True)
# AES256 now works
self.swift.register('PUT', '/v1/AUTH_X/bucket/object',
swob.HTTPCreated, {}, None)
req = Request.blank('/bucket/object',
environ={'REQUEST_METHOD': 'PUT',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z'},
headers={sse_header: 'AES256',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status, '200 OK')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_X/bucket/object',
req.environ['swift.backend_path'])
# ...but aws:kms continues to fail
self._test_unsupported_header(sse_header, 'aws:kms')
def test_website_redirect_location(self):
self._test_unsupported_header('x-amz-website-redirect-location')
def test_aws_chunked(self):
self._test_unsupported_header('content-encoding', 'aws-chunked')
# https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-streaming.html
# has a multi-encoding example:
#
# > Amazon S3 supports multiple content encodings. For example:
# >
# > Content-Encoding : aws-chunked,gzip
# > That is, you can specify your custom content-encoding when using
# > Signature Version 4 streaming API.
self._test_unsupported_header('Content-Encoding', 'aws-chunked,gzip')
# Some clients skip the content-encoding,
# such as minio-go and aws-sdk-java
self._test_unsupported_header('x-amz-content-sha256',
'STREAMING-AWS4-HMAC-SHA256-PAYLOAD')
self._test_unsupported_header('x-amz-decoded-content-length')
def test_object_tagging(self):
self._test_unsupported_header('x-amz-tagging')
def _test_unsupported_resource(self, resource):
req = Request.blank('/error?' + resource,
environ={'REQUEST_METHOD': 'GET',
'HTTP_AUTHORIZATION': 'AWS X:Y:Z'},
headers={'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NotImplemented')
def test_notification(self):
self._test_unsupported_resource('notification')
def test_policy(self):
self._test_unsupported_resource('policy')
def test_request_payment(self):
self._test_unsupported_resource('requestPayment')
def test_torrent(self):
self._test_unsupported_resource('torrent')
def test_website(self):
self._test_unsupported_resource('website')
def test_cors(self):
self._test_unsupported_resource('cors')
def test_tagging(self):
req = Request.blank('/bucket?tagging',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200')
req = Request.blank('/bucket?tagging',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NotImplemented')
req = Request.blank('/bucket?tagging',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), 'NotImplemented')
def test_restore(self):
self._test_unsupported_resource('restore')
def test_unsupported_method(self):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'POST'},
headers={'Authorization': 'AWS test:tester:hmac',
'Date': self.get_date_header()})
status, headers, body = self.call_s3api(req)
elem = fromstring(body, 'Error')
self.assertEqual(elem.find('./Code').text, 'MethodNotAllowed')
self.assertEqual(elem.find('./Method').text, 'POST')
self.assertEqual(elem.find('./ResourceType').text, 'ACL')
def test_registered_defaults(self):
filter_factory(self.conf)
swift_info = utils.get_swift_info()
self.assertTrue('s3api' in swift_info)
self.assertEqual(swift_info['s3api'].get('max_bucket_listing'),
self.conf.max_bucket_listing)
self.assertEqual(swift_info['s3api'].get('max_parts_listing'),
self.conf.max_parts_listing)
self.assertEqual(swift_info['s3api'].get('max_upload_part_num'),
self.conf.max_upload_part_num)
self.assertEqual(swift_info['s3api'].get('max_multi_delete_objects'),
self.conf.max_multi_delete_objects)
def test_check_pipeline(self):
with patch("swift.common.middleware.s3api.s3api.loadcontext"), \
patch("swift.common.middleware.s3api.s3api.PipelineWrapper") \
as pipeline:
self.conf.auth_pipeline_check = True
self.conf.__file__ = ''
pipeline.return_value = 's3api tempauth proxy-server'
self.s3api.check_pipeline(self.conf)
# This *should* still work; authtoken will remove our auth details,
# but the X-Auth-Token we drop in will remain
# if we found one in the response
pipeline.return_value = 's3api s3token authtoken keystoneauth ' \
'proxy-server'
self.s3api.check_pipeline(self.conf)
# This should work now; no more doubled-up requests to keystone!
pipeline.return_value = 's3api s3token keystoneauth proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 's3api swauth proxy-server'
self.s3api.check_pipeline(self.conf)
# Note that authtoken would need to have delay_auth_decision=True
pipeline.return_value = 's3api authtoken s3token keystoneauth ' \
'proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 's3api proxy-server'
with self.assertRaises(ValueError) as cm:
self.s3api.check_pipeline(self.conf)
self.assertIn('expected auth between s3api and proxy-server',
cm.exception.args[0])
pipeline.return_value = 'proxy-server'
with self.assertRaises(ValueError) as cm:
self.s3api.check_pipeline(self.conf)
self.assertIn("missing filters ['s3api']",
cm.exception.args[0])
def test_s3api_initialization_with_disabled_pipeline_check(self):
with patch("swift.common.middleware.s3api.s3api.loadcontext"), \
patch("swift.common.middleware.s3api.s3api.PipelineWrapper") \
as pipeline:
# Disable pipeline check
self.conf.auth_pipeline_check = False
self.conf.__file__ = ''
pipeline.return_value = 's3api tempauth proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 's3api s3token authtoken keystoneauth ' \
'proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 's3api swauth proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 's3api authtoken s3token keystoneauth ' \
'proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 's3api proxy-server'
self.s3api.check_pipeline(self.conf)
pipeline.return_value = 'proxy-server'
with self.assertRaises(ValueError):
self.s3api.check_pipeline(self.conf)
def test_signature_v4(self):
environ = {
'REQUEST_METHOD': 'GET'}
authz_header = 'AWS4-HMAC-SHA256 ' + ', '.join([
'Credential=test:tester/%s/us-east-1/s3/aws4_request' %
self.get_v4_amz_date_header().split('T', 1)[0],
'SignedHeaders=host;x-amz-date',
'Signature=X',
])
headers = {
'Authorization': authz_header,
'X-Amz-Date': self.get_v4_amz_date_header(),
'X-Amz-Content-SHA256': '0123456789'}
req = Request.blank('/bucket/object', environ=environ, headers=headers)
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '200', body)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_test/bucket/object',
req.environ['swift.backend_path'])
for _, _, headers in self.swift.calls_with_headers:
self.assertEqual(authz_header, headers['Authorization'])
self.assertNotIn('X-Auth-Token', headers)
def test_signature_v4_no_date(self):
environ = {
'REQUEST_METHOD': 'GET'}
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test:tester/20130524/us-east-1/s3/aws4_request, '
'SignedHeaders=host;range;x-amz-date,'
'Signature=X',
'X-Amz-Content-SHA256': '0123456789'}
req = Request.blank('/bucket/object', environ=environ, headers=headers)
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403')
self.assertEqual(self._get_error_code(body), 'AccessDenied')
def test_signature_v4_no_payload(self):
environ = {
'REQUEST_METHOD': 'GET'}
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-date,'
'Signature=X' % self.get_v4_amz_date_header().split('T', 1)[0],
'X-Amz-Date': self.get_v4_amz_date_header()}
req = Request.blank('/bucket/object', environ=environ, headers=headers)
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '400')
self.assertEqual(self._get_error_code(body), 'InvalidRequest')
self.assertEqual(
self._get_error_message(body),
'Missing required header for this request: x-amz-content-sha256')
def test_signature_v4_bad_authorization_string(self):
def test(auth_str, error, msg, extra=b''):
environ = {
'REQUEST_METHOD': 'GET'}
headers = {
'Authorization': auth_str,
'X-Amz-Date': self.get_v4_amz_date_header(),
'X-Amz-Content-SHA256': '0123456789'}
req = Request.blank('/bucket/object', environ=environ,
headers=headers)
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(self._get_error_code(body), error)
self.assertEqual(self._get_error_message(body), msg)
self.assertIn(extra, body)
auth_str = ('AWS4-HMAC-SHA256 '
'SignedHeaders=host;x-amz-date,'
'Signature=X')
test(auth_str, 'AccessDenied', 'Access Denied.')
auth_str = (
'AWS4-HMAC-SHA256 '
'Credential=test:tester/20130524/us-east-1/s3/aws4_request, '
'Signature=X')
test(auth_str, 'AuthorizationHeaderMalformed',
'The authorization header is malformed; the authorization '
'header requires three components: Credential, SignedHeaders, '
'and Signature.')
auth_str = ('AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-west-2/s3/aws4_request, '
'Signature=X, SignedHeaders=host;x-amz-date' %
self.get_v4_amz_date_header().split('T', 1)[0])
test(auth_str, 'AuthorizationHeaderMalformed',
"The authorization header is malformed; "
"the region 'us-west-2' is wrong; expecting 'us-east-1'",
b'<Region>us-east-1</Region>')
auth_str = ('AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-east-1/not-s3/aws4_request, '
'Signature=X, SignedHeaders=host;x-amz-date' %
self.get_v4_amz_date_header().split('T', 1)[0])
test(auth_str, 'AuthorizationHeaderMalformed',
'The authorization header is malformed; '
'incorrect service "not-s3". This endpoint belongs to "s3".')
auth_str = ('AWS4-HMAC-SHA256 '
'Credential=test:tester/%s/us-east-1/s3/not-aws4_request, '
'Signature=X, SignedHeaders=host;x-amz-date' %
self.get_v4_amz_date_header().split('T', 1)[0])
test(auth_str, 'AuthorizationHeaderMalformed',
'The authorization header is malformed; '
'incorrect terminal "not-aws4_request". '
'This endpoint uses "aws4_request".')
auth_str = (
'AWS4-HMAC-SHA256 '
'Credential=test:tester/20130524/us-east-1/s3/aws4_request, '
'SignedHeaders=host;x-amz-date')
test(auth_str, 'AccessDenied', 'Access Denied.')
def test_canonical_string_v4(self):
def _get_req(path, environ):
if '?' in path:
path, query_string = path.split('?', 1)
else:
query_string = ''
env = {
'REQUEST_METHOD': 'GET',
'PATH_INFO': path,
'QUERY_STRING': query_string,
'HTTP_DATE': 'Mon, 09 Sep 2011 23:36:00 GMT',
'HTTP_X_AMZ_CONTENT_SHA256':
'e3b0c44298fc1c149afbf4c8996fb924'
'27ae41e4649b934ca495991b7852b855',
'HTTP_AUTHORIZATION':
'AWS4-HMAC-SHA256 '
'Credential=X:Y/20110909/us-east-1/s3/aws4_request, '
'SignedHeaders=content-md5;content-type;date, '
'Signature=x',
}
fake_time = calendar.timegm((2011, 9, 9, 23, 36, 0))
env.update(environ)
with patch('swift.common.middleware.s3api.s3request.'
'S3Request._validate_headers'), \
patch('swift.common.middleware.s3api.utils.time.time',
return_value=fake_time):
req = SigV4Request(env, location=self.conf.location)
return req
def canonical_string(path, environ):
return _get_req(path, environ)._canonical_request()
def verify(hash_val, path, environ):
# See http://docs.aws.amazon.com/general/latest/gr
# /signature-v4-test-suite.html for where location, service, and
# signing key came from
with patch.object(self.conf, 'location', 'us-east-1'), \
patch.object(swift.common.middleware.s3api.s3request,
'SERVICE', 'host'):
req = _get_req(path, environ)
hash_in_sts = req._string_to_sign().split(b'\n')[3]
self.assertEqual(hash_val, hash_in_sts.decode('ascii'))
self.assertTrue(req.check_signature(
'wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY'))
# all next data got from aws4_testsuite from Amazon
# http://docs.aws.amazon.com/general/latest/gr/samples
# /aws4_testsuite.zip
# Each *expected* hash value is the 4th line in <test-name>.sts in the
# test suite.
# get-vanilla
env = {
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host, '
'Signature=b27ccfbfa7df52a200ff74193ca6e32d'
'4b48b8856fab7ebf1c595d0670a7e470'),
'HTTP_HOST': 'host.foo.com'}
verify('366b91fb121d72a00f46bbe8d395f53a'
'102b06dfb7e79636515208ed3fa606b1',
'/', env)
# get-header-value-trim
env = {
'REQUEST_METHOD': 'POST',
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host;p, '
'Signature=debf546796015d6f6ded8626f5ce9859'
'7c33b47b9164cf6b17b4642036fcb592'),
'HTTP_HOST': 'host.foo.com',
'HTTP_P': 'phfft'}
verify('dddd1902add08da1ac94782b05f9278c'
'08dc7468db178a84f8950d93b30b1f35',
'/', env)
# get-utf8 (not exact)
env = {
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host, '
'Signature=8d6634c189aa8c75c2e51e106b6b5121'
'bed103fdb351f7d7d4381c738823af74'),
'HTTP_HOST': 'host.foo.com',
'RAW_PATH_INFO': '/%E1%88%B4'}
# This might look weird because actually S3 doesn't care about utf-8
# encoded multi-byte bucket name from bucket-in-host name constraint.
# However, aws4_testsuite has only a sample hash with utf-8 *bucket*
# name to make sure the correctness (probably it can be used in other
# aws resource except s3) so, to test also utf-8, skip the bucket name
# validation in the following test.
# NOTE: eventlet's PATH_INFO is unquoted
with patch('swift.common.middleware.s3api.s3request.'
'validate_bucket_name'):
verify('27ba31df5dbc6e063d8f87d62eb07143'
'f7f271c5330a917840586ac1c85b6f6b',
swob.wsgi_unquote('/%E1%88%B4'), env)
# get-vanilla-query-order-key
env = {
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host, '
'Signature=0dc122f3b28b831ab48ba65cb47300de'
'53fbe91b577fe113edac383730254a3b'),
'HTTP_HOST': 'host.foo.com'}
verify('2f23d14fe13caebf6dfda346285c6d9c'
'14f49eaca8f5ec55c627dd7404f7a727',
'/?a=foo&b=foo', env)
# post-header-value-case
env = {
'REQUEST_METHOD': 'POST',
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host;zoo, '
'Signature=273313af9d0c265c531e11db70bbd653'
'f3ba074c1009239e8559d3987039cad7'),
'HTTP_HOST': 'host.foo.com',
'HTTP_ZOO': 'ZOOBAR'}
verify('3aae6d8274b8c03e2cc96fc7d6bda4b9'
'bd7a0a184309344470b2c96953e124aa',
'/', env)
# post-x-www-form-urlencoded-parameters
env = {
'REQUEST_METHOD': 'POST',
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host;content-type, '
'Signature=b105eb10c6d318d2294de9d49dd8b031'
'b55e3c3fe139f2e637da70511e9e7b71'),
'HTTP_HOST': 'host.foo.com',
'HTTP_X_AMZ_CONTENT_SHA256':
'3ba8907e7a252327488df390ed517c45'
'b96dead033600219bdca7107d1d3f88a',
'CONTENT_TYPE':
'application/x-www-form-urlencoded; charset=utf8'}
verify('c4115f9e54b5cecf192b1eaa23b8e88e'
'd8dc5391bd4fde7b3fff3d9c9fe0af1f',
'/', env)
# post-x-www-form-urlencoded
env = {
'REQUEST_METHOD': 'POST',
'HTTP_AUTHORIZATION': (
'AWS4-HMAC-SHA256 '
'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
'SignedHeaders=date;host;content-type, '
'Signature=5a15b22cf462f047318703b92e6f4f38'
'884e4a7ab7b1d6426ca46a8bd1c26cbc'),
'HTTP_HOST': 'host.foo.com',
'HTTP_X_AMZ_CONTENT_SHA256':
'3ba8907e7a252327488df390ed517c45'
'b96dead033600219bdca7107d1d3f88a',
'CONTENT_TYPE':
'application/x-www-form-urlencoded'}
verify('4c5c6e4b52fb5fb947a8733982a8a5a6'
'1b14f04345cbfe6e739236c76dd48f74',
'/', env)
# Note that boto does not do proper stripping (as of 2.42.0).
# These were determined by examining the StringToSignBytes element of
# resulting SignatureDoesNotMatch errors from AWS.
str1 = canonical_string('/', {'CONTENT_TYPE': 'text/plain',
'HTTP_CONTENT_MD5': '##'})
str2 = canonical_string('/', {'CONTENT_TYPE': '\x01\x02text/plain',
'HTTP_CONTENT_MD5': '\x1f ##'})
str3 = canonical_string('/', {'CONTENT_TYPE': 'text/plain \x10',
'HTTP_CONTENT_MD5': '##\x18'})
self.assertEqual(str1, str2)
self.assertEqual(str2, str3)
def test_mixture_param_v4(self):
# now we have an Authorization header
headers = {
'Authorization':
'AWS4-HMAC-SHA256 '
'Credential=test/20130524/us-east-1/s3/aws4_request_A, '
'SignedHeaders=hostA;rangeA;x-amz-dateA,'
'Signature=X',
'X-Amz-Date': self.get_v4_amz_date_header(),
'X-Amz-Content-SHA256': '0123456789'}
# and then, different auth info (Credential, SignedHeaders, Signature)
# in query
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_requestB'
'&X-Amz-SignedHeaders=hostB'
'&X-Amz-Signature=Y',
environ={'REQUEST_METHOD': 'GET'},
headers=headers)
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
# FIXME: should this failed as 400 or pass via query auth?
# for now, 403 forbidden for safety
self.assertEqual(status.split()[0], '403', body)
# But if we are missing Signature in query param
req = Request.blank(
'/bucket/object'
'?X-Amz-Algorithm=AWS4-HMAC-SHA256'
'&X-Amz-Credential=test/20T20Z/us-east-1/s3/aws4_requestB'
'&X-Amz-SignedHeaders=hostB',
environ={'REQUEST_METHOD': 'GET'},
headers=headers)
req.content_type = 'text/plain'
status, headers, body = self.call_s3api(req)
self.assertEqual(status.split()[0], '403', body)
def test_s3api_with_only_s3_token(self):
self.swift = FakeSwift()
self.keystone_auth = KeystoneAuth(
self.swift, {'operator_roles': 'swift-user'})
self.s3_token = S3Token(
self.keystone_auth, {'auth_uri': 'https://fakehost/identity'})
self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
self.s3api.logger = debug_logger()
req = Request.blank(
'/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS access:signature',
'Date': self.get_date_header()})
self.swift.register('PUT', '/v1/AUTH_TENANT_ID/bucket',
swob.HTTPCreated, {}, None)
self.swift.register('HEAD', '/v1/AUTH_TENANT_ID',
swob.HTTPOk, {}, None)
with patch.object(self.s3_token, '_json_request') as mock_req:
mock_resp = requests.Response()
mock_resp._content = json.dumps(GOOD_RESPONSE_V2).encode('ascii')
mock_resp.status_code = 201
mock_req.return_value = mock_resp
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'')
self.assertEqual(1, mock_req.call_count)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_TENANT_ID/bucket',
req.environ['swift.backend_path'])
def test_s3api_with_only_s3_token_v3(self):
self.swift = FakeSwift()
self.keystone_auth = KeystoneAuth(
self.swift, {'operator_roles': 'swift-user'})
self.s3_token = S3Token(
self.keystone_auth, {'auth_uri': 'https://fakehost/identity'})
self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
self.s3api.logger = debug_logger()
req = Request.blank(
'/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS access:signature',
'Date': self.get_date_header()})
self.swift.register('PUT', '/v1/AUTH_PROJECT_ID/bucket',
swob.HTTPCreated, {}, None)
self.swift.register('HEAD', '/v1/AUTH_PROJECT_ID',
swob.HTTPOk, {}, None)
with patch.object(self.s3_token, '_json_request') as mock_req:
mock_resp = requests.Response()
mock_resp._content = json.dumps(GOOD_RESPONSE_V3).encode('ascii')
mock_resp.status_code = 200
mock_req.return_value = mock_resp
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'')
self.assertEqual(1, mock_req.call_count)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_PROJECT_ID/bucket',
req.environ['swift.backend_path'])
def test_s3api_with_s3_token_and_auth_token(self):
self.swift = FakeSwift()
self.keystone_auth = KeystoneAuth(
self.swift, {'operator_roles': 'swift-user'})
self.auth_token = AuthProtocol(
self.keystone_auth, {'delay_auth_decision': 'True'})
self.s3_token = S3Token(
self.auth_token, {'auth_uri': 'https://fakehost/identity'})
self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
self.s3api.logger = debug_logger()
req = Request.blank(
'/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS access:signature',
'Date': self.get_date_header()})
self.swift.register('PUT', '/v1/AUTH_TENANT_ID/bucket',
swob.HTTPCreated, {}, None)
self.swift.register('HEAD', '/v1/AUTH_TENANT_ID',
swob.HTTPOk, {}, None)
with patch.object(self.s3_token, '_json_request') as mock_req:
with patch.object(self.auth_token,
'_do_fetch_token') as mock_fetch:
# sanity check
self.assertIn('id', GOOD_RESPONSE_V2['access']['token'])
mock_resp = requests.Response()
mock_resp._content = json.dumps(
GOOD_RESPONSE_V2).encode('ascii')
mock_resp.status_code = 201
mock_req.return_value = mock_resp
mock_access_info = AccessInfoV2(GOOD_RESPONSE_V2)
mock_access_info.will_expire_soon = \
lambda stale_duration: False
mock_fetch.return_value = (MagicMock(), mock_access_info)
status, headers, body = self.call_s3api(req)
# Even though s3token got a token back from keystone, we drop
# it on the floor, resulting in a 401 Unauthorized at
# `swift.common.middleware.keystoneauth` because
# keystonemiddleware's auth_token strips out all auth headers,
# significantly 'X-Identity-Status'. Without a token, it then
# sets 'X-Identity-Status: Invalid' and never contacts
# Keystone.
self.assertEqual('403 Forbidden', status)
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_TENANT_ID/bucket',
req.environ['swift.backend_path'])
self.assertEqual(1, mock_req.call_count)
# it never even tries to contact keystone
self.assertEqual(0, mock_fetch.call_count)
def test_s3api_with_only_s3_token_in_s3acl(self):
self.swift = FakeSwift()
self.keystone_auth = KeystoneAuth(
self.swift, {'operator_roles': 'swift-user'})
self.s3_token = S3Token(
self.keystone_auth, {'auth_uri': 'https://fakehost/identity'})
self.conf['s3_acl'] = True
self.s3api = S3ApiMiddleware(self.s3_token, self.conf)
self.s3api.logger = debug_logger()
req = Request.blank(
'/bucket',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS access:signature',
'Date': self.get_date_header()})
self.swift.register('PUT', '/v1/AUTH_TENANT_ID/bucket',
swob.HTTPCreated, {}, None)
# For now, s3 acl commits the bucket owner acl via POST
# after PUT container so we need to register the resposne here
self.swift.register('POST', '/v1/AUTH_TENANT_ID/bucket',
swob.HTTPNoContent, {}, None)
self.swift.register('TEST', '/v1/AUTH_TENANT_ID',
swob.HTTPMethodNotAllowed, {}, None)
with patch.object(self.s3_token, '_json_request') as mock_req:
mock_resp = requests.Response()
mock_resp._content = json.dumps(GOOD_RESPONSE_V2).encode('ascii')
mock_resp.status_code = 201
mock_req.return_value = mock_resp
status, headers, body = self.call_s3api(req)
self.assertEqual(body, b'')
self.assertIn('swift.backend_path', req.environ)
self.assertEqual('/v1/AUTH_TENANT_ID/bucket',
req.environ['swift.backend_path'])
self.assertEqual(1, mock_req.call_count)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
| 4,349,858,928,597,868,000 | 45.587566 | 79 | 0.553158 | false |
sibskull/synaptiks
|
scripts/try_keyboard_monitor.py
|
1
|
3711
|
#!/usr/bin/python2
# Copyright (c) 2010, 2011, Sebastian Wiesner <lunaryorn@googlemail.com>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import (print_function, division, unicode_literals,
absolute_import)
import sys
from functools import partial
from PyQt4.QtGui import (QApplication, QMainWindow, QWidget, QLabel,
QVBoxLayout, QComboBox, QAction)
from synaptiks.monitors import AbstractKeyboardMonitor, create_keyboard_monitor
def main():
app = QApplication(sys.argv)
window = QMainWindow()
central_widget = QWidget(window)
start_action = QAction('Start', window)
stop_action = QAction('Stop', window)
toolbar = window.addToolBar('Monitor')
toolbar.addAction(start_action)
toolbar.addAction(stop_action)
central_layout = QVBoxLayout(central_widget)
monitor_name = QLabel(central_widget)
central_layout.addWidget(monitor_name)
state_label = QLabel(central_widget)
central_layout.addWidget(state_label)
combo_box = QComboBox(central_widget)
items = [
('No keys', AbstractKeyboardMonitor.IGNORE_NO_KEYS),
('Modifiers', AbstractKeyboardMonitor.IGNORE_MODIFIER_KEYS),
('Modifier combos', AbstractKeyboardMonitor.IGNORE_MODIFIER_COMBOS)]
for label, userdata in items:
combo_box.addItem(label, userdata)
def _update_ignore_keys(index):
monitor.keys_to_ignore = combo_box.itemData(index).toPyObject()
combo_box.currentIndexChanged[int].connect(_update_ignore_keys)
central_layout.addWidget(combo_box)
central_widget.setLayout(central_layout)
window.setCentralWidget(central_widget)
monitor = create_keyboard_monitor(window)
monitor_name.setText('Using monitor class {0}'.format(
monitor.__class__.__name__))
monitor.typingStarted.connect(partial(state_label.setText, 'typing'))
monitor.typingStopped.connect(partial(state_label.setText, 'not typing'))
start_action.triggered.connect(monitor.start)
stop_action.triggered.connect(monitor.stop)
stop_action.setEnabled(False)
monitor.started.connect(partial(start_action.setEnabled, False))
monitor.started.connect(partial(stop_action.setEnabled, True))
monitor.stopped.connect(partial(start_action.setEnabled, True))
monitor.stopped.connect(partial(stop_action.setEnabled, False))
window.show()
app.exec_()
if __name__ == '__main__':
main()
|
bsd-2-clause
| 7,346,558,691,500,575,000 | 39.336957 | 79 | 0.737807 | false |
riddlezyc/geolab
|
src/structure/Z.py
|
1
|
1474
|
# -*- coding: utf-8 -*-
# from framesplit import trajectory
# too slow using this module
import matplotlib.pyplot as plt
dirName = r"F:\simulations\asphaltenes\na-mont\TMBO-oil\water\373-continue/"
xyzName = 'all.xyz'
hetero = 'O' # 'oh' 'N' 'sp' 'O' 'Np' 'sp'
with open(dirName + xyzName, 'r') as foo:
coords = foo.readlines()
nAtoms = int(coords[0])
nFrames = int(len(coords) / (nAtoms + 2))
pos = []
for i in range(nFrames):
istart = i * (nAtoms + 2)
iend = (i + 1) * (nAtoms + 2)
pos.append(coords[istart:iend])
# for i in range(200):
# print coords[i]
heteroatom = 0
# all of my molecules have atoms less than 200
for i in range(200):
x = pos[0][i].split()[0]
if x == hetero:
heteroatom = i
break
heteroZ = []
for p in pos:
# print p[heteroatom].split()[0]
zx = float(p[heteroatom].split()[3])
if zx < 10:
zx = zx + 80
heteroZ.append(zx)
with open(dirName + 'heteroZ.dat', 'w') as foo:
for i, z in enumerate(heteroZ):
print >> foo, "%3d %8.5f" % (i, z)
# energy plot
plt.figure(0, figsize=(8, 4))
figName = dirName + 'heteroZ.png'
plt.title('z of heteroatom', fontsize=20)
plt.plot(range(len(heteroZ)-1), heteroZ[1:], linewidth=2)
plt.grid(True)
plt.xlabel('steps')
plt.ylabel('Z')
plt.axis([0, len(heteroZ)*1.1, 0, max(heteroZ)*1.1])
plt.savefig(figName, format='png', dpi=300)
plt.close()
|
gpl-3.0
| 1,018,782,703,266,797,700 | 24.8 | 76 | 0.580054 | false |
kylewm/oauth-dropins
|
twitter_auth.py
|
1
|
2312
|
"""Utility functions for generating Twitter OAuth headers and making API calls.
"""
import logging
import urllib2
import appengine_config
import handlers
import requests
import requests_oauthlib
import tweepy
def auth_header(url, token_key, token_secret, method='GET'):
"""Generates an Authorization header and returns it in a header dict.
Args:
url: string
token_key: string
token_secret: string
method: string
Returns: single element dict with key 'Authorization'
"""
oauth1 = requests_oauthlib.OAuth1(
client_key=appengine_config.TWITTER_APP_KEY,
client_secret=appengine_config.TWITTER_APP_SECRET,
resource_owner_key=token_key,
resource_owner_secret=token_secret,
)
req = requests.Request(method=method, url=url, auth=oauth1).prepare()
logging.debug(
'Generated Authorization header from access token key %s... and secret %s...',
token_key[:4], token_secret[:4])
return req.headers
def signed_urlopen(url, token_key, token_secret, headers=None, **kwargs):
"""Wraps urllib2.urlopen() and adds an OAuth signature.
"""
if headers is None:
headers = {}
# if this is a post, move the body params into the URL. Tweepy's OAuth
# signing doesn't work if they're in the body; Twitter returns a 401.
data = kwargs.get('data')
if data:
method = 'POST'
url += ('&' if '?' in url else '?') + data
kwargs['data'] = ''
else:
method = 'GET'
headers.update(auth_header(url, token_key, token_secret, method=method))
timeout = kwargs.pop('timeout', appengine_config.HTTP_TIMEOUT)
logging.debug('Fetching %s', url)
try:
return urllib2.urlopen(urllib2.Request(url, headers=headers, **kwargs),
timeout=timeout)
except BaseException, e:
handlers.interpret_http_exception(e)
raise
def tweepy_auth(token_key, token_secret):
"""Returns a tweepy.OAuthHandler.
"""
assert (appengine_config.TWITTER_APP_KEY and
appengine_config.TWITTER_APP_SECRET), (
"Please fill in the twitter_app_key and twitter_app_secret files in "
"your app's root directory.")
handler = tweepy.OAuthHandler(appengine_config.TWITTER_APP_KEY,
appengine_config.TWITTER_APP_SECRET)
handler.set_access_token(token_key, token_secret)
return handler
|
unlicense
| -1,618,857,978,968,227,300 | 29.025974 | 82 | 0.687284 | false |
carthach/essentia
|
src/python/essentia/translate.py
|
1
|
24729
|
# Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import inspect, types
import streaming
import _essentia
import common
from streaming import _reloadStreamingAlgorithms
# genetic marker used to track which composite parameters configure which inner algorithms
class MarkerObject(object):
def __init__(self, default_value=None):
self.default_value = default_value
edt_parameter_code = { common.Edt.STRING: 'String',
common.Edt.INTEGER: 'Int',
common.Edt.VECTOR_REAL: 'VectorReal',
common.Edt.VECTOR_VECTOR_REAL: 'VectorVectorReal',
common.Edt.REAL: 'Real',
common.Edt.BOOL: 'Bool'}
edt_cpp_code = { common.Edt.STRING: 'string',
common.Edt.INTEGER: 'int',
common.Edt.VECTOR_INTEGER: 'vector<int>',
common.Edt.VECTOR_STRING: 'vector<string>',
common.Edt.VECTOR_REAL: 'vector<Real>',
common.Edt.VECTOR_VECTOR_REAL: 'vector<vector<Real> >',
common.Edt.REAL: 'Real',
common.Edt.BOOL: 'bool'}
# html code for edt_dot_code:
# < --> less than or <
# > --> greater than or >
# #: --> colon or :
edt_dot_code = { common.Edt.STRING: 'string',
common.Edt.INTEGER: 'int',
common.Edt.VECTOR_INTEGER: 'vector<int>',
common.Edt.VECTOR_STRING: 'vector<string>',
common.Edt.VECTOR_REAL: 'vector<Real>',
common.Edt.VECTOR_VECTOR_REAL: 'vector<vector<Real> >',
common.Edt.REAL: 'Real',
common.Edt.BOOL: 'bool',
common.Edt.STEREOSAMPLE: 'StereoSample',
common.Edt.MATRIX_REAL: 'TNT::Array2D<Real>'} #: -> colon
# finds the EDT of a parameter of an algorithm given the name of the parameter, the marker_obj used
# to configure that parameter, and the configure_log
def find_edt(composite_param_name, marker_obj, configure_log):
# find inner algorithm and inner parameter name that this composite_param will configure
for inner_algo_name, properties in configure_log.iteritems():
for inner_param_name, value in properties['parameters'].iteritems():
if marker_obj == value:
return properties['instance'].paramType(inner_param_name)
raise RuntimeError('Could not determine parameter type of composite algorithm\'s \''+composite_param_name+'\' parameter')
# given a reference to an inner algorithm and the configure_log, returns the name of the algo (use
# lower() if referring to the member var name)
def inner_algo_name(instance, configure_log):
for algo_name, properties in configure_log.iteritems():
if instance == properties['instance']:
return algo_name
raise RuntimeError('Could not find the name of the inner algorithm')
def generate_dot_algo(algo_name, algo_inst):
'''declares an algorithm in dot'''
dot_code = '_'+algo_name.lower()+' [shape="box", style="rounded,filled", fillcolor="grey50", color="transparent", \n'
indent = ' '*len('_'+algo_name)
dot_code += indent+' label=<\n'
indent += ' '*len('label=<')
dot_code += generate_dot_algo_label(algo_inst, indent)
dot_code += '>]\n\n'
return dot_code
def generate_dot_algo_label(algo_inst, indent=''):
### each label of a node consists of algo inputs, algo name,
### configuration parameters and algo outputs
dot_code = indent+'<table border="0"><tr>\n'+\
indent+' <td><table border="0" bgcolor="white">\n'
# add inputs:
if not len(algo_inst.inputNames()): dot_code += ''
else:
for name in algo_inst.inputNames():
typestr = edt_dot_code[ algo_inst.getInputType(name) ]
dot_code += indent+' <tr><td port="'+name+'_i">'+name+'<br/>['+typestr+']</td></tr>\n'
dot_code += indent+' </table></td>\n\n'+\
indent+' <td><table border="0">\n'
# add algo name:
dot_code += indent+' <tr><td valign="top" colspan="2"><font color="white" point-size="18">'+algo_inst.name()+'</font></td></tr>\n'
# add parameters:
if not algo_inst.parameterNames(): dot_code += ''
else:
for name in algo_inst.parameterNames():
value = algo_inst.paramValue(name)
dot_code += indent+' <tr>\n'+\
indent+' <td border="0" valign="top" align="right"><font color="white">'+name+'</font></td>\n'+\
indent+' <td border="0" valign="top" align="left"><font color="white">'+str(value)+'</font></td>\n'+\
indent+' </tr>\n'
dot_code += indent+' </table></td>\n\n'+\
indent+' <td><table border="0" bgcolor="white">\n'
# add outputs:
if not len(algo_inst.outputNames()): dot_code += ''
else:
for name in algo_inst.outputNames():
typestr = edt_dot_code[ algo_inst.getOutputType(name) ]
dot_code += indent+' <tr><td port="'+name+'_o">'+name+'<br/>['+typestr+']</td></tr>\n'
dot_code += indent+' </table></td>\n'+\
indent+'</tr></table>'
return dot_code
def generate_dot_network(configure_log, composite_algo_inst):
# make connections
dot_code ='\n// connecting the network\n'
for algo_name, properties in configure_log.iteritems():
for left_connector, right_connectors in properties['instance'].connections.iteritems():
for right_connector in right_connectors:
if isinstance(right_connector, streaming._StreamConnector):
dot_code += ' _'+inner_algo_name(left_connector.output_algo, configure_log).lower()+':'+left_connector.name+'_o:e'+' -> '+\
'_'+inner_algo_name(right_connector.input_algo, configure_log).lower()+':'+right_connector.name + '_i:w;\n'
if isinstance(right_connector, types.NoneType):
inneralgoname = inner_algo_name(left_connector.output_algo, configure_log).lower()
dot_code += ' nowhere_'+inneralgoname+' [shape="box", style="rounded,filled", fillcolor="grey50", color="transparent" label="Nowhere" fontcolor="white" fontsize="18"];\n'+\
' _'+inneralgoname+':'+left_connector.name+'_o:e'+' -> nowhere_'+inneralgoname+';\n'
# make connections from floating inputs
for name, connector in composite_algo_inst.inputs.iteritems():
innerinputname = connector.name
inneralgoname = inner_algo_name(connector.input_algo, configure_log).lower()
dot_code += ' '+name+':e -> _'+inneralgoname+':'+innerinputname+'_i:w;\n'
# make connections from floating outputs
for name, connector in composite_algo_inst.outputs.iteritems():
inneroutputname = connector.name
inneralgoname = inner_algo_name(connector.output_algo, configure_log).lower()
dot_code += ' _'+inneralgoname+':'+inneroutputname+'_o:e -> '+name+':w;\n'
return dot_code
def generate_dot_cluster(configure_log, clustername, composite_algo_inst):
''' creates a cluster in dot language surrounded by a dashed, lightgrey line'''
dot_code = ' subgraph cluster_0 {\n'\
' color=lightgrey;\n'\
' style=dashed;\n'\
' label='+clustername+';\n\n'
# for each algo in the cluster, declare it in dot:
for algo_name, properties in configure_log.iteritems():
dot_code += generate_dot_algo(algo_name, properties['instance'])
# create the connections
dot_code += generate_dot_network(configure_log, composite_algo_inst)
# close the cluster code
dot_code += ' }\n'
return dot_code
def translate(composite_algo, output_filename, dot_graph=False):
'''Takes in a class that is derived from essentia.streaming.CompositeBase and an output-filename
and writes output-filename.h and output-filename.cpp versions of the given class.'''
if not inspect.isclass(composite_algo):
raise TypeError('"composite_algo" argument must be a class')
if not streaming.CompositeBase in inspect.getmro(composite_algo):
raise TypeError('"composite_algo" argument must inherit from essentia.streaming.CompositeBase')
param_names, _, _, default_values = inspect.getargspec(composite_algo.__init__)
param_names.remove('self')
# these marker objects are used to track where config params travel in the network
marker_objs = {}
if not default_values and param_names: # python vars have no type so we cannot know what type they are!!
raise TypeError('"composite_algo" arguments must have default values')
if param_names:
for param_name, value in zip(param_names, default_values):
marker_objs[param_name] = MarkerObject(value)
### Before we call their function we need to neuter all of the configure methods of each
### streaming algorithm so that our markers won't cause the configure method to vomit
configure_log = {}
def dummy_configure(self, **kwargs):
lbl = 0
algo_name = self.name()+'_'+str(lbl)
# increment lbl to generate a unique name for inner algo
lowered_algo_names = [name.lower() for name in configure_log.keys()]
while algo_name.lower() in lowered_algo_names:
algo_name = algo_name[:algo_name.index('_')+1] + str(lbl)
lbl +=1
# algo_name is now unique
configure_log[algo_name] = {}
configure_log[algo_name]['instance'] = self
configure_log[algo_name]['parameters'] = kwargs
# We need to actually call the internal configure method because algorithms like silencerate
# need to be configured so we can use its outputs. However we can't use our marker objects,
# so we remove the marker objects that don't have a default value associated with them, and
# for those that do have a default value, we use that value instead of the MarkerObject
# itself
kwargs_no_markers = dict(kwargs)
for key, value in kwargs.iteritems():
if value in marker_objs.values():
if value.default_value == None:
del kwargs_no_markers[key]
else:
kwargs_no_markers[key] = value.default_value
self.real_configure(**kwargs_no_markers)
# iterate over all streaming_algos
streaming_algos = inspect.getmembers( streaming,
lambda obj: inspect.isclass(obj) and \
_essentia.StreamingAlgorithm in inspect.getmro(obj) )
streaming_algos = [member[1] for member in streaming_algos]
for algo in streaming_algos:
algo.real_configure = algo.configure
algo.configure = dummy_configure
### Now generate an instance of their composite algorithm ###
algo_inst = composite_algo(**marker_objs)
# overwrite the dummy configure with the real configure method, so
# translate can be called several times in the same file for a different
# compositebase without entering in an infinite loop
for algo in streaming_algos:
algo.configure = algo.real_configure
### Do some checking on their network ###
for algo in [ logitem['instance'] for logitem in configure_log.values() ]:
if isinstance(algo, streaming.VectorInput):
raise TypeError('essentia.streaming.VectorInput algorithms are not allowed for translatable composite algorithms')
if isinstance(algo, streaming.AudioLoader) or \
isinstance(algo, streaming.EasyLoader) or \
isinstance(algo, streaming.MonoLoader) or \
isinstance(algo, streaming.EqloudLoader):
raise TypeError('No type of AudioLoader is allowed for translatable composite algorithms')
if isinstance(algo, streaming.AudioWriter) or \
isinstance(algo, streaming.MonoWriter):
raise TypeError('No type of AudioWriter is allowed for translatable composite algorithms')
if isinstance(algo, streaming.FileOutput):
raise TypeError('essentia.streaming.FileOutput algorithms are not allowed for translatable composite algorithms')
def sort_by_key(configure_log):
# sort algorithms and conf values:
sitems = configure_log.items()
sitems.sort()
sorted_algos = []
sorted_params= []
for k,v in sitems:
sorted_params.append(v)
sorted_algos.append(k)
return sorted_algos, sorted_params
sorted_algos, sorted_params = sort_by_key(configure_log)
### generate .h code ###
h_code = '''// Generated automatically by essentia::translate
#ifndef STREAMING_''' + composite_algo.__name__.upper() + '''
#define STREAMING_''' + composite_algo.__name__.upper()+ '''
#include "streamingalgorithmcomposite.h"
class '''+composite_algo.__name__+''' : public essentia::streaming::AlgorithmComposite {
protected:
'''
for algo_name in sorted_algos:
h_code += ' essentia::streaming::Algorithm* _'+algo_name.lower()+';\n'
h_code += '''
public:
'''+composite_algo.__name__+'''();
~'''+composite_algo.__name__+'''() {
'''
for algo_name in sorted_algos:
h_code += ' delete _'+algo_name.lower()+';\n'
h_code += ''' }
void declareParameters() {
'''
if param_names:
for param_name, default_value in zip(param_names, default_values):
h_code += ' declareParameter("'+param_name+'", "", "", '
if isinstance(default_value, basestring): h_code += '"'+default_value+'"'
else: h_code += str(default_value)
h_code += ');\n'
h_code += ''' }
void configure();
void createInnerNetwork();
void reset();
static const char* name;
static const char* version;
static const char* description;
};
#endif
'''
### Generate .cpp code ###
cpp_code = '''// Generated automatically by essentia::translate
#include "'''+output_filename+'''.h"
#include "algorithmfactory.h"
#include "taskqueue.h"
using namespace std;
using namespace essentia;
using namespace essentia::streaming;
const char* '''+composite_algo.__name__+'''::name = "'''+composite_algo.__name__+'''";
const char* '''+composite_algo.__name__+'''::version = "1.0";
const char* '''+composite_algo.__name__+'''::description = DOC("");\n\n'''
################################
# CONSTRUCTOR
################################
cpp_code += composite_algo.__name__+'''::'''+composite_algo.__name__+'''(): '''
for algo_name in sorted_algos: cpp_code += '_' + algo_name.lower() + '(0), '
cpp_code = cpp_code[:-2] + ''' {
setName("''' + composite_algo.__name__ + '''");
declareParameters();
AlgorithmFactory& factory = AlgorithmFactory::instance();\n\n'''
# create inner algorithms
for algo_name in sorted_algos:
cpp_code += ' _'+algo_name.lower()+' = factory.create("'+algo_name[:algo_name.rindex('_')]+'");\n'
cpp_code+='}\n\n'
################################
# INNER NETWORK
################################
# declaration of inputs and output and connecting the network should not be
# done in the constructor, as there are algos like silencerate which
# inputs/outputs depend on the configuration parameters. Hence, it is safer to
# do it in the configure() function
cpp_code += 'void ' + composite_algo.__name__ + '::createInnerNetwork() {\n'
# declare inputs
for input_alias, connector in algo_inst.inputs.iteritems():
input_owner_name = None
input_name = None
for algo_name, properties in zip(sorted_algos, sorted_params): #configure_log.iteritems():
if properties['instance'] == connector.input_algo:
input_owner_name = algo_name
input_name = connector.name
break
if not input_owner_name:
raise RuntimeError('Could not determine owner of the \''+input_alias+'\' input')
cpp_code += ' declareInput(_'+input_owner_name.lower()+'->input("'+input_name+'"), "'+input_alias+'", "");\n'
cpp_code += '\n'
# declare outputs
aliases, connectors = sort_by_key(algo_inst.outputs)
for output_alias, connector in zip(aliases, connectors):
output_owner_name = None
output_name = None
for algo_name, properties in zip(sorted_algos, sorted_params): #configure_log.iteritems():
if properties['instance'] == connector.output_algo:
output_owner_name = algo_name
output_name = connector.name
break
if not output_owner_name:
raise RuntimeError('Could not determine owner of the \''+output_alias+'\' output')
cpp_code += ' declareOutput(_'+output_owner_name.lower()+'->output("'+output_name+'"), "'+output_alias+'", "");\n'
cpp_code += '\n'
# make connections
for algo_name, properties in zip(sorted_algos, sorted_params): #configure_log.iteritems():
for left_connector, right_connectors in properties['instance'].connections.iteritems():
for right_connector in right_connectors:
if isinstance(right_connector, streaming._StreamConnector):
cpp_code += ' connect( _'+\
inner_algo_name(left_connector.output_algo, configure_log).lower() + \
'->output("'+left_connector.name+'"), _' + \
inner_algo_name(right_connector.input_algo, configure_log).lower() + \
'->input("'+right_connector.name+'") );\n'
elif isinstance(right_connector, types.NoneType):
cpp_code += ' connect( _'+\
inner_algo_name(left_connector.output_algo, configure_log).lower() + \
'->output("'+left_connector.name+'"), NOWHERE );\n'
cpp_code = cpp_code[:-1]
cpp_code += '''
}\n\n'''
################################
# CONFIGURE
################################
cpp_code += 'void '+composite_algo.__name__+'::configure() {\n'
# configure method
# create local variable for every composite parameter
for composite_param_name in param_names:
param_edt = find_edt(composite_param_name, marker_objs[composite_param_name], configure_log)
cpp_code += ' '+edt_cpp_code[param_edt]+' '+composite_param_name + \
' = parameter("'+composite_param_name+'").to' + \
edt_parameter_code[param_edt]+'();\n'
cpp_code += '\n'
# configure inner algorithms
for algo_name, properties in zip(sorted_algos, sorted_params): #configure_log.iteritems():
# skip if inner algorithm wasn't configured explicitly
if not properties['parameters']: continue
for param_name, value in properties['parameters'].iteritems():
type = common.determineEdt(value)
if 'LIST' in str(type) or 'VECTOR' in str(type):
if type in [common.Edt.VECTOR_STRING]:
cpp_code += ' const char* ' + param_name + '[] = {'
for s in value: cpp_code += '\"' + s + '\"' + ','
elif type in[common.Edt.VECTOR_REAL, common.Edt.LIST_REAL]:
cpp_code += ' Real ' + param_name + '[] = {'
for f in value: cpp_code += str(f) + ','
elif type in [common.Edt.VECTOR_INT, common.Edt.LIST_INT]:
cpp_code += ' int' + param_name + '[] = {'
for i in value: cpp_code += str(i) + ','
cpp_code = cpp_code[:-1]+'};\n'
cpp_code += ' _'+algo_name.lower()+'->configure('
for param_name, value in properties['parameters'].iteritems():
if isinstance(value, MarkerObject):
# figure out which composite param it is
composite_param_name = None
for marker_name, marker_obj in marker_objs.iteritems():
if marker_obj == value:
composite_param_name = marker_name
break
if not composite_param_name:
raise RuntimeError('Could not determine which composite parameter to use to configure inner algorithm \''+algo_name+'\'s parameter \''+param_name+'\'')
cpp_code += '"'+param_name+'", '+composite_param_name+', '
else:
type = common.determineEdt(value)
if 'LIST' in str(type) or 'VECTOR' in str(type):
if type in [common.Edt.VECTOR_STRING]:
cpp_code += '"'+param_name+'", '+'arrayToVector<string>(' + param_name + ') '
elif type in[common.Edt.VECTOR_REAL, common.Edt.LIST_REAL]:
cpp_code += '"'+param_name+'", '+'arrayToVector<Real>(' + param_name + ') '
elif type in [common.Edt.VECTOR_INT, common.Edt.LIST_INT]:
cpp_code += '"'+param_name+'", '+'arrayToVector<int>(' + param_name + ') '
elif isinstance(value, basestring):
cpp_code += '"'+param_name+'", "'+value+'", '
elif isinstance(value, bool):
if value: cpp_code += '"'+param_name+'", true, '
else: cpp_code += '"'+param_name+'", false, '
else:
cpp_code += '"'+param_name+'", '+str(value)+', '
cpp_code = cpp_code[:-2] + ');\n'
cpp_code += ' createInnerNetwork();\n}\n\n'
################################
# RESET
################################
cpp_code += 'void '+composite_algo.__name__+'::reset() {\n'
for algo_name in sorted_algos:
cpp_code += ' _' + algo_name.lower() + '->reset();\n'
cpp_code += '}\n\n'
################################
# DESTRUCTOR
################################
# see h_code. Each algo from the composite is deleted separately instead of
# calling deleteNetwork
# cpp_code += composite_algo.__name__+'''::~'''+composite_algo.__name__+'''() {
# deleteNetwork(_''' + input_owner_name.lower() + ''');
#}'''
# cpp_code +='\n'
#
################################
# end of cpp code
################################
if dot_graph:
### generate .dot code ###
dot_code = 'digraph ' + output_filename +' {\n'
dot_code += ' rankdir=LR\n' # if instead of top-down left-right is prefered
# general formatting options:
dot_code += ' node [color=black, fontname=Verdana, weight=1, fontsize=8, shape=Mrecord]\n'
dot_code += ' edge [color=black, style=solid, weight=1, arrowhead="dotnormal", arrowtail="dot", arrowsize=1, fontsize=6]\n'
# for each input generate nodes
for name in algo_inst.inputs.keys():
dot_code += ' '+name+' [label="'+name+'"];\n'
dot_code += generate_dot_cluster(configure_log, composite_algo.__name__, algo_inst)
# for each output generate nodes
for name in algo_inst.outputs.keys():
dot_code += ' '+name+' [label="'+name+'"];\n'
dot_code += '}'
### Write files ###
f = open(output_filename+'.h', 'w')
f.write(h_code)
f.close()
f = open(output_filename+'.cpp', 'w')
f.write(cpp_code)
f.close()
if dot_graph:
f = open(output_filename+'.dot', 'w')
f.write(dot_code)
f.close()
|
agpl-3.0
| 8,897,953,070,991,686,000 | 41.271795 | 199 | 0.576206 | false |
alephdata/aleph
|
aleph/views/xref_api.py
|
1
|
3909
|
import logging
from flask import Blueprint, request
from pantomime.types import XLSX
from aleph.search import XrefQuery
from aleph.logic.profiles import pairwise_judgements
from aleph.logic.export import create_export
from aleph.views.serializers import XrefSerializer
from aleph.queues import queue_task, OP_XREF, OP_EXPORT_XREF
from aleph.views.util import (
get_db_collection,
get_index_collection,
get_session_id,
jsonify,
)
blueprint = Blueprint("xref_api", __name__)
log = logging.getLogger(__name__)
@blueprint.route("/api/2/collections/<int:collection_id>/xref", methods=["GET"]) # noqa
def index(collection_id):
"""
---
get:
summary: Fetch cross-reference results
description: >-
Fetch cross-reference matches for entities in the collection
with id `collection_id`
parameters:
- in: path
name: collection_id
required: true
schema:
type: integer
- in: query
name: evaluation_mode
required: false
schema:
type: bool
default: false
responses:
'200':
description: OK
content:
application/json:
schema:
type: object
allOf:
- $ref: '#/components/schemas/QueryResponse'
properties:
results:
type: array
items:
$ref: '#/components/schemas/XrefResponse'
tags:
- Xref
- Collection
"""
get_index_collection(collection_id, request.authz.READ)
result = XrefQuery.handle(request, collection_id=collection_id)
pairs = []
for xref in result.results:
pairs.append((xref.get("entity_id"), xref.get("match_id")))
judgements = pairwise_judgements(pairs, collection_id)
for xref in result.results:
key = (xref.get("entity_id"), xref.get("match_id"))
xref["judgement"] = judgements.get(key)
return XrefSerializer.jsonify_result(result)
@blueprint.route("/api/2/collections/<int:collection_id>/xref", methods=["POST"])
def generate(collection_id):
"""
---
post:
summary: Generate cross-reference matches
description: >
Generate cross-reference matches for entities in a collection.
parameters:
- in: path
name: collection_id
required: true
schema:
type: integer
responses:
'202':
content:
application/json:
schema:
properties:
status:
description: accepted
type: string
type: object
description: Accepted
tags:
- Xref
- Collection
"""
collection = get_db_collection(collection_id, request.authz.WRITE)
queue_task(collection, OP_XREF)
return jsonify({"status": "accepted"}, status=202)
@blueprint.route("/api/2/collections/<int:collection_id>/xref.xlsx", methods=["POST"])
def export(collection_id):
"""
---
post:
summary: Download cross-reference results
description: Download results of cross-referencing as an Excel file
parameters:
- in: path
name: collection_id
required: true
schema:
type: integer
responses:
'202':
description: Accepted
tags:
- Xref
- Collection
"""
collection = get_db_collection(collection_id, request.authz.READ)
label = "%s - Crossreference results" % collection.label
export = create_export(
operation=OP_EXPORT_XREF,
role_id=request.authz.id,
label=label,
collection=collection,
mime_type=XLSX,
)
job_id = get_session_id()
queue_task(None, OP_EXPORT_XREF, job_id=job_id, export_id=export.id)
return ("", 202)
|
mit
| -5,093,979,607,631,259,000 | 27.532847 | 88 | 0.589153 | false |
rbose85/bounce-cars
|
backend/bounce/settings/base.py
|
1
|
2389
|
import os
from django.core.exceptions import ImproperlyConfigured
def get_env_setting(setting):
"""Return the value of an environment variable, or raise an exception."""
try:
return os.environ[setting]
except KeyError:
error_msg = "Set the {} env variable".format(setting)
raise ImproperlyConfigured(error_msg)
# ### PROJECT
PROJECT = "bounce"
PROJECT_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
SITE_DOMAIN = None
# ### DJANGO: https://docs.djangoproject.com/en/dev/ref/settings/
ROOT_URLCONF = "bounce.urls"
# ### DJANGO: django.conf.global_settings
TIME_ZONE = "UTC"
USE_TZ = True
LANGUAGE_CODE = "en-gb"
USE_I18N = True
USE_L10N = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2"
}
}
INSTALLED_APPS = (
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
)
# don't forget project apps ..
INSTALLED_APPS += (
"bounce",
"trades",
)
SECRET_KEY = get_env_setting("SECRET_KEY")
STATIC_ROOT = os.path.join(PROJECT_DIR, "staticfiles")
STATIC_URL = "/static/"
MIDDLEWARE_CLASSES = (
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
)
# ### DJANGO REST FRAMEWORK: rest_framework.settings
INSTALLED_APPS += (
"rest_framework",
)
REST_FRAMEWORK = {
"DEFAULT_MODEL_SERIALIZER_CLASS": [
"rest_framework.serializers.HyperlinkedModelSerializer",
],
"DEFAULT_PERMISSION_CLASSES": [
"rest_framework.permissions.AllowAny",
],
"DEFAULT_RENDERER_CLASSES": [
"djangorestframework_camel_case.render.CamelCaseJSONRenderer",
"rest_framework.renderers.JSONRenderer",
"rest_framework.renderers.BrowsableAPIRenderer",
],
"DEFAULT_PARSER_CLASSES": [
"djangorestframework_camel_case.parser.CamelCaseJSONParser",
"rest_framework.parsers.JSONParser",
"rest_framework.parsers.FormParser",
"rest_framework.parsers.MultiPartParser"
],
}
|
mit
| -924,595,611,643,535,000 | 22.194175 | 77 | 0.686898 | false |
andela-ooshodi/codango-debug
|
codango/account/migrations/0001_initial.py
|
1
|
1359
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import cloudinary.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='UserProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('fb_id', models.CharField(max_length=200, null=True)),
('place_of_work', models.CharField(max_length=150, blank=True)),
('position', models.CharField(max_length=100, blank=True)),
('followers', models.IntegerField(default=0)),
('following', models.IntegerField(default=0)),
('first_name', models.CharField(max_length=100, blank=True)),
('last_name', models.CharField(max_length=100, blank=True)),
('about', models.TextField(blank=True)),
('image', cloudinary.models.CloudinaryField(default=b'image/upload/v1443782603/vqr7n59zfxyeybttleug.gif', max_length=255, verbose_name=b'image')),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
),
]
|
mit
| -6,548,849,997,940,557,000 | 41.46875 | 162 | 0.608536 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.