text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from pycp2k.inputsection import InputSection
from ._each174 import _each174
class _diis_info3(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Section_parameters = None
self.Add_last = None
self.Common_iteration_levels = None
self.Filename = None
self.Log_print_key = None
self.EACH = _each174()
self._name = "DIIS_INFO"
self._keywords = {'Log_print_key': 'LOG_PRINT_KEY', 'Filename': 'FILENAME', 'Add_last': 'ADD_LAST', 'Common_iteration_levels': 'COMMON_ITERATION_LEVELS'}
self._subsections = {'EACH': 'EACH'}
self._attributes = ['Section_parameters']
|
SINGROUP/pycp2k
|
pycp2k/classes/_diis_info3.py
|
Python
|
lgpl-3.0
| 668 | 0.002994 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from keystoneclient import exceptions
from keystoneclient import fixture
from keystoneclient.tests.unit.v2_0 import utils
from keystoneclient.v2_0 import client
from keystoneclient.v2_0 import tenants
from keystoneclient.v2_0 import users
class TenantTests(utils.TestCase):
def setUp(self):
super(TenantTests, self).setUp()
self.INVIS_ID = uuid.uuid4().hex
self.DEMO_ID = uuid.uuid4().hex
self.ADMIN_ID = uuid.uuid4().hex
self.EXTRAS_ID = uuid.uuid4().hex
self.TEST_TENANTS = {
"tenants": {
"values": [
{
"enabled": True,
"description": "A description change!",
"name": "invisible_to_admin",
"id": self.INVIS_ID,
},
{
"enabled": True,
"description": "None",
"name": "demo",
"id": self.DEMO_ID,
},
{
"enabled": True,
"description": "None",
"name": "admin",
"id": self.ADMIN_ID,
},
{
"extravalue01": "metadata01",
"enabled": True,
"description": "For testing extras",
"name": "test_extras",
"id": self.EXTRAS_ID,
}
],
"links": [],
},
}
def test_create(self):
req_body = {
"tenant": {
"name": "tenantX",
"description": "Like tenant 9, but better.",
"enabled": True,
"extravalue01": "metadata01",
},
}
id_ = uuid.uuid4().hex
resp_body = {
"tenant": {
"name": "tenantX",
"enabled": True,
"id": id_,
"description": "Like tenant 9, but better.",
"extravalue01": "metadata01",
}
}
self.stub_url('POST', ['tenants'], json=resp_body)
tenant = self.client.tenants.create(
req_body['tenant']['name'],
req_body['tenant']['description'],
req_body['tenant']['enabled'],
extravalue01=req_body['tenant']['extravalue01'],
name="don't overwrite priors")
self.assertIsInstance(tenant, tenants.Tenant)
self.assertEqual(tenant.id, id_)
self.assertEqual(tenant.name, "tenantX")
self.assertEqual(tenant.description, "Like tenant 9, but better.")
self.assertEqual(tenant.extravalue01, "metadata01")
self.assertRequestBodyIs(json=req_body)
def test_duplicate_create(self):
req_body = {
"tenant": {
"name": "tenantX",
"description": "The duplicate tenant.",
"enabled": True
},
}
resp_body = {
"error": {
"message": "Conflict occurred attempting to store project.",
"code": 409,
"title": "Conflict",
}
}
self.stub_url('POST', ['tenants'], status_code=409, json=resp_body)
def create_duplicate_tenant():
self.client.tenants.create(req_body['tenant']['name'],
req_body['tenant']['description'],
req_body['tenant']['enabled'])
self.assertRaises(exceptions.Conflict, create_duplicate_tenant)
def test_delete(self):
self.stub_url('DELETE', ['tenants', self.ADMIN_ID], status_code=204)
self.client.tenants.delete(self.ADMIN_ID)
def test_get(self):
resp = {'tenant': self.TEST_TENANTS['tenants']['values'][2]}
self.stub_url('GET', ['tenants', self.ADMIN_ID], json=resp)
t = self.client.tenants.get(self.ADMIN_ID)
self.assertIsInstance(t, tenants.Tenant)
self.assertEqual(t.id, self.ADMIN_ID)
self.assertEqual(t.name, 'admin')
def test_list(self):
self.stub_url('GET', ['tenants'], json=self.TEST_TENANTS)
tenant_list = self.client.tenants.list()
[self.assertIsInstance(t, tenants.Tenant) for t in tenant_list]
def test_list_limit(self):
self.stub_url('GET', ['tenants'], json=self.TEST_TENANTS)
tenant_list = self.client.tenants.list(limit=1)
self.assertQueryStringIs('limit=1')
[self.assertIsInstance(t, tenants.Tenant) for t in tenant_list]
def test_list_marker(self):
self.stub_url('GET', ['tenants'], json=self.TEST_TENANTS)
tenant_list = self.client.tenants.list(marker=1)
self.assertQueryStringIs('marker=1')
[self.assertIsInstance(t, tenants.Tenant) for t in tenant_list]
def test_list_limit_marker(self):
self.stub_url('GET', ['tenants'], json=self.TEST_TENANTS)
tenant_list = self.client.tenants.list(limit=1, marker=1)
self.assertQueryStringIs('marker=1&limit=1')
[self.assertIsInstance(t, tenants.Tenant) for t in tenant_list]
def test_update(self):
req_body = {
"tenant": {
"id": self.EXTRAS_ID,
"name": "tenantX",
"description": "I changed you!",
"enabled": False,
"extravalue01": "metadataChanged",
# "extraname": "dontoverwrite!",
},
}
resp_body = {
"tenant": {
"name": "tenantX",
"enabled": False,
"id": self.EXTRAS_ID,
"description": "I changed you!",
"extravalue01": "metadataChanged",
},
}
self.stub_url('POST', ['tenants', self.EXTRAS_ID], json=resp_body)
tenant = self.client.tenants.update(
req_body['tenant']['id'],
req_body['tenant']['name'],
req_body['tenant']['description'],
req_body['tenant']['enabled'],
extravalue01=req_body['tenant']['extravalue01'],
name="don't overwrite priors")
self.assertIsInstance(tenant, tenants.Tenant)
self.assertRequestBodyIs(json=req_body)
self.assertEqual(tenant.id, self.EXTRAS_ID)
self.assertEqual(tenant.name, "tenantX")
self.assertEqual(tenant.description, "I changed you!")
self.assertFalse(tenant.enabled)
self.assertEqual(tenant.extravalue01, "metadataChanged")
def test_update_empty_description(self):
req_body = {
"tenant": {
"id": self.EXTRAS_ID,
"name": "tenantX",
"description": "",
"enabled": False,
},
}
resp_body = {
"tenant": {
"name": "tenantX",
"enabled": False,
"id": self.EXTRAS_ID,
"description": "",
},
}
self.stub_url('POST', ['tenants', self.EXTRAS_ID], json=resp_body)
tenant = self.client.tenants.update(req_body['tenant']['id'],
req_body['tenant']['name'],
req_body['tenant']['description'],
req_body['tenant']['enabled'])
self.assertIsInstance(tenant, tenants.Tenant)
self.assertRequestBodyIs(json=req_body)
self.assertEqual(tenant.id, self.EXTRAS_ID)
self.assertEqual(tenant.name, "tenantX")
self.assertEqual(tenant.description, "")
self.assertFalse(tenant.enabled)
def test_add_user(self):
self.stub_url('PUT',
['tenants', self.EXTRAS_ID, 'users', 'foo', 'roles',
'OS-KSADM', 'barrr'],
status_code=204)
self.client.tenants.add_user(self.EXTRAS_ID, 'foo', 'barrr')
def test_remove_user(self):
self.stub_url('DELETE', ['tenants', self.EXTRAS_ID, 'users',
'foo', 'roles', 'OS-KSADM', 'barrr'],
status_code=204)
self.client.tenants.remove_user(self.EXTRAS_ID, 'foo', 'barrr')
def test_tenant_add_user(self):
self.stub_url('PUT', ['tenants', self.EXTRAS_ID, 'users',
'foo', 'roles', 'OS-KSADM', 'barrr'],
status_code=204)
req_body = {
"tenant": {
"id": self.EXTRAS_ID,
"name": "tenantX",
"description": "I changed you!",
"enabled": False,
},
}
# make tenant object with manager
tenant = self.client.tenants.resource_class(self.client.tenants,
req_body['tenant'])
tenant.add_user('foo', 'barrr')
self.assertIsInstance(tenant, tenants.Tenant)
def test_tenant_remove_user(self):
self.stub_url('DELETE', ['tenants', self.EXTRAS_ID, 'users',
'foo', 'roles', 'OS-KSADM', 'barrr'],
status_code=204)
req_body = {
"tenant": {
"id": self.EXTRAS_ID,
"name": "tenantX",
"description": "I changed you!",
"enabled": False,
},
}
# make tenant object with manager
tenant = self.client.tenants.resource_class(self.client.tenants,
req_body['tenant'])
tenant.remove_user('foo', 'barrr')
self.assertIsInstance(tenant, tenants.Tenant)
def test_tenant_list_users(self):
tenant_id = uuid.uuid4().hex
user_id1 = uuid.uuid4().hex
user_id2 = uuid.uuid4().hex
tenant_resp = {
'tenant': {
'name': uuid.uuid4().hex,
'enabled': True,
'id': tenant_id,
'description': 'test tenant',
}
}
users_resp = {
'users': {
'values': [
{
'email': uuid.uuid4().hex,
'enabled': True,
'id': user_id1,
'name': uuid.uuid4().hex,
},
{
'email': uuid.uuid4().hex,
'enabled': True,
'id': user_id2,
'name': uuid.uuid4().hex,
},
]
}
}
self.stub_url('GET', ['tenants', tenant_id], json=tenant_resp)
self.stub_url('GET',
['tenants', tenant_id, 'users'],
json=users_resp)
tenant = self.client.tenants.get(tenant_id)
user_objs = tenant.list_users()
for u in user_objs:
self.assertIsInstance(u, users.User)
self.assertEqual(set([user_id1, user_id2]),
set([u.id for u in user_objs]))
def test_list_tenants_use_admin_url(self):
self.stub_url('GET', ['tenants'], json=self.TEST_TENANTS)
self.assertEqual(self.TEST_URL, self.client.management_url)
tenant_list = self.client.tenants.list()
[self.assertIsInstance(t, tenants.Tenant) for t in tenant_list]
self.assertEqual(len(self.TEST_TENANTS['tenants']['values']),
len(tenant_list))
def test_list_tenants_fallback_to_auth_url(self):
new_auth_url = 'http://keystone.test:5000/v2.0'
token = fixture.V2Token(token_id=self.TEST_TOKEN,
user_name=self.TEST_USER,
user_id=self.TEST_USER_ID)
self.stub_auth(base_url=new_auth_url, json=token)
self.stub_url('GET', ['tenants'], base_url=new_auth_url,
json=self.TEST_TENANTS)
c = client.Client(username=self.TEST_USER,
auth_url=new_auth_url,
password=uuid.uuid4().hex)
self.assertIsNone(c.management_url)
tenant_list = c.tenants.list()
[self.assertIsInstance(t, tenants.Tenant) for t in tenant_list]
self.assertEqual(len(self.TEST_TENANTS['tenants']['values']),
len(tenant_list))
|
sdpp/python-keystoneclient
|
keystoneclient/tests/unit/v2_0/test_tenants.py
|
Python
|
apache-2.0
| 13,149 | 0 |
"""
# TOP2049 Open Source programming suite
#
# Microchip8_18_common - basic file for 8bit PIC18 MCU
#
# Copyright (c) 2013 Pavel Stemberk <stemberk@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
from libtoprammer.chip import *
class Chip_Microchip8_18_common(Chip):
STAT_BUSY = 0x01
STAT_SDIO = 0x02
PCMDBIT_4BITINSTR = 0
PCMDBIT_SENDDATA = 1
PCMDBIT_READDATA = 2
PCMDBIT_KEEPCLKHIGH = 7
CMD_CORE_INSTRUCTION = 0x0
CMD_SHIFT_OUT_TABLAT = 0x2
CMD_TR = 0x8
CMD_TRI = 0x9
CMD_TRD = 0xA
CMD_ITR = 0xB
CMD_TW = 0xC
CMD_TWII = 0xD
CMD_TWDD = 0xE
CMD_TW_START_PROG = 0xF
# EEPROM access: default on, if does not exist override it
hasEEPROM = True
# default delays - can be overridden
delayP2A = 400e-9 # Serial clock low time
delayP5 = 2.2e-6 # Delay between 4-bit command and command operand
delayP5A = 2.2e-6 # Delay between 4-bit command operand and next 4-bit command
delayP6 = 2.2e-6 # Delay between last SCK fall of command byte to first SCK rise of read data word
delayP9 = 1e-3 # SCK High time (minimum programming time)
delayP10 = 30e-6 # SCK Low time after programming (high-voltage discharge time)
delayP11 = 0.01 # Delay to allow self-timed data write or bulk erase to occur
delayP12 = 0.000002 # Input data hold time from nMCLR/Vpp rise
delayP13 = 0.0000001 # Vdd rise setup time to nMCLR/Vpp rise
delayP14 = 0.00000001 # Data out Valid from SCK rise
delayP15 = 0.000002 # PGM rise setup time to nMCLR/Vpp rise
userIDLocationSize = 8
userIDLocationAddr = 0x200000
deviceIDAddr = 0x3FFFFE
configWordAddr = 0x300000
deviceIDLength = 2
voltageVDD = 5
voltageVPP = 12
def __init__(self,
chipPackage, chipPinVCC, chipPinsVPP, chipPinGND,
signature,
flashPageSize, flashPages,
eepromPageSize, eepromPages,
fuseBytes
):
Chip.__init__(self,
chipPackage=chipPackage,
chipPinVCC=chipPinVCC,
chipPinsVPP=chipPinsVPP,
chipPinGND=chipPinGND)
self.signature = signature
self.flashPageSize = flashPageSize # Flash page size, in words
self.flashPages = flashPages # Nr of flash pages
self.eepromPageSize = eepromPageSize # EEPROM page size, in bytes
self.eepromPages = eepromPages # Nr of EEPROM pages
self.fuseBytes = fuseBytes # Nr of fuse bytes
self.isInPmMode = False
self.BufferedBytes = 0
self.Image = b""
def getIHexInterpreter(self):
inter = IHexInterpreter()
inter.progmemRanges = [ AddressRange(0, self.flashPageSize) ]
inter.fuseRanges = [ AddressRange(self.configWordAddr,
self.configWordAddr + self.fuseBytes) ]
inter.uilRanges = [ AddressRange(self.userIDLocationAddr,
self.userIDLocationAddr + self.userIDLocationSize) ]
return inter
def enterPM(self, force=False):
if self.isInPmMode and not force:
return
"Enter HV programming mode. Vdd first entry mode"
self.applyVCC(False)
self.applyVPP(False)
self.applyGND(False)
self.setPins(0, 0)
self.top.cmdSetVCCVoltage(self.voltageVDD)
self.top.cmdSetVPPVoltage(self.voltageVPP)
self.applyGND(True)
self.applyVCC(True)
self.top.hostDelay(10 * self.delayP13)
self.applyVPP(True)
self.top.hostDelay(102 * self.delayP12)
self.setTopProgrammerDelays()
self.isInPmMode = True
def readUserIdLocation(self):
return self.readSequentialBlock(self.userIDLocationAddr, self.userIDLocationSize, "Reading User ID Locations")
def readFuse(self):
return self.readSequentialBlock(self.configWordAddr, self.fuseBytes, "Reading Config Words")
def readSignature(self):
return self.readSequentialBlock(self.deviceIDAddr, self.deviceIDLength, "Reading Signature")
def readProgmem(self):
nrBytes = self.flashPages * self.flashPageSize
return self.readSequentialBlock(0, nrBytes, "Reading flash")
def readSequentialBlock(self, startAddr, nBytes, infoText):
self.enterPM()
self.progressMeterInit(infoText, nBytes)
self.BufferedBytes = 0
self.Image = b""
self.executeCode(self.getCodeAddrToTBLPTR(startAddr))
for byteAddr in range(0, nBytes):
self.send4bitReadInstruction(self.CMD_TRI)
self.progressMeter(byteAddr)
self.progressMeterFinish()
self.flushBufferToImage()
return self.Image
def writeSequentialBlock(self, startAddr, image, size, infoText):
if len(image) > size:
self.throwError("Invalid flash image size %d (expected <=%d)" % \
(len(image), self.userIDLocationSize))
self.enterPM()
self.executeCode((0x8EA6, 0x9CA6))
self.progressMeterInit(infoText, len(image) // 8)
for blockAddr in range(0, len(image), self.writeBufferSize):
#print("addr:{:x}".format(startAddr+blockAddr))
self.executeCode(self.getCodeAddrToTBLPTR(startAddr+blockAddr))
#for code in self.getCodeAddrToTBLPTR(startAddr+blockAddr):
# print("({:x}, ".format(code))
print(")\n")
self.writeNbytes(image[blockAddr:], self.writeBufferSize)
#self.executeCode((0x0, 0x0))
self.progressMeter(blockAddr)
self.progressMeterFinish()
def readEEPROM(self):
nrBytes = self.eepromPages * self.eepromPageSize
self.enterPM()
self.progressMeterInit("Reading EEPROM", nrBytes)
self.BufferedBytes = 0
self.Image = b""
self.executeCode((0x9EA6, 0x9CA6))
for byteAddr in range(0, nrBytes):
# print("set addr to {:x}\n".format(byteAddr))
self.setEEPROMAddr(byteAddr)
self.executeCode((0x80A6, 0x50A8, 0x6EF5))
self.send4bitReadInstruction(self.CMD_SHIFT_OUT_TABLAT)
self.progressMeter(byteAddr)
self.progressMeterFinish()
self.flushBufferToImage()
return self.Image
def writeEEPROM(self, image):
nrBytes = self.eepromPages * self.eepromPageSize
if len(image) > nrBytes:
self.throwError("Invalid flash image size {:d} (expected <={:d})".format(len(image), nrBytes))
self.enterPM()
self.progressMeterInit("Writing eeprom", len(image))
self.executeCode((0x9EA6, 0x9CA6))
for addr in range(0, len(image)):
self.progressMeter(addr)
#print("writing {:x} value to addr {:x}\n".format(byte2int(image[addr]), addr))
self.setEEPROMAddr(addr)
self.executeCode((0x0E00 | (byte2int(image[addr]) & 0xFF), 0x6EA8))
self.executeCode((0x84A6, 0x0E55, 0x6EA7, 0x0EAA, 0x6EA7))
self.executeCode((0x82A6, 0x0, 0x0))
self.top.hostDelay(self.delayP11 + self.delayP10)
self.executeCode((0x94A6,))
self.progressMeterFinish()
def writeNbytes(self, image, N):
if N % 2:
self.throwError("N should be even, not %d" % N)
isEmpty = True
#N = (pN, len(image))[len(image) < pN]
for idx in range(0, N):
if idx == len(image):
image += b'\xFF'
elif byte2int(image[idx]) != 0xFF:
isEmpty = False
if(not isEmpty):
for wordAddr in range(0, N-2, 2):
self.send4bitWriteInstruction(self.CMD_TWII, byte2int(image[wordAddr]) | (byte2int(image[wordAddr + 1]) << 8))
self.send4bitWriteInstruction(self.CMD_TW_START_PROG, byte2int(image[N-2]) | (byte2int(image[N-1]) << 8))
self.top.cmdFPGAWrite(0x12, 0x81)
self.top.hostDelay(self.delayP9)
self.setPins(0)
self.top.cmdDelay(self.delayP10)
for i in range(0,4):
self.sendCommand(1)
def writeUserIdLocation(self, image):
self.writeSequentialBlock(self.userIDLocationAddr, image, self.userIDLocationSize, "Writing User ID Locations")
def checkSignature(self):
signature = self.readSignature()
if signature != self.signature:
msg = "Unexpected device signature. " + \
"Want %02X%02X%02X, but got %02X%02X%02X" % \
(byte2int(self.signature[0]), byte2int(self.signature[1]),
byte2int(self.signature[2]),
byte2int(signature[0]), byte2int(signature[1]),
byte2int(signature[2]))
if self.top.getForceLevel() >= 1:
self.printWarning(msg)
else:
self.throwError(msg)
def writeProgmem(self, image):
nrBytes = self.flashPages * self.flashPageSize
if len(image) > nrBytes:
self.throwError("Invalid flash image size %d (expected <=%d)" % \
(len(image), nrBytes))
self.writeSequentialBlock(0, image, nrBytes, "Writing flash")
def writeFuse(self, image):
self.enterPM()
if len(image) > self.fuseBytes:
self.throwError("Invalid Fuses image size %d (expected less than %d)" % \
(len(image), self.fuseBytes))
self.executeCode((0x8EA6, 0x8CA6, 0xEF00, 0xF800))
for fuseAddr in range(0,len(image)):
self.executeCode(self.getCodeAddrToTBLPTR(self.configWordAddr+fuseAddr))
if(fuseAddr & 0x01):
byte = byte2int(image[fuseAddr]) << 8
else:
byte = byte2int(image[fuseAddr])
self.send4bitWriteInstruction(self.CMD_TW_START_PROG, byte)
self.top.cmdFPGAWrite(0x12, 0x81)
#self.setPins(1)
self.top.hostDelay(self.delayP9)
self.setPins(0)
self.top.cmdDelay(self.delayP10)
for i in range(0,4):
self.sendCommand(1)
#self.executeCode((0x2AF6,))
self.writeSequentialBlock(self.configWordAddr, image, self.fuseBytes, "Writing fuses")
self.progressMeterInit("Writing fuses", 0)
def exitPM(self):
"Exit programming mode. Vdd last exit mode"
self.top.flushCommands()
self.setPins(0, 0)
self.applyVPP(False)
self.applyVCC(False)
self.applyGND(False)
self.isInPmMode = False
# ready for 18F below
def send4bitReadInstruction(self, pInstruction):
def incBbAndCheckFillImage():
self.BufferedBytes += 1
if self.BufferedBytes == self.top.getBufferRegSize():
self.flushBufferToImage()
# self.sendCommand(1,0,1,pInstruction)
self.sendCommand(1, 0, 1, pInstruction)
# self.busyWait()
self.readSDOBufferHigh()
incBbAndCheckFillImage()
def send4bitWriteInstruction(self, pInstruction, pDataPayload):
# self.busyWait()
self.setSDI(pDataPayload)
#print("sending {:x}\n".format(pDataPayload))
self.sendCommand(1, 1, 0, pInstruction)
self.top.flushCommands()
def sendCommand(self, bit4bitInstr=1, bitSendData=0, bitReadData=0, cmd4bit=0, bitKeepClkHigh=0):
'''
`define CMDBIT_4BITINSTR 0
`define CMDBIT_SENDDATA 1
`define CMDBIT_READDATA 2
`define CMDBIT_KEEPCLKHIGH 7
'''
command = (cmd4bit & 0x0F) << 3
if bit4bitInstr:
command |= 2 ** self.PCMDBIT_4BITINSTR
if bitSendData:
command |= 2 ** self.PCMDBIT_SENDDATA
if bitReadData:
command |= 2 ** self.PCMDBIT_READDATA
if bitKeepClkHigh:
command |= 2 ** self.PCMDBIT_KEEPCLKHIGH
# print("cmd sending {:x}\n".format(command))
self.top.cmdFPGAWrite(0x12, command)
if(bitSendData or bitReadData):
self.top.cmdDelay(2 * 20 * 2 * self.delayP2A)
else:
self.top.cmdDelay(2 * 4 * 2 * self.delayP2A)
def setTopProgrammerDelays(self):
#print("tdel5:{:d}".format(int(math.ceil(self.delayP2A / 42e-9))))
#print("tdly:{:d}".format(int(math.ceil(self.delayP5 / 42e-9))))
self.top.cmdFPGAWrite(0x10, int(math.ceil(self.delayP2A / 42e-9)))
self.top.cmdFPGAWrite(0x11, int(math.ceil(self.delayP5 / 42e-9)))
def setSDI8(self, sdi):
self.top.cmdFPGAWrite(0x16, sdi & 0xFF)
def setSDI(self, sdi):
'''
16 -set 16 bit sdi value
'''
for addr in (0x16, 0x17):
self.top.cmdFPGAWrite(addr, sdi & 0xFF)
sdi = sdi >> 8
def flushBufferToImage(self):
# print ("storing {:d} bytes to image".format(self.BufferedBytes))
if self.BufferedBytes > 0:
self.Image += self.top.cmdReadBufferReg(self.BufferedBytes)
self.BufferedBytes = 0
def sendInstruction(self, instr):
self.setSDI(instr)
self.sendCommand(1, 1) # send 4 times positive edge
# self.top.flushCommands()
def executeCode(self, code):
for instr in code:
self.sendInstruction(instr)
def setPins(self, ICSPCLK=0, SDIOVALUE=0, SDIODRIVEN=1):
'''
16 - setPins
'''
data = 0
if ICSPCLK:
data |= 1
if SDIODRIVEN:
data |= 2
if SDIOVALUE:
data |= 4
self.top.cmdFPGAWrite(0x15, data)
def getStatusFlags(self):
'''
[0] - BUSY
[1] - SDO
'''
self.flushBufferToImage()
self.top.cmdFPGARead(0x12)
stat = self.top.cmdReadBufferReg()
return byte2int(stat[0])
def readSDOBufferHigh(self):
self.top.cmdFPGARead(0x14)
def readSDOBufferLow(self):
self.top.cmdFPGARead(0x15)
def rawSDIOState(self):
return bool(self.getStatusFlags() & self.STAT_SDIO)
def isBusy(self):
return bool(self.getStatusFlags() & self.STAT_BUSY)
def busyWait(self):
for i in range(0, 100):
if not self.isBusy():
return
self.top.hostDelay(0.000001)
self.throwError("Timeout in busywait.")
def getCodeAddrToTBLPTR(self, addr):
ub = (addr >> 16) & 0xFF
hb = (addr >> 8) & 0xFF
lb = addr & 0xFF
return ((0x0E00 | ub), 0x6EF8, (0x0E00 | hb), 0x6EF7, (0x0E00 | lb), 0x6EF6)
|
mbuesch/toprammer
|
libtoprammer/chips/microchip8/microchip8_18_common.py
|
Python
|
gpl-2.0
| 13,142 | 0.035915 |
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
import sys
# Define required packages.
requires = []
# Assume spidev is required on non-windows & non-mac platforms (i.e. linux).
if sys.platform != 'win32' and sys.platform != 'darwin':
requires.append('spidev')
setup(name = 'Adafruit_GPIO',
version = '0.8.0',
author = 'Tony DiCola',
author_email = 'tdicola@adafruit.com',
description = 'Library to provide a cross-platform GPIO interface on the Raspberry Pi and Beaglebone Black using the RPi.GPIO and Adafruit_BBIO libraries.',
license = 'MIT',
url = 'https://github.com/adafruit/Adafruit_Python_GPIO/',
install_requires = requires,
packages = find_packages())
|
WxOutside/software
|
telemetry/sensors/weatherPiArduino/Adafruit_Python_GPIO/setup.py
|
Python
|
unlicense
| 846 | 0.035461 |
class LSA(object):
def __init__(self,input_path,output_path):
super(LSA,self).__init__()
self.input_path = input_path
self.output_path = output_path
self.hpfx = 'k, bins: ['
|
scottdaniel/LatentStrainAnalysis
|
LSA/LSA.py
|
Python
|
mit
| 182 | 0.049451 |
#!/usr/bin/env python
import os
import uuid
import cgi
import hashlib
import webapp2 as webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp import util
from google.appengine.ext import db
from google.appengine.api import users
import datetime
from calendar import timegm
import time
class SettingsEntry(db.Model):
valid_users_entry = db.ListProperty(str,indexed=False,default=None)
secret_hash_entry = db.StringProperty()
class TemperatureEntry(db.Model):
date = db.DateTimeProperty(auto_now_add=True)
room = db.StringProperty()
temperature = db.FloatProperty()
target = db.FloatProperty()
furnacestate = db.IntegerProperty()
mode = db.StringProperty()
outside = db.FloatProperty()
other = db.FloatProperty()
class DailyTemperatureEntry(db.Model):
date = db.IntegerProperty()
temp_entry = db.TextProperty()
target_entry = db.TextProperty()
furnace_entry = db.TextProperty()
room_entry = db.TextProperty()
mode_entry = db.TextProperty()
outside_entry = db.TextProperty()
class TargetEntry(db.Model):
date = db.IntegerProperty()
target_temperature_entry = db.IntegerProperty()
target_start_minutes_entry = db.IntegerProperty()
target_held_minutes_entry = db.IntegerProperty()
target_executed = db.BooleanProperty()
default_temperature_entry = db.IntegerProperty()
default_temperature_mode_entry = db.TextProperty()
class MainHandler(webapp.RequestHandler):
def get(self):
self.redirect('http://www.google.com/')
class Temperature(webapp.RequestHandler):
def post(self):
secret_hash = db.GqlQuery(
"SELECT * FROM SettingsEntry LIMIT 1")[0].secret_hash_entry
temp = str(float(cgi.escape(self.request.get('t'))))
target = str(float(cgi.escape(self.request.get('g'))))
furnace = str(cgi.escape(self.request.get('f')))
room = str(cgi.escape(self.request.get('r')))
home = str(cgi.escape(self.request.get('h')))
outside = str(float(cgi.escape(self.request.get('o'))))
mode = str(cgi.escape(self.request.get('m')))
strS = str(cgi.escape(self.request.get('s')))
# secret added since I don't want just anyone to pollute my furnace data
if hashlib.sha512(strS).hexdigest() == secret_hash:
rightNow = int(time.time())
dayAgo = rightNow-86400
recent_record = DailyTemperatureEntry.gql(
"WHERE date > :1 ORDER BY date DESC", dayAgo)
rightNow = str(rightNow)
if recent_record.count()!=0: # update entry
dayObj = recent_record[0]
dayObj.temp_entry = dayObj.temp_entry + \
'['+rightNow+','+temp+'],'
dayObj.target_entry = dayObj.target_entry + \
'['+rightNow+','+target+'],'
dayObj.furnace_entry = dayObj.furnace_entry + \
'['+rightNow+','+furnace+'],'
dayObj.room_entry = dayObj.room_entry + \
'['+rightNow+','+room+'],'
dayObj.mode_entry = dayObj.mode_entry + \
'['+rightNow+','+mode+'],'
dayObj.outside_entry = dayObj.outside_entry + \
'['+rightNow+','+outside+'],'
dayObj.put()
else: # create entry
newEntry = DailyTemperatureEntry(
date = int(time.time()),
temp_entry = '['+rightNow+','+temp+'],',
target_entry = '['+rightNow+','+target+'],',
furnace_entry = '['+rightNow+','+furnace+'],',
room_entry = '['+rightNow+','+room+'],',
mode_entry = '['+rightNow+','+mode+'],',
outside_entry = '['+rightNow+','+outside+'],'
)
newEntry.put()
self.response.headers.add_header("X-Raspberry-Pi-Data", temp +','+ \
target +','+ furnace + \
','+ room +','+ mode + \
','+ outside)
the_target = db.GqlQuery(
"SELECT * FROM TargetEntry ORDER BY date DESC LIMIT 1")
template_values = {
'target' : the_target
}
path = os.path.join(os.path.dirname(__file__), 'target.html')
self.response.write(template.render(path, template_values))
else:
self.error(500)
class Submit(webapp.RequestHandler):
def post(self):
user = users.get_current_user()
valid_users = db.GqlQuery(
"SELECT * FROM SettingsEntry LIMIT 1")[0].valid_users_entry
if user and user.nickname() in valid_users and \
self.request.get('target_temperature'):
self.response.write('<html><head><meta http-equiv="refresh" ' + \
'content="5; url=https://furnaceathome.appspot.com/t"></head><body>')
target_temperature = \
int(cgi.escape(self.request.get('target_temperature')))
target_start_minutes = \
int(cgi.escape(self.request.get('target_start_minutes')))
target_held_minutes = \
int(cgi.escape(self.request.get('target_held_minutes')))
errors = 0
if 0 <= target_temperature <= 22:
self.response.write( \
'will set target to %s °C</br>' % target_temperature)
else:
self.response.write( \
'invalid temperature: %s</br></body></html>' % target_temperature)
errors+=1
if errors == 0 and 0 <= target_start_minutes <= 120:
self.response.write( \
'to be reached in %s minutes</br>' % target_start_minutes)
else:
self.response.write( \
'invalid time span: %s</br></body></html>' % target_start_minutes)
errors+=1
if errors == 0 and 5 <= target_held_minutes <= 120:
self.response.write('for %s minutes' % target_held_minutes)
else:
self.response.write( \
'invalid duration: %s</br></body></html>' % target_held_minutes)
errors+=1
if errors == 0:
self.response.write('</body></html>')
recent_record = TargetEntry.gql("WHERE date > 0 ORDER BY date DESC")
if recent_record.count()!=0: #update entry
targetObj = recent_record[0]
targetObj.date = timegm(datetime.datetime.now().utctimetuple())
targetObj.target_temperature_entry = target_temperature
targetObj.target_start_minutes_entry = target_start_minutes
targetObj.target_held_minutes_entry = target_held_minutes
targetObj.target_executed = False
targetObj.put()
else: #create entry
newEntry = TargetEntry(
date = int(time.time()),
target_temperature_entry = target_temperature,
target_start_minutes_entry = target_start_minutes,
target_held_minutes_entry = target_held_minutes,
target_executed = False
)
newEntry.put()
self.response.headers.add_header("X-Raspberry-Pi-Data",
target_temperature + ',' + target_start_minutes + \
',' + target_held_minutes)
elif user and user.nickname() in valid_users and \
self.request.get('default_temp'):
default_temperature=int(cgi.escape(self.request.get('default_temp')))
default_temperature_mode = \
str(cgi.escape(self.request.get('default_temp_mode')))
recent_record = TargetEntry.gql("WHERE date > 0 ORDER BY date DESC")
if recent_record.count()!=0: #update entry
targetObj = recent_record[0]
targetObj.default_temperature_entry = default_temperature
targetObj.default_temperature_mode_entry = default_temperature_mode
targetObj.put()
else: #create entry
newEntry = TargetEntry(
default_temperature_entry = default_temperature,
default_temperature_mode_entry = default_temperature_mode
)
newEntry.put()
self.response.headers.add_header("X-Raspberry-Pi-Data", ': ',
default_temperature +','+ default_temperature_mode)
class ShowTemperature(webapp.RequestHandler):
def get(self):
user = users.get_current_user()
valid_users = db.GqlQuery( \
"SELECT * FROM SettingsEntry LIMIT 1")[0].valid_users_entry
if user and user.nickname() in valid_users:
the_temperature = db.GqlQuery( \
"SELECT * FROM DailyTemperatureEntry ORDER BY date DESC LIMIT 1")
template_values = {
'temperature' : the_temperature,
'user' : user.nickname(),
'logout_url' : users.create_logout_url('/')
}
path = os.path.join(os.path.dirname(__file__), 'temperature.html')
self.response.out.write(template.render(path, template_values))
else:
greeting = ('<a href="%s">Sign in or register</a>.' %
users.create_login_url('/t'))
self.response.out.write('<html><body>%s</body></html>' % greeting)
app = webapp.WSGIApplication([('/', MainHandler),
('/temperature', Temperature),
('/submit', Submit),
('/t', ShowTemperature)
])
|
040medien/furnaceathome
|
furnace.py
|
Python
|
gpl-2.0
| 9,961 | 0.012449 |
from __future__ import print_function, absolute_import
import weakref
class PDroneCreator(object):
def __init__(self, mainwindow, clipboard, title="drones"):
self._mainwindow = mainwindow
self._clipboard = clipboard
self._subwin = mainwindow.newSubWindow(title)
from . import PTree
self._tree = PTree(self._subwin.wrapwidget(), self._select_drone)
self._subwin.setWidget(self._tree.widget())
def _select_drone(self, dronetype):
dronetype = ".".join(dronetype)
self._clipboard.set_dragboard_value("drone", dronetype)
def append(self, dronename):
key = tuple(dronename.split("."))
self._tree.append(key)
def remove(self, dronename):
key = tuple(dronename.split("."))
self._tree.remove(key)
|
agoose77/hivesystem
|
hiveguilib/PGui/PDroneCreator.py
|
Python
|
bsd-2-clause
| 807 | 0 |
# -*- coding:UTF-8 -*-
""" pyBox Modbus"""
# !/usr/bin/python
# Python: 3.5.2
# Platform: Windows/ARMv7
# Author: Heyn
# Program: Modbus RTU & TCP
# History: 2017/02/14 V1.0.0 [Heyn]
# 2017/03/08 V1.0.1 [Heyn] Send return string.
# 2017/04/07 V1.0.2 [Heyn] Redesign PBoxModbus class functions.
# 2017/04/10 V1.0.3 [Heyn] Bug fixe import imx6_ixora_led as led
# Windows(X86) Platform: You should have modbus.dll and pymodbus.pyd
# Linux or ARM Platform: You should have modbus.so and pymodbus.cpython-35m-arm-linux-gnueabihf.so
import sys
import pymodbus
if sys.platform == 'linux':
import imx6_ixora_led as led
class PBoxModbus:
"""Pbox Modbus Class"""
def __init__(self):
super(PBoxModbus, self).__init__()
self.isopened = False
self.platform = sys.platform
def __del__(self):
self.isopened = False
pymodbus.free_tcp()
if self.platform == 'linux':
led.ioctl(led.IXORA_LED4, led.GREEN, led.LOW)
led.ioctl(led.IXORA_LED4, led.RED, led.LOW)
def newtcp(self, addr='127.0.0.1', port=502):
"""New TCP for Modbus."""
print('[Modbus TCP] IP=%s:%d'%(addr, port))
try:
self.isopened = pymodbus.new_tcp(addr, port)
except BaseException as err:
self.isopened = False
print(err)
if (self.platform == 'linux') and (self.isopened is False):
led.ioctl(led.IXORA_LED4, led.RED, led.HIGH)
return self.isopened
def newrtu(self, dev='/dev/ttymxc1'):
"""New RTU for Modbus."""
print('[Modbus RTU] Port=%s'%(dev))
try:
self.isopened = pymodbus.new_rtu(dev)
except BaseException as err:
self.isopened = False
print(err)
if (self.platform == 'linux') and (self.isopened is False):
led.ioctl(led.IXORA_LED4, led.RED, led.HIGH)
return self.isopened
def settimeout(self, sec=0, msc=500):
"""
sec: second.
msc: millisecond seconds.
"""
# set_timeout(seconds, microseconds = us)
pymodbus.set_timeout(sec, msc) # default timeout=500ms
def setslave(self, addr=1):
"""Set modbus slave address."""
if self.isopened is False:
return None
ret = False
try:
ret = pymodbus.set_slave(addr)
except BaseException as err:
print(err)
return ret
def readstring(self, readlist, size=1):
"""
Read String from Device.
readlist = [function code, address, data type]
"""
if self.isopened is False:
return None
try:
ret = pymodbus.read_registers(readlist[0:3], size)
except BaseException as err:
if self.platform == 'linux':
led.ioctl(led.IXORA_LED4, led.GREEN, led.LOW)
led.ioctl(led.IXORA_LED4, led.RED, led.HIGH)
print(err)
return None
else:
if self.platform == 'linux':
led.ioctl(led.IXORA_LED4, led.RED, led.LOW)
led.ioctl(led.IXORA_LED4, led.GREEN, led.HIGH)
else:
pass
# And each hexadecimal number into ASCII code
return ''.join((lambda v: [chr(i) for i in v])(ret)).strip('\x00')
def readregs(self, readlist, size=1):
"""
Read Data from Device.
readlist = [function code, address, data type]
"""
if self.isopened is False:
return None
try:
retlist = pymodbus.read_registers(readlist[0:3], size)
except BaseException as err:
if self.platform == 'linux':
led.ioctl(led.IXORA_LED4, led.GREEN, led.LOW)
led.ioctl(led.IXORA_LED4, led.RED, led.HIGH)
print(err)
return None
else:
if self.platform == 'linux':
led.ioctl(led.IXORA_LED4, led.RED, led.LOW)
led.ioctl(led.IXORA_LED4, led.GREEN, led.HIGH)
else:
pass
return retlist
# if __name__ == '__main__':
# MODBUS = PBoxModbus()
# print(MODBUS.newtcp())
# print(MODBUS.readregs([3, 1, 'U16']))
|
Heyn2016/Python
|
ApalisT30/Pbox/proto/PBoxModbus.py
|
Python
|
gpl-3.0
| 4,314 | 0.001159 |
"""XKNX version."""
__version__ = "0.19.2.dev"
|
XKNX/xknx
|
xknx/__version__.py
|
Python
|
mit
| 48 | 0 |
# -*- coding: utf-8 -*-
"""
pygments.lexers.actionscript
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for ActionScript and MXML.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, bygroups, using, this, words, default
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer']
class ActionScriptLexer(RegexLexer):
"""
For ActionScript source code.
.. versionadded:: 0.9
"""
name = 'ActionScript'
aliases = ['as', 'actionscript']
filenames = ['*.as']
mimetypes = ['application/x-actionscript', 'text/x-actionscript',
'text/actionscript']
flags = re.DOTALL
tokens = {
'root': [
(r'\s+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
(r'[~^*!%&<>|+=:;,/?\\-]+', Operator),
(r'[{}\[\]();.]+', Punctuation),
(words((
'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break',
'return', 'continue', 'if', 'else', 'throw', 'try', 'catch',
'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this',
'switch'), suffix=r'\b'),
Keyword),
(words((
'class', 'public', 'final', 'internal', 'native', 'override', 'private',
'protected', 'static', 'import', 'extends', 'implements', 'interface',
'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get',
'namespace', 'package', 'set'), suffix=r'\b'),
Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
Keyword.Constant),
(words((
'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion',
'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array',
'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData',
'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType',
'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle',
'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu',
'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem',
'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError',
'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject',
'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter',
'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher',
'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference',
'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType',
'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter',
'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent',
'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput'
'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable',
'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int',
'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent',
'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation',
'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection',
'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent',
'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent',
'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping',
'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy',
'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample',
'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError',
'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject',
'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel',
'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite',
'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState',
'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet',
'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField',
'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign',
'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform',
'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest',
'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError',
'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket',
'XMLUI'), suffix=r'\b'),
Name.Builtin),
(words((
'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN',
'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion',
'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent',
'unescape'), suffix=r'\b'),
Name.Function),
(r'[$a-zA-Z_]\w*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
]
}
class ActionScript3Lexer(RegexLexer):
"""
For ActionScript 3 source code.
.. versionadded:: 0.11
"""
name = 'ActionScript 3'
aliases = ['as3', 'actionscript3']
filenames = ['*.as']
mimetypes = ['application/x-actionscript3', 'text/x-actionscript3',
'text/actionscript3']
identifier = r'[$a-zA-Z_]\w*'
typeidentifier = identifier + '(?:\.<\w+>)?'
flags = re.DOTALL | re.MULTILINE
tokens = {
'root': [
(r'\s+', Text),
(r'(function\s+)(' + identifier + r')(\s*)(\()',
bygroups(Keyword.Declaration, Name.Function, Text, Operator),
'funcparams'),
(r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' +
typeidentifier + r')',
bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
Keyword.Type)),
(r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
bygroups(Keyword, Text, Name.Namespace, Text)),
(r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()',
bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
(r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
(r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
r'switch|import|include|as|is)\b',
Keyword),
(r'(class|public|final|internal|native|override|private|protected|'
r'static|import|extends|implements|interface|intrinsic|return|super|'
r'dynamic|function|const|get|namespace|package|set)\b',
Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
Keyword.Constant),
(r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
r'unescape)\b', Name.Function),
(identifier, Name),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator),
],
'funcparams': [
(r'\s+', Text),
(r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
typeidentifier + r'|\*)(\s*)',
bygroups(Text, Punctuation, Name, Text, Operator, Text,
Keyword.Type, Text), 'defval'),
(r'\)', Operator, 'type')
],
'type': [
(r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)',
bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
(r'\s+', Text, '#pop:2'),
default('#pop:2')
],
'defval': [
(r'(=)(\s*)([^(),]+)(\s*)(,?)',
bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
(r',', Operator, '#pop'),
default('#pop')
]
}
def analyse_text(text):
if re.match(r'\w+\s*:\s*\w', text):
return 0.3
return 0
class MxmlLexer(RegexLexer):
"""
For MXML markup.
Nested AS3 in <script> tags is highlighted by the appropriate lexer.
.. versionadded:: 1.1
"""
flags = re.MULTILINE | re.DOTALL
name = 'MXML'
aliases = ['mxml']
filenames = ['*.mxml']
mimetimes = ['text/xml', 'application/xml']
tokens = {
'root': [
('[^<&]+', Text),
(r'&\S*?;', Name.Entity),
(r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
bygroups(String, using(ActionScript3Lexer), String)),
('<!--', Comment, 'comment'),
(r'<\?.*?\?>', Comment.Preproc),
('<![^>]*>', Comment.Preproc),
(r'<\s*[\w:.-]+', Name.Tag, 'tag'),
(r'<\s*/\s*[\w:.-]+\s*>', Name.Tag),
],
'comment': [
('[^-]+', Comment),
('-->', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[\w.:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('\s+', Text),
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
|
prashanthr/wakatime
|
wakatime/packages/pygments_py3/pygments/lexers/actionscript.py
|
Python
|
bsd-3-clause
| 11,179 | 0.004652 |
from basetest import *
from zfs_autobackup.LogStub import LogStub
from zfs_autobackup.ExecuteNode import ExecuteError
class TestZfsNode(unittest2.TestCase):
def setUp(self):
prepare_zpools()
# return super().setUp()
def test_consistent_snapshot(self):
logger = LogStub()
description = "[Source]"
node = ZfsNode(snapshot_time_format="test-%Y%m%d%H%M%S", hold_name="zfs_autobackup:test", logger=logger, description=description)
with self.subTest("first snapshot"):
node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test",exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=200000), "test-20101111000001", 100000)
r = shelltest("zfs list -H -o name -r -t all " + TEST_POOLS)
self.assertEqual(r, """
test_source1
test_source1/fs1
test_source1/fs1@test-20101111000001
test_source1/fs1/sub
test_source1/fs1/sub@test-20101111000001
test_source2
test_source2/fs2
test_source2/fs2/sub
test_source2/fs2/sub@test-20101111000001
test_source2/fs3
test_source2/fs3/sub
test_target1
""")
with self.subTest("second snapshot, no changes, no snapshot"):
node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test",exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=200000), "test-20101111000002", 1)
r = shelltest("zfs list -H -o name -r -t all " + TEST_POOLS)
self.assertEqual(r, """
test_source1
test_source1/fs1
test_source1/fs1@test-20101111000001
test_source1/fs1/sub
test_source1/fs1/sub@test-20101111000001
test_source2
test_source2/fs2
test_source2/fs2/sub
test_source2/fs2/sub@test-20101111000001
test_source2/fs3
test_source2/fs3/sub
test_target1
""")
with self.subTest("second snapshot, no changes, empty snapshot"):
node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test", exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=200000), "test-20101111000002", 0)
r = shelltest("zfs list -H -o name -r -t all " + TEST_POOLS)
self.assertEqual(r, """
test_source1
test_source1/fs1
test_source1/fs1@test-20101111000001
test_source1/fs1@test-20101111000002
test_source1/fs1/sub
test_source1/fs1/sub@test-20101111000001
test_source1/fs1/sub@test-20101111000002
test_source2
test_source2/fs2
test_source2/fs2/sub
test_source2/fs2/sub@test-20101111000001
test_source2/fs2/sub@test-20101111000002
test_source2/fs3
test_source2/fs3/sub
test_target1
""")
def test_consistent_snapshot_prepostcmds(self):
logger = LogStub()
description = "[Source]"
node = ZfsNode(snapshot_time_format="test", hold_name="test", logger=logger, description=description, debug_output=True)
with self.subTest("Test if all cmds are executed correctly (no failures)"):
with OutputIO() as buf:
with redirect_stdout(buf):
node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test", exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=1), "test-1",
0,
pre_snapshot_cmds=["echo pre1", "echo pre2"],
post_snapshot_cmds=["echo post1 >&2", "echo post2 >&2"]
)
self.assertIn("STDOUT > pre1", buf.getvalue())
self.assertIn("STDOUT > pre2", buf.getvalue())
self.assertIn("STDOUT > post1", buf.getvalue())
self.assertIn("STDOUT > post2", buf.getvalue())
with self.subTest("Failure in the middle, only pre1 and both post1 and post2 should be executed, no snapshot should be attempted"):
with OutputIO() as buf:
with redirect_stdout(buf):
with self.assertRaises(ExecuteError):
node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test", exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=1), "test-1",
0,
pre_snapshot_cmds=["echo pre1", "false", "echo pre2"],
post_snapshot_cmds=["echo post1", "false", "echo post2"]
)
print(buf.getvalue())
self.assertIn("STDOUT > pre1", buf.getvalue())
self.assertNotIn("STDOUT > pre2", buf.getvalue())
self.assertIn("STDOUT > post1", buf.getvalue())
self.assertIn("STDOUT > post2", buf.getvalue())
with self.subTest("Snapshot fails"):
with OutputIO() as buf:
with redirect_stdout(buf):
with self.assertRaises(ExecuteError):
#same snapshot name as before so it fails
node.consistent_snapshot(node.selected_datasets(property_name="autobackup:test", exclude_paths=[], exclude_received=False, exclude_unchanged=False, min_change=1), "test-1",
0,
pre_snapshot_cmds=["echo pre1", "echo pre2"],
post_snapshot_cmds=["echo post1", "echo post2"]
)
print(buf.getvalue())
self.assertIn("STDOUT > pre1", buf.getvalue())
self.assertIn("STDOUT > pre2", buf.getvalue())
self.assertIn("STDOUT > post1", buf.getvalue())
self.assertIn("STDOUT > post2", buf.getvalue())
def test_getselected(self):
# should be excluded by property
shelltest("zfs create test_source1/fs1/subexcluded")
shelltest("zfs set autobackup:test=false test_source1/fs1/subexcluded")
# should be excluded by being unchanged
shelltest("zfs create test_source1/fs1/unchanged")
shelltest("zfs snapshot test_source1/fs1/unchanged@somesnapshot")
logger = LogStub()
description = "[Source]"
node = ZfsNode(snapshot_time_format="test-%Y%m%d%H%M%S", hold_name="zfs_autobackup:test", logger=logger, description=description)
s = pformat(node.selected_datasets(property_name="autobackup:test", exclude_paths=[], exclude_received=False, exclude_unchanged=True, min_change=1))
print(s)
# basics
self.assertEqual(s, """[(local): test_source1/fs1,
(local): test_source1/fs1/sub,
(local): test_source2/fs2/sub]""")
def test_validcommand(self):
logger = LogStub()
description = "[Source]"
node = ZfsNode(snapshot_time_format="test-%Y%m%d%H%M%S", hold_name="zfs_autobackup:test", logger=logger, description=description)
with self.subTest("test invalid option"):
self.assertFalse(node.valid_command(["zfs", "send", "--invalid-option", "nonexisting"]))
with self.subTest("test valid option"):
self.assertTrue(node.valid_command(["zfs", "send", "-v", "nonexisting"]))
def test_supportedsendoptions(self):
logger = LogStub()
description = "[Source]"
node = ZfsNode(snapshot_time_format="test-%Y%m%d%H%M%S", hold_name="zfs_autobackup:test", logger=logger, description=description)
# -D propably always supported
self.assertGreater(len(node.supported_send_options), 0)
def test_supportedrecvoptions(self):
logger = LogStub()
description = "[Source]"
# NOTE: this could hang via ssh if we dont close filehandles properly. (which was a previous bug)
node = ZfsNode(snapshot_time_format="test-%Y%m%d%H%M%S", hold_name="zfs_autobackup:test", logger=logger, description=description, ssh_to='localhost')
self.assertIsInstance(node.supported_recv_options, list)
if __name__ == '__main__':
unittest.main()
|
psy0rz/zfs_autobackup
|
tests/test_zfsnode.py
|
Python
|
gpl-3.0
| 8,088 | 0.003709 |
import time # about 60 s
def countBinOnes(x):
cnt = 0
while x != 0:
cnt += 1
x &= x - 1
return cnt
def isSpecialSumSet(A):
N = ( 1 << len(A) ) - 1
subset = N * [None]
for i in range(1, N):
subset[i] = 0
for j in range(len(A)):
if (i >> j) & 1 == 1:
subset[i] += A[j]
# if combining the two loops, the execution is slower...
# data reading caching factor weighs more than false earlier detection
for i in range( 1, 1 << ( len(A) - 1 ) ): # just verify the last element
for j in range( 1 << ( len(A) - 1 ), N ):
if i&j == 0:
if subset[i] == subset[j]: # rule i fails
return False
if subset[i] > subset[j]:
if countBinOnes(i) < countBinOnes(j): # rule ii fails
return False
elif countBinOnes(i) > countBinOnes(j): # rule ii fails
return False
return True
# for loop is too ugly, recursion is beautiful
def findSpecialOptimum(a, pos):
if pos > 1:
while a[0] + a[1] > a[pos]:
if isSpecialSumSet(a[:pos + 1]) == True:
if pos == len(a) - 1: # find one, print it
print a, sum(a)
return
a[pos + 1] = a[pos] + 1
findSpecialOptimum(a, pos + 1)
a[pos] += 1
else:
while a[pos] <= upbound[pos]: # the upbounding
a[pos + 1] = a[pos] + 1
findSpecialOptimum(a, pos + 1)
a[pos] += 1
return
start = time.time()
Set = [11] * 7
upbound = [20, 36]
findSpecialOptimum(Set, 0)
print( 'Time cost: %lf s.' %( time.time() - start ) )
|
wandering007/ProjectEuler
|
src/103.py
|
Python
|
apache-2.0
| 1,733 | 0.018465 |
from django.db.models import CharField, Value as V
from django.db.models.functions import Coalesce, Length, Upper
from django.test import TestCase
from django.test.utils import register_lookup
from .models import Author
class UpperBilateral(Upper):
bilateral = True
class FunctionTests(TestCase):
def test_nested_function_ordering(self):
Author.objects.create(name='John Smith')
Author.objects.create(name='Rhonda Simpson', alias='ronny')
authors = Author.objects.order_by(Length(Coalesce('alias', 'name')))
self.assertQuerysetEqual(
authors, [
'Rhonda Simpson',
'John Smith',
],
lambda a: a.name
)
authors = Author.objects.order_by(Length(Coalesce('alias', 'name')).desc())
self.assertQuerysetEqual(
authors, [
'John Smith',
'Rhonda Simpson',
],
lambda a: a.name
)
def test_func_transform_bilateral(self):
with register_lookup(CharField, UpperBilateral):
Author.objects.create(name='John Smith', alias='smithj')
Author.objects.create(name='Rhonda')
authors = Author.objects.filter(name__upper__exact='john smith')
self.assertQuerysetEqual(
authors.order_by('name'), [
'John Smith',
],
lambda a: a.name
)
def test_func_transform_bilateral_multivalue(self):
with register_lookup(CharField, UpperBilateral):
Author.objects.create(name='John Smith', alias='smithj')
Author.objects.create(name='Rhonda')
authors = Author.objects.filter(name__upper__in=['john smith', 'rhonda'])
self.assertQuerysetEqual(
authors.order_by('name'), [
'John Smith',
'Rhonda',
],
lambda a: a.name
)
def test_function_as_filter(self):
Author.objects.create(name='John Smith', alias='SMITHJ')
Author.objects.create(name='Rhonda')
self.assertQuerysetEqual(
Author.objects.filter(alias=Upper(V('smithj'))),
['John Smith'], lambda x: x.name
)
self.assertQuerysetEqual(
Author.objects.exclude(alias=Upper(V('smithj'))),
['Rhonda'], lambda x: x.name
)
|
nesdis/djongo
|
tests/django_tests/tests/v22/tests/db_functions/tests.py
|
Python
|
agpl-3.0
| 2,439 | 0.00082 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Building()
result.template = "object/building/poi/shared_tatooine_desert_demons_camp_medium.iff"
result.attribute_template_id = -1
result.stfName("poi_n","base_poi_building")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/building/poi/shared_tatooine_desert_demons_camp_medium.py
|
Python
|
mit
| 465 | 0.047312 |
import unittest
from webtest import TestApp
import test_helper
class ErrorsControllerTests(unittest.TestCase):
def test_error_404(self):
app = TestApp(test_helper.get_app())
assert app.get('/error/404').status == '200 OK'
def test_error_500(self):
app = TestApp(test_helper.get_app())
assert app.get('/error/500').status == '200 OK'
|
Rolinh/pydeo
|
pydeo/test/functional/errors_controller_test.py
|
Python
|
bsd-3-clause
| 378 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2012 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.1.1'
VERSION_DEV = True
VERSION_DATE = '20140606'
def GetVersion():
"""Returns a version information for plaso."""
if not VERSION_DEV:
return __version__
return u'{}_{}'.format(__version__, VERSION_DATE)
|
iwm911/plaso
|
plaso/__init__.py
|
Python
|
apache-2.0
| 934 | 0.003212 |
import datetime
import logging
from unittest.mock import patch
from django.test import TestCase
from django.test.utils import override_settings
from konfera import models
from payments import utils
from payments.models import ProcessedTransaction
def make_payment(new_data):
data = {
'date': datetime.date(2015, 10, 5),
'variable_symbol': '1234',
'transaction_id': '1234',
'amount': 0.0,
'currency': 'EUR',
'comment': '',
'executor': '',
}
data.update(new_data)
return data
logging.disable(logging.WARNING)
class TestGetLastPayements(TestCase):
@patch('django.utils.timezone.now', return_value=datetime.datetime(2016, 9, 29))
@patch('fiobank.FioBank.period', return_value=[])
@override_settings(FIO_BANK_TOKEN='fio_token')
def test__get_last_payments(self, FioBankMockPeriod, timezone_mock):
data = utils._get_last_payments()
self.assertEqual(data, [])
FioBankMockPeriod.assert_called_with('2016-09-26', '2016-09-29')
timezone_mock.assert_called_once_with()
class TestGetNotProcessedPayments(TestCase):
def test_no_processed_payment_is_available(self):
payments = [
make_payment({'transaction_id': '1'}),
make_payment({'transaction_id': '2'}),
]
self.assertEqual(
list(utils._get_not_processed_payments(payments)),
payments
)
def test_processed_payments_filtered(self):
payments = [
make_payment({'transaction_id': '1'}),
make_payment({'transaction_id': '2'}),
make_payment({'transaction_id': '3'}),
]
ProcessedTransaction.objects.create(transaction_id='2', amount=0)
self.assertEqual(
list(utils._get_not_processed_payments(payments)),
[
make_payment({'transaction_id': '1'}),
make_payment({'transaction_id': '3'}),
]
)
class TestGetPaymentsForOrder(TestCase):
def setUp(self):
self.order = models.Order.objects.create(price=200, discount=0)
def test_no_payments(self):
payments = []
self.assertEqual(
list(utils._get_payments_for_order(self.order, payments)),
[]
)
def test_payments_for_different_orders(self):
payments = [
make_payment({'variable_symbol': str(self.order.pk + 7)}),
make_payment({'variable_symbol': str(self.order.pk + 13)}),
]
self.assertEqual(
list(utils._get_payments_for_order(self.order, payments)),
[]
)
def test_payment_found_for_order(self):
payments = [
make_payment({'variable_symbol': self.order.variable_symbol}),
make_payment({'variable_symbol': str(self.order.pk + 13)}),
]
self.assertEqual(
list(utils._get_payments_for_order(self.order, payments)),
[make_payment({'variable_symbol': self.order.variable_symbol})]
)
def test_multiple_payments_found_for_order(self):
payments = [
make_payment({'variable_symbol': self.order.variable_symbol}),
make_payment({'variable_symbol': str(self.order.pk + 13)}),
make_payment({'variable_symbol': self.order.variable_symbol}),
]
self.assertEqual(
list(utils._get_payments_for_order(self.order, payments)),
[
make_payment({'variable_symbol': self.order.variable_symbol}),
make_payment({'variable_symbol': self.order.variable_symbol}),
]
)
class TestProcessPayment(TestCase):
def test_attendee_paid_less(self):
order = models.Order.objects.create(price=100, discount=10)
payment = make_payment({'amount': 80, 'transaction_id': '7'})
utils._process_payment(order, payment)
self.assertEqual(order.amount_paid, 80)
self.assertEqual(order.status, models.order.PARTLY_PAID)
def test_attendee_paid_enough(self):
order = models.Order.objects.create(price=100, discount=10, amount_paid=5, status=models.order.PARTLY_PAID)
payment = make_payment({'amount': 85, 'transaction_id': '7'})
utils._process_payment(order, payment)
self.assertEqual(order.amount_paid, 90)
self.assertEqual(order.status, models.order.PAID)
def test_payment_marked_as_processed(self):
order = models.Order.objects.create(price=100, discount=10)
payment = make_payment({'amount': 80, 'transaction_id': '7'})
self.assertEqual(ProcessedTransaction.objects.count(), 0)
utils._process_payment(order, payment)
self.assertEqual(ProcessedTransaction.objects.count(), 1)
self.assertEqual(ProcessedTransaction.objects.all()[0].transaction_id, '7')
class TestCheckPaymentsStatus(TestCase):
def setUp(self):
self.order1 = models.Order.objects.create(price=200, discount=0)
self.order2 = models.Order.objects.create(price=200, discount=7)
@patch('payments.utils._get_last_payments', return_value=[])
def test_no_payments_available(self, mock_api_call):
""" FioBank doesn't have any payments - no order status should be changed """
utils.check_payments_status()
order1 = models.Order.objects.get(pk=self.order1.pk)
order2 = models.Order.objects.get(pk=self.order2.pk)
self.assertEqual(mock_api_call.call_count, 1)
self.assertEqual(order1.status, models.order.AWAITING)
self.assertEqual(order2.status, models.order.AWAITING)
@patch('payments.utils._get_last_payments')
def test_one_order_is_paid(self, mock_api_call):
""" FioBank doesn't have a payment for order1 - order's status was changed """
mock_api_call.return_value = [
make_payment({'variable_symbol': self.order1.variable_symbol, 'amount': 200, 'transaction_id': '7'}),
]
utils.check_payments_status()
order1 = models.Order.objects.get(pk=self.order1.pk)
order2 = models.Order.objects.get(pk=self.order2.pk)
self.assertEqual(mock_api_call.call_count, 1)
self.assertEqual(order1.status, models.order.PAID)
self.assertEqual(order2.status, models.order.AWAITING)
@patch('payments.utils._get_last_payments')
def test_all_orders_are_paid(self, mock_api_call):
mock_api_call.return_value = [
make_payment({'variable_symbol': self.order1.variable_symbol, 'amount': 200, 'transaction_id': '7'}),
make_payment({'variable_symbol': self.order2.variable_symbol, 'amount': 200, 'transaction_id': '8'}),
]
utils.check_payments_status()
order1 = models.Order.objects.get(pk=self.order1.pk)
order2 = models.Order.objects.get(pk=self.order2.pk)
self.assertEqual(mock_api_call.call_count, 1)
self.assertEqual(order1.status, models.order.PAID)
self.assertEqual(order2.status, models.order.PAID)
@patch('payments.utils._get_last_payments')
def test_order_is_paid_in_multiple_payments(self, mock_api_call):
mock_api_call.return_value = [
make_payment({'variable_symbol': self.order1.variable_symbol, 'amount': 150, 'transaction_id': '7'}),
make_payment({'variable_symbol': self.order1.variable_symbol, 'amount': 50, 'transaction_id': '79'}),
make_payment({'variable_symbol': self.order2.variable_symbol, 'amount': 30, 'transaction_id': '80'}),
]
utils.check_payments_status()
order1 = models.Order.objects.get(pk=self.order1.pk)
order2 = models.Order.objects.get(pk=self.order2.pk)
self.assertEqual(order1.status, models.order.PAID)
self.assertEqual(order2.status, models.order.PARTLY_PAID)
|
kapucko/django-konfera
|
payments/tests.py
|
Python
|
mit
| 7,832 | 0.001404 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Event CRM Sale',
'version': '1.0',
'category': 'Marketing/Events',
'website': 'https://www.odoo.com/page/events',
'description': "Add information of sale order linked to the registration for the creation of the lead.",
'depends': ['event_crm', 'event_sale'],
'data': [
'views/event_lead_rule_views.xml',
],
'installable': True,
'auto_install': True,
}
|
ddico/odoo
|
addons/event_crm_sale/__manifest__.py
|
Python
|
agpl-3.0
| 511 | 0.001957 |
import mock
from pdcupdater.tests.handler_tests import BaseHandlerTest, mock_pdc
import pdcupdater.services
import pdcupdater.handlers.retirement
from pdcupdater.handlers.retirement import RetireComponentHandler
class TestRetiredComponents(BaseHandlerTest):
handler_path = 'pdcupdater.handlers.retirement:RetireComponentHandler'
def test_can_handle_retire_msg(self):
idx = '2017-b1adac6d-64e9-406f-a1f4-4d3e57105649'
msg = pdcupdater.utils.get_fedmsg(idx)
result = self.handler.can_handle(None, msg)
self.assertTrue(result)
def test_cannot_handle_unretire_msg(self):
idx = '2017-d20c1ee0-9c00-4ab8-9364-0fdf120e822c'
msg = pdcupdater.utils.get_fedmsg(idx)
result = self.handler.can_handle(None, msg)
self.assertFalse(result)
@mock_pdc
def test_can_process_retire_msg(self, pdc):
pdc.add_endpoint('component-branches', 'GET', [
{
"id": 89151,
"global_component": "iwhd",
"name": "f26",
"slas": [
{
"id": 178020,
"sla": "bug_fixes",
"eol": "2222-07-01"
},
{
"id": 178028,
"sla": "security_fixes",
"eol": "2222-07-01"
}
],
"type": "rpm",
"active": True,
"critical_path": False
}
])
pdc.add_endpoint('component-branch-slas', 'GET', [
{
"id": 178020,
"sla": "bug_fixes",
"branch": {
"id": 89151,
"name": "f26",
"global_component": "iwhd",
"type": "rpm",
"critical_path": False,
"active": True
},
"eol": "2222-07-01"
},
{
"id": 178028,
"sla": "security_fixes",
"branch": {
"id": 89151,
"name": "f26",
"global_component": "iwhd",
"type": "rpm",
"critical_path": False,
"active": True
},
"eol": "2222-07-01"
}
])
pdc.add_endpoint('component-branch-slas/178020', 'PATCH', 'ok')
pdc.add_endpoint('component-branch-slas/178028', 'PATCH', 'ok')
idx = '2017-b1adac6d-64e9-406f-a1f4-4d3e57105649'
msg = pdcupdater.utils.get_fedmsg(idx)
self.handler.handle(pdc, msg)
expected_keys = [
'component-branches',
'component-branch-slas/178020',
'component-branch-slas/178028'
]
self.assertEquals(pdc.calls.keys(), expected_keys)
@mock_pdc
def test_can_process_retire_msg_already_retired(self, pdc):
pdc.add_endpoint('component-branches', 'GET', [
{
"id": 155867,
"global_component": "obexftp",
"name": "f26",
"slas": [
{
"id": 310591,
"sla": "bug_fixes",
"eol": "2017-06-28"
},
{
"id": 310602,
"sla": "security_fixes",
"eol": "2017-06-28"
}
],
"type": "rpm",
"active": False,
"critical_path": False
}
])
idx = '2017-3f490f4d-7612-4881-80cb-e1a941d6d700'
msg = pdcupdater.utils.get_fedmsg(idx)
self.handler.handle(pdc, msg)
expected_keys = [
'component-branches'
]
self.assertEquals(pdc.calls.keys(), expected_keys)
@mock_pdc
def test_audit(self, pdc):
pdc.add_endpoint('component-branches', 'GET', [
{
"id": 155867,
"global_component": "obexftp",
"name": "f26",
"slas": [
{
"id": 310591,
"sla": "bug_fixes",
"eol": "2017-06-28"
},
{
"id": 310602,
"sla": "security_fixes",
"eol": "2017-06-28"
}
],
"type": "rpm",
"active": False,
"critical_path": False
},
{
"id": 323149,
"global_component": "python",
"name": "f26",
"slas": [
{
"id": 646309,
"sla": "security_fixes",
"eol": "2222-07-01"
},
{
"id": 646303,
"sla": "bug_fixes",
"eol": "2222-07-01"
}
],
"type": "module",
"active": True,
"critical_path": False
}
])
with mock.patch('requests.Session') as mock_requests_session:
mock_rv_found = mock.Mock()
mock_rv_found.status_code = 200
mock_rv_not_found = mock.Mock()
mock_rv_not_found.status_code = 404
mock_session_rv = mock.Mock()
mock_session_rv.head.side_effect = [mock_rv_found, mock_rv_not_found]
mock_requests_session.return_value = mock_session_rv
present, absent = self.handler.audit(pdc)
self.assertEquals(present, set())
self.assertEquals(absent, set())
@mock_pdc
def test_audit_retired_in_pdc_not_git(self, pdc):
pdc.add_endpoint('component-branches', 'GET', [
{
"id": 155867,
"global_component": "obexftp",
"name": "f26",
"slas": [
{
"id": 310591,
"sla": "bug_fixes",
"eol": "2017-06-28"
},
{
"id": 310602,
"sla": "security_fixes",
"eol": "2017-06-28"
}
],
"type": "rpm",
"active": False,
"critical_path": False
},
{
"id": 323149,
"global_component": "python",
"name": "f26",
"slas": [
{
"id": 646309,
"sla": "security_fixes",
"eol": "2222-07-01"
},
{
"id": 646303,
"sla": "bug_fixes",
"eol": "2222-07-01"
}
],
"type": "module",
"active": True,
"critical_path": False
}
])
with mock.patch('requests.Session') as mock_requests_session:
mock_rv_not_found = mock.Mock()
mock_rv_not_found.status_code = 404
mock_session_rv = mock.Mock()
mock_session_rv.head.return_value = mock_rv_not_found
mock_requests_session.return_value = mock_session_rv
present, absent = self.handler.audit(pdc)
self.assertEquals(present, {'rpm/obexftp#f26'})
self.assertEquals(absent, set())
@mock_pdc
def test_audit_retired_in_git_not_pdc(self, pdc):
pdc.add_endpoint('component-branches', 'GET', [
{
"id": 155867,
"global_component": "obexftp",
"name": "f26",
"slas": [
{
"id": 310591,
"sla": "bug_fixes",
"eol": "2222-06-28"
},
{
"id": 310602,
"sla": "security_fixes",
"eol": "2222-06-28"
}
],
"type": "rpm",
"active": True,
"critical_path": False
},
{
"id": 323149,
"global_component": "python",
"name": "f26",
"slas": [
{
"id": 646309,
"sla": "security_fixes",
"eol": "2222-07-01"
},
{
"id": 646303,
"sla": "bug_fixes",
"eol": "2222-07-01"
}
],
"type": "module",
"active": True,
"critical_path": False
}
])
with mock.patch('requests.Session') as mock_requests_session:
mock_rv_not_found = mock.Mock()
mock_rv_not_found.status_code = 200
mock_session_rv = mock.Mock()
mock_session_rv.head.return_value = mock_rv_not_found
mock_requests_session.return_value = mock_session_rv
present, absent = self.handler.audit(pdc)
self.assertEquals(present, set())
self.assertEquals(absent, {'rpm/obexftp#f26', 'module/python#f26'})
@mock_pdc
def test_initialize(self, pdc):
pdc.add_endpoint('component-branches', 'GET', [
{
"id": 155867,
"global_component": "obexftp",
"name": "f26",
"slas": [
{
"id": 310591,
"sla": "bug_fixes",
"eol": "2017-06-28"
},
{
"id": 310602,
"sla": "security_fixes",
"eol": "2017-06-28"
}
],
"type": "rpm",
"active": False,
"critical_path": False
},
{
"id": 323149,
"global_component": "python",
"name": "f26",
"slas": [
{
"id": 646309,
"sla": "security_fixes",
"eol": "2222-07-01"
},
{
"id": 646303,
"sla": "bug_fixes",
"eol": "2222-07-01"
}
],
"type": "module",
"active": True,
"critical_path": False
}
])
with mock.patch.object(pdcupdater.handlers.retirement, '_retire_branch') as mock_retire_branch, \
mock.patch.object(pdcupdater.handlers.retirement, '_is_retired_in_dist_git') as mock_is_retired_in_dist_git:
mock_is_retired_in_dist_git.side_effect = [True, False]
self.handler.initialize(pdc)
self.assertEqual(mock_retire_branch.call_count, 1)
|
fedora-infra/pdc-updater
|
pdcupdater/tests/handler_tests/test_retirement.py
|
Python
|
lgpl-2.1
| 11,525 | 0.000347 |
dictionary = {"GEEKS", "FOR", "QUIZ", "GO"}
N, M = 3, 3
board = [['G','I','Z'],
['U','E','K'],
['Q','S','E']]
class Graph:
class Vertex:
def __int__(self, v):
self.val = v
self.adj = []
def findWords(board=board):
def search(node, word, visited):
if node not in visited:
visited.append(node)
word.append(node.val)
for adjNode in node.adj:
search(node, word, visited)
if word not in dictionary:
word.pop()
result = []
g = creategraph(board)
for u in g.vertices():
visited = []
visited.append(u)
word = ""
for adj in u.adj:
search(adj, word, visited)
if word in dictionary:
result.append(word)
return result
if __name__=="__main__":
print(findWords())
|
carlb15/Python
|
boggle.py
|
Python
|
mit
| 785 | 0.049682 |
# -*- encoding:utf-8 -*-
from flask import Flask
app = Flask(__name__)
@app.route('/config')
def hello_world():
return 'Hello World!'
if __name__ == '__main__':
app.run(host="0.0.0.0", port=8080)
|
SimonXming/Lab
|
backend/app.py
|
Python
|
mit
| 207 | 0.009662 |
"""
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
from setuptools import setup
APP = ['aggregate.py']
DATA_FILES = []
OPTIONS = {'argv_emulation': True}
setup(
app=APP,
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
|
mtnman38/Aggregate
|
aggForFoundation/setup.py
|
Python
|
gpl-2.0
| 311 | 0 |
class NamespaceAlreadyRegistered(Exception):
pass
class NoParentFound(Exception):
pass
|
rsalmaso/django-cms
|
menus/exceptions.py
|
Python
|
bsd-3-clause
| 97 | 0 |
#!/usr/bin/python
# Landsat 8 TOA reflactance and brightness temparature calculation
# Author Riazuddin Kawsar
# email: r.kawsar@spatial-business-integration
# date: 30th july 2015
# Landsat 8 bands
#---------------------------------------------------------
#Band 1 - Coastal aerosol 0.43 - 0.45 30
#Band 2 - Blue 0.45 - 0.51 30
#Band 3 - Green 0.53 - 0.59 30
#Band 4 - Red 0.64 - 0.67 30
#Band 5 - Near Infrared (NIR) 0.85 - 0.88 30
#Band 6 - SWIR 1 1.57 - 1.65 30
#Band 7 - SWIR 2 2.11 - 2.29 30
#Band 8 - Panchromatic 0.50 - 0.68 15
#Band 9 - Cirrus 1.36 - 1.38 30
#Band 10 - Thermal Infrared (TIRS) 1 10.60 - 11.19 100 * (30)
#Band 11 - Thermal Infrared (TIRS) 2 11.50 - 12.51 100 * (30)
# http://landsat.usgs.gov/band_designations_landsat_satellites.php
# reflectance algorithm source: http://landsat.usgs.gov/Landsat8_Using_Product.php
import xml.dom.minidom as minidom
from collections import defaultdict
import csv
import os, glob, sys, time, datetime, math, numpy, fnmatch
from osgeo import gdal
from osgeo.gdalconst import *
#import multiprocessing
import matplotlib.pylab as plt
#from decimal import *
# input variables ---- user input ----
data_folder = sys.argv[1] #'LC81970302014029LGN00'
wrd_dir = sys.argv[2] #'/media/Num/wrk_dir/landsat'
min_ndvi= 0.01
max_ndvi = 1.0
fillval = 9999
# input files (automatically generated)
data_dir = os.path.join(wrd_dir, data_folder)
os.chdir(data_dir)
for meta_file in glob.glob('*_MTL.txt'):
metadata_file = os.path.join(data_dir, meta_file)
print 'metadata_file: ' + metadata_file
# Quality pixel flag ( from Landsat 8 Quality Assessment band)
#BQF = [61440,59424,57344,56320,53248,39936,36896,36864,
# 31744,28672,28590,26656,24576,23552,20516,20512,20480]
BQF = [1, 61440,59424,57344,56320,53248,39936,36896,36864]
#---------------------------- collecting all the necessary input files -------------------
# ----------------------------------------------------------------------------------------
def findfiles(input_dir, file_type): # file_type = '*.tif'
toprocess = []
for root,dir,files in os.walk(input_dir):
for name in sorted(files):
if fnmatch.fnmatch(name,file_type):
toprocess.append( os.path.join(root, name) )
return toprocess
# finding different bands for calculation
for raster_files in findfiles(data_dir, '*.TIF'):
raster_path = raster_files
raster_name = os.path.basename(raster_path)
band_name = raster_name.split('_', 1)[1]
band_name = band_name.split('.', 1)[0]
if band_name == 'B1':
Band_B1 = raster_path
if band_name == 'B2':
Band_B2 = raster_path
if band_name == 'B3':
Band_B3 = raster_path
if band_name == 'B4':
Band_B4 = raster_path
if band_name == 'B5':
Band_B5 = raster_path
if band_name == 'B6':
Band_B6 = raster_path
if band_name == 'B7':
Band_B7 = raster_path
if band_name == 'B10':
Band_B10 = raster_path
if band_name == 'B11':
Band_B11 = raster_path
if band_name == 'BQA':
Band_BQA = raster_path
print 'Band_BQA = ' + Band_BQA
####### extracting metadata ---------------------------
#------------------------------------------------------
# function to read the metadata (works for landsat MTL and Digitalglobe IMD metadata)
def read_metadata(f):
lines=iter(open(f).readlines())
hdrdata={}
line=lines.next()
while line:
line=[item.strip() for item in line.replace('"','').split('=')]
group=line[0].upper()
if group in ['END;','END']:break
value=line[1]
if group in ['END_GROUP']:pass
elif group in ['BEGIN_GROUP','GROUP']:
group=value
subdata={}
while line:
line=lines.next()
line = [l.replace('"','').strip() for l in line.split('=')]
subgroup=line[0]
subvalue=line[1]
if subgroup == 'END_GROUP':
break
elif line[1] == '(':
while line:
line=lines.next()
line = line.replace('"','').strip()
subvalue+=line
if line[-1:]==';':
subvalue=eval(subvalue.strip(';'))
break
else:subvalue=subvalue.strip(';')
subdata[subgroup]=subvalue
hdrdata[group]=subdata
else: hdrdata[group]=value.strip(');')
line=lines.next()
return hdrdata
# reading the metadata in a dictionary
imddata = read_metadata (metadata_file)
def acquireMetadata(band):
ref_MRF = float(imddata['RADIOMETRIC_RESCALING']['REFLECTANCE_MULT_BAND_' + str(band)])
ref_AMF = float(imddata['RADIOMETRIC_RESCALING']['REFLECTANCE_ADD_BAND_' + str(band)])
metadatalist = [0, 0, ref_MRF, ref_AMF]
return metadatalist
def acquireThrmalMetadata(band):
radi_MRF = float(imddata['RADIOMETRIC_RESCALING']['RADIANCE_MULT_BAND_' + str(band)])
radi_AMF = float(imddata['RADIOMETRIC_RESCALING']['RADIANCE_ADD_BAND_' + str(band)])
K1 = float(imddata['TIRS_THERMAL_CONSTANTS']['K1_CONSTANT_BAND_' + str(band)])
K2 = float(imddata['TIRS_THERMAL_CONSTANTS']['K2_CONSTANT_BAND_' + str(band)])
acquireThrmalMetadata = [radi_MRF, radi_AMF, K1, K2]
return acquireThrmalMetadata
SunElevation = float(imddata['IMAGE_ATTRIBUTES']['SUN_ELEVATION'])
img_date = imddata['PRODUCT_METADATA']['DATE_ACQUIRED']
img_time = imddata['PRODUCT_METADATA']['SCENE_CENTER_TIME']
solar_zenith_angle = float(90.00) - SunElevation
solar_zenith_angle_radians = math.radians(solar_zenith_angle)
SunElevation_radians = math.radians(SunElevation)
print 'Acquisition date : ' + img_date
print 'Acquisition time : ' + img_time
print 'SunElevation :' + str(SunElevation)
# creating the product name and output dir
year = img_date.split('-', 1)[0]
month = img_date.split('-', 1)[1]
month = month.split('-', 1)[0]
day = img_date.split('-', 1)[1]
day = day.split('-', 1)[1]
product_date = day + '.' + month + '.' + year[2:4]
product = 'binned_' + product_date
output_dir = os.path.join (data_dir, product)
print output_dir
if not os.path.exists(output_dir):
os.makedirs(output_dir)
######## raster processing functions ---------------------
#----------------------------------------------------------
# function to read the image bands
def return_band(image_file_name, band_number):
image = image_file_name
dataset = gdal.Open(image,GA_ReadOnly)
if dataset is None:
print "Could not open " + dataset
sys.exit(1)
geoTransform = dataset.GetGeoTransform()
proj = dataset.GetProjection()
rasterband = dataset.GetRasterBand(band_number)
type(rasterband)
ncol = dataset.RasterXSize
nrow = dataset.RasterYSize
band = rasterband.ReadAsArray(0,0,ncol,nrow)
band = band.astype(numpy.uint16)
return band,geoTransform,proj,ncol,nrow
dataset = None
band = None
# will return '/media/Arc/eo_archive_proc/VHR_SAT_IMAGE/SPOT6/20140704_SPOT/binned_SPOT6_20140704/B0.binned_SPOT6_20140704.tif'
# the function input defined in the beginining: out_put_dir, product just we have to change the product name.....
def product_output_name(out_put_dir,product,Product_name):
product_dir = os.path.join(out_put_dir,product)
product_output_name = Product_name+'.'+product+'.tif'
product_path_file = os.path.join(product_dir,product_output_name)
return product_path_file
def output_file(output_name,output_array,geoTransform,proj,ncol,nrow):
format = "GTiff"
driver = gdal.GetDriverByName( format )
outDataset = driver.Create(output_name,ncol,nrow,1,GDT_Float32)
outBand = outDataset.GetRasterBand(1)
outBand.WriteArray(output_array,0,0)
outBand.FlushCache()
outBand.SetNoDataValue(fillval)
outDataset.SetGeoTransform(geoTransform )
outDataset.SetProjection(proj)
def normalize(band1,band2):
var1 = numpy.subtract(band1,band2)
var2 = numpy.add(band1,band2)
numpy.seterr(all='ignore')
ndvi = numpy.divide(var1,var2)
return ndvi
# reading DN bands, extracting metadata and calculating radiance and reflactance and writing it to the folder
# i.e, band_name = B1
def calculate_reflectance(band_name, solar_zenith_angle_radians, DN, output_dir):
img_name = 'Band_' + band_name
band_metadata = acquireMetadata (band_name[1:])
ref_MRF = float(band_metadata[2])
ref_AMF = float(band_metadata[3])
print 'calculating ' + band_name + ' reflactance...'
reflectance = (ref_MRF * DN + ref_AMF) / (math.cos(solar_zenith_angle_radians))
reflectance_name = product_output_name(data_dir,product,band_name)
print 'Masking with Quality flag...'
band_BQA,geoTransform,proj,ncol,nrow = return_band(Band_BQA,1)
for i in BQF:
qc = numpy.where(band_BQA==i,1,0)
numpy.putmask(reflectance, qc, fillval)
output_file(reflectance_name,reflectance,geoTransform,proj,ncol,nrow)
reflactance = None
band_BQA = None
# calculating the ndvi
def calculate_ndvi(solar_zenith_angle_radians, red, nir, output_dir):
print 'reading RED band....'
band_metadata_B4 = acquireMetadata(4)
ref_MRF_B4 = float(band_metadata_B4[2])
ref_AMF_B4 = float(band_metadata_B4[3])
print 'calculating reflactance...'
reflectance_B4 = (ref_MRF_B4 * Band_B4 + ref_AMF_B4) / (math.cos(solar_zenith_angle_radians))
print 'reading NIR band....'
band_metadata_B5 = acquireMetadata(5)
ref_MRF_B5 = float(band_metadata_B5[2])
ref_AMF_B5 = float(band_metadata_B5[3])
print 'calculating reflactance...'
reflectance_B5 = (ref_MRF_B5 * Band_B5 + ref_AMF_B5) / (math.cos(solar_zenith_angle_radians))
ndvi_name = product_output_name(data_dir,product,'ndvi')
print "Calculating ndvi...."
ndvi = normalize(reflectance_B5, reflectance_B4)
min_ndvi_mask = numpy.where(ndvi < min_ndvi, 1, 0)
max_ndvi_mask = numpy.where(ndvi > max_ndvi, 1, 0)
numpy.putmask(ndvi, min_ndvi_mask, min_ndvi)
numpy.putmask(ndvi, max_ndvi_mask, max_ndvi)
#print 'Masking with Quality flag...'
band_BQA,geoTransform,proj,ncol,nrow = return_band(Band_BQA,1)
for i in BQF:
qc = numpy.where(band_BQA==i,1,0)
numpy.putmask(ndvi, qc, fillval)
output_file(ndvi_name,ndvi,geoTransform,proj,ncol,nrow)
reflectance_B4 = None
reflectance_B5 = None
mdvi = None
band_BQA = None
# Conversion to At-Satellite Brightness Temperature (K)
def calculate_brightness_temperature(band_name, solar_zenith_angle_radians, DN, output_dir):
img_name = 'Band_' + band_name
print 'reading....' + img_name
band_metadata = acquireThrmalMetadata (band_name[1:])
radi_MRF = float(band_metadata[0])
radi_AMF = float(band_metadata[1])
K1 = float(band_metadata[2])
K2 = float(band_metadata[3])
print 'calculating Radiance...'
radiance = (DN * radi_MRF) + radi_AMF
print 'calculating Satellite Brightness Temperature...'
TB = K2 / (numpy.log((K1 / radiance) +1))
print 'Masking with Quality flag...'
band_BQA,geoTransform,proj,ncol,nrow = return_band(Band_BQA,1)
for i in BQF:
qc = numpy.where(band_BQA==i,1,0)
numpy.putmask(TB, qc, fillval)
print 'writing output...'
reflectance_name = product_output_name(data_dir,product,band_name)
output_file(reflectance_name,TB,geoTransform,proj,ncol,nrow)
radiance = None
if __name__ == "__main__":
Band_B1,geoTransform,proj,ncol,nrow = return_band(Band_B1,1)
calculate_reflectance('B1', solar_zenith_angle_radians, Band_B1, output_dir)
Band_B1 = None
Band_B2,geoTransform,proj,ncol,nrow = return_band(Band_B2,1)
calculate_reflectance('B2', solar_zenith_angle_radians, Band_B2, output_dir)
Band_B2 = None
Band_B3,geoTransform,proj,ncol,nrow = return_band(Band_B3,1)
calculate_reflectance('B3', solar_zenith_angle_radians, Band_B3, output_dir)
Band_B3 = None
Band_B4,geoTransform,proj,ncol,nrow = return_band(Band_B4,1)
calculate_reflectance('B4', solar_zenith_angle_radians, Band_B4, output_dir)
Band_B5,geoTransform,proj,ncol,nrow = return_band(Band_B5,1)
calculate_reflectance('B5', solar_zenith_angle_radians, Band_B5, output_dir)
Band_B6,geoTransform,proj,ncol,nrow = return_band(Band_B6,1)
calculate_reflectance('B6', solar_zenith_angle_radians, Band_B6, output_dir)
Band_B6 = None
Band_B7,geoTransform,proj,ncol,nrow = return_band(Band_B7,1)
calculate_reflectance('B7', solar_zenith_angle_radians, Band_B7, output_dir)
Band_B7 = None
calculate_ndvi(solar_zenith_angle_radians, Band_B4, Band_B5, output_dir)
Band_B5 = None
Band_B4 = None
Band_B10,geoTransform,proj,ncol,nrow = return_band(Band_B10,1)
calculate_brightness_temperature('B10', solar_zenith_angle_radians, Band_B10, output_dir)
Band_B10 = None
Band_B11,geoTransform,proj,ncol,nrow = return_band(Band_B11,1)
calculate_brightness_temperature('B11', solar_zenith_angle_radians, Band_B11, output_dir)
Band_B11 = None
print 'GAME OVER .... '
|
neel9102/eo_tools
|
image_processing/Landsat/reflectance_landsat_8_multiple_release.py
|
Python
|
gpl-3.0
| 13,296 | 0.019179 |
import cProfile
import unittest
import pstats
if __name__ == '__main__':
suite = unittest.TestLoader().discover('.')
def runtests():
# set verbosity to 2 to see each test
unittest.TextTestRunner(verbosity=1, buffer=True).run(suite)
cProfile.run(
'runtests()', filename='test_cprofile_results.log', sort='cumtime')
p = pstats.Stats('test_cprofile_results.log')
p.strip_dirs().sort_stats('cumulative').print_stats(100)
|
wanqizhu/mtg-python-engine
|
test.py
|
Python
|
mit
| 466 | 0.006438 |
'''
Analysis plugin for supporting WorkspaceEmulators during analysis pass.
Finds and connects Switch Cases, most specifically from Microsoft.
'''
import envi
import envi.archs.i386 as e_i386
import vivisect
import vivisect.analysis.generic.codeblocks as vagc
def analyzeJmp(amod, emu, op, starteip):
'''
Top level logic
'''
test, ctx = testSwitch(emu.vw, op, starteip, emu)
if test:
output = makeSwitch(emu.vw, starteip, ctx['offarraybase'], ctx['indiroffbase'])
def testSwitch(vw, op, vajmp, emu=None):
'''
identifies and enumerates microsoft's switch-case methods.
'''
if not (op.iflags & envi.IF_BRANCH):
# vw.verbprint( "indirect branch is not correct type")
return False,None
backone = vw.getLocation(vajmp-1)
if backone == None:
#vw.verbprint( "previous instruction isn't defined")
return False,None
backtwo = vw.getLocation(backone[0]-1)
if backtwo == None:
#vw.verbprint( "two previous instruction isn't defined")
return False,None
filename = vw.getMemoryMap(vajmp)[3]
imagebase = vw.getFileMeta(filename, 'imagebase')
op1 = vw.parseOpcode(backone[0])
if op1.mnem != 'add':
#vw.verbprint( "previous instruction isn't an 'add'")
return False,None
baseoper = op1.opers[1]
if not isinstance(baseoper, e_i386.i386RegOper):
#vw.verbprint( "baseoper is not an i386RegOper: %s" % repr(baseoper))
return False,None
# this is a weak analysis failure, but a powerful confirmation.
if emu != None:
regbase = op1.getOperValue(1, emu)
if regbase != imagebase:
vw.verbprint( "reg != imagebase")
return False,None
# now check the instruction before that
op2 = vw.parseOpcode(backtwo[0])
if op2.mnem != 'mov':
vw.verbprint( "2nd previous instruction isn't an 'mov'")
return False,None
arrayoper = op2.opers[1]
if not (isinstance(arrayoper, e_i386.i386SibOper) and arrayoper.scale == 4):
vw.verbprint( "arrayoper is not an i386SibOper of size 4: %s" % repr(baseoper))
return False,None
ao_reg = arrayoper.reg & e_i386.RMETA_NMASK
if ao_reg != baseoper.reg:
vw.verbprint( "arrayoper.reg != baseoper.reg: %s != %s" % (ao_reg, baseoper.reg))
return False,None
offarraybase = arrayoper.disp
#initial check of the array. should point to the next va. we'll scrape it up later
offarrayfirst = vw.readMemValue(offarraybase+imagebase, 4)
if offarrayfirst+imagebase != vajmp+2:
vw.verbprint( "first ref is not the va after the jmp: %x != %x" % (offarrayfirst+imagebase, vajmp+2))
indiroffbase = None
# now check for the byte array before that
backthree = vw.getLocation(backtwo[0]-1) # this one is optional. first two are not.
if backthree != None:
op = vw.parseOpcode(backthree[0])
if op.mnem == 'movzx' and isinstance(op.opers[1], e_i386.i386SibOper) and \
op.opers[1].scale == 1:
vw.verbprint( "this is a double deref (hitting a byte array offset into the offset-array)")
indiroffbase = op.opers[1].disp
return True, {'indiroffbase':indiroffbase, 'offarraybase':offarraybase, }
def makeSwitch(vw, vajmp, offarraybase, indiroffbase=None):
'''
Makes the changes to the Workspace for the given jmp location. Handles
naming for all cases because naming wants to indicate larger context.
(future)If indiroffbase is not None, the indirection "database" is analyzed for naming
'''
filename = vw.getMemoryMap(vajmp)[3]
imagebase = vw.getFileMeta(filename, 'imagebase')
# we have identified this is a switch case
vw.verbprint( "FOUND MS SWITCH CASE SPRAY at 0x%x" % vajmp)
# roll through the offset array until imagebase+offset is not a valid pointer, points to non-op locations or splits instructions
count = 0
tracker = []
ptr = offarraybase
while True:
off = vw.readMemValue(ptr+imagebase, 4)
ova = imagebase + off
tgtva = makeSwitchCase(vw, vajmp, ova)
if not tgtva:
break
tracker.append((count, tgtva))
count += 1
ptr += 4
# FIXME: this doesn't take into account two-level derefs (indiroffbase)
naming = {}
for idx,va in tracker:
lst = naming.get(va)
if lst == None:
lst = []
naming[va] = lst
lst.append("%xh" % idx)
#TODO: analyze indiroffbase to determine case information
for va, opts in naming.items():
options = "_".join(opts)
name = "switch_case_%s_%.8x" % (options, va)
vw.makeName(va, name)
#TODO: analyze which paths handle which cases, name accordingly
#TODO: determine good hint for symbolik constraints
funcva = vw.getFunction(vajmp)
vw.makeName(vajmp, "jmp_switch_%.8x" % vajmp)
vagc.analyzeFunction(vw, funcva)
return tracker
def makeSwitchCase(vw, vaSwitch, vaCase):
'''
Handle minutia of each case, specifically, checking for validity and
making Xref and making code (if necessary)
'''
if not vw.isValidPointer(vaCase):
return False
loc = vw.getLocation(vaCase)
if loc != None:
if loc[0] != vaCase:
return False
if loc[vivisect.L_LTYPE] != vivisect.LOC_OP:
return False
else:
vw.makeCode(vaCase)
#if we reach here, we're going to assume the location is valid.
vw.verbprint( "0x%x MS Switch Case Spray: emu.getBranchNode( emu.curpath , 0x%x )" % (vaSwitch, vaCase))
vw.addXref(vaSwitch, vaCase, vivisect.REF_CODE)
return vaCase
if globals().get('vw'):
verbose = vw.verbose
vw.verbose = True
vw.vprint("Starting...")
findSwitchCase(vw)
vw.vprint("Done")
vw.verbose = verbose
|
imjonsnooow/vivisect
|
vivisect/analysis/generic/switchcase.py
|
Python
|
apache-2.0
| 5,920 | 0.010473 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
# IMPORTANT: only import safe functions as this module will be included in jinja environment
import frappe
import operator
import re, urllib, datetime, math
import babel.dates
from dateutil import parser
from num2words import num2words
import HTMLParser
from html2text import html2text
DATE_FORMAT = "%Y-%m-%d"
TIME_FORMAT = "%H:%M:%S.%f"
DATETIME_FORMAT = DATE_FORMAT + " " + TIME_FORMAT
# datetime functions
def getdate(string_date=None):
"""
Coverts string date (yyyy-mm-dd) to datetime.date object
"""
if not string_date:
return get_datetime().date()
if isinstance(string_date, datetime.datetime):
return string_date.date()
elif isinstance(string_date, datetime.date):
return string_date
# dateutil parser does not agree with dates like 0000-00-00
if not string_date or string_date=="0000-00-00":
return None
return parser.parse(string_date).date()
def get_datetime(datetime_str=None):
if not datetime_str:
return now_datetime()
if isinstance(datetime_str, (datetime.datetime, datetime.timedelta)):
return datetime_str
elif isinstance(datetime_str, (list, tuple)):
return datetime.datetime(datetime_str)
elif isinstance(datetime_str, datetime.date):
return datetime.datetime.combine(datetime_str, datetime.time())
# dateutil parser does not agree with dates like 0000-00-00
if not datetime_str or (datetime_str or "").startswith("0000-00-00"):
return None
return parser.parse(datetime_str)
def to_timedelta(time_str):
if isinstance(time_str, basestring):
t = parser.parse(time_str)
return datetime.timedelta(hours=t.hour, minutes=t.minute, seconds=t.second, microseconds=t.microsecond)
else:
return time_str
def add_to_date(date, years=0, months=0, days=0):
"""Adds `days` to the given date"""
from dateutil.relativedelta import relativedelta
as_string, as_datetime = False, False
if date==None:
date = now_datetime()
if isinstance(date, basestring):
as_string = True
if " " in date:
as_datetime = True
date = parser.parse(date)
date = date + relativedelta(years=years, months=months, days=days)
if as_string:
if as_datetime:
return date.strftime(DATETIME_FORMAT)
else:
return date.strftime(DATE_FORMAT)
else:
return date
def add_days(date, days):
return add_to_date(date, days=days)
def add_months(date, months):
return add_to_date(date, months=months)
def add_years(date, years):
return add_to_date(date, years=years)
def date_diff(string_ed_date, string_st_date):
return (getdate(string_ed_date) - getdate(string_st_date)).days
def time_diff(string_ed_date, string_st_date):
return get_datetime(string_ed_date) - get_datetime(string_st_date)
def time_diff_in_seconds(string_ed_date, string_st_date):
return time_diff(string_ed_date, string_st_date).total_seconds()
def time_diff_in_hours(string_ed_date, string_st_date):
return round(float(time_diff(string_ed_date, string_st_date).total_seconds()) / 3600, 6)
def now_datetime():
dt = convert_utc_to_user_timezone(datetime.datetime.utcnow())
return dt.replace(tzinfo=None)
def _get_time_zone():
return frappe.db.get_system_setting('time_zone') or 'Asia/Kolkata'
def get_time_zone():
if frappe.local.flags.in_test:
return _get_time_zone()
return frappe.cache().get_value("time_zone", _get_time_zone)
def convert_utc_to_user_timezone(utc_timestamp):
from pytz import timezone, UnknownTimeZoneError
utcnow = timezone('UTC').localize(utc_timestamp)
try:
return utcnow.astimezone(timezone(get_time_zone()))
except UnknownTimeZoneError:
return utcnow
def now():
"""return current datetime as yyyy-mm-dd hh:mm:ss"""
if frappe.flags.current_date:
return getdate(frappe.flags.current_date).strftime(DATE_FORMAT) + " " + \
now_datetime().strftime(TIME_FORMAT)
else:
return now_datetime().strftime(DATETIME_FORMAT)
def nowdate():
"""return current date as yyyy-mm-dd"""
return now_datetime().strftime(DATE_FORMAT)
def today():
return nowdate()
def nowtime():
"""return current time in hh:mm"""
return now_datetime().strftime(TIME_FORMAT)
def get_first_day(dt, d_years=0, d_months=0):
"""
Returns the first day of the month for the date specified by date object
Also adds `d_years` and `d_months` if specified
"""
dt = getdate(dt)
# d_years, d_months are "deltas" to apply to dt
overflow_years, month = divmod(dt.month + d_months - 1, 12)
year = dt.year + d_years + overflow_years
return datetime.date(year, month + 1, 1)
def get_last_day(dt):
"""
Returns last day of the month using:
`get_first_day(dt, 0, 1) + datetime.timedelta(-1)`
"""
return get_first_day(dt, 0, 1) + datetime.timedelta(-1)
def get_time(time_str):
if isinstance(time_str, datetime.datetime):
return time_str.time()
elif isinstance(time_str, datetime.time):
return time_str
else:
if isinstance(time_str, datetime.timedelta):
time_str = str(time_str)
return parser.parse(time_str).time()
def get_datetime_str(datetime_obj):
if isinstance(datetime_obj, basestring):
datetime_obj = get_datetime(datetime_obj)
return datetime_obj.strftime(DATETIME_FORMAT)
def get_user_format():
if getattr(frappe.local, "user_format", None) is None:
frappe.local.user_format = frappe.db.get_default("date_format")
return frappe.local.user_format or "yyyy-mm-dd"
def formatdate(string_date=None, format_string=None):
"""
Convers the given string date to :data:`user_format`
User format specified in defaults
Examples:
* dd-mm-yyyy
* mm-dd-yyyy
* dd/mm/yyyy
"""
date = getdate(string_date) if string_date else now_datetime().date()
if not format_string:
format_string = get_user_format().replace("mm", "MM")
return babel.dates.format_date(date, format_string, locale=(frappe.local.lang or "").replace("-", "_"))
def format_time(txt):
return babel.dates.format_time(get_time(txt), locale=(frappe.local.lang or "").replace("-", "_"))
def format_datetime(datetime_string, format_string=None):
if not datetime_string:
return
datetime = get_datetime(datetime_string)
if not format_string:
format_string = get_user_format().replace("mm", "MM") + " HH:mm:ss"
return babel.dates.format_datetime(datetime, format_string, locale=(frappe.local.lang or "").replace("-", "_"))
def global_date_format(date):
"""returns date as 1 January 2012"""
formatted_date = getdate(date).strftime("%d %B %Y")
return formatted_date.startswith("0") and formatted_date[1:] or formatted_date
def has_common(l1, l2):
"""Returns truthy value if there are common elements in lists l1 and l2"""
return set(l1) & set(l2)
def flt(s, precision=None):
"""Convert to float (ignore commas)"""
if isinstance(s, basestring):
s = s.replace(',','')
try:
num = float(s)
if precision is not None:
num = rounded(num, precision)
except Exception:
num = 0
return num
def cint(s):
"""Convert to integer"""
try: num = int(float(s))
except: num = 0
return num
def cstr(s):
if isinstance(s, unicode):
return s
elif s==None:
return ''
elif isinstance(s, basestring):
return unicode(s, 'utf-8')
else:
return unicode(s)
def rounded(num, precision=0):
"""round method for round halfs to nearest even algorithm aka banker's rounding - compatible with python3"""
precision = cint(precision)
multiplier = 10 ** precision
# avoid rounding errors
num = round(num * multiplier if precision else num, 8)
floor = math.floor(num)
decimal_part = num - floor
if not precision and decimal_part == 0.5:
num = floor if (floor % 2 == 0) else floor + 1
else:
num = round(num)
return (num / multiplier) if precision else num
def remainder(numerator, denominator, precision=2):
precision = cint(precision)
multiplier = 10 ** precision
if precision:
_remainder = ((numerator * multiplier) % (denominator * multiplier)) / multiplier
else:
_remainder = numerator % denominator
return flt(_remainder, precision);
def round_based_on_smallest_currency_fraction(value, currency, precision=2):
smallest_currency_fraction_value = flt(frappe.db.get_value("Currency",
currency, "smallest_currency_fraction_value"))
if smallest_currency_fraction_value:
remainder_val = remainder(value, smallest_currency_fraction_value, precision)
if remainder_val > (smallest_currency_fraction_value / 2):
value += smallest_currency_fraction_value - remainder_val
else:
value -= remainder_val
else:
value = rounded(value)
return flt(value, precision)
def encode(obj, encoding="utf-8"):
if isinstance(obj, list):
out = []
for o in obj:
if isinstance(o, unicode):
out.append(o.encode(encoding))
else:
out.append(o)
return out
elif isinstance(obj, unicode):
return obj.encode(encoding)
else:
return obj
def parse_val(v):
"""Converts to simple datatypes from SQL query results"""
if isinstance(v, (datetime.date, datetime.datetime)):
v = unicode(v)
elif isinstance(v, datetime.timedelta):
v = ":".join(unicode(v).split(":")[:2])
elif isinstance(v, long):
v = int(v)
return v
def fmt_money(amount, precision=None, currency=None):
"""
Convert to string with commas for thousands, millions etc
"""
number_format = None
if currency:
number_format = frappe.db.get_value("Currency", currency, "number_format", cache=True)
if not number_format:
number_format = frappe.db.get_default("number_format") or "#,###.##"
decimal_str, comma_str, number_format_precision = get_number_format_info(number_format)
if precision is None:
precision = number_format_precision
amount = '%.*f' % (precision, flt(amount))
if amount.find('.') == -1:
decimals = ''
else:
decimals = amount.split('.')[1]
parts = []
minus = ''
if flt(amount) < 0:
minus = '-'
amount = cstr(abs(flt(amount))).split('.')[0]
if len(amount) > 3:
parts.append(amount[-3:])
amount = amount[:-3]
val = number_format=="#,##,###.##" and 2 or 3
while len(amount) > val:
parts.append(amount[-val:])
amount = amount[:-val]
parts.append(amount)
parts.reverse()
amount = comma_str.join(parts) + ((precision and decimal_str) and (decimal_str + decimals) or "")
amount = minus + amount
if currency and frappe.defaults.get_global_default("hide_currency_symbol") != "Yes":
symbol = frappe.db.get_value("Currency", currency, "symbol") or currency
amount = symbol + " " + amount
return amount
number_format_info = {
"#,###.##": (".", ",", 2),
"#.###,##": (",", ".", 2),
"# ###.##": (".", " ", 2),
"# ###,##": (",", " ", 2),
"#'###.##": (".", "'", 2),
"#, ###.##": (".", ", ", 2),
"#,##,###.##": (".", ",", 2),
"#,###.###": (".", ",", 3),
"#.###": ("", ".", 0),
"#,###": ("", ",", 0)
}
def get_number_format_info(format):
return number_format_info.get(format) or (".", ",", 2)
#
# convet currency to words
#
def money_in_words(number, main_currency = None, fraction_currency=None):
"""
Returns string in words with currency and fraction currency.
"""
from frappe.utils import get_defaults
_ = frappe._
if not number or flt(number) < 0:
return ""
d = get_defaults()
if not main_currency:
main_currency = d.get('currency', 'INR')
if not fraction_currency:
fraction_currency = frappe.db.get_value("Currency", main_currency, "fraction") or _("Cent")
n = "%.2f" % flt(number)
main, fraction = n.split('.')
if len(fraction)==1: fraction += '0'
number_format = frappe.db.get_value("Currency", main_currency, "number_format", cache=True) or \
frappe.db.get_default("number_format") or "#,###.##"
in_million = True
if number_format == "#,##,###.##": in_million = False
out = main_currency + ' ' + in_words(main, in_million).title()
if cint(fraction):
out = out + ' ' + _('and') + ' ' + in_words(fraction, in_million).title() + ' ' + fraction_currency
return out + ' ' + _('only.')
#
# convert number to words
#
def in_words(integer, in_million=True):
"""
Returns string in words for the given integer.
"""
locale = 'en_IN' if not in_million else frappe.local.lang
integer = int(integer)
try:
ret = num2words(integer, lang=locale)
except NotImplementedError:
ret = num2words(integer, lang='en')
return ret.replace('-', ' ')
def is_html(text):
out = False
for key in ["<br>", "<p", "<img", "<div"]:
if key in text:
out = True
break
return out
# from Jinja2 code
_striptags_re = re.compile(r'(<!--.*?-->|<[^>]*>)')
def strip_html(text):
"""removes anything enclosed in and including <>"""
return _striptags_re.sub("", text)
def escape_html(text):
html_escape_table = {
"&": "&",
'"': """,
"'": "'",
">": ">",
"<": "<",
}
return "".join(html_escape_table.get(c,c) for c in text)
def pretty_date(iso_datetime):
"""
Takes an ISO time and returns a string representing how
long ago the date represents.
Ported from PrettyDate by John Resig
"""
if not iso_datetime: return ''
import math
if isinstance(iso_datetime, basestring):
iso_datetime = datetime.datetime.strptime(iso_datetime, DATETIME_FORMAT)
now_dt = datetime.datetime.strptime(now(), DATETIME_FORMAT)
dt_diff = now_dt - iso_datetime
# available only in python 2.7+
# dt_diff_seconds = dt_diff.total_seconds()
dt_diff_seconds = dt_diff.days * 86400.0 + dt_diff.seconds
dt_diff_days = math.floor(dt_diff_seconds / 86400.0)
# differnt cases
if dt_diff_seconds < 60.0:
return 'just now'
elif dt_diff_seconds < 120.0:
return '1 minute ago'
elif dt_diff_seconds < 3600.0:
return '%s minutes ago' % cint(math.floor(dt_diff_seconds / 60.0))
elif dt_diff_seconds < 7200.0:
return '1 hour ago'
elif dt_diff_seconds < 86400.0:
return '%s hours ago' % cint(math.floor(dt_diff_seconds / 3600.0))
elif dt_diff_days == 1.0:
return 'Yesterday'
elif dt_diff_days < 7.0:
return '%s days ago' % cint(dt_diff_days)
elif dt_diff_days < 31.0:
return '%s week(s) ago' % cint(math.ceil(dt_diff_days / 7.0))
elif dt_diff_days < 365.0:
return '%s months ago' % cint(math.ceil(dt_diff_days / 30.0))
else:
return 'more than %s year(s) ago' % cint(math.floor(dt_diff_days / 365.0))
def comma_or(some_list):
return comma_sep(some_list, frappe._("{0} or {1}"))
def comma_and(some_list):
return comma_sep(some_list, frappe._("{0} and {1}"))
def comma_sep(some_list, pattern):
if isinstance(some_list, (list, tuple)):
# list(some_list) is done to preserve the existing list
some_list = [unicode(s) for s in list(some_list)]
if not some_list:
return ""
elif len(some_list) == 1:
return some_list[0]
else:
some_list = ["'%s'" % s for s in some_list]
return pattern.format(", ".join(frappe._(s) for s in some_list[:-1]), some_list[-1])
else:
return some_list
def new_line_sep(some_list):
if isinstance(some_list, (list, tuple)):
# list(some_list) is done to preserve the existing list
some_list = [unicode(s) for s in list(some_list)]
if not some_list:
return ""
elif len(some_list) == 1:
return some_list[0]
else:
some_list = ["%s" % s for s in some_list]
return format("\n ".join(some_list))
else:
return some_list
def filter_strip_join(some_list, sep):
"""given a list, filter None values, strip spaces and join"""
return (cstr(sep)).join((cstr(a).strip() for a in filter(None, some_list)))
def get_url(uri=None, full_address=False):
"""get app url from request"""
host_name = frappe.local.conf.host_name or frappe.local.conf.hostname
if uri and (uri.startswith("http://") or uri.startswith("https://")):
return uri
if not host_name:
if hasattr(frappe.local, "request") and frappe.local.request and frappe.local.request.host:
protocol = 'https' == frappe.get_request_header('X-Forwarded-Proto', "") and 'https://' or 'http://'
host_name = protocol + frappe.local.request.host
elif frappe.local.site:
host_name = "http://{}".format(frappe.local.site)
else:
host_name = frappe.db.get_value("Website Settings", "Website Settings",
"subdomain")
if host_name and "http" not in host_name:
host_name = "http://" + host_name
if not host_name:
host_name = "http://localhost"
if not uri and full_address:
uri = frappe.get_request_header("REQUEST_URI", "")
url = urllib.basejoin(host_name, uri) if uri else host_name
return url
def get_host_name():
return get_url().rsplit("//", 1)[-1]
def get_link_to_form(doctype, name, label=None):
if not label: label = name
return """<a href="{0}">{1}</a>""".format(get_url_to_form(doctype, name), label)
def get_url_to_form(doctype, name):
return get_url(uri = "desk#Form/{0}/{1}".format(quoted(doctype), quoted(name)))
def get_url_to_list(doctype):
return get_url(uri = "desk#List/{0}".format(quoted(doctype)))
operator_map = {
# startswith
"^": lambda (a, b): (a or "").startswith(b),
# in or not in a list
"in": lambda (a, b): operator.contains(b, a),
"not in": lambda (a, b): not operator.contains(b, a),
# comparison operators
"=": lambda (a, b): operator.eq(a, b),
"!=": lambda (a, b): operator.ne(a, b),
">": lambda (a, b): operator.gt(a, b),
"<": lambda (a, b): operator.lt(a, b),
">=": lambda (a, b): operator.ge(a, b),
"<=": lambda (a, b): operator.le(a, b),
"not None": lambda (a, b): a and True or False,
"None": lambda (a, b): (not a) and True or False
}
def evaluate_filters(doc, filters):
'''Returns true if doc matches filters'''
if isinstance(filters, dict):
for key, value in filters.iteritems():
f = get_filter(None, {key:value})
if not compare(doc.get(f.fieldname), f.operator, f.value):
return False
elif isinstance(filters, (list, tuple)):
for d in filters:
f = get_filter(None, d)
if not compare(doc.get(f.fieldname), f.operator, f.value):
return False
return True
def compare(val1, condition, val2):
ret = False
if condition in operator_map:
ret = operator_map[condition]((val1, val2))
return ret
def get_filter(doctype, f):
"""Returns a _dict like
{
"doctype":
"fieldname":
"operator":
"value":
}
"""
from frappe.model import default_fields, optional_fields
if isinstance(f, dict):
key, value = f.items()[0]
f = make_filter_tuple(doctype, key, value)
if not isinstance(f, (list, tuple)):
frappe.throw("Filter must be a tuple or list (in a list)")
if len(f) == 3:
f = (doctype, f[0], f[1], f[2])
elif len(f) != 4:
frappe.throw("Filter must have 4 values (doctype, fieldname, operator, value): {0}".format(str(f)))
f = frappe._dict(doctype=f[0], fieldname=f[1], operator=f[2], value=f[3])
if not f.operator:
# if operator is missing
f.operator = "="
valid_operators = ("=", "!=", ">", "<", ">=", "<=", "like", "not like", "in", "not in")
if f.operator not in valid_operators:
frappe.throw("Operator must be one of {0}".format(", ".join(valid_operators)))
if f.doctype and (f.fieldname not in default_fields + optional_fields):
# verify fieldname belongs to the doctype
meta = frappe.get_meta(f.doctype)
if not meta.has_field(f.fieldname):
# try and match the doctype name from child tables
for df in meta.get_table_fields():
if frappe.get_meta(df.options).has_field(f.fieldname):
f.doctype = df.options
break
return f
def make_filter_tuple(doctype, key, value):
'''return a filter tuple like [doctype, key, operator, value]'''
if isinstance(value, (list, tuple)):
return [doctype, key, value[0], value[1]]
else:
return [doctype, key, "=", value]
def scrub_urls(html):
html = expand_relative_urls(html)
# encoding should be responsibility of the composer
# html = quote_urls(html)
return html
def expand_relative_urls(html):
# expand relative urls
url = get_url()
if url.endswith("/"): url = url[:-1]
def _expand_relative_urls(match):
to_expand = list(match.groups())
if not to_expand[2].startswith("/"):
to_expand[2] = "/" + to_expand[2]
to_expand.insert(2, url)
if 'url' in to_expand[0] and to_expand[1].startswith('(') and to_expand[-1].endswith(')'):
# background-image: url('/assets/...') - workaround for wkhtmltopdf print-media-type
to_expand.append(' !important')
return "".join(to_expand)
html = re.sub('(href|src){1}([\s]*=[\s]*[\'"]?)((?!http)[^\'" >]+)([\'"]?)', _expand_relative_urls, html)
# background-image: url('/assets/...')
html = re.sub('(:[\s]?url)(\([\'"]?)([^\)]*)([\'"]?\))', _expand_relative_urls, html)
return html
def quoted(url):
return cstr(urllib.quote(encode(url), safe=b"~@#$&()*!+=:;,.?/'"))
def quote_urls(html):
def _quote_url(match):
groups = list(match.groups())
groups[2] = quoted(groups[2])
return "".join(groups)
return re.sub('(href|src){1}([\s]*=[\s]*[\'"]?)((?:http)[^\'">]+)([\'"]?)',
_quote_url, html)
def unique(seq):
"""use this instead of list(set()) to preserve order of the original list.
Thanks to Stackoverflow: http://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-in-python-whilst-preserving-order"""
seen = set()
seen_add = seen.add
return [ x for x in seq if not (x in seen or seen_add(x)) ]
def strip(val, chars=None):
# \ufeff is no-width-break, \u200b is no-width-space
return (val or "").replace("\ufeff", "").replace("\u200b", "").strip(chars)
def to_markdown(html):
text = None
try:
text = html2text(html)
except HTMLParser.HTMLParseError:
pass
return text
|
anandpdoshi/frappe
|
frappe/utils/data.py
|
Python
|
mit
| 21,230 | 0.03024 |
import pytest
import os
def test_launch (launch):
serve = '../../atila/example/serve.py'
if not os.path.isfile (serve):
return
with launch (serve) as engine:
for i in range (2):
resp = engine.axios.get ('/apis/rest-api{}'.format (i == 1 and 2 or ''))
assert resp.status_code == 200
assert 'result' in resp.data
assert 'info' in resp.data ['result']
for i in range (2):
resp = engine.axios.get ('/apis/rest-api{}'.format (i == 1 and 2 or ''))
assert resp.status_code == 200
assert 'result' in resp.data
assert 'info' in resp.data ['result']
|
hansroh/skitai
|
tests/level4/test_late_respond.py
|
Python
|
mit
| 675 | 0.020741 |
# This file is part of Moksha.
# Copyright (C) 2008-2010 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from moksha.config.app_cfg import base_config
#Use base_config to setup the environment loader function
load_environment = base_config.make_load_environment()
|
lmacken/moksha
|
moksha/config/environment.py
|
Python
|
apache-2.0
| 779 | 0.002567 |
# -*- coding: utf-8 -*-
import logging
import threading
from midas.compat import HTTPError
from midas.compat import Queue
from midas.crunchbase_company import CompanyList
import midas.scripts
class FetchCrunchbaseCompanies(midas.scripts.MDCommand):
"""
Crawl the companies information from crunchbase.com and save it
locally.
"""
def add_argument(self):
self.parser.add_argument('-p', '--num_threads', default=1, type=int,
help='How many threads should crawl in parallel')
self.parser.add_argument('location',
action=midas.scripts.CheckDirectoryAction,
help='The location to save the crawled data')
def run(self):
if self.args.quiet:
log_level = logging.CRITICAL
else:
log_level = logging.INFO
logging.basicConfig(level=log_level)
cl = CompanyList(self.args.location)
logging.info('Updating CompanyList')
cl.update()
q = Queue()
for _ in range(self.args.num_threads):
t = Fetcher(q)
t.daemon = True
t.start()
for company in cl.list_not_local():
q.put(company)
q.join()
return 0
class Fetcher(threading.Thread):
def __init__(self, queue):
super(Fetcher, self).__init__()
self.q = queue
self.inst = None
def run(self):
while True:
self.inst = self.q.get()
logging.info('{0}: Updating'.format(self.inst))
self.make_update(0)
self.q.task_done()
def make_update(self, tries=0):
try:
self.inst.update()
except HTTPError as e:
if e.code == 404:
logging.critical('{0}: Got 404'.format(self.inst))
elif tries < 2 and (e.code == 503 or e.code == 504):
logging.critical(
'{0}: Got 504 ({1} attempt[s])'.format(self.inst, tries + 1)
)
self.make_update(tries + 1)
else:
logging.exception(e)
except Exception as e:
logging.critical(
'{0}: An exception occured'.format(self.inst))
logging.exception(e)
|
fuzzy-id/midas
|
midas/scripts/fetch_crunchbase_companies.py
|
Python
|
bsd-3-clause
| 2,306 | 0.001301 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Creature()
result.template = "object/mobile/shared_dressed_weaponsmith_trainer_02.iff"
result.attribute_template_id = 9
result.stfName("npc_name","human_base_male")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
anhstudios/swganh
|
data/scripts/templates/object/mobile/shared_dressed_weaponsmith_trainer_02.py
|
Python
|
mit
| 455 | 0.046154 |
import ddt
from mock import patch
from nose.tools import raises
from django.db import DatabaseError
from django.test.utils import override_settings
from openassessment.assessment.models import PeerWorkflow, StudentTrainingWorkflow
from openassessment.test_utils import CacheResetTest
import openassessment.workflow.api as workflow_api
from openassessment.workflow.errors import AssessmentWorkflowInternalError
from openassessment.workflow.models import AssessmentWorkflow
import submissions.api as sub_api
RUBRIC_DICT = {
"criteria": [
{
"name": "secret",
"prompt": "Did the writer keep it secret?",
"options": [
{"name": "no", "points": "0", "explanation": ""},
{"name": "yes", "points": "1", "explanation": ""},
]
},
]
}
ANSWER_1 = {"text": "Shoot Hot Rod"}
ANSWER_2 = {"text": "Ultra Magnus fumble"}
ITEM_1 = {
"student_id": "Optimus Prime 001",
"item_id": "Matrix of Leadership",
"course_id": "Advanced Auto Mechanics 200",
"item_type": "openassessment",
}
ITEM_2 = {
"student_id": "Optimus Prime 002",
"item_id": "Matrix of Leadership(COPY)",
"course_id": "Advanced Auto Mechanics 201",
"item_type": "openassessment",
}
@ddt.ddt
class TestAssessmentWorkflowApi(CacheResetTest):
@ddt.file_data('data/assessments.json')
def test_create_workflow(self, data):
first_step = data["steps"][0] if data["steps"] else "peer"
if "ai" in data["steps"]:
first_step = data["steps"][1] if len(data["steps"]) > 1 else "waiting"
submission = sub_api.create_submission(ITEM_1, ANSWER_1)
workflow = workflow_api.create_workflow(submission["uuid"], data["steps"])
workflow_keys = set(workflow.keys())
self.assertEqual(
workflow_keys,
{
'submission_uuid', 'status', 'created', 'modified', 'score'
}
)
self.assertEqual(workflow["submission_uuid"], submission["uuid"])
self.assertEqual(workflow["status"], first_step)
workflow_from_get = workflow_api.get_workflow_for_submission(
submission["uuid"], data["requirements"]
)
del workflow_from_get['status_details']
self.assertEqual(workflow, workflow_from_get)
# Test that the Peer Workflow is, or is not created, based on when peer
# is a step in the workflow.
if "peer" == first_step:
peer_workflow = PeerWorkflow.objects.get(submission_uuid=submission["uuid"])
self.assertIsNotNone(peer_workflow)
else:
peer_workflows = list(PeerWorkflow.objects.filter(submission_uuid=submission["uuid"]))
self.assertFalse(peer_workflows)
def test_assessment_module_rollback_update_workflow(self):
"""
Test that updates work when assessment modules roll back
This test is designed to instantiate a workflow with an installed
assessment module, then verify the workflow can be updated even when
the status is set to an uninstalled assessment module.
"""
requirements = {
"special": {},
"peer": {
"must_grade": 1,
"must_be_graded_by": 1
},
"self": {}
}
# We'll cheat to create the workflow with the new 'special' status,
# otherwise the creation logic will not allow unknown an unknown status
# to be set.
real_steps = AssessmentWorkflow.STEPS
AssessmentWorkflow.STEPS = ["special"] + real_steps
workflow, submission = self._create_workflow_with_status(
"user 1",
"test/1/1",
"peer-problem",
"special",
steps=["special", "peer", "self"])
AssessmentWorkflow.STEPS = real_steps
workflow_api.get_workflow_for_submission(
submission["uuid"], requirements
)
peer_workflows = list(PeerWorkflow.objects.filter(submission_uuid=submission["uuid"]))
self.assertTrue(peer_workflows)
with patch('openassessment.assessment.api.peer.submitter_is_finished') as mock_peer_submit:
mock_peer_submit.return_value = True
workflow = workflow_api.get_workflow_for_submission(
submission["uuid"], requirements
)
self.assertEquals("self", workflow['status'])
with patch('openassessment.assessment.api.self.submitter_is_finished') as mock_self_submit:
mock_self_submit.return_value = True
workflow = workflow_api.get_workflow_for_submission(
submission["uuid"], requirements
)
self.assertEquals("waiting", workflow['status'])
def test_update_peer_workflow(self):
submission = sub_api.create_submission(ITEM_1, ANSWER_1)
workflow = workflow_api.create_workflow(submission["uuid"], ["training", "peer"])
StudentTrainingWorkflow.create_workflow(submission_uuid=submission["uuid"])
requirements = {
"training": {
"num_required": 2
},
"peer": {
"must_grade": 5,
"must_be_graded_by": 3
}
}
workflow_keys = set(workflow.keys())
self.assertEqual(
workflow_keys,
{
'submission_uuid', 'status', 'created', 'modified', 'score'
}
)
self.assertEqual(workflow["submission_uuid"], submission["uuid"])
self.assertEqual(workflow["status"], "training")
peer_workflows = list(PeerWorkflow.objects.filter(submission_uuid=submission["uuid"]))
self.assertFalse(peer_workflows)
workflow_from_get = workflow_api.get_workflow_for_submission(
submission["uuid"], requirements
)
del workflow_from_get['status_details']
self.assertEqual(workflow, workflow_from_get)
requirements["training"]["num_required"] = 0
workflow = workflow_api.update_from_assessments(submission["uuid"], requirements)
# New step is Peer, and a Workflow has been created.
self.assertEqual(workflow["status"], "peer")
peer_workflow = PeerWorkflow.objects.get(submission_uuid=submission["uuid"])
self.assertIsNotNone(peer_workflow)
@ddt.file_data('data/assessments.json')
def test_need_valid_submission_uuid(self, data):
# submission doesn't exist
with self.assertRaises(workflow_api.AssessmentWorkflowInternalError):
_ = workflow_api.create_workflow("xxxxxxxxxxx", data["steps"])
# submission_uuid is the wrong type
with self.assertRaises(workflow_api.AssessmentWorkflowRequestError):
_ = workflow_api.create_workflow(123, data["steps"])
@patch('submissions.models.Submission.objects.get')
@ddt.file_data('data/assessments.json')
@raises(workflow_api.AssessmentWorkflowInternalError)
def test_unexpected_submissions_errors_wrapped(self, data, mock_get):
mock_get.side_effect = Exception("Kaboom!")
workflow_api.create_workflow("zzzzzzzzzzzzzzz", data["steps"])
@patch('openassessment.workflow.models.AssessmentWorkflow.objects.create')
@ddt.file_data('data/assessments.json')
@raises(workflow_api.AssessmentWorkflowInternalError)
def test_unexpected_workflow_errors_wrapped(self, data, mock_create):
mock_create.side_effect = DatabaseError("Kaboom!")
submission = sub_api.create_submission(ITEM_1, ANSWER_2)
workflow_api.create_workflow(submission["uuid"], data["steps"])
@patch('openassessment.assessment.models.peer.PeerWorkflow.objects.get_or_create')
@raises(workflow_api.AssessmentWorkflowInternalError)
def test_unexpected_peer_workflow_errors_wrapped(self, mock_create):
mock_create.side_effect = DatabaseError("Kaboom!")
submission = sub_api.create_submission(ITEM_1, ANSWER_2)
workflow_api.create_workflow(submission["uuid"], ["peer", "self"])
@patch('openassessment.assessment.api.staff.get_score')
@patch('openassessment.assessment.models.peer.PeerWorkflow.objects.get_or_create')
@raises(workflow_api.AssessmentWorkflowInternalError)
def test_no_peer_assessment_error_handled(self, mock_get_workflow, mock_get_staff_score):
"""
Tests to verify that, given a problem that requires the peer step and a submission associated with a workflow
that has no assessments, an overriding staff score will push the workflow into the done state and not crash
when there are no assessments in the "completed" peer step.
"""
mock_get_workflow.raises = PeerWorkflow.DoesNotExist
mock_get_staff_score.return_value = {
"points_earned": 10,
"points_possible": 10,
"contributing_assessments": 123,
"staff_id": "staff 1",
}
_, submission = self._create_workflow_with_status(
"user 1",
"test/1/1",
"peer-problem",
"peer",
steps=["peer"]
)
workflow_api.update_from_assessments(
submission["uuid"],
{
"peer": {
"must_grade": 5,
"must_be_graded_by": 3
}
},
override_submitter_requirements=True
)
@patch('openassessment.workflow.models.AssessmentWorkflow.objects.get')
@ddt.file_data('data/assessments.json')
@raises(workflow_api.AssessmentWorkflowInternalError)
def test_unexpected_exception_wrapped(self, data, mock_create):
mock_create.side_effect = Exception("Kaboom!")
submission = sub_api.create_submission(ITEM_1, ANSWER_2)
workflow_api.update_from_assessments(submission["uuid"], data["steps"])
@ddt.file_data('data/assessments.json')
def test_get_assessment_workflow_expected_errors(self, data):
with self.assertRaises(workflow_api.AssessmentWorkflowNotFoundError):
workflow_api.get_workflow_for_submission("0000000000000", data["requirements"])
with self.assertRaises(workflow_api.AssessmentWorkflowRequestError):
workflow_api.get_workflow_for_submission(123, data["requirements"])
@patch('submissions.models.Submission.objects.get')
@ddt.file_data('data/assessments.json')
@raises(workflow_api.AssessmentWorkflowInternalError)
def test_unexpected_workflow_get_errors_wrapped(self, data, mock_get):
mock_get.side_effect = Exception("Kaboom!")
submission = sub_api.create_submission(ITEM_1, "We talk TV!")
workflow = workflow_api.create_workflow(submission["uuid"], data["steps"])
workflow_api.get_workflow_for_submission(workflow["uuid"], {})
def test_preexisting_workflow(self):
"""
Verifies that even if a workflow does not go through start_workflow, it won't blow up.
update_from_assessments() will go through _get_steps(), and add a staff step to the workflow
even if it was created without one initially.
"""
submission = sub_api.create_submission({
"student_id": "test student",
"course_id": "test course",
"item_id": "test item",
"item_type": "openassessment",
}, "test answer")
# Create the model object directly, bypassing start_workflow()
workflow = AssessmentWorkflow.objects.create(
submission_uuid=submission["uuid"],
status=AssessmentWorkflow.STATUS.waiting,
course_id="test course",
item_id="test item"
)
# This call will throw exceptions if the workflow is in an invalid state
workflow_api.update_from_assessments(submission["uuid"], {})
def test_get_status_counts(self):
# Initially, the counts should all be zero
counts = workflow_api.get_status_counts(
"test/1/1",
"peer-problem",
["ai", "training", "peer", "self"]
)
self.assertEqual(counts, [
{"status": "training", "count": 0},
{"status": "peer", "count": 0},
{"status": "self", "count": 0},
{"status": "waiting", "count": 0},
{"status": "done", "count": 0},
{"status": "cancelled", "count": 0},
])
self.assertFalse("ai" in [count['status'] for count in counts])
# Create assessments with each status
# We're going to cheat a little bit by using the model objects
# directly, since the API does not provide access to the status directly.
self._create_workflow_with_status("user 1", "test/1/1", "peer-problem", "training")
self._create_workflow_with_status("user 1", "test/1/1", "peer-problem", "peer")
self._create_workflow_with_status("user 2", "test/1/1", "peer-problem", "self")
self._create_workflow_with_status("user 3", "test/1/1", "peer-problem", "self")
self._create_workflow_with_status("user 4", "test/1/1", "peer-problem", "waiting")
self._create_workflow_with_status("user 5", "test/1/1", "peer-problem", "waiting")
self._create_workflow_with_status("user 6", "test/1/1", "peer-problem", "waiting")
self._create_workflow_with_status("user 7", "test/1/1", "peer-problem", "done")
self._create_workflow_with_status("user 8", "test/1/1", "peer-problem", "done")
self._create_workflow_with_status("user 9", "test/1/1", "peer-problem", "done")
self._create_workflow_with_status("user 10", "test/1/1", "peer-problem", "done")
self._create_workflow_with_status("user 11", "test/1/1", "peer-problem", "cancelled")
# Now the counts should be updated
counts = workflow_api.get_status_counts(
"test/1/1",
"peer-problem",
["ai", "training", "peer", "self"]
)
self.assertEqual(counts, [
{"status": "training", "count": 1},
{"status": "peer", "count": 1},
{"status": "self", "count": 2},
{"status": "waiting", "count": 3},
{"status": "done", "count": 4},
{"status": "cancelled", "count": 1},
])
self.assertFalse("ai" in [count['status'] for count in counts])
# Create a workflow in a different course, same user and item
# Counts should be the same
self._create_workflow_with_status("user 1", "other_course", "peer-problem", "peer")
updated_counts = workflow_api.get_status_counts(
"test/1/1",
"peer-problem",
["ai", "training", "peer", "self"]
)
self.assertEqual(counts, updated_counts)
# Create a workflow in the same course, different item
# Counts should be the same
self._create_workflow_with_status("user 1", "test/1/1", "other problem", "peer")
updated_counts = workflow_api.get_status_counts(
"test/1/1",
"peer-problem",
["ai", "training", "peer", "self"]
)
self.assertEqual(counts, updated_counts)
@override_settings(ORA2_ASSESSMENTS={'self': 'not.a.module'})
def test_unable_to_load_api(self):
submission = sub_api.create_submission({
"student_id": "test student",
"course_id": "test course",
"item_id": "test item",
"item_type": "openassessment",
}, "test answer")
with self.assertRaises(AssessmentWorkflowInternalError):
workflow_api.create_workflow(submission['uuid'], ['self'])
def test_cancel_the_assessment_workflow(self):
# Create the submission and assessment workflow.
submission = sub_api.create_submission(ITEM_1, ANSWER_1)
workflow = workflow_api.create_workflow(submission["uuid"], ["peer"])
requirements = {
"peer": {
"must_grade": 1,
"must_be_graded_by": 1
}
}
# Check the workflow is not cancelled.
self.assertFalse(workflow_api.is_workflow_cancelled(submission["uuid"]))
# Check the status is not cancelled.
self.assertNotEqual(workflow.get('status'), 'cancelled')
# Check the points_earned are not 0
self.assertNotEqual(workflow['score'], 0)
# Cancel the workflow for submission.
workflow_api.cancel_workflow(
submission_uuid=submission["uuid"],
comments="Inappropriate language",
cancelled_by_id=ITEM_2['student_id'],
assessment_requirements=requirements
)
# Check workflow is cancelled.
self.assertTrue(workflow_api.is_workflow_cancelled(submission["uuid"]))
# Status for workflow should be cancelled.
workflow = AssessmentWorkflow.get_by_submission_uuid(submission["uuid"])
self.assertEqual(workflow.status, 'cancelled')
# Score points_earned should be 0.
# In case of 0 earned points the score would be None.
self.assertEqual(workflow.score, None)
def test_cancel_the_assessment_workflow_does_not_exist(self):
# Create the submission and assessment workflow.
submission = sub_api.create_submission(ITEM_1, ANSWER_1)
workflow = workflow_api.create_workflow(submission["uuid"], ["peer"])
requirements = {
"peer": {
"must_grade": 1,
"must_be_graded_by": 1
}
}
# Check if workflow is cancelled.
self.assertFalse(workflow_api.is_workflow_cancelled(submission["uuid"]))
self.assertNotEqual(workflow.get('status'), 'cancelled')
# Cancel the workflow raises DoesNotExist.
with self.assertRaises(workflow_api.AssessmentWorkflowError):
workflow_api.cancel_workflow(
submission_uuid="1234567098789",
comments="Inappropriate language",
cancelled_by_id=ITEM_2['student_id'],
assessment_requirements=requirements
)
# Status for workflow should not be cancelled.
workflow = AssessmentWorkflow.get_by_submission_uuid(submission["uuid"])
self.assertNotEqual(workflow.status, 'cancelled')
def test_get_the_cancelled_workflow(self):
# Create the submission and assessment workflow.
submission = sub_api.create_submission(ITEM_1, ANSWER_1)
workflow = workflow_api.create_workflow(submission["uuid"], ["peer"])
requirements = {
"peer": {
"must_grade": 1,
"must_be_graded_by": 1
}
}
# Check the workflow is not cancelled.
self.assertFalse(workflow_api.is_workflow_cancelled(submission["uuid"]))
# Check the status is not cancelled.
self.assertNotEqual(workflow.get('status'), 'cancelled')
# Check the points_earned are not 0
self.assertNotEqual(workflow['score'], 0)
cancelled_workflow = workflow_api.get_assessment_workflow_cancellation(submission["uuid"])
self.assertIsNone(cancelled_workflow)
# Cancel the workflow for submission.
workflow_api.cancel_workflow(
submission_uuid=submission["uuid"],
comments="Inappropriate language",
cancelled_by_id=ITEM_2['student_id'],
assessment_requirements=requirements
)
# Check workflow is cancelled.
self.assertTrue(workflow_api.is_workflow_cancelled(submission["uuid"]))
workflow = workflow_api.get_assessment_workflow_cancellation(submission["uuid"])
self.assertIsNotNone(workflow)
def _create_workflow_with_status(
self, student_id, course_id, item_id,
status, answer="answer", steps=None
):
"""
Create a submission and workflow with a given status.
Args:
student_id (unicode): Student ID for the submission.
course_id (unicode): Course ID for the submission.
item_id (unicode): Item ID for the submission
status (unicode): One of acceptable status values (e.g. "peer", "self", "waiting", "done")
Keyword Arguments:
answer (unicode): Submission answer.
steps (list): A list of steps to create the workflow with. If not
specified the default steps are "peer", "self".
Returns:
workflow, submission
"""
if not steps:
steps = ["peer", "self"]
submission = sub_api.create_submission({
"student_id": student_id,
"course_id": course_id,
"item_id": item_id,
"item_type": "openassessment",
}, answer)
workflow = workflow_api.create_workflow(submission['uuid'], steps)
workflow_model = AssessmentWorkflow.objects.get(submission_uuid=workflow['submission_uuid'])
workflow_model.status = status
workflow_model.save()
return workflow, submission
|
Stanford-Online/edx-ora2
|
openassessment/workflow/test/test_api.py
|
Python
|
agpl-3.0
| 21,103 | 0.002275 |
patch_size = 1
|
KGPML/Hyperspectral
|
patch_size.py
|
Python
|
gpl-3.0
| 14 | 0.071429 |
from django.conf.urls import url, include
from django.contrib.auth.decorators import login_required
from .views import *
urlpatterns = [
# Listado
url(r'^evaluacion-lista/', login_required(evaluacion_list), name='listar_evaluacion'),
# Evaluacion paso a paso
url(r'^generar/step1/$', login_required(evaluacion_step1), name='evaluacion_step1'),
url(r'^generar/step1/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_step1_back), name='evaluacion_step1_back'),
url(r'^generar/step2/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_step2), name='evaluacion_step2'),
url(r'^generar/step3/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_step3), name='evaluacion_step3'),
url(r'^generar/step4/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_step4), name='evaluacion_step4'),
# Evaluacion automatica
url(r'^automatica/step1/$', login_required(evaluacion_rapida_step1), name='evaluacion_rapida_step1'),
url(r'^automatica/step2/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_rapida_step2), name='evaluacion_rapida_step2'),
# Detalle evaluacion
url(r'^detalle/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_detail), name='evaluacion_detail'),
url(r'^descartar/(?P<evaluacion_id>\d+)/$', login_required(descartar_evaluacion), name='evaluacion_descartar'),
# Calificacion alumnos
url(r'^estudiantes-notas/(?P<evaluacion_id>\d+)/$', login_required(estudiante_calificacion), name='estudiante_calificacion'),
#excel
url(r'^calificacion/(?P<evaluacion_id>\d+)/$', login_required(calificaciones), name='calificacion'),
url(r'^agregar/calificaciones/(?P<evaluacion_id>\d+)/$', login_required(get_calificaciones), name='calificacionExcel'),
#PDF
url(r'^evaluacion-pdf/(?P<evaluacion_id>\d+)/$', login_required(evaluacion_pdf), name='evaluacionPDF'),
url(r'^solucion-pdf/(?P<evaluacion_id>\d+)/$', login_required(solucion_pdf), name='solucionPDF'),
# AJAX
url(r'^ContenidoFiltroAjax/$', login_required(ContenidoFiltroAjax.as_view()), name='ContenidoFiltroAjax'),
url(r'^PreguntaObjAjax/$', login_required(PreguntaObjAjax.as_view()), name='PreguntaObjAjax'),
url(r'^filtro/palabras/$', login_required(busqueda), name='busqueda_palabra'),
url(r'^PreguntaBusquedaAjax/$', login_required(PreguntaBusquedaAjax.as_view()), name='PreguntaBusquedaAjax'),
]
|
Mansilla1/Sistema-SEC
|
apps/evaluacion/urls.py
|
Python
|
apache-2.0
| 2,293 | 0.024422 |
## \file
## \ingroup tutorial_tdataframe
## \notebook -nodraw
## This tutorial shows how to express the concept of ranges when working with the TDataFrame.
## \macro_code
##
## \date March 2017
## \author Danilo Piparo
import ROOT
fill_tree_code = '''
void fill_tree(const char *filename, const char *treeName)
{
TFile f(filename, "RECREATE");
TTree t(treeName, treeName);
int b1;
float b2;
t.Branch("b1", &b1);
t.Branch("b2", &b2);
for (int i = 0; i < 100; ++i) {
b1 = i;
b2 = i * i;
t.Fill();
}
t.Write();
f.Close();
return;
}
'''
# We prepare an input tree to run on
fileName = "tdf006_ranges_py.root"
treeName = "myTree"
ROOT.gInterpreter.Declare(fill_tree_code)
ROOT.fill_tree(fileName, treeName)
# We read the tree from the file and create a TDataFrame.
TDF = ROOT.ROOT.Experimental.TDataFrame
d = TDF(treeName, fileName)
# ## Usage of ranges
# Now we'll count some entries using ranges
c_all = d.Count()
# This is how you can express a range of the first 30 entries
d_0_30 = d.Range(0, 30)
c_0_30 = d_0_30.Count()
# This is how you pick all entries from 15 onwards
d_15_end = d.Range(15, 0)
c_15_end = d_15_end.Count()
# We can use a stride too, in this case we pick an event every 3
d_15_end_3 = d.Range(15, 0, 3)
c_15_end_3 = d_15_end_3.Count()
# The Range is a 1st class citizen in the TDataFrame graph:
# not only actions (like Count) but also filters and new columns can be added to it.
d_0_50 = d.Range(0, 50)
c_0_50_odd_b1 = d_0_50.Filter("1 == b1 % 2").Count()
# An important thing to notice is that the counts of a filter are relative to the
# number of entries a filter "sees". Therefore, if a Range depends on a filter,
# the Range will act on the entries passing the filter only.
c_0_3_after_even_b1 = d.Filter("0 == b1 % 2").Range(0, 3).Count()
# Ok, time to wrap up: let's print all counts!
print("Usage of ranges:")
print(" - All entries:", c_all.GetValue())
print(" - Entries from 0 to 30:", c_0_30.GetValue())
print(" - Entries from 15 onwards:", c_15_end.GetValue())
print(" - Entries from 15 onwards in steps of 3:", c_15_end_3.GetValue())
print(" - Entries from 0 to 50, odd only:", c_0_50_odd_b1.GetValue())
print(" - First three entries of all even entries:", c_0_3_after_even_b1.GetValue())
|
bbockelm/root
|
tutorials/dataframe/tdf006_ranges.py
|
Python
|
lgpl-2.1
| 2,283 | 0.004818 |
from __future__ import absolute_import
from django.db import models
from django.test import TestCase
from .models import Author, Book
signal_output = []
def pre_save_test(signal, sender, instance, **kwargs):
signal_output.append('pre_save signal, %s' % instance)
if kwargs.get('raw'):
signal_output.append('Is raw')
def post_save_test(signal, sender, instance, **kwargs):
signal_output.append('post_save signal, %s' % instance)
if 'created' in kwargs:
if kwargs['created']:
signal_output.append('Is created')
else:
signal_output.append('Is updated')
if kwargs.get('raw'):
signal_output.append('Is raw')
def pre_delete_test(signal, sender, instance, **kwargs):
signal_output.append('pre_save signal, %s' % instance)
signal_output.append('instance.id is not None: %s' % (instance.id != None))
def post_delete_test(signal, sender, instance, **kwargs):
signal_output.append('post_delete signal, %s' % instance)
signal_output.append('instance.id is not None: %s' % (instance.id != None))
class SignalsRegressTests(TestCase):
"""
Testing signals before/after saving and deleting.
"""
def get_signal_output(self, fn, *args, **kwargs):
# Flush any existing signal output
global signal_output
signal_output = []
fn(*args, **kwargs)
return signal_output
def setUp(self):
# Save up the number of connected signals so that we can check at the end
# that all the signals we register get properly unregistered (#9989)
self.pre_signals = (len(models.signals.pre_save.receivers),
len(models.signals.post_save.receivers),
len(models.signals.pre_delete.receivers),
len(models.signals.post_delete.receivers))
models.signals.pre_save.connect(pre_save_test)
models.signals.post_save.connect(post_save_test)
models.signals.pre_delete.connect(pre_delete_test)
models.signals.post_delete.connect(post_delete_test)
def tearDown(self):
models.signals.post_delete.disconnect(post_delete_test)
models.signals.pre_delete.disconnect(pre_delete_test)
models.signals.post_save.disconnect(post_save_test)
models.signals.pre_save.disconnect(pre_save_test)
# Check that all our signals got disconnected properly.
post_signals = (len(models.signals.pre_save.receivers),
len(models.signals.post_save.receivers),
len(models.signals.pre_delete.receivers),
len(models.signals.post_delete.receivers))
self.assertEqual(self.pre_signals, post_signals)
def test_model_signals(self):
""" Model saves should throw some signals. """
a1 = Author(name='Neal Stephenson')
self.assertEqual(self.get_signal_output(a1.save), [
"pre_save signal, Neal Stephenson",
"post_save signal, Neal Stephenson",
"Is created"
])
b1 = Book(name='Snow Crash')
self.assertEqual(self.get_signal_output(b1.save), [
"pre_save signal, Snow Crash",
"post_save signal, Snow Crash",
"Is created"
])
def test_m2m_signals(self):
""" Assigning and removing to/from m2m shouldn't generate an m2m signal """
b1 = Book(name='Snow Crash')
self.get_signal_output(b1.save)
a1 = Author(name='Neal Stephenson')
self.get_signal_output(a1.save)
self.assertEqual(self.get_signal_output(setattr, b1, 'authors', [a1]), [])
self.assertEqual(self.get_signal_output(setattr, b1, 'authors', []), [])
|
LethusTI/supportcenter
|
vendor/django/tests/regressiontests/signals_regress/tests.py
|
Python
|
gpl-3.0
| 3,724 | 0.003222 |
# Copyright © 2019 Red Hat, Inc.
#
# This file is part of Bodhi.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""These are tests for the bodhi.server.consumers.automatic_updates module."""
from copy import deepcopy
from unittest import mock
import logging
from fedora_messaging.api import Message
from fedora_messaging.testing import mock_sends
import pytest
from bodhi.server.config import config
from bodhi.server.consumers.automatic_updates import AutomaticUpdateHandler
from bodhi.server.models import (
Build, Release, TestGatingStatus, Update, UpdateRequest, UpdateStatus, UpdateType, User
)
from bodhi.tests.server import base
@mock.patch('bodhi.server.consumers.automatic_updates.work_on_bugs_task', mock.Mock())
class TestAutomaticUpdateHandler(base.BasePyTestCase):
"""Test the automatic update handler."""
def setup_method(self, method):
"""Set up environment for each test."""
super().setup_method(method)
self.release = self.db.query(Release).filter_by(name='F17').first()
if self.release:
self.release.create_automatic_updates = True
self.db.flush()
else:
self.release = self.create_release('17', create_automatic_updates=True)
body = {
'build_id': 442562,
'name': 'colord',
'tag_id': 214,
'instance': 's390',
'tag': 'f17-updates-candidate',
'user': 'sharkcz',
'version': '1.3.4',
'owner': 'sharkcz',
'release': '1.fc26',
}
self.sample_message = Message(topic='', body=body)
self.sample_nvr = f"{body['name']}-{body['version']}-{body['release']}"
self.db_factory = base.TransactionalSessionMaker(self.Session)
self.handler = AutomaticUpdateHandler(self.db_factory)
# Test the main code paths.
def test_consume(self, caplog):
"""Assert that messages about tagged builds create an update."""
caplog.set_level(logging.DEBUG)
# process the message
self.handler(self.sample_message)
# check if the update exists...
update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == self.sample_nvr)
).first()
# ...and some of its properties
assert update is not None
assert update.type == UpdateType.unspecified
assert update.status == UpdateStatus.pending
assert update.autokarma == False
assert update.test_gating_status is None
assert update.builds[0].release == self.release
expected_username = base.buildsys.DevBuildsys._build_data['owner_name']
assert update.user and update.user.name == expected_username
assert not any(r.levelno >= logging.WARNING for r in caplog.records)
@pytest.mark.parametrize('changelog', (True, None, ""))
@mock.patch('bodhi.server.models.RpmBuild.get_changelog')
def test_changelog(self, mock_generate_changelog, changelog):
"""Assert that update notes contain the changelog if it exists."""
if changelog:
# fill the changelog here rather than in the decorator
changelog = ('* Sat Aug 3 2013 Fedora Releng <rel-eng@lists.fedoraproject.org> - 2\n'
'- Added a free money feature.\n* Tue Jun 11 2013 Randy <bowlofeggs@fpo>'
' - 2.0.1-2\n- Make users ☺\n')
mock_generate_changelog.return_value = changelog
# process the message
self.handler(self.sample_message)
# check if the update exists...
update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == self.sample_nvr)
).first()
if changelog:
assert update.notes == f"""Automatic update for colord-1.3.4-1.fc26.
##### **Changelog**
```
{changelog}
```"""
else: # no changelog
assert update.notes == "Automatic update for colord-1.3.4-1.fc26."
@mock.patch('bodhi.server.models.RpmBuild.get_changelog')
def test_bug_added(self, mock_generate_changelog):
"""Assert that a bug is added to the update if proper string is in changelog."""
changelog = ('* Sat Aug 3 2013 Fedora Releng <rel-eng@lists.fedoraproject.org> - 2\n'
'- Added a free money feature.\n- Fix rhbz#112233.')
mock_generate_changelog.return_value = changelog
# process the message
self.handler(self.sample_message)
# check if the update exists...
update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == self.sample_nvr)
).first()
assert update.notes == f"""Automatic update for colord-1.3.4-1.fc26.
##### **Changelog**
```
{changelog}
```"""
assert len(update.bugs) > 0
assert update.bugs[0].bug_id == 112233
@mock.patch.dict(config, [('bz_exclude_rels', ['F17'])])
@mock.patch('bodhi.server.models.RpmBuild.get_changelog')
def test_bug_not_added_excluded_release(self, mock_generate_changelog):
"""Assert that a bug is not added for excluded release."""
changelog = ('* Sat Aug 3 2013 Fedora Releng <rel-eng@lists.fedoraproject.org> - 2\n'
'- Added a free money feature.\n- Fix rhbz#112233.')
mock_generate_changelog.return_value = changelog
# process the message
self.handler(self.sample_message)
# check if the update exists...
update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == self.sample_nvr)
).first()
assert update.notes == f"""Automatic update for colord-1.3.4-1.fc26.
##### **Changelog**
```
{changelog}
```"""
assert len(update.bugs) == 0
@mock.patch('bodhi.server.models.RpmBuild.get_changelog')
def test_changelog_handled_exception(self, mock_generate_changelog):
"""Assert that update creation is succesful if get_changelog() raises ValueError."""
mock_generate_changelog.side_effect = ValueError('Handled exception')
# process the message
self.handler(self.sample_message)
# check if the update exists...
update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == self.sample_nvr)
).first()
assert update.notes == "Automatic update for colord-1.3.4-1.fc26."
@mock.patch('bodhi.server.models.RpmBuild.get_changelog')
def test_changelog_unhandled_exception(self, mock_generate_changelog):
"""Assert that update creation is not succesful if get_changelog() raises Exception."""
mock_generate_changelog.side_effect = Exception('Unhandled exception')
with pytest.raises(Exception) as exc:
self.handler(self.sample_message)
assert str(exc.value) == 'Unhandled exception'
def test_consume_with_orphan_build(self, caplog):
"""
Assert existing builds without an update can be handled.
Such builds can exist e.g. if they're used in a buildroot override.
"""
caplog.set_level(logging.DEBUG)
# Run the handler to create the build & update, then remove the update.
self.handler(self.sample_message)
build = self.db.query(Build).filter_by(nvr=self.sample_nvr).one()
update = build.update
build.update = None # satisfy foreign key constraint
self.db.delete(update)
# Now test with the same message again which should encounter the
# build already existing in the database.
self.handler(self.sample_message)
# check if the update exists...
update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == self.sample_nvr)
).first()
# ...and some of its properties
assert update is not None
assert update.type == UpdateType.unspecified
assert update.status == UpdateStatus.pending
assert update.test_gating_status is None
expected_username = base.buildsys.DevBuildsys._build_data['owner_name']
assert update.user and update.user.name == expected_username
assert not any(r.levelno >= logging.WARNING for r in caplog.records)
def test_existing_pending_update(self, caplog):
"""
Ensure an update is not created if a matching pending one exists.
"""
caplog.set_level(logging.DEBUG)
self.handler(self.sample_message)
update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == self.sample_nvr)
).first()
# Move it back to Pending as if the user has manually created it
update.status = UpdateStatus.pending
update.request = UpdateRequest.testing
self.db.add(update)
self.db.flush()
# Clear pending messages
self.db.info['messages'] = []
caplog.clear()
self.handler(self.sample_message)
assert (f"Build, active update for {self.sample_nvr} exists already, skipping."
in caplog.messages)
def test_obsolete_testing_update(self, caplog):
"""Assert that older builds stuck in Testing get obsoleted."""
caplog.set_level(logging.DEBUG)
self.handler(self.sample_message)
update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == self.sample_nvr)
).first()
assert update is not None
# Simulate update status after failed gating
update.status = UpdateStatus.testing
update.test_gating_status = TestGatingStatus.failed
self.db.flush()
# Clear pending messages
self.db.info['messages'] = []
caplog.clear()
# Create an update with a newer build
msg = deepcopy(self.sample_message)
msg.body['version'] = '1.3.5'
msg.body['build_id'] = 442563
self.handler(msg)
nvr = self.sample_nvr.replace('1.3.4', '1.3.5')
old_update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == self.sample_nvr)
).first()
new_update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == nvr)
).first()
assert new_update is not None
assert old_update is not None
assert new_update.status == UpdateStatus.pending
assert old_update.status == UpdateStatus.obsolete
def test_problem_obsoleting_older_update(self, caplog):
"""Assert that an error while obsoleting doesn't block a new update being created."""
caplog.set_level(logging.DEBUG)
self.handler(self.sample_message)
update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == self.sample_nvr)
).first()
assert update is not None
# Simulate update status after failed gating
update.status = UpdateStatus.testing
update.test_gating_status = TestGatingStatus.failed
self.db.flush()
# Clear pending messages
self.db.info['messages'] = []
caplog.clear()
# Create an update with a newer build
msg = deepcopy(self.sample_message)
msg.body['version'] = '1.3.5'
msg.body['build_id'] = 442563
with mock.patch('bodhi.server.models.Update.obsolete_older_updates',
side_effect=Exception('Something gone wrong')):
self.handler(msg)
assert 'Problem obsoleting older updates: Something gone wrong' in caplog.messages
# The new update should have been created and the old one should be stuck in testing
nvr = self.sample_nvr.replace('1.3.4', '1.3.5')
old_update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == self.sample_nvr)
).first()
new_update = self.db.query(Update).filter(
Update.builds.any(Build.nvr == nvr)
).first()
assert new_update is not None
assert old_update is not None
assert new_update.status == UpdateStatus.pending
assert old_update.status == UpdateStatus.testing
# The following tests cover lesser-travelled code paths.
@mock.patch('bodhi.server.consumers.automatic_updates.transactional_session_maker')
def test___init___without_db_factory(self, transactional_session_maker):
"""__init__() should create db_factory if missing."""
handler = AutomaticUpdateHandler()
assert handler.db_factory is transactional_session_maker.return_value
transactional_session_maker.assert_called_once_with()
# Test robustness: malformed messages, unknown koji builds, incomplete
# buildinfo, release missing from the DB
@pytest.mark.parametrize('missing_elem', ('tag', 'build_id', 'name', 'version', 'release'))
def test_missing_mandatory_elems(self, missing_elem, caplog):
"""Test tag message without mandatory elements."""
caplog.set_level(logging.DEBUG)
msg = deepcopy(self.sample_message)
del msg.body[missing_elem]
self.handler(msg)
assert any(r.levelno == logging.DEBUG
and r.getMessage() == f"Received incomplete tag message. Missing: {missing_elem}"
for r in caplog.records)
def test_unknown_koji_build(self, caplog):
"""Test tag message about unknown koji build."""
caplog.set_level(logging.DEBUG)
msg = deepcopy(self.sample_message)
msg.body['release'] += '.youdontknowme'
self.handler(msg)
assert any(r.levelno == logging.DEBUG
and r.getMessage().startswith("Can't find Koji build for ")
for r in caplog.records)
def test_incomplete_koji_buildinfo_nvr(self, caplog):
"""Test koji returning incomplete buildinfo: no nvr."""
caplog.set_level(logging.DEBUG)
msg = deepcopy(self.sample_message)
msg.body['release'] += '.testmissingnvr'
self.handler(msg)
assert any(r.levelno == logging.DEBUG
and r.getMessage().startswith("Koji build info for ")
and r.getMessage().endswith(" doesn't contain 'nvr'.")
for r in caplog.records)
def test_incomplete_koji_buildinfo_owner(self, caplog):
"""Test koji returning incomplete buildinfo: no owner."""
caplog.set_level(logging.DEBUG)
msg = deepcopy(self.sample_message)
msg.body['release'] += '.noowner'
self.handler(msg)
assert any(r.levelno == logging.DEBUG
and r.getMessage().startswith("Koji build info for ")
and r.getMessage().endswith(" doesn't contain 'owner_name'.")
for r in caplog.records)
def test_missing_user(self, caplog):
"""Test Koji build user missing from DB."""
caplog.set_level(logging.DEBUG)
expected_username = base.buildsys.DevBuildsys._build_data['owner_name']
# ensure user with expected name doesn't exist
self.db.query(User).filter_by(name=expected_username).delete()
self.db.flush()
self.handler(self.sample_message)
assert(f"Creating bodhi user for '{expected_username}'."
in caplog.messages)
def test_existing_user(self, caplog):
"""Test Koji build user existing in DB."""
caplog.set_level(logging.DEBUG)
expected_username = base.buildsys.DevBuildsys._build_data['owner_name']
# ensure user with expected name exists
user = self.db.query(User).filter_by(name=expected_username).first()
if not user:
user = User(name=expected_username)
self.db.add(user)
self.db.flush()
assert(f"Creating bodhi user for '{expected_username}'."
not in caplog.messages)
# Test messages that should be ignored.
def test_ignored_tag(self, caplog):
"""Test messages re: tags not configured for automatic updates."""
caplog.set_level(logging.DEBUG)
msg = deepcopy(self.sample_message)
bogus_tag = 'thisisntthetagyourelookingfor'
msg.body['tag'] = bogus_tag
with mock_sends():
self.handler(msg)
assert any(x.startswith(f"Ignoring build being tagged into '{bogus_tag}'")
for x in caplog.messages)
def test_duplicate_message(self, caplog):
"""Assert that duplicate messages ignore existing build/update."""
caplog.set_level(logging.DEBUG)
self.handler(self.sample_message)
caplog.clear()
with mock_sends():
self.handler(self.sample_message)
assert (f"Build, active update for {self.sample_nvr} exists already, skipping."
in caplog.messages)
@mock.patch.dict(config, [('automatic_updates_blacklist', ['lmacken'])])
def test_user_in_blacklist(self, caplog):
"""Test that update not created if the koji build owner is in the blacklist"""
caplog.set_level(logging.DEBUG)
body = {
'build_id': 4425622,
'name': 'python-pants',
'tag_id': 214,
'instance': 's390',
'tag': 'f17-updates-testing-pending',
'user': 'lmacken',
'version': '1.3.4',
'owner': 'lmacken',
'release': '1.fc26',
}
self.sample_message = Message(topic='', body=body)
self.sample_nvr = f"{body['name']}-{body['version']}-{body['release']}"
with mock_sends():
self.handler(self.sample_message)
assert (f"{self.sample_nvr} owned by lmacken who is listed in "
"automatic_updates_blacklist, skipping." in caplog.messages)
|
Conan-Kudo/bodhi
|
bodhi/tests/server/consumers/test_automatic_updates.py
|
Python
|
gpl-2.0
| 18,330 | 0.00131 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2014 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""
"""
__author__ = 'Bitcraze AB'
__all__ = ['InputMux']
import os
import glob
import logging
from cflib.utils.callbacks import Caller
logger = logging.getLogger(__name__)
MAX_THRUST = 65000
class InputMux(object):
def __init__(self, input):
self._devs = []
self.name = "N/A"
self.input = input
self._prev_values = {}
# Roll/pitch limitation
self.max_rp_angle = 0
# Thrust limitations
self.thrust_slew_enabled = True
self.thrust_slew_limit = 0
self.thrust_slew_rate = 0
self.max_thrust = 0
self.max_yaw_rate = 0
self.springy_throttle = True
self.trim_roll = 0
self.trim_pitch = 0
self.has_pressure_sensor = False
# TODO: Fix writing these values
#self._max_rp_angle = 40
#self._springy_throttle = True
#self._thrust_slew_enabled = True
#self._thrust_slew_limit = 30
#self._thrust_slew_rate = 30
#self._min_thrust = 20000
#self._max_thrust = 50000
#self._max_yaw_rate = 400
#self._trim_roll = 0.0
#self._trim_pitch = 0.0
# Stateful things
self._old_thrust = 0
self._old_raw_thrust = 0
self._old_alt_hold = False
# TODO: Should these really be placed here?
#self.input_updated = Caller()
#self.rp_trim_updated = Caller()
#self.emergency_stop_updated = Caller()
#self.device_discovery = Caller()
#self.device_error = Caller()
#self.althold_updated = Caller()
#self.alt1_updated = Caller()
#self.alt2_updated = Caller()
def get_supported_dev_count(self):
return 1
def add_device(self, dev, parameters):
logger.info("Adding device and opening it")
dev.open()
self._devs.append(dev)
def remove_device(self, dev):
self._devs.remove(dev)
dev.close()
def close(self):
"""Close down the MUX and close all it's devices"""
for d in self._devs:
d.close()
self._devs = []
def _cap_rp(self, rp):
ret = rp * self.max_rp_angle
if ret > self.max_rp_angle:
ret = self.max_rp_angle
elif ret < -1 * self.max_rp_angle:
ret = -1 * self.max_rp_angle
return ret
def _scale_rp(self, roll, pitch):
return [self._cap_rp(roll), self._cap_rp(pitch)]
def _scale_and_deadband_yaw(self, yaw):
return InputMux.deadband(yaw, 0.2) * self.max_yaw_rate
def _limit_thrust(self, thrust, althold, emergency_stop):
# Thust limiting (slew, minimum and emergency stop)
if self.springy_throttle:
if althold and self.has_pressure_sensor:
thrust = int(round(InputMux.deadband(thrust, 0.2)*32767 + 32767)) #Convert to uint16
else:
if thrust < 0.05 or emergency_stop:
thrust = 0
else:
thrust = self.min_thrust + thrust * (self.max_thrust -
self.min_thrust)
if (self.thrust_slew_enabled == True and
self.thrust_slew_limit > thrust and not
emergency_stop):
if self._old_thrust > self.thrust_slew_limit:
self._old_thrust = self.thrust_slew_limit
if thrust < (self._old_thrust - (self.thrust_slew_rate / 100)):
thrust = self._old_thrust - self.thrust_slew_rate / 100
if thrust < 0 or thrust < self.min_thrust:
thrust = 0
else:
thrust = thrust / 2 + 0.5
if althold and self.has_pressure_sensor:
#thrust = int(round(JoystickReader.deadband(thrust,0.2)*32767 + 32767)) #Convert to uint16
thrust = 32767
else:
if thrust < -0.90 or emergency_stop:
thrust = 0
else:
thrust = self.min_thrust + thrust * (self.max_thrust -
self.min_thrust)
if (self.thrust_slew_enabled == True and
self.thrust_slew_limit > thrust and not
emergency_stop):
if self._old_thrust > self.thrust_slew_limit:
self._old_thrust = self.thrust_slew_limit
if thrust < (self._old_thrust - (self.thrust_slew_rate / 100)):
thrust = self._old_thrust - self.thrust_slew_rate / 100
if thrust < -1 or thrust < self.min_thrust:
thrust = 0
self._old_thrust = thrust
self._old_raw_thrust = thrust
return thrust
def set_alt_hold_available(self, available):
"""Set if altitude hold is available or not (depending on HW)"""
self.input._has_pressure_sensor = available
def enable_alt_hold(self, althold):
"""Enable or disable altitude hold"""
self._old_alt_hold = althold
def _check_toggle(self, key, data):
if not key in self._prev_values:
self._prev_values[key] = data
elif self._prev_values[key] != data:
self._prev_values[key] = data
return True
return False
def _update_alt_hold(self, value):
if self._check_toggle("althold", value):
self.input.althold_updated.call(str(value))
def _update_em_stop(self, value):
if self._check_toggle("estop", value):
self.input.emergency_stop_updated.call(value)
def _update_alt1(self, value):
if self._check_toggle("alt1", value):
self.input.alt1_updated.call(value)
def _update_alt2(self, value):
if self._check_toggle("alt2", value):
self.input.alt2_updated.call(value)
def _trim_rp(self, roll, pitch):
return [roll + self.trim_roll, pitch + self.trim_pitch]
@staticmethod
def p2t(percentage):
"""Convert a percentage to raw thrust"""
return int(MAX_THRUST * (percentage / 100.0))
@staticmethod
def deadband(value, threshold):
if abs(value) < threshold:
value = 0
elif value > 0:
value -= threshold
elif value < 0:
value += threshold
return value/(1-threshold)
def read(self):
return None
|
qrohlf/cf-client
|
lib/cfclient/utils/mux/__init__.py
|
Python
|
gpl-2.0
| 7,589 | 0.00448 |
import types
import functools
import unittest
from .agent import Config, Agent
# XXX bring into compliance with python 2.7 unittest api
class AssertRaisesContextManager(object):
def __init__(self, expected):
self.expected = expected
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if type is None:
raise AssertionError('%s expected but not raised' % str(self.expected))
if type != self.expected:
raise AssertionError('%s expected, not `%s`' % (self.expected.__class__, str(value)))
self.exception = value
# silence exception
return True
class WebTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(WebTestCase, self).__init__(*args, **kwargs)
# XXX does not inherit
self.config = getattr(self.__class__, '_config', None) or Config()
def setUp(self):
super(WebTestCase, self).setUp()
self._agent = self._create_agent()
def _create_agent(self):
kwargs = {}
kwargs['config'] = self.config
agent_class = self.config.agent_class or Agent
return agent_class(**kwargs)
def agent(self):
agent = self._create_agent()
return agent
@property
def response(self):
return self._agent.response
def request(self, method, url, *args, **kwargs):
if hasattr(self, '_no_session') and self._no_session:
self._agent = self._create_agent()
return self._agent.request(method, url, *args, **kwargs)
def get(self, url, *args, **kwargs):
return self.request('get', url, *args, **kwargs)
def post(self, url, *args, **kwargs):
return self.request('post', url, *args, **kwargs)
def follow_redirect(self):
return self._agent.follow_redirect()
def submit_form(self, form, elements=None):
return self._agent.submit_form(form, elements)
# XXX move to utu
# XXX accept kwargs
def assert_raises(self, expected, *args):
if args:
return self.assertRaises(expected, *args)
else:
return AssertRaisesContextManager(expected)
def assert_status(self, code):
self._agent.assert_status(code)
def assert_redirected_to_uri(self, target):
self._agent.assert_redirected_to_uri(target)
def assert_redirected_to_url(self, target):
self._agent.assert_redirected_to_url(target)
def assert_response_cookie(self, name, **kwargs):
self._agent.assert_response_cookie(name, **kwargs)
def assert_not_response_cookie(self, name):
self._agent.assert_not_response_cookie(name)
def assert_cookie_jar_cookie(self, name, **kwargs):
self._agent.assert_cookie_jar_cookie(name, **kwargs)
def assert_not_cookie_jar_cookie(self, name):
self._agent.assert_not_cookie_jar_cookie(name)
@property
def cookies(self):
return self._agent.response.cookies
@property
def raw_headers(self):
return self._agent.raw_headers
@property
def headers(self):
return self._agent.headers
@property
def current_url(self):
'''Contains the full URL for the last request made.
None if no requests have been made.
'''
return self._agent.current_url
def no_session(cls):
'''Class decorator requesting that session management should not be
performed.
'''
cls._no_session = True
return cls
def config(**kwargs):
'''Function and class decorator for setting configuration on test cases.'''
def decorator(cls_or_fn):
if isinstance(cls_or_fn, types.FunctionType):
fn = cls_or_fn
@functools.wraps(fn)
def decorated(self):
saved = {}
for key in kwargs:
saved[key] = getattr(self.config, key)
setattr(self.config, key, kwargs[key])
try:
fn(self)
finally:
for key in kwargs:
setattr(self.config, key, saved[key])
return decorated
else:
cls = cls_or_fn
config = getattr(cls, '_config', None) or Config()
for name in kwargs:
setattr(config, name, kwargs[name])
cls._config = config
return cls
return decorator
|
p/webracer
|
webracer/testcase.py
|
Python
|
bsd-2-clause
| 4,545 | 0.007261 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# ObservationTools documentation build configuration file, created by
# sphinx-quickstart on Sun Apr 30 14:32:48 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'ObservationTools'
copyright = '2017, IA'
author = 'IA'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['.static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'ObservationToolsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ObservationTools.tex', 'ObservationTools Documentation',
'IA', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'observationtools', 'ObservationTools Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ObservationTools', 'ObservationTools Documentation',
author, 'ObservationTools', 'One line description of project.',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
|
iastro-pt/ObservationTools
|
docs/source/conf.py
|
Python
|
mit
| 5,035 | 0.001192 |
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
setup(
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("capture", ["capture.pyx"])]
)
|
hirolovesbeer/sekiwake
|
src/setup.py
|
Python
|
mit
| 223 | 0.017937 |
# -*- coding:utf-8 -*-
"""
# Author: Pegasus Wang (pegasuswang@qq.com, http://ningning.today)
# Created Time : Fri Feb 20 21:38:57 2015
# File Name: wechatService.py
# Description:
# :copyright: (c) 2015 by Pegasus Wang.
# :license: MIT, see LICENSE for more details.
"""
import json
import time
import urllib
import urllib2
from wechatUtil import MessageUtil
from wechatReply import TextReply
class RobotService(object):
"""Auto reply robot service"""
KEY = 'd92d20bc1d8bb3cff585bf746603b2a9'
url = 'http://www.tuling123.com/openapi/api'
@staticmethod
def auto_reply(req_info):
query = {'key': RobotService.KEY, 'info': req_info.encode('utf-8')}
headers = {'Content-type': 'text/html', 'charset': 'utf-8'}
data = urllib.urlencode(query)
req = urllib2.Request(RobotService.url, data)
f = urllib2.urlopen(req).read()
return json.loads(f).get('text').replace('<br>', '\n')
#return json.loads(f).get('text')
class WechatService(object):
"""process request"""
@staticmethod
def processRequest(request):
"""process different message types.
:param request: post request message
:return: None
"""
requestMap = MessageUtil.parseXml(request)
fromUserName = requestMap.get(u'FromUserName')
toUserName = requestMap.get(u'ToUserName')
createTime = requestMap.get(u'CreateTime')
msgType = requestMap.get(u'MsgType')
msgId = requestMap.get(u'MsgId')
textReply = TextReply()
textReply.setToUserName(fromUserName)
textReply.setFromUserName(toUserName)
textReply.setCreateTime(time.time())
textReply.setMsgType(MessageUtil.RESP_MESSAGE_TYPE_TEXT)
if msgType == MessageUtil.REQ_MESSAGE_TYPE_TEXT:
content = requestMap.get('Content').decode('utf-8') # note: decode first
#respContent = u'您发送的是文本消息:' + content
respContent = RobotService.auto_reply(content)
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_IMAGE:
respContent = u'您发送的是图片消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_VOICE:
respContent = u'您发送的是语音消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_VIDEO:
respContent = u'您发送的是视频消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_LOCATION:
respContent = u'您发送的是地理位置消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_LINK:
respContent = u'您发送的是链接消息!'
elif msgType == MessageUtil.REQ_MESSAGE_TYPE_EVENT:
eventType = requestMap.get(u'Event')
if eventType == MessageUtil.EVENT_TYPE_SUBSCRIBE:
respContent = u'^_^谢谢您的关注,本公众号由王宁宁开发(python2.7+django1.4),如果你有兴趣继续开发,' \
u'可以联系我,就当打发时间了.'
elif eventType == MessageUtil.EVENT_TYPE_UNSUBSCRIBE:
pass
elif eventType == MessageUtil.EVENT_TYPE_SCAN:
# TODO
pass
elif eventType == MessageUtil.EVENT_TYPE_LOCATION:
# TODO
pass
elif eventType == MessageUtil.EVENT_TYPE_CLICK:
# TODO
pass
textReply.setContent(respContent)
respXml = MessageUtil.class2xml(textReply)
return respXml
"""
if msgType == 'text':
content = requestMap.get('Content')
# TODO
elif msgType == 'image':
picUrl = requestMap.get('PicUrl')
# TODO
elif msgType == 'voice':
mediaId = requestMap.get('MediaId')
format = requestMap.get('Format')
# TODO
elif msgType == 'video':
mediaId = requestMap.get('MediaId')
thumbMediaId = requestMap.get('ThumbMediaId')
# TODO
elif msgType == 'location':
lat = requestMap.get('Location_X')
lng = requestMap.get('Location_Y')
label = requestMap.get('Label')
scale = requestMap.get('Scale')
# TODO
elif msgType == 'link':
title = requestMap.get('Title')
description = requestMap.get('Description')
url = requestMap.get('Url')
"""
|
PegasusWang/WeiPython
|
wechat/wechatService.py
|
Python
|
mit
| 4,478 | 0.001638 |
from .app import App as _App
class UserApp(_App):
'''
An object based on the relationship between a user and an app. A subclass of :class:`steamfront.app.App`.
This will not contain any of the attributes for :class:`steamfront.app.App` until :meth unlazify: has been called.
Should not be called manually - will be automatically generated with a :class:`steamfront.user.User` instance.
:param dict appdata: The app data that came from the API through the user.
:param steamfront.user.User user: The user to whom the app belongs.
:ivar player_id: A `str` containing the player's ID.
:ivar play_time: An `int` containing how many hours the user has in the app.
:ivar player: The :class:`steamfront.user.User` to whom the app belongs.
:ivar lazy: A `bool` representing whether or not the object has all of its aspects from :class:`steamfront.app.App`.
'''
def __init__(self, appdata:dict, user, lazy=True):
self.appid = str(appdata['appid'])
self.play_time = appdata['playtime_forever']
self.player_id = user.id64
self.player = user
if lazy == False:
super().__init__(self.appid)
self.lazy = lazy
def unlazify(self):
'''
To get all of the app attributes of an app, this must be called.
'''
self.lazy = False
super().__init__(self.appid)
|
4Kaylum/Steamfront
|
steamfront/userapp.py
|
Python
|
mit
| 1,396 | 0.006447 |
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Julien Veyssier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from urlparse import urlsplit, parse_qsl, urlparse
from datetime import datetime, timedelta
from weboob.deprecated.browser import Browser, BrowserIncorrectPassword
from weboob.capabilities.bank import Transfer, TransferError
from .pages import LoginPage, LoginErrorPage, AccountsPage, UserSpacePage, EmptyPage, \
OperationsPage, CardPage, ComingPage, NoOperationsPage, InfoPage, \
TransfertPage, ChangePasswordPage, VerifCodePage
__all__ = ['CICBrowser']
# Browser
class CICBrowser(Browser):
PROTOCOL = 'https'
DOMAIN = 'www.cic.fr'
CERTHASH = '9f41522275058310a6fb348504daeadd16ae852a686a91383b10ad045da76d29'
ENCODING = 'iso-8859-1'
USER_AGENT = Browser.USER_AGENTS['wget']
PAGES = {'https://www.cic.fr/.*/fr/banques/particuliers/index.html': LoginPage,
'https://www.cic.fr/.*/fr/identification/default.cgi': LoginErrorPage,
'https://www.cic.fr/.*/fr/banque/situation_financiere.cgi': AccountsPage,
'https://www.cic.fr/.*/fr/banque/situation_financiere.html': AccountsPage,
'https://www.cic.fr/.*/fr/banque/espace_personnel.aspx': UserSpacePage,
'https://www.cic.fr/.*/fr/banque/mouvements.cgi.*': OperationsPage,
'https://www.cic.fr/.*/fr/banque/mouvements.html.*': OperationsPage,
'https://www.cic.fr/.*/fr/banque/mvts_instance.cgi.*': ComingPage,
'https://www.cic.fr/.*/fr/banque/nr/nr_devbooster.aspx.*': OperationsPage,
'https://www.cic.fr/.*/fr/banque/operations_carte\.cgi.*': CardPage,
'https://www.cic.fr/.*/fr/banque/CR/arrivee\.asp.*': NoOperationsPage,
'https://www.cic.fr/.*/fr/banque/BAD.*': InfoPage,
'https://www.cic.fr/.*/fr/banque/.*Vir.*': TransfertPage,
'https://www.cic.fr/.*/fr/validation/change_password.cgi': ChangePasswordPage,
'https://www.cic.fr/.*/fr/validation/verif_code.cgi.*': VerifCodePage,
'https://www.cic.fr/.*/fr/': EmptyPage,
'https://www.cic.fr/.*/fr/banques/index.html': EmptyPage,
'https://www.cic.fr/.*/fr/banque/paci_beware_of_phishing.html.*': EmptyPage,
'https://www.cic.fr/.*/fr/validation/(?!change_password|verif_code).*': EmptyPage,
}
currentSubBank = None
def is_logged(self):
return not self.is_on_page(LoginPage) and not self.is_on_page(LoginErrorPage)
def home(self):
return self.location('https://www.cic.fr/sb/fr/banques/particuliers/index.html')
def login(self):
assert isinstance(self.username, basestring)
assert isinstance(self.password, basestring)
if not self.is_on_page(LoginPage):
self.location('https://www.cic.fr/', no_login=True)
self.page.login(self.username, self.password)
if not self.is_logged() or self.is_on_page(LoginErrorPage):
raise BrowserIncorrectPassword()
self.getCurrentSubBank()
def get_accounts_list(self):
if not self.is_on_page(AccountsPage):
self.location('https://www.cic.fr/%s/fr/banque/situation_financiere.cgi' % self.currentSubBank)
return self.page.get_list()
def get_account(self, id):
assert isinstance(id, basestring)
l = self.get_accounts_list()
for a in l:
if a.id == id:
return a
return None
def getCurrentSubBank(self):
# the account list and history urls depend on the sub bank of the user
url = urlparse(self.geturl())
self.currentSubBank = url.path.lstrip('/').split('/')[0]
def list_operations(self, page_url):
if page_url.startswith('/') or page_url.startswith('https'):
self.location(page_url)
else:
self.location('https://%s/%s/fr/banque/%s' % (self.DOMAIN, self.currentSubBank, page_url))
go_next = True
while go_next:
if not self.is_on_page(OperationsPage):
return
for op in self.page.get_history():
yield op
go_next = self.page.go_next()
def get_history(self, account):
transactions = []
last_debit = None
for tr in self.list_operations(account._link_id):
# to prevent redundancy with card transactions, we do not
# store 'RELEVE CARTE' transaction.
if tr.raw != 'RELEVE CARTE':
transactions.append(tr)
elif last_debit is None:
last_debit = (tr.date - timedelta(days=10)).month
coming_link = self.page.get_coming_link() if self.is_on_page(OperationsPage) else None
if coming_link is not None:
for tr in self.list_operations(coming_link):
transactions.append(tr)
month = 0
for card_link in account._card_links:
v = urlsplit(card_link)
args = dict(parse_qsl(v.query))
# useful with 12 -> 1
if int(args['mois']) < month:
month = month + 1
else:
month = int(args['mois'])
for tr in self.list_operations(card_link):
if month > last_debit:
tr._is_coming = True
transactions.append(tr)
transactions.sort(key=lambda tr: tr.rdate, reverse=True)
return transactions
def transfer(self, account, to, amount, reason=None):
# access the transfer page
transfert_url = 'WI_VPLV_VirUniSaiCpt.asp?RAZ=ALL&Cat=6&PERM=N&CHX=A'
self.location('https://%s/%s/fr/banque/%s' % (self.DOMAIN, self.currentSubBank, transfert_url))
# fill the form
self.select_form(name='FormVirUniSaiCpt')
self['IDB'] = [account[-1]]
self['ICR'] = [to[-1]]
self['MTTVIR'] = '%s' % str(amount).replace('.', ',')
if reason is not None:
self['LIBDBT'] = reason
self['LIBCRT'] = reason
self.submit()
# look for known errors
content = unicode(self.response().get_data(), self.ENCODING)
insufficient_amount_message = u'Montant insuffisant.'
maximum_allowed_balance_message = u'Solde maximum autorisé dépassé.'
if content.find(insufficient_amount_message) != -1:
raise TransferError('The amount you tried to transfer is too low.')
if content.find(maximum_allowed_balance_message) != -1:
raise TransferError('The maximum allowed balance for the target account has been / would be reached.')
# look for the known "all right" message
ready_for_transfer_message = u'Confirmez un virement entre vos comptes'
if not content.find(ready_for_transfer_message):
raise TransferError('The expected message "%s" was not found.' % ready_for_transfer_message)
# submit the confirmation form
self.select_form(name='FormVirUniCnf')
submit_date = datetime.now()
self.submit()
# look for the known "everything went well" message
content = unicode(self.response().get_data(), self.ENCODING)
transfer_ok_message = u'Votre virement a été exécuté ce jour'
if not content.find(transfer_ok_message):
raise TransferError('The expected message "%s" was not found.' % transfer_ok_message)
# We now have to return a Transfer object
transfer = Transfer(submit_date.strftime('%Y%m%d%H%M%S'))
transfer.amount = amount
transfer.origin = account
transfer.recipient = to
transfer.date = submit_date
return transfer
|
laurent-george/weboob
|
modules/cic/browser.py
|
Python
|
agpl-3.0
| 8,378 | 0.003703 |
# -*- coding: utf-8 -*-
"""
聚类和EM算法
~~~~~~~~~~~~~~~~
聚类
:copyright: (c) 2016 by the huaxz1986.
:license: lgpl-3.0, see LICENSE for more details.
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets.samples_generator import make_blobs
# from .agglomerative_clustering import test_AgglomerativeClustering,test_AgglomerativeClustering_nclusters,test_AgglomerativeClustering_linkage
# from .dbscan import test_DBSCAN,test_DBSCAN_epsilon,test_DBSCAN_min_samples
from chapters.Cluster_EM.gmm import test_GMM,test_GMM_cov_type,test_GMM_n_components
# from .kmeans import test_Kmeans,test_Kmeans_n_init,test_Kmeans_nclusters
def create_data(centers,num=100,std=0.7):
'''
生成用于聚类的数据集
:param centers: 聚类的中心点组成的数组。如果中心点是二维的,则产生的每个样本都是二维的。
:param num: 样本数
:param std: 每个簇中样本的标准差
:return: 用于聚类的数据集。是一个元组,第一个元素为样本集,第二个元素为样本集的真实簇分类标记
'''
X, labels_true = make_blobs(n_samples=num, centers=centers, cluster_std=std)
return X,labels_true
def plot_data(*data):
'''
绘制用于聚类的数据集
:param data: 可变参数。它是一个元组。元组元素依次为:第一个元素为样本集,第二个元素为样本集的真实簇分类标记
:return: None
'''
X,labels_true=data
labels=np.unique(labels_true)
fig=plt.figure()
ax=fig.add_subplot(1,1,1)
colors='rgbyckm' # 每个簇的样本标记不同的颜色
for i,label in enumerate(labels):
position=labels_true==label
ax.scatter(X[position,0],X[position,1],label="cluster %d"%label,
color=colors[i%len(colors)])
ax.legend(loc="best",framealpha=0.5)
ax.set_xlabel("X[0]")
ax.set_ylabel("Y[1]")
ax.set_title("data")
plt.show()
if __name__=='__main__':
centers=[[1,1],[2,2],[1,2],[10,20]] # 用于产生聚类的中心点
X,labels_true=create_data(centers,1000,0.5) # 产生用于聚类的数据集
# plot_data(X,labels_true) # 绘制用于聚类的数据集
# test_Kmeans(X,labels_true) # 调用 test_Kmeans 函数
# test_Kmeans_nclusters(X,labels_true) # 调用 test_Kmeans_nclusters 函数
# test_Kmeans_n_init(X,labels_true) # 调用 test_Kmeans_n_init 函数
# test_DBSCAN(X,labels_true) # 调用 test_DBSCAN 函数
# test_DBSCAN_epsilon(X,labels_true) # 调用 test_DBSCAN_epsilon 函数
# test_DBSCAN_min_samples(X,labels_true) # 调用 test_DBSCAN_min_samples 函数
# test_AgglomerativeClustering(X,labels_true) # 调用 test_AgglomerativeClustering 函数
# test_AgglomerativeClustering_nclusters(X,labels_true) # 调用 test_AgglomerativeClustering_nclusters 函数
# test_AgglomerativeClustering_linkage(X,labels_true) # 调用 test_AgglomerativeClustering_linkage 函数
# test_GMM(X,labels_true) # 调用 test_GMM 函数
# test_GMM_n_components(X,labels_true) # 调用 test_GMM_n_components 函数
test_GMM_cov_type(X,labels_true) # 调用 test_GMM_cov_type 函数
|
huaxz1986/git_book
|
chapters/Cluster_EM/cluster.py
|
Python
|
gpl-3.0
| 3,182 | 0.021348 |
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for dashboard module. Separated here to break include loops."""
__author__ = 'Mike Gainer (mgainer@google.com)'
import os
import appengine_config
from controllers import sites
from models import vfs
RESOURCES_PATH = '/modules/dashboard/resources'
RESOURCES_DIR = os.path.join(appengine_config.BUNDLE_ROOT,
RESOURCES_PATH.lstrip('/'))
def build_assets_url(tab_name):
return '/dashboard?action=assets&tab=%s' % tab_name
def list_files(handler, subfolder, merge_local_files=False, all_paths=None):
"""Makes a list of files in a subfolder.
Args:
handler: webapp request handler.
subfolder: string. Relative path of the subfolder to list.
merge_local_files: boolean. If True, the returned list will
contain files found on either the datastore filesystem or the
read-only local filesystem. If a file is found on both, its
datastore filesystem version will trump its local filesystem
version.
all_paths: list. A list of all file paths in the underlying file
system.
Returns:
List of relative, normalized file path strings.
"""
home = sites.abspath(handler.app_context.get_home_folder(), '/')
_paths = None
if all_paths is not None:
_paths = []
for _path in all_paths:
if _path.startswith(sites.abspath(
handler.app_context.get_home_folder(), subfolder)):
_paths.append(_path)
_paths = set(_paths)
else:
_paths = set(handler.app_context.fs.list(
sites.abspath(handler.app_context.get_home_folder(), subfolder)))
if merge_local_files:
local_fs = vfs.LocalReadOnlyFileSystem(logical_home_folder='/')
_paths = _paths.union(set([
os.path.join(appengine_config.BUNDLE_ROOT, path) for path in
local_fs.list(subfolder[1:])]))
result = []
for abs_filename in _paths:
filename = os.path.relpath(abs_filename, home)
result.append(vfs.AbstractFileSystem.normpath(filename))
return sorted(result)
|
CSCI1200Course/csci1200OnlineCourse
|
modules/dashboard/utils.py
|
Python
|
apache-2.0
| 2,730 | 0 |
# -*- coding: utf-8 -*-
def social_blblbl(entity, argument):
return True
#- Fine Funzione -
|
Onirik79/aaritmud
|
src/socials/social_blblbl.py
|
Python
|
gpl-2.0
| 97 | 0.010309 |
from __future__ import unicode_literals
import importlib
import os
import sys
from django.apps import apps
from django.utils import datetime_safe, six
from django.utils.six.moves import input
from .loader import MIGRATIONS_MODULE_NAME
class MigrationQuestioner(object):
"""
Gives the autodetector responses to questions it might have.
This base class has a built-in noninteractive mode, but the
interactive subclass is what the command-line arguments will use.
"""
def __init__(self, defaults=None, specified_apps=None, dry_run=None):
self.defaults = defaults or {}
self.specified_apps = specified_apps or set()
self.dry_run = dry_run
def ask_initial(self, app_label):
"Should we create an initial migration for the app?"
# If it was specified on the command line, definitely true
if app_label in self.specified_apps:
return True
# Otherwise, we look to see if it has a migrations module
# without any Python files in it, apart from __init__.py.
# Apps from the new app template will have these; the python
# file check will ensure we skip South ones.
try:
app_config = apps.get_app_config(app_label)
except LookupError: # It's a fake app.
return self.defaults.get("ask_initial", False)
migrations_import_path = "%s.%s" % (app_config.name, MIGRATIONS_MODULE_NAME)
try:
migrations_module = importlib.import_module(migrations_import_path)
except ImportError:
return self.defaults.get("ask_initial", False)
else:
if hasattr(migrations_module, "__file__"):
filenames = os.listdir(os.path.dirname(migrations_module.__file__))
elif hasattr(migrations_module, "__path__"):
if len(migrations_module.__path__) > 1:
return False
filenames = os.listdir(list(migrations_module.__path__)[0])
return not any(x.endswith(".py") for x in filenames if x != "__init__.py")
def ask_not_null_addition(self, field_name, model_name):
"Adding a NOT NULL field to a model"
# None means quit
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"Was this field really renamed?"
return self.defaults.get("ask_rename", False)
def ask_rename_model(self, old_model_state, new_model_state):
"Was this model really renamed?"
return self.defaults.get("ask_rename_model", False)
def ask_merge(self, app_label):
"Do you really want to merge these migrations?"
return self.defaults.get("ask_merge", False)
class InteractiveMigrationQuestioner(MigrationQuestioner):
def _boolean_input(self, question, default=None):
result = input("%s " % question)
if not result and default is not None:
return default
while len(result) < 1 or result[0].lower() not in "yn":
result = input("Please answer yes or no: ")
return result[0].lower() == "y"
def _choice_input(self, question, choices):
print(question)
for i, choice in enumerate(choices):
print(" %s) %s" % (i + 1, choice))
result = input("Select an option: ")
while True:
try:
value = int(result)
if 0 < value <= len(choices):
return value
except ValueError:
pass
result = input("Please select a valid option: ")
def ask_not_null_addition(self, field_name, model_name):
"Adding a NOT NULL field to a model"
if not self.dry_run:
choice = self._choice_input(
"You are trying to add a non-nullable field '%s' to %s without a default;\n" % (field_name, model_name) +
"we can't do that (the database needs something to populate existing rows).\n" +
"Please select a fix:",
[
"Provide a one-off default now (will be set on all existing rows)",
"Quit, and let me add a default in models.py",
]
)
if choice == 2:
sys.exit(3)
else:
print("Please enter the default value now, as valid Python")
print("The datetime module is available, so you can do e.g. datetime.date.today()")
while True:
if six.PY3:
# Six does not correctly abstract over the fact that
# py3 input returns a unicode string, while py2 raw_input
# returns a bytestring.
code = input(">>> ")
else:
code = input(">>> ").decode(sys.stdin.encoding)
if not code:
print("Please enter some code, or 'exit' (with no quotes) to exit.")
elif code == "exit":
sys.exit(1)
else:
try:
return eval(code, {}, {"datetime": datetime_safe})
except (SyntaxError, NameError) as e:
print("Invalid input: %s" % e)
return None
def ask_rename(self, model_name, old_name, new_name, field_instance):
"Was this field really renamed?"
return self._boolean_input("Did you rename %s.%s to %s.%s (a %s)? [y/N]" % (model_name, old_name, model_name, new_name, field_instance.__class__.__name__), False)
def ask_rename_model(self, old_model_state, new_model_state):
"Was this model really renamed?"
return self._boolean_input("Did you rename the %s.%s model to %s? [y/N]" % (old_model_state.app_label, old_model_state.name, new_model_state.name), False)
def ask_merge(self, app_label):
return self._boolean_input(
"\nMerging will only work if the operations printed above do not conflict\n" +
"with each other (working on different fields or models)\n" +
"Do you want to merge these migration branches? [y/N]",
False,
)
|
domenicosolazzo/practice-django
|
venv/lib/python2.7/site-packages/django/db/migrations/questioner.py
|
Python
|
mit
| 6,265 | 0.001915 |
TEST_DEFAULT = 'defvalue'
|
scorphus/scrapy
|
tests/test_settings/default_settings.py
|
Python
|
bsd-3-clause
| 27 | 0 |
import json
from typing import TYPE_CHECKING, Optional
from boxsdk.util.text_enum import TextEnum
from boxsdk.exception import BoxAPIException
from .base_object import BaseObject
if TYPE_CHECKING:
from boxsdk.object.user import User
from boxsdk.object.terms_of_service_user_status import TermsOfServiceUserStatus
class TermsOfServiceType(TextEnum):
"""An enum of possible terms of service types"""
MANAGED = 'managed'
EXTERNAL = 'external'
class TermsOfServiceStatus(TextEnum):
"""An enum of possible terms of service status"""
ENABLED = 'enabled'
DISABLED = 'disabled'
class TermsOfService(BaseObject):
"""Represents a Box terms of service."""
_item_type = 'terms_of_service'
def get_user_status(self, user: Optional['User'] = None) -> 'TermsOfServiceUserStatus':
"""
Get the terms of service user status.
:param user:
This is the user to get the status of the terms of service for. This defaults to current
user.
:returns:
A :class:`TermsOfServiceUserStatus` object
"""
url = self._session.get_url('terms_of_service_user_statuses')
additional_params = {
'tos_id': self.object_id,
}
if user is not None:
additional_params['user_id'] = user.object_id
box_response = self._session.get(url, params=additional_params)
response_object = box_response.json()
response = response_object['entries'][0]
return self.translator.translate(
session=self._session,
response_object=response,
)
def accept(self, user: Optional['User'] = None) -> 'TermsOfServiceUserStatus':
"""
Accept a terms of service.
:param user:
The :class:`User` to assign the terms of service to.
:returns:
A newly created :class:`TermsOfServiceUserStatus` object
"""
return self.set_user_status(is_accepted=True, user=user)
def reject(self, user: Optional['User'] = None) -> 'TermsOfServiceUserStatus':
"""
Reject a terms of service.
:param user:
The :class:`User` to assign the terms of service to.
:returns:
A newly created :class:`TermsOfServiceUserStatus` object
"""
return self.set_user_status(is_accepted=False, user=user)
def set_user_status(self, is_accepted: bool, user: Optional['User'] = None) -> 'TermsOfServiceUserStatus':
"""
Create a terms of service user status.
:param is_accepted:
Indicates whether a use has accepted or rejected a terms of service.
:param user:
The :class:`User` to assign the terms of service to.
:returns:
A newly created :class:`TermsOfServiceUserStatus` object
"""
url = self._session.get_url('terms_of_service_user_statuses')
body = {
'tos': {
'type': self.object_type,
'id': self.object_id,
},
'is_accepted': is_accepted,
}
if user is not None:
body['user'] = {
'type': user.object_type,
'id': user.object_id,
}
translated_response = None
try:
box_response = self._session.post(url, data=json.dumps(body))
response = box_response.json()
translated_response = self.translator.translate(
session=self._session,
response_object=response,
)
except BoxAPIException as err:
if err.status == 409:
user_status = self.get_user_status(user)
translated_response = user_status.update_info(data={'is_accepted': is_accepted})
return translated_response
|
box/box-python-sdk
|
boxsdk/object/terms_of_service.py
|
Python
|
apache-2.0
| 3,849 | 0.002078 |
"""
WSGI config for myproject project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myproject.settings")
from django.core.wsgi import get_wsgi_application # flake8: noqa
application = get_wsgi_application()
|
Perkville/django-tastypie
|
docs/code/myproject/wsgi.py
|
Python
|
bsd-3-clause
| 409 | 0.002445 |
from django.db import models
from django.db.models.signals import post_save
from django.db.utils import DatabaseError
from django.dispatch import receiver
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _, ugettext
STANDARD_EMAIL = "anonymous@readthedocs.org"
class UserProfile (models.Model):
"""Additional information about a User.
"""
user = models.ForeignKey(User, verbose_name=_('User'), unique=True, related_name='profile')
whitelisted = models.BooleanField(_('Whitelisted'))
homepage = models.CharField(_('Homepage'), max_length=100, blank=True)
allow_email = models.BooleanField(_('Allow email'), help_text=_('Show your email on VCS contributions.'), default=True)
def __unicode__(self):
return ugettext("%(username)s's profile") % {'username': self.user.username}
def get_absolute_url(self):
return ('profiles_profile_detail', (), {'username': self.user.username})
get_absolute_url = models.permalink(get_absolute_url)
def get_contribution_details(self):
"""
Gets the line to put into commits to attribute the author.
Returns a tuple (name, email)
"""
if self.user.first_name and self.user.last_name:
name = '%s %s' % (self.user.first_name, self.user.last_name)
else:
name = self.user.username
if self.allow_email:
email = self.user.email
else:
email = STANDARD_EMAIL
return (name, email)
@receiver(post_save, sender=User)
def create_profile(sender, **kwargs):
if kwargs['created'] is True:
try:
UserProfile.objects.create(user_id=kwargs['instance'].id)
except DatabaseError:
pass
|
ojii/readthedocs.org
|
readthedocs/core/models.py
|
Python
|
mit
| 1,768 | 0.002828 |
# -*- coding: utf-8 -*-
import pytest
from .utils import last_activity
@pytest.mark.usefixtures('versioning_manager', 'table_creator')
class TestActivityCreationWithColumnExclusion(object):
@pytest.fixture
def audit_trigger_creator(self, session, user_class):
session.execute(
'''SELECT audit_table('{0}', '{{"age"}}')'''.format(
user_class.__tablename__
)
)
@pytest.fixture
def user(self, session, user_class, audit_trigger_creator):
user = user_class(name='John', age=15)
session.add(user)
session.flush()
return user
def test_insert(self, user, connection):
activity = last_activity(connection)
assert activity['old_data'] == {}
assert activity['changed_data'] == {
'id': user.id,
'name': 'John'
}
assert activity['table_name'] == 'user'
assert activity['native_transaction_id'] > 0
assert activity['verb'] == 'insert'
def test_update(self, user, session):
user.name = 'Luke'
user.age = 18
session.flush()
activity = last_activity(session)
assert activity['changed_data'] == {'name': 'Luke'}
assert activity['old_data'] == {
'id': user.id,
'name': 'John',
}
assert activity['table_name'] == 'user'
assert activity['native_transaction_id'] > 0
assert activity['verb'] == 'update'
def test_delete(self, user, session):
session.delete(user)
session.flush()
activity = last_activity(session)
assert activity['changed_data'] == {}
assert activity['old_data'] == {
'id': user.id,
'name': 'John',
}
assert activity['table_name'] == 'user'
assert activity['native_transaction_id'] > 0
assert activity['verb'] == 'delete'
|
kvesteri/postgresql-audit
|
tests/test_sql_files.py
|
Python
|
bsd-2-clause
| 1,919 | 0 |
"""HTTP server classes.
Note: BaseHTTPRequestHandler doesn't implement any HTTP request; see
SimpleHTTPRequestHandler for simple implementations of GET, HEAD and POST,
and CGIHTTPRequestHandler for CGI scripts.
It does, however, optionally implement HTTP/1.1 persistent connections,
as of version 0.3.
Notes on CGIHTTPRequestHandler
------------------------------
This class implements GET and POST requests to cgi-bin scripts.
If the os.fork() function is not present (e.g. on Windows),
subprocess.Popen() is used as a fallback, with slightly altered semantics.
In all cases, the implementation is intentionally naive -- all
requests are executed synchronously.
SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL
-- it may execute arbitrary Python code or external programs.
Note that status code 200 is sent prior to execution of a CGI script, so
scripts cannot send other status codes such as 302 (redirect).
XXX To do:
- log requests even later (to capture byte count)
- log user-agent header and other interesting goodies
- send error log to separate file
"""
# See also:
#
# HTTP Working Group T. Berners-Lee
# INTERNET-DRAFT R. T. Fielding
# <draft-ietf-http-v10-spec-00.txt> H. Frystyk Nielsen
# Expires September 8, 1995 March 8, 1995
#
# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt
#
# and
#
# Network Working Group R. Fielding
# Request for Comments: 2616 et al
# Obsoletes: 2068 June 1999
# Category: Standards Track
#
# URL: http://www.faqs.org/rfcs/rfc2616.html
# Log files
# ---------
#
# Here's a quote from the NCSA httpd docs about log file format.
#
# | The logfile format is as follows. Each line consists of:
# |
# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb
# |
# | host: Either the DNS name or the IP number of the remote client
# | rfc931: Any information returned by identd for this person,
# | - otherwise.
# | authuser: If user sent a userid for authentication, the user name,
# | - otherwise.
# | DD: Day
# | Mon: Month (calendar name)
# | YYYY: Year
# | hh: hour (24-hour format, the machine's timezone)
# | mm: minutes
# | ss: seconds
# | request: The first line of the HTTP request as sent by the client.
# | ddd: the status code returned by the server, - if not available.
# | bbbb: the total number of bytes sent,
# | *not including the HTTP/1.0 header*, - if not available
# |
# | You can determine the name of the file accessed through request.
#
# (Actually, the latter is only true if you know the server configuration
# at the time the request was made!)
__version__ = "0.6"
__all__ = [
"HTTPServer", "ThreadingHTTPServer", "BaseHTTPRequestHandler",
"SimpleHTTPRequestHandler", "CGIHTTPRequestHandler",
]
import copy
import datetime
import email.utils
import html
import http.client
import io
import mimetypes
import os
import posixpath
import select
import shutil
import socket # For gethostbyaddr()
import socketserver
import sys
import time
import urllib.parse
import contextlib
from functools import partial
from http import HTTPStatus
# Default error message template
DEFAULT_ERROR_MESSAGE = """\
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
"http://www.w3.org/TR/html4/strict.dtd">
<html>
<head>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8">
<title>Error response</title>
</head>
<body>
<h1>Error response</h1>
<p>Error code: %(code)d</p>
<p>Message: %(message)s.</p>
<p>Error code explanation: %(code)s - %(explain)s.</p>
</body>
</html>
"""
DEFAULT_ERROR_CONTENT_TYPE = "text/html;charset=utf-8"
class HTTPServer(socketserver.TCPServer):
allow_reuse_address = 1 # Seems to make sense in testing environment
def server_bind(self):
"""Override server_bind to store the server name."""
socketserver.TCPServer.server_bind(self)
host, port = self.server_address[:2]
self.server_name = socket.getfqdn(host)
self.server_port = port
class ThreadingHTTPServer(socketserver.ThreadingMixIn, HTTPServer):
daemon_threads = True
class BaseHTTPRequestHandler(socketserver.StreamRequestHandler):
"""HTTP request handler base class.
The following explanation of HTTP serves to guide you through the
code as well as to expose any misunderstandings I may have about
HTTP (so you don't need to read the code to figure out I'm wrong
:-).
HTTP (HyperText Transfer Protocol) is an extensible protocol on
top of a reliable stream transport (e.g. TCP/IP). The protocol
recognizes three parts to a request:
1. One line identifying the request type and path
2. An optional set of RFC-822-style headers
3. An optional data part
The headers and data are separated by a blank line.
The first line of the request has the form
<command> <path> <version>
where <command> is a (case-sensitive) keyword such as GET or POST,
<path> is a string containing path information for the request,
and <version> should be the string "HTTP/1.0" or "HTTP/1.1".
<path> is encoded using the URL encoding scheme (using %xx to signify
the ASCII character with hex code xx).
The specification specifies that lines are separated by CRLF but
for compatibility with the widest range of clients recommends
servers also handle LF. Similarly, whitespace in the request line
is treated sensibly (allowing multiple spaces between components
and allowing trailing whitespace).
Similarly, for output, lines ought to be separated by CRLF pairs
but most clients grok LF characters just fine.
If the first line of the request has the form
<command> <path>
(i.e. <version> is left out) then this is assumed to be an HTTP
0.9 request; this form has no optional headers and data part and
the reply consists of just the data.
The reply form of the HTTP 1.x protocol again has three parts:
1. One line giving the response code
2. An optional set of RFC-822-style headers
3. The data
Again, the headers and data are separated by a blank line.
The response code line has the form
<version> <responsecode> <responsestring>
where <version> is the protocol version ("HTTP/1.0" or "HTTP/1.1"),
<responsecode> is a 3-digit response code indicating success or
failure of the request, and <responsestring> is an optional
human-readable string explaining what the response code means.
This server parses the request and the headers, and then calls a
function specific to the request type (<command>). Specifically,
a request SPAM will be handled by a method do_SPAM(). If no
such method exists the server sends an error response to the
client. If it exists, it is called with no arguments:
do_SPAM()
Note that the request name is case sensitive (i.e. SPAM and spam
are different requests).
The various request details are stored in instance variables:
- client_address is the client IP address in the form (host,
port);
- command, path and version are the broken-down request line;
- headers is an instance of email.message.Message (or a derived
class) containing the header information;
- rfile is a file object open for reading positioned at the
start of the optional input data part;
- wfile is a file object open for writing.
IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING!
The first thing to be written must be the response line. Then
follow 0 or more header lines, then a blank line, and then the
actual data (if any). The meaning of the header lines depends on
the command executed by the server; in most cases, when data is
returned, there should be at least one header line of the form
Content-type: <type>/<subtype>
where <type> and <subtype> should be registered MIME types,
e.g. "text/html" or "text/plain".
"""
# The Python system version, truncated to its first component.
sys_version = "Python/" + sys.version.split()[0]
# The server software version. You may want to override this.
# The format is multiple whitespace-separated strings,
# where each string is of the form name[/version].
server_version = "BaseHTTP/" + __version__
error_message_format = DEFAULT_ERROR_MESSAGE
error_content_type = DEFAULT_ERROR_CONTENT_TYPE
# The default request version. This only affects responses up until
# the point where the request line is parsed, so it mainly decides what
# the client gets back when sending a malformed request line.
# Most web servers default to HTTP 0.9, i.e. don't send a status line.
default_request_version = "HTTP/0.9"
def parse_request(self):
"""Parse a request (internal).
The request should be stored in self.raw_requestline; the results
are in self.command, self.path, self.request_version and
self.headers.
Return True for success, False for failure; on failure, any relevant
error response has already been sent back.
"""
self.command = None # set in case of error on the first line
self.request_version = version = self.default_request_version
self.close_connection = True
requestline = str(self.raw_requestline, 'iso-8859-1')
requestline = requestline.rstrip('\r\n')
self.requestline = requestline
words = requestline.split()
if len(words) == 0:
return False
if len(words) >= 3: # Enough to determine protocol version
version = words[-1]
try:
if not version.startswith('HTTP/'):
raise ValueError
base_version_number = version.split('/', 1)[1]
version_number = base_version_number.split(".")
# RFC 2145 section 3.1 says there can be only one "." and
# - major and minor numbers MUST be treated as
# separate integers;
# - HTTP/2.4 is a lower version than HTTP/2.13, which in
# turn is lower than HTTP/12.3;
# - Leading zeros MUST be ignored by recipients.
if len(version_number) != 2:
raise ValueError
version_number = int(version_number[0]), int(version_number[1])
except (ValueError, IndexError):
self.send_error(
HTTPStatus.BAD_REQUEST,
"Bad request version (%r)" % version)
return False
if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1":
self.close_connection = False
if version_number >= (2, 0):
self.send_error(
HTTPStatus.HTTP_VERSION_NOT_SUPPORTED,
"Invalid HTTP version (%s)" % base_version_number)
return False
self.request_version = version
if not 2 <= len(words) <= 3:
self.send_error(
HTTPStatus.BAD_REQUEST,
"Bad request syntax (%r)" % requestline)
return False
command, path = words[:2]
if len(words) == 2:
self.close_connection = True
if command != 'GET':
self.send_error(
HTTPStatus.BAD_REQUEST,
"Bad HTTP/0.9 request type (%r)" % command)
return False
self.command, self.path = command, path
# Examine the headers and look for a Connection directive.
try:
self.headers = http.client.parse_headers(self.rfile,
_class=self.MessageClass)
except http.client.LineTooLong as err:
self.send_error(
HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE,
"Line too long",
str(err))
return False
except http.client.HTTPException as err:
self.send_error(
HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE,
"Too many headers",
str(err)
)
return False
conntype = self.headers.get('Connection', "")
if conntype.lower() == 'close':
self.close_connection = True
elif (conntype.lower() == 'keep-alive' and
self.protocol_version >= "HTTP/1.1"):
self.close_connection = False
# Examine the headers and look for an Expect directive
expect = self.headers.get('Expect', "")
if (expect.lower() == "100-continue" and
self.protocol_version >= "HTTP/1.1" and
self.request_version >= "HTTP/1.1"):
if not self.handle_expect_100():
return False
return True
def handle_expect_100(self):
"""Decide what to do with an "Expect: 100-continue" header.
If the client is expecting a 100 Continue response, we must
respond with either a 100 Continue or a final response before
waiting for the request body. The default is to always respond
with a 100 Continue. You can behave differently (for example,
reject unauthorized requests) by overriding this method.
This method should either return True (possibly after sending
a 100 Continue response) or send an error response and return
False.
"""
self.send_response_only(HTTPStatus.CONTINUE)
self.end_headers()
return True
def handle_one_request(self):
"""Handle a single HTTP request.
You normally don't need to override this method; see the class
__doc__ string for information on how to handle specific HTTP
commands such as GET and POST.
"""
try:
self.raw_requestline = self.rfile.readline(65537)
if len(self.raw_requestline) > 65536:
self.requestline = ''
self.request_version = ''
self.command = ''
self.send_error(HTTPStatus.REQUEST_URI_TOO_LONG)
return
if not self.raw_requestline:
self.close_connection = True
return
if not self.parse_request():
# An error code has been sent, just exit
return
mname = 'do_' + self.command
if not hasattr(self, mname):
self.send_error(
HTTPStatus.NOT_IMPLEMENTED,
"Unsupported method (%r)" % self.command)
return
method = getattr(self, mname)
method()
self.wfile.flush() #actually send the response if not already done.
except socket.timeout as e:
#a read or a write timed out. Discard this connection
self.log_error("Request timed out: %r", e)
self.close_connection = True
return
def handle(self):
"""Handle multiple requests if necessary."""
self.close_connection = True
self.handle_one_request()
while not self.close_connection:
self.handle_one_request()
def send_error(self, code, message=None, explain=None):
"""Send and log an error reply.
Arguments are
* code: an HTTP error code
3 digits
* message: a simple optional 1 line reason phrase.
*( HTAB / SP / VCHAR / %x80-FF )
defaults to short entry matching the response code
* explain: a detailed message defaults to the long entry
matching the response code.
This sends an error response (so it must be called before any
output has been generated), logs the error, and finally sends
a piece of HTML explaining the error to the user.
"""
try:
shortmsg, longmsg = self.responses[code]
except KeyError:
shortmsg, longmsg = '???', '???'
if message is None:
message = shortmsg
if explain is None:
explain = longmsg
self.log_error("code %d, message %s", code, message)
self.send_response(code, message)
self.send_header('Connection', 'close')
# Message body is omitted for cases described in:
# - RFC7230: 3.3. 1xx, 204(No Content), 304(Not Modified)
# - RFC7231: 6.3.6. 205(Reset Content)
body = None
if (code >= 200 and
code not in (HTTPStatus.NO_CONTENT,
HTTPStatus.RESET_CONTENT,
HTTPStatus.NOT_MODIFIED)):
# HTML encode to prevent Cross Site Scripting attacks
# (see bug #1100201)
content = (self.error_message_format % {
'code': code,
'message': html.escape(message, quote=False),
'explain': html.escape(explain, quote=False)
})
body = content.encode('UTF-8', 'replace')
self.send_header("Content-Type", self.error_content_type)
self.send_header('Content-Length', str(len(body)))
self.end_headers()
if self.command != 'HEAD' and body:
self.wfile.write(body)
def send_response(self, code, message=None):
"""Add the response header to the headers buffer and log the
response code.
Also send two standard headers with the server software
version and the current date.
"""
self.log_request(code)
self.send_response_only(code, message)
self.send_header('Server', self.version_string())
self.send_header('Date', self.date_time_string())
def send_response_only(self, code, message=None):
"""Send the response header only."""
if self.request_version != 'HTTP/0.9':
if message is None:
if code in self.responses:
message = self.responses[code][0]
else:
message = ''
if not hasattr(self, '_headers_buffer'):
self._headers_buffer = []
self._headers_buffer.append(("%s %d %s\r\n" %
(self.protocol_version, code, message)).encode(
'latin-1', 'strict'))
def send_header(self, keyword, value):
"""Send a MIME header to the headers buffer."""
if self.request_version != 'HTTP/0.9':
if not hasattr(self, '_headers_buffer'):
self._headers_buffer = []
self._headers_buffer.append(
("%s: %s\r\n" % (keyword, value)).encode('latin-1', 'strict'))
if keyword.lower() == 'connection':
if value.lower() == 'close':
self.close_connection = True
elif value.lower() == 'keep-alive':
self.close_connection = False
def end_headers(self):
"""Send the blank line ending the MIME headers."""
if self.request_version != 'HTTP/0.9':
self._headers_buffer.append(b"\r\n")
self.flush_headers()
def flush_headers(self):
if hasattr(self, '_headers_buffer'):
self.wfile.write(b"".join(self._headers_buffer))
self._headers_buffer = []
def log_request(self, code='-', size='-'):
"""Log an accepted request.
This is called by send_response().
"""
if isinstance(code, HTTPStatus):
code = code.value
self.log_message('"%s" %s %s',
self.requestline, str(code), str(size))
def log_error(self, format, *args):
"""Log an error.
This is called when a request cannot be fulfilled. By
default it passes the message on to log_message().
Arguments are the same as for log_message().
XXX This should go to the separate error log.
"""
self.log_message(format, *args)
def log_message(self, format, *args):
"""Log an arbitrary message.
This is used by all other logging functions. Override
it if you have specific logging wishes.
The first argument, FORMAT, is a format string for the
message to be logged. If the format string contains
any % escapes requiring parameters, they should be
specified as subsequent arguments (it's just like
printf!).
The client ip and current date/time are prefixed to
every message.
"""
sys.stderr.write("%s - - [%s] %s\n" %
(self.address_string(),
self.log_date_time_string(),
format%args))
def version_string(self):
"""Return the server software version string."""
return self.server_version + ' ' + self.sys_version
def date_time_string(self, timestamp=None):
"""Return the current date and time formatted for a message header."""
if timestamp is None:
timestamp = time.time()
return email.utils.formatdate(timestamp, usegmt=True)
def log_date_time_string(self):
"""Return the current time formatted for logging."""
now = time.time()
year, month, day, hh, mm, ss, x, y, z = time.localtime(now)
s = "%02d/%3s/%04d %02d:%02d:%02d" % (
day, self.monthname[month], year, hh, mm, ss)
return s
weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
monthname = [None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
def address_string(self):
"""Return the client address."""
return self.client_address[0]
# Essentially static class variables
# The version of the HTTP protocol we support.
# Set this to HTTP/1.1 to enable automatic keepalive
protocol_version = "HTTP/1.0"
# MessageClass used to parse headers
MessageClass = http.client.HTTPMessage
# hack to maintain backwards compatibility
responses = {
v: (v.phrase, v.description)
for v in HTTPStatus.__members__.values()
}
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
"""Simple HTTP request handler with GET and HEAD commands.
This serves files from the current directory and any of its
subdirectories. The MIME type for files is determined by
calling the .guess_type() method.
The GET and HEAD requests are identical except that the HEAD
request omits the actual contents of the file.
"""
server_version = "SimpleHTTP/" + __version__
def __init__(self, *args, directory=None, **kwargs):
if directory is None:
directory = os.getcwd()
self.directory = directory
super().__init__(*args, **kwargs)
def do_GET(self):
"""Serve a GET request."""
f = self.send_head()
if f:
try:
self.copyfile(f, self.wfile)
finally:
f.close()
def do_HEAD(self):
"""Serve a HEAD request."""
f = self.send_head()
if f:
f.close()
def send_head(self):
"""Common code for GET and HEAD commands.
This sends the response code and MIME headers.
Return value is either a file object (which has to be copied
to the outputfile by the caller unless the command was HEAD,
and must be closed by the caller under all circumstances), or
None, in which case the caller has nothing further to do.
"""
path = self.translate_path(self.path)
f = None
if os.path.isdir(path):
parts = urllib.parse.urlsplit(self.path)
if not parts.path.endswith('/'):
# redirect browser - doing basically what apache does
self.send_response(HTTPStatus.MOVED_PERMANENTLY)
new_parts = (parts[0], parts[1], parts[2] + '/',
parts[3], parts[4])
new_url = urllib.parse.urlunsplit(new_parts)
self.send_header("Location", new_url)
self.end_headers()
return None
for index in "index.html", "index.htm":
index = os.path.join(path, index)
if os.path.exists(index):
path = index
break
else:
return self.list_directory(path)
ctype = self.guess_type(path)
# check for trailing "/" which should return 404. See Issue17324
# The test for this was added in test_httpserver.py
# However, some OS platforms accept a trailingSlash as a filename
# See discussion on python-dev and Issue34711 regarding
# parseing and rejection of filenames with a trailing slash
if path.endswith("/"):
self.send_error(HTTPStatus.NOT_FOUND, "File not found")
return None
try:
f = open(path, 'rb')
except OSError:
self.send_error(HTTPStatus.NOT_FOUND, "File not found")
return None
try:
fs = os.fstat(f.fileno())
# Use browser cache if possible
if ("If-Modified-Since" in self.headers
and "If-None-Match" not in self.headers):
# compare If-Modified-Since and time of last file modification
try:
ims = email.utils.parsedate_to_datetime(
self.headers["If-Modified-Since"])
except (TypeError, IndexError, OverflowError, ValueError):
# ignore ill-formed values
pass
else:
if ims.tzinfo is None:
# obsolete format with no timezone, cf.
# https://tools.ietf.org/html/rfc7231#section-7.1.1.1
ims = ims.replace(tzinfo=datetime.timezone.utc)
if ims.tzinfo is datetime.timezone.utc:
# compare to UTC datetime of last modification
last_modif = datetime.datetime.fromtimestamp(
fs.st_mtime, datetime.timezone.utc)
# remove microseconds, like in If-Modified-Since
last_modif = last_modif.replace(microsecond=0)
if last_modif <= ims:
self.send_response(HTTPStatus.NOT_MODIFIED)
self.end_headers()
f.close()
return None
self.send_response(HTTPStatus.OK)
self.send_header("Content-type", ctype)
self.send_header("Content-Length", str(fs[6]))
self.send_header("Last-Modified",
self.date_time_string(fs.st_mtime))
self.end_headers()
return f
except:
f.close()
raise
def list_directory(self, path):
"""Helper to produce a directory listing (absent index.html).
Return value is either a file object, or None (indicating an
error). In either case, the headers are sent, making the
interface the same as for send_head().
"""
try:
list = os.listdir(path)
except OSError:
self.send_error(
HTTPStatus.NOT_FOUND,
"No permission to list directory")
return None
list.sort(key=lambda a: a.lower())
r = []
try:
displaypath = urllib.parse.unquote(self.path,
errors='surrogatepass')
except UnicodeDecodeError:
displaypath = urllib.parse.unquote(path)
displaypath = html.escape(displaypath, quote=False)
enc = sys.getfilesystemencoding()
title = 'Directory listing for %s' % displaypath
r.append('<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" '
'"http://www.w3.org/TR/html4/strict.dtd">')
r.append('<html>\n<head>')
r.append('<meta http-equiv="Content-Type" '
'content="text/html; charset=%s">' % enc)
r.append('<title>%s</title>\n</head>' % title)
r.append('<body>\n<h1>%s</h1>' % title)
r.append('<hr>\n<ul>')
for name in list:
fullname = os.path.join(path, name)
displayname = linkname = name
# Append / for directories or @ for symbolic links
if os.path.isdir(fullname):
displayname = name + "/"
linkname = name + "/"
if os.path.islink(fullname):
displayname = name + "@"
# Note: a link to a directory displays with @ and links with /
r.append('<li><a href="%s">%s</a></li>'
% (urllib.parse.quote(linkname,
errors='surrogatepass'),
html.escape(displayname, quote=False)))
r.append('</ul>\n<hr>\n</body>\n</html>\n')
encoded = '\n'.join(r).encode(enc, 'surrogateescape')
f = io.BytesIO()
f.write(encoded)
f.seek(0)
self.send_response(HTTPStatus.OK)
self.send_header("Content-type", "text/html; charset=%s" % enc)
self.send_header("Content-Length", str(len(encoded)))
self.end_headers()
return f
def translate_path(self, path):
"""Translate a /-separated PATH to the local filename syntax.
Components that mean special things to the local file system
(e.g. drive or directory names) are ignored. (XXX They should
probably be diagnosed.)
"""
# abandon query parameters
path = path.split('?',1)[0]
path = path.split('#',1)[0]
# Don't forget explicit trailing slash when normalizing. Issue17324
trailing_slash = path.rstrip().endswith('/')
try:
path = urllib.parse.unquote(path, errors='surrogatepass')
except UnicodeDecodeError:
path = urllib.parse.unquote(path)
path = posixpath.normpath(path)
words = path.split('/')
words = filter(None, words)
path = self.directory
for word in words:
if os.path.dirname(word) or word in (os.curdir, os.pardir):
# Ignore components that are not a simple file/directory name
continue
path = os.path.join(path, word)
if trailing_slash:
path += '/'
return path
def copyfile(self, source, outputfile):
"""Copy all data between two file objects.
The SOURCE argument is a file object open for reading
(or anything with a read() method) and the DESTINATION
argument is a file object open for writing (or
anything with a write() method).
The only reason for overriding this would be to change
the block size or perhaps to replace newlines by CRLF
-- note however that this the default server uses this
to copy binary data as well.
"""
shutil.copyfileobj(source, outputfile)
def guess_type(self, path):
"""Guess the type of a file.
Argument is a PATH (a filename).
Return value is a string of the form type/subtype,
usable for a MIME Content-type header.
The default implementation looks the file's extension
up in the table self.extensions_map, using application/octet-stream
as a default; however it would be permissible (if
slow) to look inside the data to make a better guess.
"""
base, ext = posixpath.splitext(path)
if ext in self.extensions_map:
return self.extensions_map[ext]
ext = ext.lower()
if ext in self.extensions_map:
return self.extensions_map[ext]
else:
return self.extensions_map['']
if not mimetypes.inited:
mimetypes.init() # try to read system mime.types
extensions_map = mimetypes.types_map.copy()
extensions_map.update({
'': 'application/octet-stream', # Default
'.py': 'text/plain',
'.c': 'text/plain',
'.h': 'text/plain',
})
# Utilities for CGIHTTPRequestHandler
def _url_collapse_path(path):
"""
Given a URL path, remove extra '/'s and '.' path elements and collapse
any '..' references and returns a collapsed path.
Implements something akin to RFC-2396 5.2 step 6 to parse relative paths.
The utility of this function is limited to is_cgi method and helps
preventing some security attacks.
Returns: The reconstituted URL, which will always start with a '/'.
Raises: IndexError if too many '..' occur within the path.
"""
# Query component should not be involved.
path, _, query = path.partition('?')
path = urllib.parse.unquote(path)
# Similar to os.path.split(os.path.normpath(path)) but specific to URL
# path semantics rather than local operating system semantics.
path_parts = path.split('/')
head_parts = []
for part in path_parts[:-1]:
if part == '..':
head_parts.pop() # IndexError if more '..' than prior parts
elif part and part != '.':
head_parts.append( part )
if path_parts:
tail_part = path_parts.pop()
if tail_part:
if tail_part == '..':
head_parts.pop()
tail_part = ''
elif tail_part == '.':
tail_part = ''
else:
tail_part = ''
if query:
tail_part = '?'.join((tail_part, query))
splitpath = ('/' + '/'.join(head_parts), tail_part)
collapsed_path = "/".join(splitpath)
return collapsed_path
nobody = None
def nobody_uid():
"""Internal routine to get nobody's uid"""
global nobody
if nobody:
return nobody
try:
import pwd
except ImportError:
return -1
try:
nobody = pwd.getpwnam('nobody')[2]
except KeyError:
nobody = 1 + max(x[2] for x in pwd.getpwall())
return nobody
def executable(path):
"""Test for executable file."""
return os.access(path, os.X_OK)
class CGIHTTPRequestHandler(SimpleHTTPRequestHandler):
"""Complete HTTP server with GET, HEAD and POST commands.
GET and HEAD also support running CGI scripts.
The POST command is *only* implemented for CGI scripts.
"""
# Determine platform specifics
have_fork = hasattr(os, 'fork')
# Make rfile unbuffered -- we need to read one line and then pass
# the rest to a subprocess, so we can't use buffered input.
rbufsize = 0
def do_POST(self):
"""Serve a POST request.
This is only implemented for CGI scripts.
"""
if self.is_cgi():
self.run_cgi()
else:
self.send_error(
HTTPStatus.NOT_IMPLEMENTED,
"Can only POST to CGI scripts")
def send_head(self):
"""Version of send_head that support CGI scripts"""
if self.is_cgi():
return self.run_cgi()
else:
return SimpleHTTPRequestHandler.send_head(self)
def is_cgi(self):
"""Test whether self.path corresponds to a CGI script.
Returns True and updates the cgi_info attribute to the tuple
(dir, rest) if self.path requires running a CGI script.
Returns False otherwise.
If any exception is raised, the caller should assume that
self.path was rejected as invalid and act accordingly.
The default implementation tests whether the normalized url
path begins with one of the strings in self.cgi_directories
(and the next character is a '/' or the end of the string).
"""
collapsed_path = _url_collapse_path(self.path)
dir_sep = collapsed_path.find('/', 1)
head, tail = collapsed_path[:dir_sep], collapsed_path[dir_sep+1:]
if head in self.cgi_directories:
self.cgi_info = head, tail
return True
return False
cgi_directories = ['/cgi-bin', '/htbin']
def is_executable(self, path):
"""Test whether argument path is an executable file."""
return executable(path)
def is_python(self, path):
"""Test whether argument path is a Python script."""
head, tail = os.path.splitext(path)
return tail.lower() in (".py", ".pyw")
def run_cgi(self):
"""Execute a CGI script."""
dir, rest = self.cgi_info
path = dir + '/' + rest
i = path.find('/', len(dir)+1)
while i >= 0:
nextdir = path[:i]
nextrest = path[i+1:]
scriptdir = self.translate_path(nextdir)
if os.path.isdir(scriptdir):
dir, rest = nextdir, nextrest
i = path.find('/', len(dir)+1)
else:
break
# find an explicit query string, if present.
rest, _, query = rest.partition('?')
# dissect the part after the directory name into a script name &
# a possible additional path, to be stored in PATH_INFO.
i = rest.find('/')
if i >= 0:
script, rest = rest[:i], rest[i:]
else:
script, rest = rest, ''
scriptname = dir + '/' + script
scriptfile = self.translate_path(scriptname)
if not os.path.exists(scriptfile):
self.send_error(
HTTPStatus.NOT_FOUND,
"No such CGI script (%r)" % scriptname)
return
if not os.path.isfile(scriptfile):
self.send_error(
HTTPStatus.FORBIDDEN,
"CGI script is not a plain file (%r)" % scriptname)
return
ispy = self.is_python(scriptname)
if self.have_fork or not ispy:
if not self.is_executable(scriptfile):
self.send_error(
HTTPStatus.FORBIDDEN,
"CGI script is not executable (%r)" % scriptname)
return
# Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html
# XXX Much of the following could be prepared ahead of time!
env = copy.deepcopy(os.environ)
env['SERVER_SOFTWARE'] = self.version_string()
env['SERVER_NAME'] = self.server.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PROTOCOL'] = self.protocol_version
env['SERVER_PORT'] = str(self.server.server_port)
env['REQUEST_METHOD'] = self.command
uqrest = urllib.parse.unquote(rest)
env['PATH_INFO'] = uqrest
env['PATH_TRANSLATED'] = self.translate_path(uqrest)
env['SCRIPT_NAME'] = scriptname
if query:
env['QUERY_STRING'] = query
env['REMOTE_ADDR'] = self.client_address[0]
authorization = self.headers.get("authorization")
if authorization:
authorization = authorization.split()
if len(authorization) == 2:
import base64, binascii
env['AUTH_TYPE'] = authorization[0]
if authorization[0].lower() == "basic":
try:
authorization = authorization[1].encode('ascii')
authorization = base64.decodebytes(authorization).\
decode('ascii')
except (binascii.Error, UnicodeError):
pass
else:
authorization = authorization.split(':')
if len(authorization) == 2:
env['REMOTE_USER'] = authorization[0]
# XXX REMOTE_IDENT
if self.headers.get('content-type') is None:
env['CONTENT_TYPE'] = self.headers.get_content_type()
else:
env['CONTENT_TYPE'] = self.headers['content-type']
length = self.headers.get('content-length')
if length:
env['CONTENT_LENGTH'] = length
referer = self.headers.get('referer')
if referer:
env['HTTP_REFERER'] = referer
accept = []
for line in self.headers.getallmatchingheaders('accept'):
if line[:1] in "\t\n\r ":
accept.append(line.strip())
else:
accept = accept + line[7:].split(',')
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.get('user-agent')
if ua:
env['HTTP_USER_AGENT'] = ua
co = filter(None, self.headers.get_all('cookie', []))
cookie_str = ', '.join(co)
if cookie_str:
env['HTTP_COOKIE'] = cookie_str
# XXX Other HTTP_* headers
# Since we're setting the env in the parent, provide empty
# values to override previously set values
for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH',
'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'):
env.setdefault(k, "")
self.send_response(HTTPStatus.OK, "Script output follows")
self.flush_headers()
decoded_query = query.replace('+', ' ')
if self.have_fork:
# Unix -- fork as we should
args = [script]
if '=' not in decoded_query:
args.append(decoded_query)
nobody = nobody_uid()
self.wfile.flush() # Always flush before forking
pid = os.fork()
if pid != 0:
# Parent
pid, sts = os.waitpid(pid, 0)
# throw away additional data [see bug #427345]
while select.select([self.rfile], [], [], 0)[0]:
if not self.rfile.read(1):
break
if sts:
self.log_error("CGI script exit status %#x", sts)
return
# Child
try:
try:
os.setuid(nobody)
except OSError:
pass
os.dup2(self.rfile.fileno(), 0)
os.dup2(self.wfile.fileno(), 1)
os.execve(scriptfile, args, env)
except:
self.server.handle_error(self.request, self.client_address)
os._exit(127)
else:
# Non-Unix -- use subprocess
import subprocess
cmdline = [scriptfile]
if self.is_python(scriptfile):
interp = sys.executable
if interp.lower().endswith("w.exe"):
# On Windows, use python.exe, not pythonw.exe
interp = interp[:-5] + interp[-4:]
cmdline = [interp, '-u'] + cmdline
if '=' not in query:
cmdline.append(query)
self.log_message("command: %s", subprocess.list2cmdline(cmdline))
try:
nbytes = int(length)
except (TypeError, ValueError):
nbytes = 0
p = subprocess.Popen(cmdline,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env = env
)
if self.command.lower() == "post" and nbytes > 0:
data = self.rfile.read(nbytes)
else:
data = None
# throw away additional data [see bug #427345]
while select.select([self.rfile._sock], [], [], 0)[0]:
if not self.rfile._sock.recv(1):
break
stdout, stderr = p.communicate(data)
self.wfile.write(stdout)
if stderr:
self.log_error('%s', stderr)
p.stderr.close()
p.stdout.close()
status = p.returncode
if status:
self.log_error("CGI script exit status %#x", status)
else:
self.log_message("CGI script exited OK")
def _get_best_family(*address):
infos = socket.getaddrinfo(
*address,
type=socket.SOCK_STREAM,
flags=socket.AI_PASSIVE,
)
family, type, proto, canonname, sockaddr = next(iter(infos))
return family, sockaddr
def test(HandlerClass=BaseHTTPRequestHandler,
ServerClass=ThreadingHTTPServer,
protocol="HTTP/1.0", port=8000, bind=None):
"""Test the HTTP request handler class.
This runs an HTTP server on port 8000 (or the port argument).
"""
ServerClass.address_family, addr = _get_best_family(bind, port)
HandlerClass.protocol_version = protocol
with ServerClass(addr, HandlerClass) as httpd:
host, port = httpd.socket.getsockname()[:2]
url_host = f'[{host}]' if ':' in host else host
print(
f"Serving HTTP on {host} port {port} "
f"(http://{url_host}:{port}/) ..."
)
try:
httpd.serve_forever()
except KeyboardInterrupt:
print("\nKeyboard interrupt received, exiting.")
sys.exit(0)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--cgi', action='store_true',
help='Run as CGI Server')
parser.add_argument('--bind', '-b', metavar='ADDRESS',
help='Specify alternate bind address '
'[default: all interfaces]')
parser.add_argument('--directory', '-d', default=os.getcwd(),
help='Specify alternative directory '
'[default:current directory]')
parser.add_argument('port', action='store',
default=8000, type=int,
nargs='?',
help='Specify alternate port [default: 8000]')
args = parser.parse_args()
if args.cgi:
handler_class = CGIHTTPRequestHandler
else:
handler_class = partial(SimpleHTTPRequestHandler,
directory=args.directory)
# ensure dual-stack is not disabled; ref #38907
class DualStackServer(ThreadingHTTPServer):
def server_bind(self):
# suppress exception when protocol is IPv4
with contextlib.suppress(Exception):
self.socket.setsockopt(
socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
return super().server_bind()
test(
HandlerClass=handler_class,
ServerClass=DualStackServer,
port=args.port,
bind=args.bind,
)
|
xyuanmu/XX-Net
|
python3.8.2/Lib/http/server.py
|
Python
|
bsd-2-clause
| 47,254 | 0.000614 |
# Copyright (c) 2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
from swift.common.utils import json
from swift3.response import InvalidArgument, MalformedACLError, \
S3NotImplemented, InvalidRequest, AccessDenied
from swift3.etree import Element, SubElement
from swift3.utils import LOGGER, sysmeta_header
from swift3.cfg import CONF
from swift3.exception import InvalidSubresource
XMLNS_XSI = 'http://www.w3.org/2001/XMLSchema-instance'
PERMISSIONS = ['FULL_CONTROL', 'READ', 'WRITE', 'READ_ACP', 'WRITE_ACP']
LOG_DELIVERY_USER = '.log_delivery'
"""
An entry point of this approach is here.
We should understand what we have to design to achieve real S3 ACL.
S3's ACL Model is as follows:
AccessControlPolicy:
Owner:
AccessControlList:
Grant[n]:
(Grantee, Permission)
Each bucket or object has its own acl consists of Owner and
AcessControlList. AccessControlList can contain some Grants.
By default, AccessControlList has only one Grant to allow FULL
CONTROLL to owner. Each Grant includes single pair with Grantee,
Permission. Grantee is the user (or user group) allowed the given
permission.
If you wanna get more information about S3's ACL model in detail,
please see official documentation here,
http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html
"""
def encode_acl(resource, acl):
"""
Encode an ACL instance to Swift metadata.
Given a resource type and an ACL instance, this method returns HTTP
headers, which can be used for Swift metadata.
"""
header_value = {"Owner": acl.owner.id}
grants = []
for grant in acl.grants:
grant = {"Permission": grant.permission,
"Grantee": str(grant.grantee)}
grants.append(grant)
header_value.update({"Grant": grants})
headers = {}
key = sysmeta_header(resource, 'acl')
headers[key] = json.dumps(header_value, separators=(',', ':'))
return headers
def decode_acl(resource, headers):
"""
Decode Swift metadata to an ACL instance.
Given a resource type and HTTP headers, this method returns an ACL
instance.
"""
value = ''
key = sysmeta_header(resource, 'acl')
if key in headers:
value = headers[key]
if value == '':
# Fix me: In the case of value is empty or not dict instance,
# I want an instance of Owner as None.
# However, in the above process would occur error in reference
# to an instance variable of Owner.
return ACL(Owner(None, None), [])
try:
encode_value = json.loads(value)
if not isinstance(encode_value, dict):
return ACL(Owner(None, None), [])
id = None
name = None
grants = []
if 'Owner' in encode_value:
id = encode_value['Owner']
name = encode_value['Owner']
if 'Grant' in encode_value:
for grant in encode_value['Grant']:
grantee = None
# pylint: disable-msg=E1101
for group in Group.__subclasses__():
if group.__name__ == grant['Grantee']:
grantee = group()
if not grantee:
grantee = User(grant['Grantee'])
permission = grant['Permission']
grants.append(Grant(grantee, permission))
return ACL(Owner(id, name), grants)
except Exception as e:
LOGGER.debug(e)
pass
raise InvalidSubresource((resource, 'acl', value))
class Grantee(object):
"""
Base class for grantee.
:Definition (methods):
init -> create a Grantee instance
elem -> create an ElementTree from itself
:Definition (static methods):
from_header -> convert a grantee string in the HTTP header
to an Grantee instance.
from_elem -> convert a ElementTree to an Grantee instance.
TODO (not yet):
NOTE: Needs confirmation whether we really need these methods or not.
encode (method) -> create a JSON which includes whole own elements
encode_from_elem (static method) -> convert from an ElementTree to a JSON
elem_from_json (static method) -> convert from a JSON to an ElementTree
from_json (static method) -> convert a Json string to an Grantee instance.
"""
def __contains__(self, key):
"""
The key argument is a S3 user id. This method checks that the user id
belongs to this class.
"""
raise S3NotImplemented()
def elem(self):
"""
Get an etree element of this instance.
"""
raise S3NotImplemented()
@staticmethod
def from_elem(elem):
type = elem.get('{%s}type' % XMLNS_XSI)
if type == 'CanonicalUser':
value = elem.find('./ID').text
return User(value)
elif type == 'Group':
value = elem.find('./URI').text
subclass = get_group_subclass_from_uri(value)
return subclass()
elif type == 'AmazonCustomerByEmail':
raise S3NotImplemented()
else:
raise MalformedACLError()
@staticmethod
def from_header(grantee):
"""
Convert a grantee string in the HTTP header to an Grantee instance.
"""
type, value = grantee.split('=', 1)
value = value.strip('"\'')
if type == 'id':
return User(value)
elif type == 'emailAddress':
raise S3NotImplemented()
elif type == 'uri':
# retrun a subclass instance of Group class
subclass = get_group_subclass_from_uri(value)
return subclass()
else:
raise InvalidArgument(type, value,
'Argument format not recognized')
class User(Grantee):
"""
Canonical user class for S3 accounts.
"""
type = 'CanonicalUser'
def __init__(self, name):
self.id = name
self.display_name = name
def __contains__(self, key):
return key == self.id
def elem(self):
elem = Element('Grantee', nsmap={'xsi': XMLNS_XSI})
elem.set('{%s}type' % XMLNS_XSI, self.type)
SubElement(elem, 'ID').text = self.id
SubElement(elem, 'DisplayName').text = self.display_name
return elem
def __str__(self):
return self.display_name
class Owner(object):
"""
Owner class for S3 accounts
"""
def __init__(self, id, name):
self.id = id
self.name = name
def get_group_subclass_from_uri(uri):
"""
Convert a URI to one of the predefined groups.
"""
for group in Group.__subclasses__(): # pylint: disable-msg=E1101
if group.uri == uri:
return group
raise InvalidArgument('uri', uri, 'Invalid group uri')
class Group(Grantee):
"""
Base class for Amazon S3 Predefined Groups
"""
type = 'Group'
uri = ''
def __init__(self):
# Initialize method to clarify this has nothing to do
pass
def elem(self):
elem = Element('Grantee', nsmap={'xsi': XMLNS_XSI})
elem.set('{%s}type' % XMLNS_XSI, self.type)
SubElement(elem, 'URI').text = self.uri
return elem
def __str__(self):
return self.__class__.__name__
def canned_acl_grantees(bucket_owner, object_owner=None):
"""
A set of predefined grants supported by AWS S3.
"""
owner = object_owner or bucket_owner
return {
'private': [
('FULL_CONTROL', User(owner.name)),
],
'public-read': [
('READ', AllUsers()),
('FULL_CONTROL', User(owner.name)),
],
'public-read-write': [
('READ', AllUsers()),
('WRITE', AllUsers()),
('FULL_CONTROL', User(owner.name)),
],
'authenticated-read': [
('READ', AuthenticatedUsers()),
('FULL_CONTROL', User(owner.name)),
],
'bucket-owner-read': [
('READ', User(bucket_owner.name)),
('FULL_CONTROL', User(owner.name)),
],
'bucket-owner-full-control': [
('FULL_CONTROL', User(owner.name)),
('FULL_CONTROL', User(bucket_owner.name)),
],
'log-delivery-write': [
('WRITE', LogDelivery()),
('READ_ACP', LogDelivery()),
('FULL_CONTROL', User(owner.name)),
],
}
class AuthenticatedUsers(Group):
"""
This group represents all AWS accounts. Access permission to this group
allows any AWS account to access the resource. However, all requests must
be signed (authenticated).
"""
uri = 'http://acs.amazonaws.com/groups/global/AuthenticatedUsers'
def __contains__(self, key):
# Swift3 handles only signed requests.
return True
class AllUsers(Group):
"""
Access permission to this group allows anyone to access the resource. The
requests can be signed (authenticated) or unsigned (anonymous). Unsigned
requests omit the Authentication header in the request.
Note: Swift3 regards unsigned requests as Swift API accesses, and bypasses
them to Swift. As a result, AllUsers behaves completely same as
AuthenticatedUsers.
"""
uri = 'http://acs.amazonaws.com/groups/global/AllUsers'
def __contains__(self, key):
return True
class LogDelivery(Group):
"""
WRITE and READ_ACP permissions on a bucket enables this group to write
server access logs to the bucket.
"""
uri = 'http://acs.amazonaws.com/groups/s3/LogDelivery'
def __contains__(self, key):
if ':' in key:
tenant, user = key.split(':', 1)
else:
user = key
return user == LOG_DELIVERY_USER
class Grant(object):
"""
Grant Class which includes both Grantee and Permission
"""
def __init__(self, grantee, permission):
"""
:param grantee: a grantee class or its subclass
:param permission: string
"""
if permission.upper() not in PERMISSIONS:
raise S3NotImplemented()
if not isinstance(grantee, Grantee):
raise
self.grantee = grantee
self.permission = permission
@classmethod
def from_elem(cls, elem):
"""
Convert an ElementTree to an ACL instance
"""
grantee = Grantee.from_elem(elem.find('./Grantee'))
permission = elem.find('./Permission').text
return cls(grantee, permission)
def elem(self):
"""
Create an etree element.
"""
elem = Element('Grant')
elem.append(self.grantee.elem())
SubElement(elem, 'Permission').text = self.permission
return elem
def allow(self, grantee, permission):
return permission == self.permission and grantee in self.grantee
class ACL(object):
"""
S3 ACL class.
Refs (S3 API - acl-overview:
http://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html):
The sample ACL includes an Owner element identifying the owner via the
AWS account's canonical user ID. The Grant element identifies the grantee
(either an AWS account or a predefined group), and the permission granted.
This default ACL has one Grant element for the owner. You grant permissions
by adding Grant elements, each grant identifying the grantee and the
permission.
"""
metadata_name = 'acl'
root_tag = 'AccessControlPolicy'
max_xml_length = 200 * 1024
def __init__(self, owner, grants=[]):
"""
:param owner: Owner Class for ACL instance
"""
self.owner = owner
self.grants = grants
@classmethod
def from_elem(cls, elem):
"""
Convert an ElementTree to an ACL instance
"""
id = elem.find('./Owner/ID').text
try:
name = elem.find('./Owner/DisplayName').text
except AttributeError:
name = id
grants = [Grant.from_elem(e)
for e in elem.findall('./AccessControlList/Grant')]
return cls(Owner(id, name), grants)
def elem(self):
"""
Decode the value to an ACL instance.
"""
elem = Element(self.root_tag)
owner = SubElement(elem, 'Owner')
SubElement(owner, 'ID').text = self.owner.id
SubElement(owner, 'DisplayName').text = self.owner.name
SubElement(elem, 'AccessControlList').extend(
g.elem() for g in self.grants
)
return elem
def check_owner(self, user_id):
"""
Check that the user is an owner.
"""
if not CONF.s3_acl:
# Ignore Swift3 ACL.
return
if not self.owner.id:
if CONF.allow_no_owner:
# No owner means public.
return
raise AccessDenied()
if user_id != self.owner.id:
raise AccessDenied()
def check_permission(self, user_id, permission):
"""
Check that the user has a permission.
"""
if not CONF.s3_acl:
# Ignore Swift3 ACL.
return
try:
# owners have full control permission
self.check_owner(user_id)
return
except AccessDenied:
pass
if permission in PERMISSIONS:
for g in self.grants:
if g.allow(user_id, 'FULL_CONTROL') or \
g.allow(user_id, permission):
return
raise AccessDenied()
@classmethod
def from_headers(cls, headers, bucket_owner, object_owner=None,
as_private=True):
"""
Convert HTTP headers to an ACL instance.
"""
grants = []
try:
for key, value in headers.items():
if key.lower().startswith('x-amz-grant-'):
permission = key[len('x-amz-grant-'):]
permission = permission.upper().replace('-', '_')
if permission not in PERMISSIONS:
continue
for grantee in value.split(','):
grants.append(
Grant(Grantee.from_header(grantee), permission))
if 'x-amz-acl' in headers:
try:
acl = headers['x-amz-acl']
if len(grants) > 0:
err_msg = 'Specifying both Canned ACLs and Header ' \
'Grants is not allowed'
raise InvalidRequest(err_msg)
grantees = canned_acl_grantees(
bucket_owner, object_owner)[acl]
for permission, grantee in grantees:
grants.append(Grant(grantee, permission))
except KeyError:
# expects canned_acl_grantees()[] raises KeyError
raise InvalidArgument('x-amz-acl', headers['x-amz-acl'])
except (KeyError, ValueError):
# TODO: think about we really catch this except sequence
raise InvalidRequest()
if len(grants) == 0:
# No ACL headers
if as_private:
return ACLPrivate(bucket_owner, object_owner)
else:
return None
return cls(object_owner or bucket_owner, grants)
class CannedACL(object):
"""
A dict-like object that returns canned ACL.
"""
def __getitem__(self, key):
def acl(key, bucket_owner, object_owner=None):
grants = []
grantees = canned_acl_grantees(bucket_owner, object_owner)[key]
for permission, grantee in grantees:
grants.append(Grant(grantee, permission))
return ACL(object_owner or bucket_owner, grants)
return partial(acl, key)
canned_acl = CannedACL()
ACLPrivate = canned_acl['private']
ACLPublicRead = canned_acl['public-read']
ACLPublicReadWrite = canned_acl['public-read-write']
ACLAuthenticatedRead = canned_acl['authenticated-read']
ACLBucketOwnerRead = canned_acl['bucket-owner-read']
ACLBucketOwnerFullControl = canned_acl['bucket-owner-full-control']
ACLLogDeliveryWrite = canned_acl['log-delivery-write']
|
KoreaCloudObjectStorage/swift3
|
swift3/subresource.py
|
Python
|
apache-2.0
| 16,885 | 0 |
import unittest
from scrapers.journalscrapers import ElsevierScraper
class TestElsevierScraper(unittest.TestCase):
def setUp(self):
self.instance = ElsevierScraper("../data/elsevier/2016-uncleaned.csv")
def test_strip_chars(self):
for row in self.instance.get_entries():
print row
|
PatrickSpieker/pricesleuth
|
tests/TestElsevierScraper.py
|
Python
|
mit
| 321 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014 Marcus Müller.
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import remote_agent
import task_frontend
import helpers
import benchmarking_task
from gnuradio import gr, gr_unittest
try:
import mtb_swig as mtb
except ImportError:
pass
import gc
import json
import numpy
import os
import tempfile
import time
try:
import cStringIO as StringIO
except ImportError:
import StringIO
from PyQt4 import QtGui
from PyQt4 import QtCore
class MyApplicationClass(QtGui.QApplication):
started = QtCore.pyqtSignal()
def exec_(self):
self.started.emit()
return QtGui.QApplication.exec_()
class qa_task_frontend (gr_unittest.TestCase):
def setUp(self):
self.taskstring = ""
self.task = []
self.range_spec = (0,1,100)
self.ref_task_grc = {
"class_name":"class",
"module_name":"module",
"instruction":"run_grc",
"attributes": {
"value": {
"param_type": "LIN_RANGE",
"value": list(self.range_spec),
"value_type": "float64"
},
"length": {
"param_type": "LIST",
"value": [10,20,30],
"value_type": "int64"
},
},
"sinks": [ "blocks_vector_sink_x_0" ]
}
self.xml_file = open(os.path.join(os.path.dirname(__file__), "extraction_test_topblock.grc"), "r")
self.ref_task_grc["grcxml"] = self.xml_file.read()
self.xml_file.close()
self.jsonfile = tempfile.NamedTemporaryFile(suffix=".json", delete=False)
self.jsonfilename = self.jsonfile.name
json.dump(self.ref_task_grc, self.jsonfile)
self.jsonfile.close()
self.qapp = MyApplicationClass([])
def tearDown(self):
os.unlink(self.jsonfilename)
def test_001_load_json_file(self):
self.my_ui = task_frontend.TaskFrontend()
self.my_ui._load_json_file_direct(self.jsonfilename)
if __name__ == '__main__':
gr_unittest.run(qa_task_frontend)#, "qa_task_frontend.xml")
|
greenoaktree/measurement_toolbox
|
gr-mtb/python/qa_task_frontend.py
|
Python
|
gpl-3.0
| 2,935 | 0.008862 |
# ITERATING DICTIONARY
d = {'x':1, 'y':2, 'z':3}
for key in d:
print key, 'corresponds to', d[key]
|
laurey/py
|
py/iterating.dic.py
|
Python
|
gpl-2.0
| 103 | 0.029126 |
#!/usr/bin/python
#
# \file 0_setup.py
# \brief Setup rbank
# \date 2009-03-10-22-43-GMT
# \author Jan Boon (Kaetemi)
# Python port of game data build pipeline.
# Setup rbank
#
# NeL - MMORPG Framework <http://dev.ryzom.com/projects/nel/>
# Copyright (C) 2010 Winch Gate Property Limited
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import time, sys, os, shutil, subprocess, distutils.dir_util
sys.path.append("../../configuration")
if os.path.isfile("log.log"):
os.remove("log.log")
log = open("log.log", "w")
from scripts import *
from buildsite import *
from process import *
from tools import *
from directories import *
printLog(log, "")
printLog(log, "-------")
printLog(log, "--- Setup rbank")
printLog(log, "-------")
printLog(log, time.strftime("%Y-%m-%d %H:%MGMT", time.gmtime(time.time())))
printLog(log, "")
# Setup source directories
printLog(log, ">>> Setup source directories <<<")
for dir in RBankCmbSourceDirectories:
mkPath(log, DatabaseDirectory + "/" + dir)
mkPath(log, LeveldesignWorldDirectory)
# Setup export directories
printLog(log, ">>> Setup export directories <<<")
mkPath(log, ExportBuildDirectory + "/" + RBankCmbExportDirectory)
mkPath(log, ExportBuildDirectory + "/" + RBankCmbTagExportDirectory)
mkPath(log, ExportBuildDirectory + "/" + SmallbankExportDirectory)
# Setup build directories
printLog(log, ">>> Setup build directories <<<")
mkPath(log, ExportBuildDirectory + "/" + ZoneWeldBuildDirectory)
for dir in IgLookupDirectories:
mkPath(log, ExportBuildDirectory + "/" + dir)
for dir in ShapeLookupDirectories:
mkPath(log, ExportBuildDirectory + "/" + dir)
mkPath(log, ExportBuildDirectory + "/" + RbankBboxBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + IgLandBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + IgOtherBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankTessellationBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankSmoothBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankRawBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankPreprocBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankRetrieversBuildDirectory)
mkPath(log, ExportBuildDirectory + "/" + RbankOutputBuildDirectory)
# Setup client directories
printLog(log, ">>> Setup client directories <<<")
mkPath(log, InstallDirectory + "/" + PacsInstallDirectory)
log.close()
# end of file
|
osgcc/ryzom
|
nel/tools/build_gamedata/processes/rbank/0_setup.py
|
Python
|
agpl-3.0
| 2,998 | 0.004336 |
from jupyter_workflow.data import get_fremont_data
import pandas as pd
def test_fremont_data():
data = get_fremont_data()
assert all(data.columns == ['West','East','Total'])
assert isinstance(data.index,pd.DatetimeIndex)
|
irenalanc/JupyterPythonPals
|
jupyter_workflow/tests/test_data.py
|
Python
|
mit
| 234 | 0.021368 |
# -*- coding: utf-8 -*-
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db import migrations
def clear_message_sent_by_message_type_values(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
UserCount = apps.get_model('analytics', 'UserCount')
StreamCount = apps.get_model('analytics', 'StreamCount')
RealmCount = apps.get_model('analytics', 'RealmCount')
InstallationCount = apps.get_model('analytics', 'InstallationCount')
FillState = apps.get_model('analytics', 'FillState')
property = 'messages_sent:message_type:day'
UserCount.objects.filter(property=property).delete()
StreamCount.objects.filter(property=property).delete()
RealmCount.objects.filter(property=property).delete()
InstallationCount.objects.filter(property=property).delete()
FillState.objects.filter(property=property).delete()
class Migration(migrations.Migration):
dependencies = [('analytics', '0009_remove_messages_to_stream_stat')]
operations = [
migrations.RunPython(clear_message_sent_by_message_type_values),
]
|
christi3k/zulip
|
analytics/migrations/0010_clear_messages_sent_values.py
|
Python
|
apache-2.0
| 1,174 | 0.001704 |
class Config:
STREAMING_JOB_NAME = "streaming.job.name"
STREAMING_OP_NAME = "streaming.op_name"
TASK_JOB_ID = "streaming.task_job_id"
STREAMING_WORKER_NAME = "streaming.worker_name"
# channel
CHANNEL_TYPE = "channel_type"
MEMORY_CHANNEL = "memory_channel"
NATIVE_CHANNEL = "native_channel"
CHANNEL_SIZE = "channel_size"
CHANNEL_SIZE_DEFAULT = 10**8
IS_RECREATE = "streaming.is_recreate"
# return from StreamingReader.getBundle if only empty message read in this
# interval.
TIMER_INTERVAL_MS = "timer_interval_ms"
STREAMING_RING_BUFFER_CAPACITY = "streaming.ring_buffer_capacity"
# write an empty message if there is no data to be written in this
# interval.
STREAMING_EMPTY_MESSAGE_INTERVAL = "streaming.empty_message_interval"
# operator type
OPERATOR_TYPE = "operator_type"
|
stephanie-wang/ray
|
streaming/python/config.py
|
Python
|
apache-2.0
| 859 | 0 |
# """All things dealing strictly with the GUI."""
##
# Copyright 2013 Chad Spratt
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# GUI window config
import re
import gtk
import gobject
class GUI(object):
"""Initializes the GUI from the Glade file and provides widget access.
This class:
* builds the Glade file
* gives access to all the widgets by name via __getitem__
* provides convenient message and file dialogs
* helps replace the columns in the output field store/view
* updates the progress bar which can also be used to keep the interface
responsive during background processing
"""
def __init__(self, hfuncs):
self.gladefile = 'averydb.glade'
self.builder = gtk.Builder()
self.builder.add_from_file(self.gladefile)
self.newobjects = {}
self.handlerfunctions = hfuncs
handlers = {}
# GUI_Files
handlers['adddatabutton_clicked_cb'] = hfuncs.selectandaddfile
handlers['mainwindow_drag_data_received_cb'] = hfuncs.dropfiles
handlers['tabledialog_delete_event_cb'] = hfuncs.closetabledialog
handlers['tableview_row_activated_cb'] = hfuncs.tabledoubleclicked
handlers['configinputdialog_delete_event_cb'] = hfuncs.closeconfiginputdialog
handlers['removedatabutton_clicked_cb'] = hfuncs.removefile
handlers['targetcombo_changed_cb'] = hfuncs.changetarget
handlers['browsetooutputbutton_clicked_cb'] = hfuncs.browsetooutput
handlers['replacetargetcheckbox_toggled_cb'] = hfuncs.replacetargettoggle
handlers['targetlocationcheckbox_toggled_cb'] = hfuncs.targetlocationtoggle
# GUI_JoinConfig
handlers['joinaliascombo_changed_cb'] = hfuncs.loadjoinfields
handlers['targetaliascombo_changed_cb'] = hfuncs.loadtargetfields
handlers['joinfieldcombo_changed_cb'] = hfuncs.matchtargetfield
handlers['addjoinbutton_clicked_cb'] = hfuncs.addjoin
handlers['removejoinbutton_clicked_cb'] = hfuncs.removejoin
handlers['innercolumntoggle_toggled_cb'] = hfuncs.toggleinner
# GUI_FieldToolbar
handlers['reloadfieldsbutton_clicked_cb'] = hfuncs.reloadfields
handlers['addfieldbutton_clicked_cb'] = hfuncs.addfield
handlers['copyfieldbutton_clicked_cb'] = hfuncs.copyfield
handlers['removefieldbutton_clicked_cb'] = hfuncs.removefield
handlers['movetopbutton_clicked_cb'] = hfuncs.movetop
handlers['moveupbutton_clicked_cb'] = hfuncs.moveup
handlers['movedownbutton_clicked_cb'] = hfuncs.movedown
handlers['movebottombutton_clicked_cb'] = hfuncs.movebottom
# AveryDB
handlers['mainwindow_destroy_cb'] = hfuncs.quitprogram
handlers['outputtypecombo_changed_cb'] = hfuncs.setoutputfile
handlers['executejointoggle_toggled_cb'] = hfuncs.queueexecution
handlers['stopoutputbutton_clicked_cb'] = hfuncs.abortoutput
# calc window
handlers['calclibrarybutton_clicked_cb'] = hfuncs.showlibraries
handlers['opencalcbutton_clicked_cb'] = hfuncs.showcalculator
handlers['calcwindow_delete_event_cb'] = hfuncs.hidecalculator
handlers['calcoutputfieldcombo_changed_cb'] = hfuncs.changecalcfield
handlers['calcinputview_row_activated_cb'] = hfuncs.insertfieldvalue
handlers['calcsavevaluebutton_clicked_cb'] = hfuncs.savecalcvalue
handlers['calclibrarycomboentry_changed_cb'] = hfuncs.loadfunctionlist
handlers['calcfunctionview_row_activated_cb'] = hfuncs.insertfunccall
# function window
handlers['calcopenfuncbutton_clicked_cb'] = hfuncs.showfunceditor
handlers['funcwindow_delete_event_cb'] = hfuncs.hidefunceditor
handlers['funclibrarycombo_changed_cb'] = hfuncs.loadlibraryfunctions
handlers['funcfunctioncombo_changed_cb'] = hfuncs.loadfunctiontext
handlers['funcreloadbutton_clicked_cb'] = hfuncs.loadfunctiontext
handlers['funcsavebutton_clicked_cb'] = hfuncs.savefunction
handlers['funcsaveclosebutton_clicked_cb'] = hfuncs.saveclosefunction
handlers['funccancelbutton_clicked_cb'] = hfuncs.hidefunceditor
# new library dialog
handlers['funcaddlibrarybutton_clicked_cb'] = hfuncs.getlibraryname
handlers['newlibcreate_clicked_cb'] = hfuncs.createlibrary
handlers['newlibcancel_clicked_cb'] = hfuncs.cancelcreatelibrary
# keyboard shortcuts
handlers['fieldview_key_press_event_cb'] = hfuncs.fieldskeypressed
# menu items
handlers['filemenupreferences_activate_cb'] = hfuncs.showoptions
handlers['optionsbutton_clicked_cb'] = hfuncs.showoptions
handlers['fieldlengthbutton_clicked_cb'] = hfuncs.autoadjustfieldlengths
# options window
handlers['optionswindow_delete_event_cb'] = hfuncs.closeoptions
handlers['optionsavebutton_clicked_cb'] = hfuncs.saveoptions
handlers['optionsaveclosebutton_clicked_cb'] = hfuncs.savecloseoptions
handlers['optioncancelbutton_clicked_cb'] = hfuncs.closeoptions
handlers['defaultoutputbrowsebutton_clicked_cb'] = hfuncs.browsedefaultoutput
# table selection dialog
# handlers['tableok_clicked_cb'] = hfuncs.addtables
# experimental
handlers['sampleoutputview_columns_changed_cb'] = hfuncs.reordercols
self.builder.connect_signals(handlers)
# other setup
outputselection = self.builder.get_object('fieldview').get_selection()
outputselection.set_mode(gtk.SELECTION_MULTIPLE)
tableselection = self.builder.get_object('tableview').get_selection()
tableselection.set_mode(gtk.SELECTION_MULTIPLE)
# drag and drop file support
mainwindow = self.builder.get_object('mainwindow')
mainwindow.drag_dest_set(gtk.DEST_DEFAULT_MOTION | gtk.DEST_DEFAULT_HIGHLIGHT |
gtk.DEST_DEFAULT_DROP,
# 80 is the type code for a URI list
[('text/uri-list', 0, 80)],
gtk.gdk.ACTION_COPY)
mainwindow.show_all()
def initoutputformatcombo(self, filetypes):
typelist = self['outputtypelist']
typelist.clear()
for filetype in filetypes:
if filetype not in ['All supported', 'All files']:
extensions = filetypes[filetype]['patterns']
# trim the wildcard asterisk from each extension
for i in range(len(extensions)):
extensions[i] = extensions[i][1:]
typelist.append([', '.join(extensions),
filetype])
self['outputtypecombo'].set_active(0)
@classmethod
def filedialog(cls, filetypes, foroutput=False, folder=False):
"""Sets up and returns a file chooser dialog for the caller to run."""
if folder:
title = 'Choose directory...'
action = gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER
elif foroutput:
title = 'Save as...'
action = gtk.FILE_CHOOSER_ACTION_SAVE
else:
title = 'Open...'
action = gtk.FILE_CHOOSER_ACTION_OPEN
dialog = gtk.FileChooserDialog(title, None, action,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_OPEN, gtk.RESPONSE_OK))
dialog.set_default_response(gtk.RESPONSE_OK)
for filetype in filetypes:
filefilter = gtk.FileFilter()
filefilter.set_name(filetype)
for mimetype in filetypes[filetype]['mimes']:
filefilter.add_mime_type(mimetype)
for pattern in filetypes[filetype]['patterns']:
filefilter.add_pattern(pattern.upper())
filefilter.add_pattern(pattern.lower())
dialog.add_filter(filefilter)
return dialog
def tabledialog(self, tablenames):
"""Give a list of tables within a file to choose which to load."""
dialog = self['tabledialog']
tabletree = self['tabletree']
tabletree.clear()
if type(tablenames) == list:
for tablename in tablenames:
tabletree.append(None, [tablename, None])
# used for gdb only, currently
elif type(tablenames) == dict:
# add features in datasets
if 'datasets' in tablenames:
datasets = tablenames['datasets'].keys()
datasets.sort()
for dataset in datasets:
parentiter = tabletree.append(None, [dataset, 'DataSet'])
features = tablenames['datasets'][dataset]
features.sort()
for feature in features:
tabletree.append(parentiter, [feature, 'Feature'])
# add features in root
if 'features' in tablenames:
rootfeatures = tablenames['features']
rootfeatures.sort()
for feature in rootfeatures:
tabletree.append(None, [feature, 'Feature'])
# add tables in root
if 'tables' in tablenames:
roottables = tablenames['tables']
roottables.sort()
for table in roottables:
tabletree.append(None, [table, 'Table'])
return dialog
@classmethod
def messagedialog(cls, message, style='msg'):
"""Creates a simple dialog to display the provided message."""
if style == 'yesno':
dialog = gtk.MessageDialog(type=gtk.MESSAGE_QUESTION,
buttons=gtk.BUTTONS_YES_NO)
else:
dialog = gtk.MessageDialog(buttons=gtk.BUTTONS_OK)
dialog.set_markup(message)
dialog.set_default_response(gtk.RESPONSE_OK)
response = dialog.run()
dialog.destroy()
return response
# This is used for the output field config and sample views.
def replacecolumns(self, storename, viewname, newcolnames):
"""Replaces the columns in the output list/view with new columns."""
# make a new liststore to use
celltypelist = []
for i in range(len(newcolnames)):
celltypelist.append(gobject.TYPE_STRING)
# __getitem__ checks newobjects so access will shift to the new store
self.newobjects[storename] = gtk.ListStore(*celltypelist)
# update the listview
view = self[viewname]
view.set_model(self[storename])
# remove the old columns
for col in view.get_columns():
view.remove_column(col)
# add the new columns
for i in range(len(newcolnames)):
# treeviews need double underscores to display single underscores
colname = re.sub(r'_', '__', newcolnames[i])
if colname.lower() in ('type', 'affinity'):
fieldtypelist = self['fieldtypelist']
newcell = gtk.CellRendererCombo()
newcell.set_property('editable', True)
newcell.set_property('has-entry', False)
newcell.set_property('model', fieldtypelist)
newcell.set_property('text-column', 0)
newcell.connect('changed',
self.handlerfunctions.updatefieldtype,
fieldtypelist, self[storename], i)
newcolumn = gtk.TreeViewColumn(colname, newcell, text=1)
else:
newcell = gtk.CellRendererText()
newcell.set_property('editable', True)
newcell.connect('edited',
self.handlerfunctions.updatefieldattribute,
self[storename], i)
newcolumn = gtk.TreeViewColumn(colname, newcell, text=i)
view.append_column(newcolumn)
def initconfiginputwindow(self, fieldnames, fieldvalues, fieldtypes):
dialog = gtk.Dialog('Define input', self['mainwindow'],
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT,
(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
gtk.STOCK_OK, gtk.RESPONSE_OK))
dialog.set_modal(True)
dialog.set_default_size(500, 400)
contentarea = dialog.get_content_area()
dialogvbox = gtk.VBox()
contentarea.pack_start(dialogvbox, True, True, 0)
# label at the top
dialoglabel = gtk.Label('Define field types')
dialogvbox.pack_start(dialoglabel, False, False, 0)
# scrolled window for everything else
dialogscrolledwindow = gtk.ScrolledWindow()
dialogvbox.pack_start(dialogscrolledwindow, True, True)
# viewport to hold everything in the scrolled window
dialogviewport = gtk.Viewport()
dialogscrolledwindow.add(dialogviewport)
# hbox to hold the stuff for each field
scrollhbox = gtk.HBox()
dialogviewport.add(scrollhbox)
inputtypelist = gtk.ListStore(gobject.TYPE_STRING)
for fieldtype in fieldtypes:
inputtypelist.append([fieldtype])
# store references to the comboboxes in the GUI object so their
# values can be retrieved from gui_files
self.typecomboboxes = []
# for each field
for i in range(len(fieldnames)):
# create a box to hold everything
inputfieldvbox = gtk.VBox()
# store the sample values in a list
inputfieldvaluelist = gtk.ListStore(gobject.TYPE_STRING)
for record in fieldvalues:
inputfieldvaluelist.append([record[i]])
# create a view to hold the field name and a sample of values
inputfieldview = gtk.TreeView(inputfieldvaluelist)
# add a cell in a column to the listview to display the values
inputfieldcell = gtk.CellRendererText()
inputfieldcolumn = gtk.TreeViewColumn(fieldnames[i], inputfieldcell,
text=0)
inputfieldview.append_column(inputfieldcolumn)
# add a combobox for selecting field type
inputfieldtypecombo = gtk.ComboBox(inputtypelist)
inputtypecell = gtk.CellRendererText()
inputfieldtypecombo.pack_start(inputtypecell, expand=True)
inputfieldtypecombo.add_attribute(cell=inputtypecell,
attribute='text',
column=0)
self.typecomboboxes.append(inputfieldtypecombo)
# pack the two main objects
inputfieldvbox.pack_start(inputfieldview, expand=True)
inputfieldvbox.pack_start(inputfieldtypecombo, expand=False)
# pack the box itself
scrollhbox.pack_start(inputfieldvbox, expand=False)
dialog.show_all()
return dialog
def setprogress(self, progress=-1, text='', lockgui=True):
"""Updates the progress bar immediately.
progress: value from 0 to 1. -1 will keep the existing setting
text: text to display on the bar
lockgui: call setprogress during a long function with lockgui=False
to enable gui input while the background function runs."""
progressbar = self['progressbar']
stopoutputbutton = self['stopoutputbutton']
if lockgui:
progressbar.grab_add()
# Also check the abort button
stopoutputbutton.grab_add()
if progress == 'pulse':
progressbar.pulse()
elif progress >= 0:
progressbar.set_fraction(progress)
progressbar.set_text(text)
while gtk.events_pending():
gtk.main_iteration(False)
if lockgui:
progressbar.grab_remove()
stopoutputbutton.grab_remove()
def __getitem__(self, objname):
if objname in self.newobjects:
return self.newobjects[objname]
return self.builder.get_object(objname)
def creategui(handlerfunctions):
"""Initializes and returns the gui."""
gui = GUI(handlerfunctions)
# root.title('DBF Utility')
return gui
def startgui():
"""Starts the gtk main loop."""
gtk.main()
|
chadspratt/AveryDB
|
gui.py
|
Python
|
apache-2.0
| 16,903 | 0.000592 |
#!/usr/bin/env python
import logging # http://docs.python.org/2/howto/logging.html#logging-basic-tutorial
import pysftp # https://code.google.com/p/pysftp/
from configobj import ConfigObj # http://www.voidspace.org.uk/python/configobj.html
import os
import sys
import time
import curses
import Image
import ImageTk
import Tkinter
from gallery import Gallery
logging.basicConfig(level=logging.WARNING)
class PiMotionGallery():
def __init__(self):
print "\n--- PiMotionGallery v0.1 ---\n"
self.config = None
self.current = 0
self.image_list = ['1.jpg', '2.jpg', '5.jpg']
self.text_list = ['apple', 'bird', 'cat']
self.root = Tkinter.Tk()
self.label = Tkinter.Label(self.root, compound=Tkinter.TOP)
self.tmp_host = ''
self.tmp_port = ''
self.tmp_user = ''
self.tmp_pass = ''
self.tmp_base = ''
self.tmp_loca = ''
if(self.loadExistingConfig()):
print "Existing configuration successfully loaded."
#fetchImages()
else:
self.askParameters()
self.loadFromRemote()
self.reviewConfig()
self.saveConfig()
def gallery2(self):
gallery = Gallery(self)
gallery.master.title('PiMotionGallery v0.1')
#gallery.master.maxsize(1024, 750)
#gallery.master.minsize(1024, 750)
gallery.mainloop()
# Try to load existing configuration file.
def loadExistingConfig(self):
logging.info("loadExistingConfig()")
self.config = ConfigObj('pmg.conf')
return self.config != {}
def fetchImages(self):
print "Connecting to remote server to fetch new images ..."
srv = pysftp.Connection(host=self.config['host'], username=self.config['username'], password=self.config['password'], port=int(self.config['port']))
#base_list = srv.execute('ls -al ' + config['motion_base'])
for item in srv.listdir(path=self.config['motion_base']):
# lstatout=str(srv.lstat(i)).split()[0]
# if 'd' in lstatout:
# print i, 'is a directory'
# Verify it is a directory
if len(item) == 8:
self.fetchImagesFromDir(srv, item)
srv.close()
def fetchImagesFromDir(self, srv, directory):
remote = self.config['motion_base'] + directory
local = self.config['motion_local'] + directory
print "\nChecking " + directory + " directory ..."
if not os.path.exists(local):
os.makedirs(local)
dir_list = srv.listdir(path=remote)
i = 0
total = len(dir_list)
# All files are copied to the local directory
for item in dir_list:
if (not 'm' in item) and (not os.path.exists(local + '/' + item)):
srv.get(remotepath=remote+'/'+item, localpath=local+'/'+item)
i += 1
current = int(i * 100 / total)
if(current % 5 == 0):
sys.stdout.write("\r[%-20s] %d%% - this can take a while, grab a coffee!" % ('=' * (current / 5), current))
sys.stdout.flush()
# Remote directory is deleted
sys.stdout.write("\n\nDeleting remote directory " + directory + " ...")
srv.execute('rm -rf ' + remote)
sys.stdout.write(" [OK]\n")
sys.stdout.flush()
def askParameters(self):
logging.info("askParameters()")
global tmp_host
global tmp_port
global tmp_user
global tmp_pass
global tmp_base
global tmp_loca
tmp_host = raw_input('host [] : ') or ''
tmp_port = raw_input('port [22] : ') or 22
tmp_user = raw_input('username [pi] : ') or 'pi'
tmp_pass = raw_input('password [raspberry] : ') or 'raspberry'
tmp_base = raw_input('motion base [/home/pi/motion/] : ') or '/home/pi/motion/'
tmp_loca = raw_input('local directory [] : ')
print "\n\nconfig parameters set to:\n\thost: " + tmp_host + "\n\tport: " + str(tmp_port) + "\n\tusername: " + tmp_user + "\n\tpassword: " + tmp_pass + "\n\n"
if(self.representsInt(tmp_port)):
tmp_port = int(tmp_port)
def loadFromRemote(self):
logging.info("loadFromRemote()")
print tmp_host
print tmp_user
print tmp_base
print tmp_port
if(self.checkConnection()):
print "Successfully connected to the remote host."
else:
keep = ''
while (keep != 'y') and (keep != 'n'):
keep = raw_input("\nDo you want to keep your current connection parameters? [y/N] : ").lower() or 'n'
if(keep == 'no'):
keep = 'n'
elif(keep == 'yes'):
keep = 'y'
if(keep == 'n'):
self.askParameters()
self.loadFromRemote()
#srv.get('thefile.txt')
#srv.put('anotherfile.txt')
def checkConnection(self):
success = True
try:
srv = pysftp.Connection(host=tmp_host, username=tmp_user, password=tmp_pass, port=tmp_port)
## test = srv.execute('ls -al ' + tmp_base)
## print test
srv.close()
except:
logging.warning("Could not connect to remote host.")
print "[WARNING] Could not connect to remote host, please check your connection parameters."
success = False
if(os.path.isdir(tmp_loca)):
return success
else:
logging.warning("Local directory does not exist.")
print "[WARNING] Local directory does not exist."
return False
def reviewConfig(self):
logging.info("reviewConfig()")
pass
def saveConfig(self):
logging.info("saveConfig()")
config = ConfigObj('pmg.conf')
config['host'] = tmp_host
config['port'] = tmp_port
config['username'] = tmp_user
config['password'] = tmp_pass
config['motion_base'] = tmp_base
config['motion_local'] = tmp_loca
config.write()
self.loadExistingConfig()
print "Configuration parameters successfully saved."
def playVideo(filename):
os.system("open " + filename)
def gallery():
label.pack()
frame = Tkinter.Frame(root)
frame.pack()
Tkinter.Button(frame, text='Previous picture', command=lambda: move(-1)).pack(side=Tkinter.LEFT)
Tkinter.Button(frame, text='Next picture', command=lambda: move(+1)).pack(side=Tkinter.LEFT)
Tkinter.Button(frame, text='Quit', command=root.quit).pack(side=Tkinter.LEFT)
move(0)
root.mainloop()
def move(delta):
global current, image_list
if not (0 <= current + delta < len(image_list)):
tkMessageBox.showinfo('End', 'No more image.')
return
current += delta
image = Image.open(image_list[current])
photo = ImageTk.PhotoImage(image)
label['text'] = text_list[current]
label['image'] = photo
label.photo = photo
def representsInt(self, s):
try:
int(s)
return True
except ValueError:
return False
def main():
pmg = PiMotionGallery()
pmg.gallery2()
if __name__ == "__main__":
main()
|
jvilaplana/PiMotionGallery
|
pmg.py
|
Python
|
gpl-3.0
| 8,027 | 0.007849 |
import praw
import urllib.request
import json
import requests
import requests.auth
import os.path
import re
from imgurpython import ImgurClient
from bs4 import BeautifulSoup
imgur_gif_regex = re.compile("https?:\/\/i\.imgur\.com\/[a-z0-9]+.gif")
def gyazo_link_parser(link):
"""
Parses Gyazo links into their raw (.png or .gif) form (i.gyazo)
"""
# opens the gyazo link
response = urllib.request.urlopen(link)
# reads the reponse
html = response.read()
# parses the html using beautifulsoup, and gives me the image link
parsed = BeautifulSoup(html)
return parsed.img['src']
# old method of handling gyazo links
#title = parsed.title.string
#print(str(title))
#return "http://i.gyazo.com/" + title.replace("Gyazo - ", "")
def imgur_uploader(link, imgur_client):
"""
Uploads passed image to imgur, and then outputs the link from the JSON/dict provided.
I"m calling it JSON.
"""
# tries to upload the image to imgur
try:
uploaded_image = imgur_client.upload_from_url(url=link, config=None, anon=True)
except:
# if it crashes, it'll just return False
print("Error when uploading the image to imgur.")
return False
else:
# otherwise, yay, we return a link
print("Successful convert of", link, "to an imgur link", uploaded_image["link"])
if len(imgur_gif_regex.findall(uploaded_image["link"])) != 0:
return uploaded_image["link"] + "v"
return uploaded_image["link"]
def comment_prep(content):
"""
Prepares the comment so we can have sme basic context.
"""
# same comment structure, so we'll just do it in a function
text = "Imgur link: " + content
text += "\n\n\n------\n"
text += "This action was performed by a bot. Message +/u/arrivance for further details."
return text
def comment_poster(comment, content):
try:
comment.reply(content)
except praw.errors.RateLimitExceeded as e:
print("Rate limit exceeded:", e)
except praw.errors.APIException as e:
print("API Exception:", e)
except:
print("Other unknown fault.")
else:
print("Successfully commented on comment ID", comment.id)
def file_checker(filename):
if os.path.isfile(filename) == True:
return True
else:
return False
def file_maker(filename, structure):
with open(filename, "w") as data_file:
json.dump(structure, filename)
return True
def reddit_oauth_token(login_details, user_agent):
client_auth = requests.auth.HTTPBasicAuth(login_details["reddit_client_id"], login_details["reddit_client_secret"])
post_data = {"grant_type": "password", "username": login_details["reddit_user"], "password": login_details["reddit_pass"]}
headers = {"User-Agent": user_agent}
print("Attempting to get the access_token from reddit...")
response = requests.post("https://www.reddit.com/api/v1/access_token", auth=client_auth, data=post_data, headers=headers)
access_token = response.json()["access_token"]
print("access_token succesfully gotten:", access_token)
return access_token
|
arrivance/gyazo-to-imgur
|
src/utility.py
|
Python
|
mit
| 3,148 | 0.014295 |
from L500analysis.derived_fields.derived_fields import *
from L500analysis.derived_fields.derived_field_functions import *
from L500analysis.derived_fields.collections.peak_height.derived_field_functions \
import *
from L500analysis.derived_fields.derived_field_tools.non_thermal_temperature \
import calculate_Ttot
from L500analysis.derived_fields.derived_field_tools.self_similar_normalizations \
import calculate_T_normalization
from L500analysis.plotting.profiles.tools.radial_normalizations import *
from L500analysis.plotting.profiles.tools.make_profile import make_profile
from L500analysis.utils.constants import K2keV
def _normalized_temperature_profile(data, *args, **kwargs) :
T_mw = data.profiles['T_mw']
Mvir = data.halo_properties[kwargs['M_delta_key']]
Rmid = data.profiles['r_mid']
Rvir = data.halo_properties[kwargs['R_delta_key']]
Rscaled = {hid: Rmid[hid]/Rvir[hid] for hid in data.halo_ids}
return dict({ 'aexp':data.aexp, 'Mvir':Mvir, 'T_mw':T_mw,
'halo_ids':data.halo_ids,
'Rscaled':Rscaled },
**kwargs)
def _normalized_total_temperature_profile(data, *args, **kwargs) :
T_mw = data.profiles['T_mw']
Mvir = data.halo_properties[kwargs['M_delta_key']]
sigr=data.profiles['vel_gas_rad_std']
sigt=data.profiles['vel_gas_tan_std']
vr = data.profiles['vel_gas_rad_avg']
vt = data.profiles['vel_gas_tan_avg']
Rmid = data.profiles['r_mid']
Rvir = data.halo_properties[kwargs['R_delta_key']]
Rscaled = {hid: Rmid[hid]/Rvir[hid] for hid in data.halo_ids}
return dict({ 'sigr':sigr, 'sigt':sigt, 'vr':vr, 'vt':vt,
'aexp':data.aexp, 'Mvir':Mvir, 'T_mw':T_mw,
'Rscaled':Rscaled,
'halo_ids':data.halo_ids }, **kwargs)
def calculate_normalized_temperature_profile(input_data) :
d = input_data
normalized_T = {}
for hid in d['halo_ids'] :
Tdelta = calculate_T_normalization(Mvir=d['Mvir'][hid],
delta=d['delta'],
aexp=d['aexp'])
normalized_T[hid] = d['T_mw'][hid]*d['units']/Tdelta
normalized_T[hid] = make_profile(x=d['Rscaled'][hid],y=normalized_T[hid])
return normalized_T
def calculate_normalized_total_temperature_profile(input_data) :
d = input_data
T_tot_normalized = {}
for hid in d['halo_ids'] :
Tdelta = calculate_T_normalization(Mvir=d['Mvir'][hid],
delta=d['delta'],
aexp=d['aexp'])
Ttot = calculate_Ttot(sigr=d['sigr'][hid],vr=d['vr'][hid],
sigt=d['sigt'][hid],vt=d['vt'][hid],
Tmw=d['T_mw'][hid])
T_tot_normalized[hid] = Ttot/Tdelta
T_tot_normalized[hid] = make_profile(x=d['Rscaled'][hid],
y=T_tot_normalized[hid])
return T_tot_normalized
def calculate_total_temperature_profile(input_data) :
d = input_data
T_tot = {}
for hid in d['halo_ids'] :
Ttot = calculate_Ttot(sigr=d['sigr'][hid],vr=d['vr'][hid],
sigt=d['sigt'][hid],vt=d['vt'][hid],
Tmw=d['T_mw'][hid])
T_tot_normalized[hid] = Ttot
T_tot_normalized[hid] = make_profile(x=d['Rscaled'][hid],
y=T_tot_normalized[hid])
return T_tot
add_derived_field('T_mw/T500c',function=_normalized_temperature_profile,
combine_function=calculate_normalized_temperature_profile,
M_delta_key='M_total_500c', R_delta_key='r500c',
delta='500c',units=K2keV)
add_derived_field('T_mw/T200m',function=_normalized_temperature_profile,
combine_function=calculate_normalized_temperature_profile,
M_delta_key='M_total_200m', R_delta_key='r200m',
delta='200m',units=K2keV)
add_derived_field('Ttot/T500c',function=_normalized_total_temperature_profile,
combine_function=calculate_normalized_total_temperature_profile
, M_delta_key='M_total_500c', R_delta_key='r500c',
delta='500c')
add_derived_field('Ttot/T200m',function=_normalized_total_temperature_profile,
combine_function=calculate_normalized_total_temperature_profile,
M_delta_key='M_total_200m', R_delta_key='r200m',
delta='200m')
add_derived_field('Ttot_500c',function=_normalized_total_temperature_profile,
combine_function=calculate_total_temperature_profile
, M_delta_key='M_total_500c', R_delta_key='r500c',
delta='500c')
add_derived_field('Ttot_200m',function=_normalized_total_temperature_profile,
combine_function=calculate_total_temperature_profile
, M_delta_key='M_total_200m', R_delta_key='r200m',
delta='200m')
|
cavestruz/L500analysis
|
derived_fields/collections/temperature/derived_field_functions.py
|
Python
|
mit
| 5,126 | 0.013461 |
import os.path
import gwt
from ...weights import W
from warnings import warn
__author__ = "Myunghwa Hwang <mhwang4@gmail.com>"
__all__ = ["DatIO"]
class DatIO(gwt.GwtIO):
"""
Opens, reads, and writes file objects in DAT format.
Spatial weights objects in DAT format are used in
Dr. LeSage's MatLab Econ library.
This DAT format is a simple text file with DAT or dat extension.
Without header line, it includes three data columns
for origin id, destination id, and weight values as follows:
[Line 1] 2 1 0.25
[Line 2] 5 1 0.50
...
Origin/destination IDs in this file format are simply record
numbers starting with 1. IDs are not necessarily integers.
Data values for all columns should be numeric.
"""
FORMATS = ['dat']
MODES = ['r', 'w']
def _read(self):
"""Reads .dat file
Returns a pysal.weights.weights.W object
Examples
--------
Type 'dir(w)' at the interpreter to see what methods are supported.
Open .dat file and read it into a pysal weights object
>>> w = pysal.open(pysal.examples.get_path('wmat.dat'),'r').read()
Get the number of observations from the header
>>> w.n
49
Get the mean number of neighbors
>>> w.mean_neighbors
4.7346938775510203
Get neighbor distances for a single observation
>>> w[1]
{2.0: 0.3333, 5.0: 0.3333, 6.0: 0.3333}
"""
if self.pos > 0:
raise StopIteration
id_type = float
weights, neighbors = self._readlines(id_type)
self.pos += 1
return W(neighbors, weights)
def write(self, obj):
"""
Parameters
----------
.write(weightsObject)
accepts a weights object
Returns
------
a DAT file
write a weights object to the opened DAT file.
Examples
--------
>>> import tempfile, pysal, os
>>> testfile = pysal.open(pysal.examples.get_path('wmat.dat'),'r')
>>> w = testfile.read()
Create a temporary file for this example
>>> f = tempfile.NamedTemporaryFile(suffix='.dat')
Reassign to new var
>>> fname = f.name
Close the temporary named file
>>> f.close()
Open the new file in write mode
>>> o = pysal.open(fname,'w')
Write the Weights object into the open file
>>> o.write(w)
>>> o.close()
Read in the newly created dat file
>>> wnew = pysal.open(fname,'r').read()
Compare values from old to new
>>> wnew.pct_nonzero == w.pct_nonzero
True
Clean up temporary file created for this example
>>> os.remove(fname)
"""
self._complain_ifclosed(self.closed)
if issubclass(type(obj), W):
self._writelines(obj)
else:
raise TypeError("Expected a pysal weights object, got: %s" % (
type(obj)))
|
sjsrey/pysal_core
|
pysal_core/io/IOHandlers/dat.py
|
Python
|
bsd-3-clause
| 3,059 | 0.000327 |
#encoding:utf-8
__authors__ = ['"Wei Keke" <keke.wei@cs2c.com.cn>']
__version__ = "V0.1"
'''
# ChangeLog:
#---------------------------------------------------------------------------------
# Version Date Desc Author
#---------------------------------------------------------------------------------
# V0.1 2014/10/09 初始版本 Wei Keke
#---------------------------------------------------------------------------------
'''
from TestData.Template import ITC07_SetUp as ModuleData
from TestAPIs.VirtualMachineAPIs import VirtualMachineAPIs
'''---------------------------------------------------------------------------------------------------
@note: PreData
---------------------------------------------------------------------------------------------------'''
vm_id = VirtualMachineAPIs().getVmIdByName(ModuleData.vm_name)
vm_name = ModuleData.vm_name
disk_name = ModuleData.disk_name
disk_info = ModuleData.disk_info
temp_name = 'Template-ITC070202'
temp_info='''
<template>
<name>%s</name>
<vm id="%s"/>
</template>
''' % (temp_name, vm_id)
'''---------------------------------------------------------------------------------------------------
@note: ExpectedData
---------------------------------------------------------------------------------------------------'''
expected_status_code = 200
|
faylau/oVirt3.3WebAPITest
|
src/TestData/Template/ITC070202_GetTemplateDiskInfo.py
|
Python
|
apache-2.0
| 1,439 | 0.004892 |
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import path_item
except ImportError:
path_item = sys.modules["onshape_client.oas.models.path_item"]
class Callback(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"empty": (bool,), # noqa: E501
"extensions": (
{str: (bool, date, datetime, dict, float, int, list, str,)},
), # noqa: E501
"getref": (str,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"empty": "empty", # noqa: E501
"extensions": "extensions", # noqa: E501
"getref": "get$ref", # noqa: E501
}
@staticmethod
def _composed_schemas():
return None
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""callback.Callback - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
empty (bool): [optional] # noqa: E501
extensions ({str: (bool, date, datetime, dict, float, int, list, str,)}): [optional] # noqa: E501
getref (str): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
for var_name, var_value in six.iteritems(kwargs):
setattr(self, var_name, var_value)
|
onshape-public/onshape-clients
|
python/onshape_client/oas/models/callback.py
|
Python
|
mit
| 4,774 | 0 |
#
# This is minimal MicroPython variant of run-tests script, which uses
# .exp files as generated by run-tests --write-exp. It is useful to run
# testsuite on systems which have neither CPython3 nor unix shell.
# This script is intended to be run by the same interpreter executable
# which is to be tested, so should use minimal language functionality.
#
import sys
import uos as os
tests = [
"basics", "micropython", "float", "import", "io",
" misc", "unicode", "extmod", "unix"
]
if sys.platform == 'win32':
MICROPYTHON = "micropython.exe"
else:
MICROPYTHON = "micropython"
def should_skip(test):
if test.startswith("native"):
return True
if test.startswith("viper"):
return True
test_count = 0
passed_count = 0
skip_count = 0
for suite in tests:
#print("Running in: %s" % suite)
if sys.platform == 'win32':
# dir /b prints only contained filenames, one on a line
# http://www.microsoft.com/resources/documentation/windows/xp/all/proddocs/en-us/dir.mspx
r = os.system("dir /b %s/*.py >tests.lst" % suite)
else:
r = os.system("ls %s/*.py | xargs -n1 basename >tests.lst" % suite)
assert r == 0
with open("tests.lst") as f:
testcases = f.readlines()
testcases = [l[:-1] for l in testcases]
assert testcases, "No tests found in dir '%s', which is implausible" % suite
#print(testcases)
for t in testcases:
if t == "native_check.py":
continue
qtest = "%s/%s" % (suite, t)
if should_skip(t):
print("skip " + qtest)
skip_count += 1
continue
exp = None
try:
f = open(qtest + ".exp")
exp = f.read()
f.close()
except OSError:
pass
if exp is not None:
#print("run " + qtest)
r = os.system(MICROPYTHON + " %s >.tst.out" % qtest)
if r == 0:
f = open(".tst.out")
out = f.read()
f.close()
else:
out = "CRASH"
if out == "SKIP\n":
print("skip " + qtest)
skip_count += 1
else:
if out == exp:
print("pass " + qtest)
passed_count += 1
else:
print("FAIL " + qtest)
test_count += 1
else:
skip_count += 1
print("%s tests performed" % test_count)
print("%s tests passed" % passed_count)
if test_count != passed_count:
print("%s tests failed" % (test_count - passed_count))
if skip_count:
print("%s tests skipped" % skip_count)
|
infinnovation/micropython
|
tests/run-tests-exp.py
|
Python
|
mit
| 2,697 | 0.002225 |
from django.contrib import admin
from Weather.models import *
from Weather.util import updateForecast
def update_forecast(modeladmin, request, queryset):
for forecast in queryset:
updateForecast(forecast)
update_forecast.short_description = "Force forecast update from NWS"
class forecastAdmin(admin.ModelAdmin):
actions = [update_forecast]
class WMSRadarOverlayAdmin(admin.ModelAdmin):
pass
admin.site.register(Forecast, forecastAdmin)
admin.site.register(WMSRadarOverlay, WMSRadarOverlayAdmin)
|
sschultz/FHSU-GSCI-Weather
|
Weather/admin.py
|
Python
|
mit
| 524 | 0.003817 |
"""
Settings for reputation changes that apply to
user in response to various actions by the same
users or others
"""
from askbot.conf.settings_wrapper import settings
from askbot.conf.super_groups import REP_AND_BADGES
from askbot.deps.livesettings import ConfigurationGroup, IntegerValue
from django.utils.translation import ugettext_lazy as _
BADGES = ConfigurationGroup(
'BADGES',
_('Badge settings'),
ordering=2,
super_group = REP_AND_BADGES
)
settings.register(
IntegerValue(
BADGES,
'DISCIPLINED_BADGE_MIN_UPVOTES',
default=3,
description=_('Disciplined: minimum upvotes for deleted post')
)
)
settings.register(
IntegerValue(
BADGES,
'PEER_PRESSURE_BADGE_MIN_DOWNVOTES',
default=3,
description=_('Peer Pressure: minimum downvotes for deleted post')
)
)
settings.register(
IntegerValue(
BADGES,
'TEACHER_BADGE_MIN_UPVOTES',
default=20,
description=_('Teacher: minimum upvotes for the answer')
)
)
settings.register(
IntegerValue(
BADGES,
'NICE_ANSWER_BADGE_MIN_UPVOTES',
default=5,
description=_('Nice Answer: minimum upvotes for the answer')
)
)
settings.register(
IntegerValue(
BADGES,
'GOOD_ANSWER_BADGE_MIN_UPVOTES',
default=10,
description=_('Good Answer: minimum upvotes for the answer')
)
)
settings.register(
IntegerValue(
BADGES,
'GREAT_ANSWER_BADGE_MIN_UPVOTES',
default=15,
description=_('Great Answer: minimum upvotes for the answer')
)
)
settings.register(
IntegerValue(
BADGES,
'NICE_QUESTION_BADGE_MIN_UPVOTES',
default=5,
description=_('Nice Question: minimum upvotes for the question')
)
)
settings.register(
IntegerValue(
BADGES,
'GOOD_QUESTION_BADGE_MIN_UPVOTES',
default=10,
description=_('Good Question: minimum upvotes for the question')
)
)
settings.register(
IntegerValue(
BADGES,
'GREAT_QUESTION_BADGE_MIN_UPVOTES',
default=15,
description=_('Great Question: minimum upvotes for the question')
)
)
settings.register(
IntegerValue(
BADGES,
'POPULAR_QUESTION_BADGE_MIN_VIEWS',
default=500,
description=_('Popular Question: minimum views')
)
)
settings.register(
IntegerValue(
BADGES,
'NOTABLE_QUESTION_BADGE_MIN_VIEWS',
default=150,
description=_('Notable Question: minimum views')
)
)
settings.register(
IntegerValue(
BADGES,
'FAMOUS_QUESTION_BADGE_MIN_VIEWS',
default=1000,
description=_('Famous Question: minimum views')
)
)
settings.register(
IntegerValue(
BADGES,
'SELF_LEARNER_BADGE_MIN_UPVOTES',
default=10,
description=_('Self-Learner: minimum answer upvotes')
)
)
settings.register(
IntegerValue(
BADGES,
'CIVIC_DUTY_BADGE_MIN_VOTES',
default=100,
description=_('Civic Duty: minimum votes')
)
)
settings.register(
IntegerValue(
BADGES,
'ENLIGHTENED_BADGE_MIN_UPVOTES',
default=15,
description=_('Enlightened Duty: minimum upvotes')
)
)
settings.register(
IntegerValue(
BADGES,
'GURU_BADGE_MIN_UPVOTES',
default=30,
description=_('Guru: minimum upvotes')
)
)
settings.register(
IntegerValue(
BADGES,
'NECROMANCER_BADGE_MIN_UPVOTES',
default=3,
description=_('Necromancer: minimum upvotes')
)
)
settings.register(
IntegerValue(
BADGES,
'NECROMANCER_BADGE_MIN_DELAY',
default=30,
description=_('Necromancer: minimum delay in days')
)
)
settings.register(
IntegerValue(
BADGES,
'ASSOCIATE_EDITOR_BADGE_MIN_EDITS',
default=20,
description=_('Associate Editor: minimum number of edits')
)
)
settings.register(
IntegerValue(
BADGES,
'FAVORITE_QUESTION_BADGE_MIN_STARS',
default=5,
description=_('Favorite Question: minimum stars')
)
)
settings.register(
IntegerValue(
BADGES,
'STELLAR_QUESTION_BADGE_MIN_STARS',
default=10,
description=_('Stellar Question: minimum stars')
)
)
settings.register(
IntegerValue(
BADGES,
'COMMENTATOR_BADGE_MIN_COMMENTS',
default=10,
description=_('Commentator: minimum comments')
)
)
settings.register(
IntegerValue(
BADGES,
'TAXONOMIST_BADGE_MIN_USE_COUNT',
default = 5,
description = _('Taxonomist: minimum tag use count')
)
)
settings.register(
IntegerValue(
BADGES,
'ENTHUSIAST_BADGE_MIN_DAYS',
default = 5,
description = _('Enthusiast: minimum days')
)
)
|
stianrh/askbot-nordic
|
askbot/conf/badges.py
|
Python
|
gpl-3.0
| 5,005 | 0.002198 |
from python_kemptech_api import *
# Specify the LoadMaster connection credentials here:
loadmaster_ip = ""
username = ""
password = ""
vs_ip_1 = ""
vs_ip_2 = ""
rs_ip_1 = ""
rs_ip_2 = ""
vs_port = ""
rs_port = ""
class RealServerPool(object):
healthcheck_parameters = [
"checktype",
"checkport",
"checkurl",
"checkheaders",
"checkuse1_1",
"checkuseget",
"checkpostdata",
"checkpattern",
"checkcodes",
"matchlen",
"enhancedhealthchecks",
"rsminimum"
]
rs_parameters = [
"enable",
"forward",
"weight",
"limit",
"critical",
"follow"
]
def __init__(self, rs_list=None, vs=None):
if rs_list is not None:
self.rs = []
for rs in rs_list:
if isinstance(rs, RealServer):
self.rs.append(rs)
else:
ip, port = rs.split(":")
mock_lm = {"endpoint": "", "ip_address": "", "vs": ""}
self.rs.append(RealServer(mock_lm, ip, port))
self.checktype = None
self.checkport = None
self.checkurl = None
self.checkheaders = None
self.checkuse1_1 = None
self.checkuseget = None
self.checkpostdata = None
self.checkpattern = None
self.checkcodes = None
self.matchlen = None
self.enhancedhealthchecks = None
self.rsminimum = None
elif vs is not None:
self.rs = vs.servers.values()
self.checktype = vs.checktype
self.checkport = vs.checkport
self.checkurl = vs.checkurl
self.checkheaders = vs.checkheaders
self.checkuse1_1 = vs.checkuse1_1
self.checkuseget = vs.checkuseget
self.checkpostdata = vs.checkpostdata
self.checkpattern = vs.checkpattern
self.checkcodes = vs.checkcodes
self.matchlen = vs.matchlen
self.enhancedhealthchecks = vs.enhancedhealthchecks
self.rsminimum = vs.rsminimum
def apply(self, vs):
[rs.delete() for rs in vs.servers.values()]
for rs in self.rs:
new_rs = vs.create_real_server(rs.rs, rs.rsport)
# Apply other settings
new_rs.save()
for attr in self.rs_parameters:
print("attr: {}".format(attr))
if hasattr(rs, attr) and rs.__getattribute__(attr) is not None:
print("set attr: {}={}".format(attr, rs.__getattribute__(attr)))
new_rs.__setattr__(attr, rs.__getattribute__(attr))
new_rs.update()
for attr in self.healthcheck_parameters:
print("attr: {}".format(attr))
if hasattr(self, attr) and self.__getattribute__(attr) is not None:
print("set attr: {}={}".format(attr, self.__getattribute__(attr)))
vs.__setattr__(attr, self.__getattribute__(attr))
vs.update()
# Create the LoadMaster object
lm = LoadMaster(loadmaster_ip, username, password)
# Delete all the existing VSs
[vs.delete() for vs in lm.vs.values()]
# Create a new VS
vs = lm.create_virtual_service(vs_ip_1, vs_port, "tcp")
vs.save()
# Configure some healthcheck options
vs.checktype = 'HTTPS'
vs.checkport = "8443"
vs.update()
# Add and save the first real server
rs1 = vs.create_real_server(rs_ip_1, rs_port)
rs1.save()
# Configure the weighting
rs1.weight = 200
rs1.update()
# Add and save the second real server
rs2 = vs.create_real_server(rs_ip_2, rs_port)
rs2.save()
# Disable the server
rs2.enable = 'N'
rs2.update()
# This will create a pool based on the VS and healthcheck settings of the VS
pool1 = RealServerPool(vs=vs)
# Create the second VS
vs2 = lm.create_virtual_service(vs_ip_2, vs_port, "tcp")
vs2.save()
# Apply the pool to the new VS. The RS and healthcheck settings will be applied
pool1.apply(vs2)
# Alternately, you can use a list of IP and ports to create a pool
rs_list = ["172.22.100.6:88", "172.22.100.7:88", "172.22.100.8:88", "172.22.100.9:88"]
pool2 = RealServerPool(rs_list)
# You can also apply healthcheck settings directly to a pool
pool2.checktype = "ICMP"
# Apply the pool to both VSs
pool2.apply(vs)
pool2.apply(vs2)
|
KEMPtechnologies/python-kemptech-api
|
examples/real_server_pooling.py
|
Python
|
apache-2.0
| 4,377 | 0.000914 |
#
# Copyright (C) 2019 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Vendula Poncova <vponcova@redhat.com>
#
import unittest
from blivet.devices import DiskDevice
from blivet.formats import get_format
from blivet.size import Size
from pyanaconda.modules.common.constants.objects import DISK_SELECTION
from pyanaconda.modules.common.errors.storage import UnavailableStorageError
from pyanaconda.modules.common.structures.validation import ValidationReport
from pyanaconda.modules.storage.disk_selection import DiskSelectionModule
from pyanaconda.modules.storage.disk_selection.selection_interface import DiskSelectionInterface
from pyanaconda.storage.initialization import create_storage
from tests.nosetests.pyanaconda_tests import check_dbus_property
class DiskSelectionInterfaceTestCase(unittest.TestCase):
"""Test DBus interface of the disk selection module."""
def setUp(self):
"""Set up the module."""
self.disk_selection_module = DiskSelectionModule()
self.disk_selection_interface = DiskSelectionInterface(self.disk_selection_module)
def _test_dbus_property(self, *args, **kwargs):
check_dbus_property(
self,
DISK_SELECTION,
self.disk_selection_interface,
*args, **kwargs
)
def selected_disks_property_test(self):
"""Test the selected disks property."""
self._test_dbus_property(
"SelectedDisks",
["sda", "sdb"]
)
def validate_selected_disks_test(self):
"""Test ValidateSelectedDisks."""
storage = create_storage()
self.disk_selection_module.on_storage_changed(storage)
dev1 = DiskDevice(
"dev1",
exists=False,
size=Size("15 GiB"),
fmt=get_format("disklabel")
)
dev2 = DiskDevice(
"dev2",
exists=False,
parents=[dev1],
size=Size("6 GiB"),
fmt=get_format("disklabel")
)
dev3 = DiskDevice(
"dev3",
exists=False,
parents=[dev2],
size=Size("6 GiB"),
fmt=get_format("disklabel")
)
storage.devicetree._add_device(dev1)
storage.devicetree._add_device(dev2)
storage.devicetree._add_device(dev3)
report = ValidationReport.from_structure(
self.disk_selection_interface.ValidateSelectedDisks([])
)
self.assertEqual(report.is_valid(), True)
report = ValidationReport.from_structure(
self.disk_selection_interface.ValidateSelectedDisks(["dev1"])
)
self.assertEqual(report.is_valid(), False)
self.assertEqual(report.error_messages, [
"You selected disk dev1, which contains devices that also use "
"unselected disks dev2, dev3. You must select or de-select "
"these disks as a set."
])
self.assertEqual(report.warning_messages, [])
report = ValidationReport.from_structure(
self.disk_selection_interface.ValidateSelectedDisks(["dev1", "dev2"])
)
self.assertEqual(report.is_valid(), False)
self.assertEqual(report.error_messages, [
"You selected disk dev1, which contains devices that also "
"use unselected disk dev3. You must select or de-select "
"these disks as a set.",
"You selected disk dev2, which contains devices that also "
"use unselected disk dev3. You must select or de-select "
"these disks as a set."
])
self.assertEqual(report.warning_messages, [])
report = ValidationReport.from_structure(
self.disk_selection_interface.ValidateSelectedDisks(["dev1", "dev2", "dev3"])
)
self.assertEqual(report.is_valid(), True)
def exclusive_disks_property_test(self):
"""Test the exclusive disks property."""
self._test_dbus_property(
"ExclusiveDisks",
["sda", "sdb"]
)
def ignored_disks_property_test(self):
"""Test the ignored disks property."""
self._test_dbus_property(
"IgnoredDisks",
["sda", "sdb"]
)
def protected_disks_property_test(self):
"""Test the protected disks property."""
self._test_dbus_property(
"ProtectedDevices",
["sda", "sdb"]
)
def disk_images_property_test(self):
"""Test the protected disks property."""
self._test_dbus_property(
"DiskImages",
{
"image_1": "/path/1",
"image_2": "/path/2"
}
)
def get_usable_disks_test(self):
"""Test the GetUsableDisks method."""
with self.assertRaises(UnavailableStorageError):
self.disk_selection_interface.GetUsableDisks()
self.disk_selection_module.on_storage_changed(create_storage())
self.assertEqual(self.disk_selection_interface.GetUsableDisks(), [])
|
atodorov/anaconda
|
tests/nosetests/pyanaconda_tests/module_disk_select_test.py
|
Python
|
gpl-2.0
| 5,967 | 0.00067 |
import json
import os
#This helper class allows to access members of the configuration
#as c.attr1.attr2.attr3 instead of c[attr1][attr2][attr3]
class DictToAttr( object ):
def __init__( self, dictionary ):
self.dict = dict( dictionary )
def __getattr__(self, attr):
if attr in self.dict.keys():
token = self.dict[ attr ]
if isinstance( token, dict ):
return DictToAttr( token )
else:
return token
else:
raise ValueError("'%s' does not exists in configuration" %(attr))
class Configuration( object ):
def __init__(self):
#Read configuration file
path = os.path.dirname (
os.path.abspath( os.path.expanduser( __file__ ) )
)
configFile = os.path.join( path, "configuration.json" )
if os.path.exists( configFile ):
try:
with open( configFile ) as f:
#Strip all comments to make the output
#a json-compliant string
allLines = f.readlines()
clean = filter( lambda line: line.find("#") < 0, allLines )
JSON = " ".join( clean )
JSON = JSON.replace("\n"," ")
self.config = json.loads( JSON )
except IOError:
print("Configuration file %s exists " % configFile +
"but cannot be read. Traceback follows")
raise
else:
raise IOError("Configuration file %s does not exist!" %(configFile))
def __getattr__(self, attr):
if attr in self.config.keys():
token = self.config[ attr ]
if isinstance( token, dict ):
return DictToAttr( token )
else:
return token
|
giacomov/pyggop
|
pyggop/config/Configuration.py
|
Python
|
mit
| 2,359 | 0.038152 |
# LICENSE: GPLv2, see attached License
# Author: Joerg Jungermann
def get_mac():
from network import WLAN
from ubinascii import hexlify
return hexlify(WLAN().config('mac'),'-').decode()
def sleep(sec, deepsleep = False):
import time
if deepsleep:
import machine
rtc = machine.RTC()
rtc.irq(trigger=rtc.ALARM0, wake=machine.DEEPSLEEP)
rtc.alarm(rtc.ALARM0, sec * 1000)
print('iotesp.sleep():', sec, 's / DEEPSLEEP')
machine.deepsleep()
time.sleep_us(100)
else:
# print('sleep:', sec, 's')
time.sleep(sec)
# vim: sw=4 ts=4 ft=python et foldmethod=indent
|
j0ju/iot-carbon-esp
|
modules/iotesp.py
|
Python
|
gpl-2.0
| 651 | 0.006144 |
###############################################################################
# Name: nsistags.py #
# Purpose: Generate Tags for Nullsoft Installer Scripts #
# Author: Cody Precord <cprecord@editra.org> #
# Copyright: (c) 2008 Cody Precord <staff@editra.org> #
# License: wxWindows License #
###############################################################################
"""
FILE: nsistags.py
AUTHOR: Cody Precord
LANGUAGE: Python
SUMMARY:
Generate a DocStruct object that captures the structure of a NSIS Script. It
currently supports generating tags for Sections, Functions, and Macro defs.
"""
__author__ = "Cody Precord <cprecord@editra.org>"
__svnid__ = "$Id: nsistags.py 52675 2008-03-22 03:34:38Z CJP $"
__revision__ = "$Revision: 52675 $"
#--------------------------------------------------------------------------#
# Dependancies
import taglib
import parselib
#--------------------------------------------------------------------------#
def GenerateTags(buff):
"""Create a DocStruct object that represents a NSIS Script
@param buff: a file like buffer object (StringIO)
@todo: generate tags for lua tables?
"""
rtags = taglib.DocStruct()
# Set Descriptions of Document Element Types
rtags.SetElementDescription('variable', "Defines")
rtags.SetElementDescription('section', "Section Definitions")
rtags.SetElementDescription('macro', "Macro Definitions")
rtags.SetElementDescription('function', "Function Definitions")
rtags.SetElementPriority('variable', 4)
rtags.SetElementPriority('section', 3)
rtags.SetElementPriority('function', 2)
rtags.SetElementPriority('macro', 1)
# Parse the lines for code objects
for lnum, line in enumerate(buff):
line = line.strip()
llen = len(line)
# Skip comment and empty lines
if line.startswith(u"#") or line.startswith(u";") or not line:
continue
# Look for functions and sections
if parselib.IsToken(line, 0, u'Function'):
parts = line.split()
if len(parts) > 1:
rtags.AddFunction(taglib.Function(parts[1], lnum))
elif parselib.IsToken(line, 0, u'Section'):
parts = line.split()
if len(parts) > 1 and parts[1][0] not in ['"', "'", "`"]:
rtags.AddElement('section', taglib.Section(parts[1], lnum))
else:
for idx, part in enumerate(parts[1:]):
if parts[idx][-1] in ['"', "'", "`"]:
rtags.AddElement('section', taglib.Section(part, lnum))
break
elif parselib.IsToken(line, 0, u'!macro'):
parts = line.split()
if len(parts) > 1:
rtags.AddElement('macro', taglib.Macro(parts[1], lnum))
elif parselib.IsToken(line, 0, u'!define'):
parts = line.split()
if len(parts) > 1 and parts[1][0].isalpha():
rtags.AddVariable(taglib.Variable(parts[1], lnum))
else:
continue
return rtags
#-----------------------------------------------------------------------------#
# Test
if __name__ == '__main__':
import sys
import StringIO
fhandle = open(sys.argv[1])
txt = fhandle.read()
fhandle.close()
tags = GenerateTags(StringIO.StringIO(txt))
print "\n\nElements:"
for element in tags.GetElements():
print "\n%s:" % element.keys()[0]
for val in element.values()[0]:
print "%s [%d]" % (val.GetName(), val.GetLine())
print "END"
|
garrettcap/Bulletproof-Backup
|
wx/tools/Editra/plugins/codebrowser/codebrowser/gentag/nsistags.py
|
Python
|
gpl-2.0
| 3,752 | 0.001866 |
from collections import defaultdict
from itertools import chain
from operator import itemgetter
import re
from django.core.serializers.json import DjangoJSONEncoder
from django.db import connection
from django.db.models import Count, Max, F
from django.db.models.query import QuerySet
from django.contrib.auth.models import User
from main.models import Package, PackageFile, Arch, Repo
from main.utils import database_vendor, groupby_preserve_order, PackageStandin
from .models import (PackageGroup, PackageRelation,
License, Depend, Conflict, Provision, Replacement,
SignoffSpecification, Signoff, fake_signoff_spec)
from todolists.models import TodolistPackage
VERSION_RE = re.compile(r'^((\d+):)?(.+)-([^-]+)$')
def parse_version(version):
match = VERSION_RE.match(version)
if not match:
return None, None, 0
ver = match.group(3)
rel = match.group(4)
if match.group(2):
epoch = int(match.group(2))
else:
epoch = 0
return ver, rel, epoch
def get_group_info(include_arches=None):
raw_groups = PackageGroup.objects.values_list(
'name', 'pkg__arch__name').order_by('name').annotate(
cnt=Count('pkg'), last_update=Max('pkg__last_update'))
# now for post_processing. we need to separate things out and add
# the count in for 'any' to all of the other architectures.
group_mapping = {}
for grp in raw_groups:
arch_groups = group_mapping.setdefault(grp[1], {})
arch_groups[grp[0]] = {
'name': grp[0], 'arch': grp[1],
'count': grp[2], 'last_update': grp[3]
}
# we want to promote the count of 'any' packages in groups to the
# other architectures, and also add any 'any'-only groups
if 'any' in group_mapping:
any_groups = group_mapping['any']
del group_mapping['any']
for arch, arch_groups in group_mapping.items():
for grp in any_groups.values():
if grp['name'] in arch_groups:
found = arch_groups[grp['name']]
found['count'] += grp['count']
if grp['last_update'] > found['last_update']:
found['last_update'] = grp['last_update']
else:
new_g = grp.copy()
# override the arch to not be 'any'
new_g['arch'] = arch
arch_groups[grp['name']] = new_g
# now transform it back into a sorted list, including only the specified
# architectures if we got a list
groups = []
for key, val in group_mapping.items():
if not include_arches or key in include_arches:
groups.extend(iter(val.values()))
return sorted(groups, key=itemgetter('name', 'arch'))
def get_split_packages_info():
'''Return info on split packages that do not have an actual package name
matching the split pkgbase.'''
pkgnames = Package.objects.values('pkgname')
split_pkgs = Package.objects.exclude(pkgname=F('pkgbase')).exclude(
pkgbase__in=pkgnames).values('pkgbase', 'repo', 'arch').annotate(
last_update=Max('last_update')).distinct()
all_arches = Arch.objects.in_bulk({s['arch'] for s in split_pkgs})
all_repos = Repo.objects.in_bulk({s['repo'] for s in split_pkgs})
for split in split_pkgs:
split['arch'] = all_arches[split['arch']]
split['repo'] = all_repos[split['repo']]
return split_pkgs
class Difference(object):
def __init__(self, pkgname, repo, pkg_a, pkg_b):
self.pkgname = pkgname
self.repo = repo
self.pkg_a = pkg_a
self.pkg_b = pkg_b
def classes(self):
'''A list of CSS classes that should be applied to this row in any
generated HTML. Useful for sorting, filtering, etc. Contains whether
this difference is in both architectures or the sole architecture it
belongs to, as well as the repo name.'''
css_classes = [self.repo.name.lower()]
if self.pkg_a and self.pkg_b:
css_classes.append('both')
elif self.pkg_a:
css_classes.append(self.pkg_a.arch.name)
elif self.pkg_b:
css_classes.append(self.pkg_b.arch.name)
return ' '.join(css_classes)
def __key(self):
return (self.pkgname, hash(self.repo),
hash(self.pkg_a), hash(self.pkg_b))
def __eq__(self, other):
return self.__key() == other.__key()
def __hash__(self):
return hash(self.__key())
def multilib_differences():
# Query for checking multilib out of date-ness
if database_vendor(Package) == 'sqlite':
pkgname_sql = """
CASE WHEN ml.pkgname LIKE %s
THEN SUBSTR(ml.pkgname, 7)
WHEN ml.pkgname LIKE %s
THEN SUBSTR(ml.pkgname, 1, LENGTH(ml.pkgname) - 9)
ELSE
ml.pkgname
END
"""
else:
pkgname_sql = """
CASE WHEN ml.pkgname LIKE %s
THEN SUBSTRING(ml.pkgname, 7)
WHEN ml.pkgname LIKE %s
THEN SUBSTRING(ml.pkgname FROM 1 FOR CHAR_LENGTH(ml.pkgname) - 9)
ELSE
ml.pkgname
END
"""
sql = """
SELECT ml.id, reg.id
FROM packages ml
JOIN packages reg
ON (
reg.pkgname = (""" + pkgname_sql + """)
AND reg.pkgver != ml.pkgver
)
JOIN repos r ON reg.repo_id = r.id
WHERE ml.repo_id = %s
AND r.testing = %s
AND r.staging = %s
AND reg.arch_id = %s
ORDER BY ml.last_update
"""
multilib = Repo.objects.get(name__iexact='multilib')
x86_64 = Arch.objects.get(name='x86_64')
params = ['lib32-%', '%-multilib', multilib.id, False, False, x86_64.id]
cursor = connection.cursor()
cursor.execute(sql, params)
results = cursor.fetchall()
# fetch all of the necessary packages
to_fetch = set(chain.from_iterable(results))
pkgs = Package.objects.normal().in_bulk(to_fetch)
return [(pkgs[ml], pkgs[reg]) for ml, reg in results]
def get_wrong_permissions():
sql = """
SELECT DISTINCT id
FROM (
SELECT pr.id, p.repo_id, pr.user_id
FROM packages p
JOIN packages_packagerelation pr ON p.pkgbase = pr.pkgbase
WHERE pr.type = %s
) mp
LEFT JOIN (
SELECT user_id, repo_id FROM user_profiles_allowed_repos ar
INNER JOIN user_profiles up ON ar.userprofile_id = up.id
) ur
ON mp.user_id = ur.user_id AND mp.repo_id = ur.repo_id
WHERE ur.user_id IS NULL;
"""
cursor = connection.cursor()
cursor.execute(sql, [PackageRelation.MAINTAINER])
to_fetch = [row[0] for row in cursor.fetchall()]
relations = PackageRelation.objects.select_related(
'user', 'user__userprofile').filter(
id__in=to_fetch)
return relations
def attach_maintainers(packages):
'''Given a queryset or something resembling it of package objects, find all
the maintainers and attach them to the packages to prevent N+1 query
cascading.'''
if isinstance(packages, QuerySet):
pkgbases = packages.values('pkgbase')
else:
packages = list(packages)
pkgbases = {p.pkgbase for p in packages if p is not None}
rels = PackageRelation.objects.filter(type=PackageRelation.MAINTAINER,
pkgbase__in=pkgbases).values_list(
'pkgbase', 'user_id').order_by().distinct()
# get all the user objects we will need
user_ids = {rel[1] for rel in rels}
users = User.objects.in_bulk(user_ids)
# now build a pkgbase -> [maintainers...] map
maintainers = defaultdict(list)
for rel in rels:
maintainers[rel[0]].append(users[rel[1]])
annotated = []
# and finally, attach the maintainer lists on the original packages
for package in packages:
if package is None:
continue
package.maintainers = maintainers[package.pkgbase]
annotated.append(package)
return annotated
def approved_by_signoffs(signoffs, spec):
if signoffs:
good_signoffs = sum(1 for s in signoffs if not s.revoked)
return good_signoffs >= spec.required
return False
class PackageSignoffGroup(object):
'''Encompasses all packages in testing with the same pkgbase.'''
def __init__(self, packages):
if len(packages) == 0:
raise Exception
self.packages = packages
self.user = None
self.target_repo = None
self.signoffs = set()
self.default_spec = True
first = packages[0]
self.pkgbase = first.pkgbase
self.arch = first.arch
self.repo = first.repo
self.version = ''
self.last_update = first.last_update
self.packager = first.packager
self.maintainers = first.maintainers
self.specification = fake_signoff_spec(first.arch)
version = first.full_version
if all(version == pkg.full_version for pkg in packages):
self.version = version
@property
def package(self):
'''Try and return a relevant single package object representing this
group. Start by seeing if there is only one package, then look for the
matching package by name, finally falling back to a standin package
object.'''
if len(self.packages) == 1:
return self.packages[0]
same_pkgs = [p for p in self.packages if p.pkgname == p.pkgbase]
if same_pkgs:
return same_pkgs[0]
return PackageStandin(self.packages[0])
def find_signoffs(self, all_signoffs):
'''Look through a list of Signoff objects for ones matching this
particular group and store them on the object.'''
for s in all_signoffs:
if s.pkgbase != self.pkgbase:
continue
if self.version and not s.full_version == self.version:
continue
if s.arch_id == self.arch.id and s.repo_id == self.repo.id:
self.signoffs.add(s)
def find_specification(self, specifications):
for spec in specifications:
if spec.pkgbase != self.pkgbase:
continue
if self.version and not spec.full_version == self.version:
continue
if spec.arch_id == self.arch.id and spec.repo_id == self.repo.id:
self.specification = spec
self.default_spec = False
return
def approved(self):
return approved_by_signoffs(self.signoffs, self.specification)
@property
def completed(self):
return sum(1 for s in self.signoffs if not s.revoked)
@property
def required(self):
return self.specification.required
def user_signed_off(self, user=None):
'''Did a given user signoff on this package? user can be passed as an
argument, or attached to the group object itself so this can be called
from a template.'''
if user is None:
user = self.user
return user in (s.user for s in self.signoffs if not s.revoked)
def __unicode__(self):
return f'{self.pkgbase}-{self.version} (self.arch): {len(self.signoffs)}'
def signoffs_id_query(model, repos):
sql = """
SELECT DISTINCT s.id
FROM %s s
JOIN packages p ON (
s.pkgbase = p.pkgbase
AND s.pkgver = p.pkgver
AND s.pkgrel = p.pkgrel
AND s.epoch = p.epoch
AND s.arch_id = p.arch_id
AND s.repo_id = p.repo_id
)
WHERE p.repo_id IN (%s)
AND s.repo_id IN (%s)
"""
cursor = connection.cursor()
# query pre-process- fill in table name and placeholders for IN
repo_sql = ','.join(['%s' for _ in repos])
sql = sql % (model._meta.db_table, repo_sql, repo_sql)
repo_ids = [r.pk for r in repos]
# repo_ids are needed twice, so double the array
cursor.execute(sql, repo_ids * 2)
results = cursor.fetchall()
return [row[0] for row in results]
def get_current_signoffs(repos):
'''Returns a list of signoff objects for the given repos.'''
to_fetch = signoffs_id_query(Signoff, repos)
return Signoff.objects.select_related('user').in_bulk(to_fetch).values()
def get_current_specifications(repos):
'''Returns a list of signoff specification objects for the given repos.'''
to_fetch = signoffs_id_query(SignoffSpecification, repos)
return SignoffSpecification.objects.select_related('arch').in_bulk(to_fetch).values()
def get_target_repo_map(repos):
sql = """
SELECT DISTINCT p1.pkgbase, r.name
FROM packages p1
JOIN repos r ON p1.repo_id = r.id
JOIN packages p2 ON p1.pkgbase = p2.pkgbase
WHERE r.staging = %s
AND r.testing = %s
AND p2.repo_id IN (
"""
sql += ','.join(['%s' for _ in repos])
sql += ")"
params = [False, False]
params.extend(r.pk for r in repos)
cursor = connection.cursor()
cursor.execute(sql, params)
return dict(cursor.fetchall())
def get_signoff_groups(repos=None, user=None):
if repos is None:
repos = Repo.objects.filter(testing=True)
repo_ids = [r.pk for r in repos]
test_pkgs = Package.objects.select_related(
'arch', 'repo', 'packager').filter(repo__in=repo_ids)
packages = test_pkgs.order_by('pkgname')
packages = attach_maintainers(packages)
# Filter by user if asked to do so
if user is not None:
packages = [p for p in packages if user == p.packager or user in p.maintainers]
# Collect all pkgbase values in testing repos
pkgtorepo = get_target_repo_map(repos)
# Collect all possible signoffs and specifications for these packages
signoffs = get_current_signoffs(repos)
specs = get_current_specifications(repos)
same_pkgbase_key = lambda x: (x.repo.name, x.arch.name, x.pkgbase)
grouped = groupby_preserve_order(packages, same_pkgbase_key)
signoff_groups = []
for group in grouped:
signoff_group = PackageSignoffGroup(group)
signoff_group.target_repo = pkgtorepo.get(signoff_group.pkgbase, "Unknown")
signoff_group.find_signoffs(signoffs)
signoff_group.find_specification(specs)
signoff_groups.append(signoff_group)
return signoff_groups
DEPENDENCY_TYPES = [('D', 'depends'), ('O', 'optdepends'),
('M', 'makedepends'), ('C', 'checkdepends')]
class PackageJSONEncoder(DjangoJSONEncoder):
pkg_attributes = ['pkgname', 'pkgbase', 'repo', 'arch', 'pkgver', 'pkgrel',
'epoch', 'pkgdesc', 'url', 'filename', 'compressed_size',
'installed_size', 'build_date', 'last_update', 'flag_date',
'maintainers', 'packager']
pkg_list_attributes = ['groups', 'licenses', 'conflicts', 'provides', 'replaces']
todolistpackage_attributes = ['status_str']
def default(self, obj):
if hasattr(obj, '__iter__'):
# mainly for queryset serialization
return list(obj)
if isinstance(obj, Package):
data = {attr: getattr(obj, attr) for attr in self.pkg_attributes}
for attr in self.pkg_list_attributes:
data[attr] = getattr(obj, attr).all()
all_deps = obj.depends.all()
for (deptype, name) in DEPENDENCY_TYPES:
data[name] = all_deps.filter(deptype=deptype)
return data
if isinstance(obj, PackageFile):
filename = obj.filename or ''
return obj.directory + filename
if isinstance(obj, (Repo, Arch)):
return obj.name.lower()
if isinstance(obj, (PackageGroup, License)):
return obj.name
if isinstance(obj, (Depend, Conflict, Provision, Replacement)):
return str(obj)
elif isinstance(obj, User):
return obj.username
elif isinstance(obj, TodolistPackage):
data = self.default(obj.pkg)
for attr in self.todolistpackage_attributes:
data[attr] = getattr(obj, attr)
return data
return super(PackageJSONEncoder, self).default(obj)
# vim: set ts=4 sw=4 et:
|
archlinux/archweb
|
packages/utils.py
|
Python
|
gpl-2.0
| 16,261 | 0.000553 |
from sys import version_info
from functools import reduce
from operator import mul
from flask_babel import gettext
if version_info[0] == 3:
unicode = str
keywords = ('min',
'max',
'avg',
'sum',
'prod')
# required answerer function
# can return a list of results (any result type) for a given query
def answer(query):
parts = query.query.split()
if len(parts) < 2:
return []
try:
args = list(map(float, parts[1:]))
except:
return []
func = parts[0]
answer = None
if func == b'min':
answer = min(args)
elif func == b'max':
answer = max(args)
elif func == b'avg':
answer = sum(args) / len(args)
elif func == b'sum':
answer = sum(args)
elif func == b'prod':
answer = reduce(mul, args, 1)
if answer is None:
return []
return [{'answer': unicode(answer)}]
# required answerer function
# returns information about the answerer
def self_info():
return {'name': gettext('Statistics functions'),
'description': gettext('Compute {functions} of the arguments').format(functions='/'.join(keywords)),
'examples': ['avg 123 548 2.04 24.2']}
|
asciimoo/searx
|
searx/answerers/statistics/answerer.py
|
Python
|
agpl-3.0
| 1,243 | 0.001609 |
# Rekall Memory Forensics
# Copyright (C) 2007,2008 Volatile Systems
# Copyright (C) 2010,2011,2012 Michael Hale Ligh <michael.ligh@mnin.org>
# Copyright 2013 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
"""The following is a description of windows stations from MSDN:
http://msdn.microsoft.com/en-us/library/windows/desktop/ms687096(v=vs.85).aspx
A window station contains a clipboard, an atom table, and one or more desktop
objects. Each window station object is a securable object. When a window station
is created, it is associated with the calling process and assigned to the
current session.
The interactive window station is the only window station that can display a
user interface or receive user input. It is assigned to the logon session of the
interactive user, and contains the keyboard, mouse, and display device. It is
always named "WinSta0". All other window stations are noninteractive, which
means they cannot display a user interface or receive user input.
Ref:
http://volatility-labs.blogspot.de/2012/09/movp-13-desktops-heaps-and-ransomware.html
NOTE: Windows 8 does not have a global atom table any more.
http://mista.nu/research/smashing_the_atom.pdf
"""
from rekall import plugin
from rekall.plugins.windows import common
from rekall.plugins.windows.gui import win32k_core
class WindowsStations(win32k_core.Win32kPluginMixin,
common.WindowsCommandPlugin):
"""Displays all the windows stations by following lists."""
__name = "windows_stations"
table_header = [
dict(name="WindowStation", style="address"),
dict(name="Name", width=20),
dict(name="SesId", width=5),
dict(name="AtomTable", style="address"),
dict(name="Interactive", width=11),
dict(name="Desktops")
]
def stations_in_session(self, session):
# Get the start of the Window station list from
# win32k.sys. These are all the Windows stations that exist in
# this Windows session.
station_list = self.win32k_profile.get_constant_object(
"grpWinStaList",
target="Pointer",
target_args=dict(
target="tagWINDOWSTATION"
),
vm=session.obj_vm,
)
for station in station_list.walk_list("rpwinstaNext"):
yield station
def stations(self):
"""A generator of tagWINDOWSTATION objects."""
# Each windows session has a unique set of windows stations.
for session in self.session.plugins.sessions().session_spaces():
for station in self.stations_in_session(session):
yield station
def collect(self):
for window_station in self.stations():
desktops = [desk.Name for desk in window_station.desktops()]
yield (window_station, window_station.Name,
window_station.dwSessionId,
window_station.pGlobalAtomTable,
window_station.Interactive,
desktops)
class WinDesktops(plugin.VerbosityMixIn, WindowsStations):
"""Print information on each desktop."""
__name = "desktops"
table_header = [
dict(name="tagDESKTOP", style="address"),
dict(name="Name", width=20),
dict(name="Sid", width=3),
dict(name="Hooks", width=5),
dict(name="tagWND", style="address"),
dict(name="Winds", width=5),
dict(name="Thrd", width=5),
dict(name="_EPROCESS"),
]
def collect(self):
for window_station in self.stations():
for desktop in window_station.desktops():
divider = ("Desktop: {0:addr}, Name: {1}\\{2}\n",
desktop,
window_station.Name,
desktop.Name)
divider += ("Heap: {0:addr}, Size: {1:addr}, Base: {2:addr}, "
"Limit: {3:addr}\n",
desktop.pheapDesktop.v(),
(desktop.DeskInfo.pvDesktopLimit.v() -
desktop.DeskInfo.pvDesktopBase.v()),
desktop.DeskInfo.pvDesktopBase,
desktop.DeskInfo.pvDesktopLimit,
)
yield dict(divider=divider)
window_count = len(list(desktop.windows(
desktop.DeskInfo.spwnd)))
for thrd in desktop.threads():
yield dict(
tagDESKTOP=desktop,
Name=desktop.Name,
Sid=desktop.dwSessionId,
Hooks=desktop.DeskInfo.fsHooks,
tagWND=desktop.DeskInfo.spwnd.deref(),
Winds=window_count,
Thrd=thrd.pEThread.Cid.UniqueThread,
_EPROCESS=thrd.ppi.Process.deref())
|
rainaashutosh/MyTestRekall
|
rekall-core/rekall/plugins/windows/gui/windowstations.py
|
Python
|
gpl-2.0
| 5,615 | 0.000534 |
# Authors: Eric Larson <larson.eric.d@gmail.com>
# Sheraz Khan <sheraz@khansheraz.com>
# Denis Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
from ..filter import next_fast_len
from ..source_estimate import _BaseSourceEstimate
from ..utils import verbose, _check_combine, _check_option
@verbose
def envelope_correlation(data, combine='mean', orthogonalize="pairwise",
log=False, absolute=True, verbose=None):
"""Compute the envelope correlation.
Parameters
----------
data : array-like, shape=(n_epochs, n_signals, n_times) | generator
The data from which to compute connectivity.
The array-like object can also be a list/generator of array,
each with shape (n_signals, n_times), or a :class:`~mne.SourceEstimate`
object (and ``stc.data`` will be used). If it's float data,
the Hilbert transform will be applied; if it's complex data,
it's assumed the Hilbert has already been applied.
combine : 'mean' | callable | None
How to combine correlation estimates across epochs.
Default is 'mean'. Can be None to return without combining.
If callable, it must accept one positional input.
For example::
combine = lambda data: np.median(data, axis=0)
orthogonalize : 'pairwise' | False
Whether to orthogonalize with the pairwise method or not.
Defaults to 'pairwise'. Note that when False,
the correlation matrix will not be returned with
absolute values.
.. versionadded:: 0.19
log : bool
If True (default False), square and take the log before orthonalizing
envelopes or computing correlations.
.. versionadded:: 0.22
absolute : bool
If True (default), then take the absolute value of correlation
coefficients before making each epoch's correlation matrix
symmetric (and thus before combining matrices across epochs).
Only used when ``orthogonalize=True``.
.. versionadded:: 0.22
%(verbose)s
Returns
-------
corr : ndarray, shape ([n_epochs, ]n_nodes, n_nodes)
The pairwise orthogonal envelope correlations.
This matrix is symmetric. If combine is None, the array
with have three dimensions, the first of which is ``n_epochs``.
Notes
-----
This function computes the power envelope correlation between
orthogonalized signals [1]_ [2]_.
.. versionchanged:: 0.22
Computations fixed for ``orthogonalize=True`` and diagonal entries are
set explicitly to zero.
References
----------
.. [1] Hipp JF, Hawellek DJ, Corbetta M, Siegel M, Engel AK (2012)
Large-scale cortical correlation structure of spontaneous
oscillatory activity. Nature Neuroscience 15:884–890
.. [2] Khan S et al. (2018). Maturation trajectories of cortical
resting-state networks depend on the mediating frequency band.
Neuroimage 174:57–68
"""
_check_option('orthogonalize', orthogonalize, (False, 'pairwise'))
from scipy.signal import hilbert
n_nodes = None
if combine is not None:
fun = _check_combine(combine, valid=('mean',))
else: # None
fun = np.array
corrs = list()
# Note: This is embarassingly parallel, but the overhead of sending
# the data to different workers is roughly the same as the gain of
# using multiple CPUs. And we require too much GIL for prefer='threading'
# to help.
for ei, epoch_data in enumerate(data):
if isinstance(epoch_data, _BaseSourceEstimate):
epoch_data = epoch_data.data
if epoch_data.ndim != 2:
raise ValueError('Each entry in data must be 2D, got shape %s'
% (epoch_data.shape,))
n_nodes, n_times = epoch_data.shape
if ei > 0 and n_nodes != corrs[0].shape[0]:
raise ValueError('n_nodes mismatch between data[0] and data[%d], '
'got %s and %s'
% (ei, n_nodes, corrs[0].shape[0]))
# Get the complex envelope (allowing complex inputs allows people
# to do raw.apply_hilbert if they want)
if epoch_data.dtype in (np.float32, np.float64):
n_fft = next_fast_len(n_times)
epoch_data = hilbert(epoch_data, N=n_fft, axis=-1)[..., :n_times]
if epoch_data.dtype not in (np.complex64, np.complex128):
raise ValueError('data.dtype must be float or complex, got %s'
% (epoch_data.dtype,))
data_mag = np.abs(epoch_data)
data_conj_scaled = epoch_data.conj()
data_conj_scaled /= data_mag
if log:
data_mag *= data_mag
np.log(data_mag, out=data_mag)
# subtract means
data_mag_nomean = data_mag - np.mean(data_mag, axis=-1, keepdims=True)
# compute variances using linalg.norm (square, sum, sqrt) since mean=0
data_mag_std = np.linalg.norm(data_mag_nomean, axis=-1)
data_mag_std[data_mag_std == 0] = 1
corr = np.empty((n_nodes, n_nodes))
for li, label_data in enumerate(epoch_data):
if orthogonalize is False: # the new code
label_data_orth = data_mag[li]
label_data_orth_std = data_mag_std[li]
else:
label_data_orth = (label_data * data_conj_scaled).imag
np.abs(label_data_orth, out=label_data_orth)
# protect against invalid value -- this will be zero
# after (log and) mean subtraction
label_data_orth[li] = 1.
if log:
label_data_orth *= label_data_orth
np.log(label_data_orth, out=label_data_orth)
label_data_orth -= np.mean(label_data_orth, axis=-1,
keepdims=True)
label_data_orth_std = np.linalg.norm(label_data_orth, axis=-1)
label_data_orth_std[label_data_orth_std == 0] = 1
# correlation is dot product divided by variances
corr[li] = np.sum(label_data_orth * data_mag_nomean, axis=1)
corr[li] /= data_mag_std
corr[li] /= label_data_orth_std
if orthogonalize is not False:
# Make it symmetric (it isn't at this point)
if absolute:
corr = np.abs(corr)
corr = (corr.T + corr) / 2.
corrs.append(corr)
del corr
corr = fun(corrs)
return corr
|
Eric89GXL/mne-python
|
mne/connectivity/envelope.py
|
Python
|
bsd-3-clause
| 6,631 | 0 |
from pattern import Pattern
import itertools
import random
import colorsys
import time
class EqPattern(Pattern):
def __init__(self, meter_color=(255,100,50), background_color=(0,50,255)):
self.meter_r = meter_color[0]
self.meter_g = meter_color[1]
self.meter_b = meter_color[2]
self.bg_r = background_color[0]
self.bg_g = background_color[1]
self.bg_b = background_color[2]
# TODO: delete?
# self.register_param("meter_r", 0, 255, meter_color[0])
# self.register_param("meter_g", 0, 255, meter_color[1])
# self.register_param("meter_b", 0, 255, meter_color[2])
# self.register_param("bg_r", 0, 255, background_color[0])
# self.register_param("bg_g", 0, 255, background_color[1])
# self.register_param("bg_b", 0, 255, background_color[2])
self.register_param("max_hue_shift", 0, 0.5, 0.2)
self.register_param("beat_channel", 0, 6, 2)
self.register_param("max_bpm", 0, 200, 100)
self.register_param("prob_shift", 0, 1, 100)
self.next_shift = time.time()
def meter_color(self):
return (self.meter_r, self.meter_g, self.meter_b)
def background_color(self):
return (self.bg_r, self.bg_g, self.bg_b)
# TODO: put this into utils or something
def hue_shift(self, color, hue_shift):
color_scaled = [x/255.0 for x in color]
hsv = list(colorsys.rgb_to_hsv(color_scaled[0], color_scaled[1], color_scaled[2]))
hsv[0] += hue_shift % 1
return tuple([int(x*255) for x in colorsys.hsv_to_rgb(hsv[0], hsv[1], hsv[2])])
def next_frame(self, octopus, data):
beat_channel = int(round(self.beat_channel))
t = time.time()
if data.beats[beat_channel] and t > self.next_shift:
self.next_shift = t + 60.0/self.max_bpm
shift = self.max_hue_shift*(2*random.random() - 1)
if int(round(random.random())):
self.meter_r, self.meter_g, self.meter_b = self.hue_shift(self.meter_color(), shift)
else:
self.bg_r, self.bg_g, self.bg_b = self.hue_shift(self.background_color(), shift)
meter_color = self.meter_color()
background_color = self.background_color()
eq = itertools.cycle(data.eq)
for tentacle in octopus.tentacles:
level = next(eq)
for led_strip in tentacle.led_strips:
pixel_colors = []
n_meter_pixels = int(len(led_strip.pixels)*float(level))
pixel_colors.extend([meter_color for x in range(n_meter_pixels)])
n_background_pixels = len(led_strip.pixels) - n_meter_pixels
pixel_colors.extend([background_color for x in range(n_background_pixels)])
led_strip.put_pixels(pixel_colors)
|
TheGentlemanOctopus/thegentlemanoctopus
|
octopus_code/core/octopus/patterns/eqPattern.py
|
Python
|
gpl-3.0
| 2,854 | 0.004905 |
def get_avatar(backend, strategy, details, response, user=None, *args, **kwargs):
url = None
if backend.name == 'facebook':
url = "http://graph.facebook.com/%s/picture?type=small"%response['id']
if url:
user.avatar = url
user.save()
|
AlexandroPQC/sara-ani
|
Saranani/apps/users/pipelines.py
|
Python
|
gpl-3.0
| 270 | 0.007407 |
from couchdbkit import ResourceNotFound
from toggle.models import Toggle
from toggle.shortcuts import update_toggle_cache, parse_toggle
def move_toggles(from_toggle_id, to_toggle_id):
"""
Moves all enabled items from one toggle to another.
"""
try:
from_toggle = Toggle.get(from_toggle_id)
except ResourceNotFound:
# if no source found this is a noop
return
try:
to_toggle = Toggle.get(to_toggle_id)
except ResourceNotFound:
to_toggle = Toggle(slug=to_toggle_id, enabled_users=[])
for item in from_toggle.enabled_users:
if item not in to_toggle.enabled_users:
to_toggle.enabled_users.append(item)
namespace, item = parse_toggle(item)
update_toggle_cache(to_toggle_id, item, True, namespace=namespace)
to_toggle.save()
from_toggle.delete()
|
qedsoftware/commcare-hq
|
corehq/apps/toggle_ui/migration_helpers.py
|
Python
|
bsd-3-clause
| 868 | 0 |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
#
# Copyright 2013 Szymon Biliński
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import mock
import os
import shutil
import tempfile
import unittest
import zipfile
from coffea.java.java_scanner import JavaScanner
class Archive(object):
def __init__(self, name='archive'):
self.name = name
self.files = []
def __enter__(self):
self._tmpdir = tempfile.mkdtemp()
self.root_path = os.path.join(self._tmpdir, self.name)
os.makedirs(self.root_path)
return self
def __exit__(self, type, value, traceback):
shutil.rmtree(self._tmpdir)
def __repr__(self):
return 'Sample Archive:\n%s' % '\n'.join(self.files)
def mkdir(self, root, name):
path = os.path.join(root, name)
os.makedirs(path)
return path
def mkfile(self, root, name):
path = os.path.join(root, name)
open(path, 'a').close()
self.files.append(path)
def mkzip(self, root, name, entries):
path = os.path.join(root, name)
zf = zipfile.ZipFile(path, "w")
with tempfile.NamedTemporaryFile() as f:
for e in entries:
zf.write(f.name, e)
zf.close()
self.files.append(path)
return path
def compress(self):
path = os.path.join(self._tmpdir, 'compressed-'+self.name)
zf = zipfile.ZipFile(path, 'w', zipfile.ZIP_DEFLATED)
for f in self.files:
zf.write(f, f.replace(self.root_path, ''))
zf.close()
return path
class SampleJar(Archive):
def __init__(self):
Archive.__init__(self, 'sample-lib.jar')
def __enter__(self):
Archive.__enter__(self)
self.com_path = self.mkdir(self.root_path, 'com')
self.com_example_path = self.mkdir(self.com_path, 'example')
self.mkfile(self.com_example_path, 'Component.class')
self.mkfile(self.com_example_path, 'ComponentImpl.class')
return self
def __repr__(self):
return 'Sample Jar:\n%s\n%s\n%s' % ('*'*20, '\n'.join(self.files), '*'*20)
class SampleWar(Archive):
def __init__(self):
Archive.__init__(self, 'sample-webapp.war')
def __enter__(self):
Archive.__enter__(self)
self.webinf_path = self.mkdir(self.root_path, 'WEB-INF')
self.classes_path = self.mkdir(self.webinf_path, 'classes')
self.lib_path = self.mkdir(self.webinf_path, 'lib')
self.css_path = self.mkdir(self.root_path, 'css')
self.img_path = self.mkdir(self.root_path, 'img')
self.mkfile(self.webinf_path, 'web.xml')
self.mkfile(self.webinf_path, 'applicationContext.xml')
self.mkfile(self.classes_path, 'Controller.class')
self.mkfile(self.classes_path, 'Model.class')
self.mkfile(self.classes_path, 'View.class')
self.mkfile(self.root_path, 'index.jsp')
self.mkfile(self.css_path, 'main.css')
self.mkfile(self.img_path, 'logo.png')
self.mkzip(self.lib_path, 'service.jar', ['com/example/ServiceImpl.class', 'com/example/ServiceImplHelper.class'])
self.mkzip(self.lib_path, 'service-api.jar', ['com/example/Service.class'])
return self
def __repr__(self):
return 'Sample War:\n%s\n%s\n%s' % ('*'*20, '\n'.join(self.files), '*'*20)
class SampleEar(Archive):
def __init__(self):
Archive.__init__(self, 'sample-app.ear')
def __enter__(self):
Archive.__enter__(self)
self.metainf_path = self.mkdir(self.root_path, 'META-INF')
self.lib_path = self.mkdir(self.root_path, 'lib')
self.mkfile(self.metainf_path, 'application.xml')
self.mkfile(self.metainf_path, 'MANIFEST.MF')
self.mkzip(self.lib_path, 'commons.jar', ['com/example/CommonClass.class',
'com/example/CommonClassFactory.class',
'com/example/CommonClassHelper.class'])
self.mkzip(self.root_path, 'business-component.jar', ['com/example/Service.class',
'com/example/ServiceBean.class'])
#TODO: Create sample JARs in /WEB-INF/lib
self.mkzip(self.root_path, 'sample-webapp.war', ['/WEB-INF/web.xml',
'com/example/CommonClass.class',
'com/example/CommonClassHelper.class'])
return self
def __repr__(self):
return 'Sample Ear:\n%s\n%s\n%s' % ('*'*20, '\n'.join(self.files), '*'*20)
class TestJavaScanner(unittest.TestCase):
def setUp(self):
self.scanner = JavaScanner(callback=mock.MagicMock())
self.assertTrue(os.path.isdir(self.scanner._work_dir))
def tearDown(self):
self.scanner.dispose()
self.assertFalse(os.path.isdir(self.scanner._work_dir))
def test_supported_files(self):
scanner = self.scanner
def supported(name):
with tempfile.NamedTemporaryFile(suffix=name) as f:
return scanner.supported_file(f.name)
self.assertTrue(supported('Test.class'))
self.assertTrue(supported('test-1.0.jar'))
self.assertTrue(supported('test-1.0.war'))
self.assertTrue(supported('test-1.0.ear'))
self.assertFalse(supported('build.properties'))
self.assertFalse(supported('pom.xml'))
self.assertFalse(supported('build.gradle'))
self.assertFalse(supported('README'))
self.assertFalse(supported('Test.java'))
def test_scan_file(self):
scanner = self.scanner
with tempfile.NamedTemporaryFile(suffix = '.xml') as not_supported_file:
scanner.callback.reset_mock()
self.assertEquals(scanner.scan(not_supported_file.name), 0)
self.assertEquals(scanner.callback.call_count, 0)
with tempfile.NamedTemporaryFile(suffix = '.class') as class_file:
self.assertEquals(scanner.scan(class_file.name), 1)
self.assertEquals(scanner.callback.call_count, 1)
scanner.callback.assert_any_call(class_file.name)
with SampleJar() as exploded_jar:
jar = exploded_jar.compress()
scanner.callback.reset_mock()
self.assertEquals(scanner.scan(jar), 2)
self.assertEquals(scanner.callback.call_count, 2)
with SampleWar() as exploded_war:
war = exploded_war.compress()
scanner.callback.reset_mock()
self.assertEquals(scanner.scan(war), 6)
self.assertEquals(scanner.callback.call_count, 6)
with SampleEar() as exploded_ear:
ear = exploded_ear.compress()
scanner.callback.reset_mock()
self.assertEquals(scanner.scan(ear), 7)
self.assertEquals(scanner.callback.call_count, 7)
def test_scan_directory(self):
scanner = self.scanner
with SampleJar() as exploded_jar:
scanner.callback.reset_mock()
self.assertEquals(scanner.scan(exploded_jar.root_path), 2)
self.assertEquals(scanner.callback.call_count, 2)
with SampleWar() as exploded_war:
scanner.callback.reset_mock()
self.assertEquals(scanner.scan(exploded_war.root_path), 6)
self.assertEquals(scanner.callback.call_count, 6)
with SampleEar() as exploded_ear:
scanner.callback.reset_mock()
self.assertEquals(scanner.scan(exploded_ear.root_path), 7)
self.assertEquals(scanner.callback.call_count, 7)
def test_with_contract(self):
with JavaScanner(callback=mock.MagicMock()) as s:
self.assertTrue(s)
self.assertTrue(os.path.isdir(s._work_dir))
self.assertFalse(os.path.isdir(s._work_dir))
|
sbilinski/coffea
|
coffea/java/tests/test_java_scanner.py
|
Python
|
apache-2.0
| 8,818 | 0.0093 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import paddle
import paddle.fluid as fluid
import numpy as np
import inspect
class TestMathOpPatchesVarBase(unittest.TestCase):
def setUp(self):
self.shape = [10, 1024]
self.dtype = np.float32
def test_add(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a + b
self.assertTrue(np.array_equal(res.numpy(), a_np + b_np))
def test_sub(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a - b
self.assertTrue(np.array_equal(res.numpy(), a_np - b_np))
def test_mul(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a * b
self.assertTrue(np.array_equal(res.numpy(), a_np * b_np))
def test_div(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a / b
#NOTE: Not sure why array_equal fails on windows, allclose is acceptable
self.assertTrue(np.allclose(res.numpy(), a_np / b_np))
def test_add_scalar(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = a + b
self.assertTrue(np.array_equal(res.numpy(), a_np + b))
def test_add_scalar_reverse(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = b + a
self.assertTrue(np.array_equal(res.numpy(), b + a_np))
def test_sub_scalar(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = a - b
self.assertTrue(np.array_equal(res.numpy(), a_np - b))
def test_sub_scalar_reverse(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = b - a
self.assertTrue(np.array_equal(res.numpy(), b - a_np))
def test_mul_scalar(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = a * b
self.assertTrue(np.array_equal(res.numpy(), a_np * b))
# div_scalar, not equal
def test_div_scalar(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = 0.1
res = a / b
self.assertTrue(np.allclose(res.numpy(), a_np / b))
# pow of float type, not equal
def test_pow(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a**b
self.assertTrue(np.allclose(res.numpy(), a_np**b_np))
def test_floor_div(self):
a_np = np.random.randint(1, 100, size=self.shape)
b_np = np.random.randint(1, 100, size=self.shape)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a // b
self.assertTrue(np.array_equal(res.numpy(), a_np // b_np))
def test_mod(self):
a_np = np.random.randint(1, 100, size=self.shape)
b_np = np.random.randint(1, 100, size=self.shape)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a % b
self.assertTrue(np.array_equal(res.numpy(), a_np % b_np))
# for bitwise and/or/xor/not
def test_bitwise(self):
paddle.disable_static()
x_np = np.random.randint(-100, 100, [2, 3, 5])
y_np = np.random.randint(-100, 100, [2, 3, 5])
x = paddle.to_tensor(x_np)
y = paddle.to_tensor(y_np)
out_np = x_np & y_np
out = x & y
self.assertTrue(np.array_equal(out.numpy(), out_np))
out_np = x_np | y_np
out = x | y
self.assertTrue(np.array_equal(out.numpy(), out_np))
out_np = x_np ^ y_np
out = x ^ y
self.assertTrue(np.array_equal(out.numpy(), out_np))
out_np = ~x_np
out = ~x
self.assertTrue(np.array_equal(out.numpy(), out_np))
# for logical compare
def test_equal(self):
a_np = np.asarray([1, 2, 3, 4, 5])
b_np = np.asarray([1, 2, 3, 4, 5])
c_np = np.asarray([1, 2, 2, 4, 5])
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
c = fluid.dygraph.to_variable(c_np)
res1 = (a == b)
res2 = (a == c)
self.assertTrue(np.array_equal(res1.numpy(), a_np == b_np))
self.assertTrue(np.array_equal(res2.numpy(), a_np == c_np))
def test_not_equal(self):
a_np = np.asarray([1, 2, 3, 4, 5])
b_np = np.asarray([1, 2, 3, 4, 5])
c_np = np.asarray([1, 2, 2, 4, 5])
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
c = fluid.dygraph.to_variable(c_np)
res1 = (a != b)
res2 = (a != c)
self.assertTrue(np.array_equal(res1.numpy(), a_np != b_np))
self.assertTrue(np.array_equal(res2.numpy(), a_np != c_np))
def test_less_than(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = (a < b)
self.assertTrue(np.array_equal(res.numpy(), a_np < b_np))
def test_less_equal(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = (a <= b)
self.assertTrue(np.array_equal(res.numpy(), a_np <= b_np))
def test_greater_than(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = (a > b)
self.assertTrue(np.array_equal(res.numpy(), a_np > b_np))
def test_greater_equal(self):
a_np = np.random.random(self.shape).astype(self.dtype)
b_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = (a >= b)
self.assertTrue(np.array_equal(res.numpy(), a_np >= b_np))
def test_neg(self):
a_np = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
res = -a
self.assertTrue(np.array_equal(res.numpy(), -a_np))
def test_float_int_long(self):
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(np.array([100.1]))
self.assertTrue(float(a) == 100.1)
self.assertTrue(int(a) == 100)
self.assertTrue(int(a) == 100)
def test_len(self):
a_np = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
self.assertTrue(len(a) == 10)
def test_index(self):
with fluid.dygraph.guard():
var1 = fluid.dygraph.to_variable(np.array([2]))
i_tmp = 0
for i in range(var1):
self.assertTrue(i == i_tmp)
i_tmp = i_tmp + 1
list1 = [1, 2, 3, 4, 5]
self.assertTrue(list1[var1] == 3)
str1 = "just test"
self.assertTrue(str1[var1] == 's')
def test_np_left_mul(self):
with fluid.dygraph.guard():
t = np.sqrt(2.0 * np.pi)
x = fluid.layers.ones((2, 2), dtype="float32")
y = t * x
self.assertTrue(
np.allclose(
y.numpy(),
t * np.ones(
(2, 2), dtype="float32"),
rtol=1e-05,
atol=0.0))
def test_add_different_dtype(self):
a_np = np.random.random(self.shape).astype(np.float32)
b_np = np.random.random(self.shape).astype(np.float16)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
res = a + b
self.assertTrue(np.array_equal(res.numpy(), a_np + b_np))
def test_floordiv_different_dtype(self):
a_np = np.full(self.shape, 10, np.int64)
b_np = np.full(self.shape, 2, np.int32)
with fluid.dygraph.guard():
a = paddle.to_tensor(a_np)
b = paddle.to_tensor(b_np)
res = a // b
self.assertTrue(np.array_equal(res.numpy(), a_np // b_np))
def test_astype(self):
a_np = np.random.uniform(-1, 1, self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
res1 = a.astype(np.float16)
res2 = a.astype('float16')
res3 = a.astype(fluid.core.VarDesc.VarType.FP16)
self.assertEqual(res1.dtype, res2.dtype)
self.assertEqual(res1.dtype, res3.dtype)
self.assertTrue(np.array_equal(res1.numpy(), res2.numpy()))
self.assertTrue(np.array_equal(res1.numpy(), res3.numpy()))
def test_conpare_op_broadcast(self):
a_np = np.random.uniform(-1, 1, [10, 1, 10]).astype(self.dtype)
b_np = np.random.uniform(-1, 1, [1, 1, 10]).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
b = fluid.dygraph.to_variable(b_np)
self.assertEqual((a != b).dtype, fluid.core.VarDesc.VarType.BOOL)
self.assertTrue(np.array_equal((a != b).numpy(), a_np != b_np))
def test_tensor_patch_method(self):
paddle.disable_static()
x_np = np.random.uniform(-1, 1, [2, 3]).astype(self.dtype)
y_np = np.random.uniform(-1, 1, [2, 3]).astype(self.dtype)
z_np = np.random.uniform(-1, 1, [6, 9]).astype(self.dtype)
x = paddle.to_tensor(x_np)
y = paddle.to_tensor(y_np)
z = paddle.to_tensor(z_np)
a = paddle.to_tensor([[1, 1], [2, 2], [3, 3]])
b = paddle.to_tensor([[1, 1], [2, 2], [3, 3]])
# 1. Unary operation for Tensor
self.assertEqual(x.dim(), 2)
self.assertEqual(x.ndimension(), 2)
self.assertEqual(x.ndim, 2)
self.assertEqual(x.size, 6)
self.assertEqual(x.numel(), 6)
self.assertTrue(np.array_equal(x.exp().numpy(), paddle.exp(x).numpy()))
self.assertTrue(
np.array_equal(x.tanh().numpy(), paddle.tanh(x).numpy()))
self.assertTrue(
np.array_equal(x.atan().numpy(), paddle.atan(x).numpy()))
self.assertTrue(np.array_equal(x.abs().numpy(), paddle.abs(x).numpy()))
m = x.abs()
self.assertTrue(
np.array_equal(m.sqrt().numpy(), paddle.sqrt(m).numpy()))
self.assertTrue(
np.array_equal(m.rsqrt().numpy(), paddle.rsqrt(m).numpy()))
self.assertTrue(
np.array_equal(x.ceil().numpy(), paddle.ceil(x).numpy()))
self.assertTrue(
np.array_equal(x.floor().numpy(), paddle.floor(x).numpy()))
self.assertTrue(np.array_equal(x.cos().numpy(), paddle.cos(x).numpy()))
self.assertTrue(
np.array_equal(x.acos().numpy(), paddle.acos(x).numpy()))
self.assertTrue(
np.array_equal(x.asin().numpy(), paddle.asin(x).numpy()))
self.assertTrue(np.array_equal(x.sin().numpy(), paddle.sin(x).numpy()))
self.assertTrue(
np.array_equal(x.sinh().numpy(), paddle.sinh(x).numpy()))
self.assertTrue(
np.array_equal(x.cosh().numpy(), paddle.cosh(x).numpy()))
self.assertTrue(
np.array_equal(x.round().numpy(), paddle.round(x).numpy()))
self.assertTrue(
np.array_equal(x.reciprocal().numpy(), paddle.reciprocal(x).numpy(
)))
self.assertTrue(
np.array_equal(x.square().numpy(), paddle.square(x).numpy()))
self.assertTrue(
np.array_equal(x.rank().numpy(), paddle.rank(x).numpy()))
self.assertTrue(
np.array_equal(x[0].t().numpy(), paddle.t(x[0]).numpy()))
self.assertTrue(
np.array_equal(x.asinh().numpy(), paddle.asinh(x).numpy()))
### acosh(x) = nan, need to change input
t_np = np.random.uniform(1, 2, [2, 3]).astype(self.dtype)
t = paddle.to_tensor(t_np)
self.assertTrue(
np.array_equal(t.acosh().numpy(), paddle.acosh(t).numpy()))
self.assertTrue(
np.array_equal(x.atanh().numpy(), paddle.atanh(x).numpy()))
d = paddle.to_tensor([[1.2285208, 1.3491015, 1.4899898],
[1.30058, 1.0688717, 1.4928783],
[1.0958099, 1.3724753, 1.8926544]])
d = d.matmul(d.t())
# ROCM not support cholesky
if not fluid.core.is_compiled_with_rocm():
self.assertTrue(
np.array_equal(d.cholesky().numpy(), paddle.cholesky(d).numpy(
)))
self.assertTrue(
np.array_equal(x.is_empty().numpy(), paddle.is_empty(x).numpy()))
self.assertTrue(
np.array_equal(x.isfinite().numpy(), paddle.isfinite(x).numpy()))
self.assertTrue(
np.array_equal(
x.cast('int32').numpy(), paddle.cast(x, 'int32').numpy()))
self.assertTrue(
np.array_equal(
x.expand([3, 2, 3]).numpy(),
paddle.expand(x, [3, 2, 3]).numpy()))
self.assertTrue(
np.array_equal(
x.tile([2, 2]).numpy(), paddle.tile(x, [2, 2]).numpy()))
self.assertTrue(
np.array_equal(x.flatten().numpy(), paddle.flatten(x).numpy()))
index = paddle.to_tensor([0, 1])
self.assertTrue(
np.array_equal(
x.gather(index).numpy(), paddle.gather(x, index).numpy()))
index = paddle.to_tensor([[0, 1], [1, 2]])
self.assertTrue(
np.array_equal(
x.gather_nd(index).numpy(), paddle.gather_nd(x, index).numpy()))
self.assertTrue(
np.array_equal(
x.reverse([0, 1]).numpy(), paddle.reverse(x, [0, 1]).numpy()))
self.assertTrue(
np.array_equal(
a.reshape([3, 2]).numpy(), paddle.reshape(a, [3, 2]).numpy()))
self.assertTrue(
np.array_equal(
x.slice([0, 1], [0, 0], [1, 2]).numpy(),
paddle.slice(x, [0, 1], [0, 0], [1, 2]).numpy()))
self.assertTrue(
np.array_equal(
x.split(2)[0].numpy(), paddle.split(x, 2)[0].numpy()))
m = paddle.to_tensor(
np.random.uniform(-1, 1, [1, 6, 1, 1]).astype(self.dtype))
self.assertTrue(
np.array_equal(
m.squeeze([]).numpy(), paddle.squeeze(m, []).numpy()))
self.assertTrue(
np.array_equal(
m.squeeze([1, 2]).numpy(), paddle.squeeze(m, [1, 2]).numpy()))
m = paddle.to_tensor([2, 3, 3, 1, 5, 3], 'float32')
self.assertTrue(
np.array_equal(m.unique()[0].numpy(), paddle.unique(m)[0].numpy()))
self.assertTrue(
np.array_equal(
m.unique(return_counts=True)[1],
paddle.unique(
m, return_counts=True)[1]))
self.assertTrue(np.array_equal(x.flip([0]), paddle.flip(x, [0])))
self.assertTrue(np.array_equal(x.unbind(0), paddle.unbind(x, 0)))
self.assertTrue(np.array_equal(x.roll(1), paddle.roll(x, 1)))
self.assertTrue(np.array_equal(x.cumsum(1), paddle.cumsum(x, 1)))
m = paddle.to_tensor(1)
self.assertTrue(np.array_equal(m.increment(), paddle.increment(m)))
m = x.abs()
self.assertTrue(np.array_equal(m.log(), paddle.log(m)))
self.assertTrue(np.array_equal(x.pow(2), paddle.pow(x, 2)))
self.assertTrue(np.array_equal(x.reciprocal(), paddle.reciprocal(x)))
# 2. Binary operation
self.assertTrue(
np.array_equal(x.divide(y).numpy(), paddle.divide(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.matmul(y, True, False).numpy(),
paddle.matmul(x, y, True, False).numpy()))
self.assertTrue(
np.array_equal(
x.norm(
p='fro', axis=[0, 1]).numpy(),
paddle.norm(
x, p='fro', axis=[0, 1]).numpy()))
self.assertTrue(
np.array_equal(x.dist(y).numpy(), paddle.dist(x, y).numpy()))
self.assertTrue(
np.array_equal(x.cross(y).numpy(), paddle.cross(x, y).numpy()))
m = x.expand([2, 2, 3])
n = y.expand([2, 2, 3]).transpose([0, 2, 1])
self.assertTrue(
np.array_equal(m.bmm(n).numpy(), paddle.bmm(m, n).numpy()))
self.assertTrue(
np.array_equal(
x.histogram(5, -1, 1).numpy(),
paddle.histogram(x, 5, -1, 1).numpy()))
self.assertTrue(
np.array_equal(x.equal(y).numpy(), paddle.equal(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.greater_equal(y).numpy(), paddle.greater_equal(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.greater_than(y).numpy(), paddle.greater_than(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.less_equal(y).numpy(), paddle.less_equal(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.less_than(y).numpy(), paddle.less_than(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.not_equal(y).numpy(), paddle.not_equal(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.equal_all(y).numpy(), paddle.equal_all(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.allclose(y).numpy(), paddle.allclose(x, y).numpy()))
m = x.expand([2, 2, 3])
self.assertTrue(
np.array_equal(
x.expand_as(m).numpy(), paddle.expand_as(x, m).numpy()))
index = paddle.to_tensor([2, 1, 0])
self.assertTrue(
np.array_equal(
a.scatter(index, b).numpy(),
paddle.scatter(a, index, b).numpy()))
# 3. Bool tensor operation
x = paddle.to_tensor([[True, False], [True, False]])
y = paddle.to_tensor([[False, False], [False, True]])
self.assertTrue(
np.array_equal(
x.logical_and(y).numpy(), paddle.logical_and(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.logical_not(y).numpy(), paddle.logical_not(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.logical_or(y).numpy(), paddle.logical_or(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.logical_xor(y).numpy(), paddle.logical_xor(x, y).numpy()))
self.assertTrue(
np.array_equal(
x.logical_and(y).numpy(), paddle.logical_and(x, y).numpy()))
a = paddle.to_tensor([[1, 2], [3, 4]])
b = paddle.to_tensor([[4, 3], [2, 1]])
self.assertTrue(
np.array_equal(
x.where(a, b).numpy(), paddle.where(x, a, b).numpy()))
x_np = np.random.randn(3, 6, 9, 7)
x = paddle.to_tensor(x_np)
x_T = x.T
self.assertTrue(x_T.shape, [7, 9, 6, 3])
self.assertTrue(np.array_equal(x_T.numpy(), x_np.T))
self.assertTrue(inspect.ismethod(a.dot))
self.assertTrue(inspect.ismethod(a.logsumexp))
self.assertTrue(inspect.ismethod(a.multiplex))
self.assertTrue(inspect.ismethod(a.prod))
self.assertTrue(inspect.ismethod(a.scale))
self.assertTrue(inspect.ismethod(a.stanh))
self.assertTrue(inspect.ismethod(a.add_n))
self.assertTrue(inspect.ismethod(a.max))
self.assertTrue(inspect.ismethod(a.maximum))
self.assertTrue(inspect.ismethod(a.min))
self.assertTrue(inspect.ismethod(a.minimum))
self.assertTrue(inspect.ismethod(a.floor_divide))
self.assertTrue(inspect.ismethod(a.remainder))
self.assertTrue(inspect.ismethod(a.floor_mod))
self.assertTrue(inspect.ismethod(a.multiply))
self.assertTrue(inspect.ismethod(a.logsumexp))
self.assertTrue(inspect.ismethod(a.inverse))
self.assertTrue(inspect.ismethod(a.log1p))
self.assertTrue(inspect.ismethod(a.erf))
self.assertTrue(inspect.ismethod(a.addmm))
self.assertTrue(inspect.ismethod(a.clip))
self.assertTrue(inspect.ismethod(a.trace))
self.assertTrue(inspect.ismethod(a.kron))
self.assertTrue(inspect.ismethod(a.isinf))
self.assertTrue(inspect.ismethod(a.isnan))
self.assertTrue(inspect.ismethod(a.concat))
self.assertTrue(inspect.ismethod(a.broadcast_to))
self.assertTrue(inspect.ismethod(a.scatter_nd_add))
self.assertTrue(inspect.ismethod(a.scatter_nd))
self.assertTrue(inspect.ismethod(a.shard_index))
self.assertTrue(inspect.ismethod(a.chunk))
self.assertTrue(inspect.ismethod(a.stack))
self.assertTrue(inspect.ismethod(a.strided_slice))
self.assertTrue(inspect.ismethod(a.unsqueeze))
self.assertTrue(inspect.ismethod(a.unstack))
self.assertTrue(inspect.ismethod(a.argmax))
self.assertTrue(inspect.ismethod(a.argmin))
self.assertTrue(inspect.ismethod(a.argsort))
self.assertTrue(inspect.ismethod(a.masked_select))
self.assertTrue(inspect.ismethod(a.topk))
self.assertTrue(inspect.ismethod(a.index_select))
self.assertTrue(inspect.ismethod(a.nonzero))
self.assertTrue(inspect.ismethod(a.sort))
self.assertTrue(inspect.ismethod(a.index_sample))
self.assertTrue(inspect.ismethod(a.mean))
self.assertTrue(inspect.ismethod(a.std))
self.assertTrue(inspect.ismethod(a.numel))
def test_complex_scalar(self):
a_np = np.random.random(self.shape).astype(self.dtype)
with fluid.dygraph.guard():
a = fluid.dygraph.to_variable(a_np)
res = 1J * a
self.assertTrue(np.array_equal(res.numpy(), 1J * a_np))
if __name__ == '__main__':
unittest.main()
|
luotao1/Paddle
|
python/paddle/fluid/tests/unittests/test_math_op_patch_var_base.py
|
Python
|
apache-2.0
| 24,962 | 0.0002 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import unittest
import random
from swift.common.utils import Timestamp
from kinetic_swift.client import KineticSwiftClient
from kinetic_swift.obj import server
from utils import KineticSwiftTestCase, debug_logger
class TestDiskFile(KineticSwiftTestCase):
def setUp(self):
super(TestDiskFile, self).setUp()
self.port = self.ports[0]
self.device = 'localhost:%s' % self.port
self.client = self.client_map[self.port]
self.logger = debug_logger('test-kinetic')
server.install_kinetic_diskfile()
self.policy = random.choice(list(server.diskfile.POLICIES))
self.router = server.diskfile.DiskFileRouter(
{}, self.logger)
self.mgr = self.router[self.policy]
self.mgr.unlink_wait = True
def test_diskfile_router(self):
expected = {
server.diskfile.EC_POLICY: server.ECDiskFileManager,
server.diskfile.REPL_POLICY: server.DiskFileManager,
}
for policy in server.diskfile.POLICIES:
self.assertIsInstance(self.router[policy],
expected[policy.policy_type])
def test_manager_config(self):
conf = {
'connect_retry': '6',
'connect_timeout': '10',
'response_timeout': '90',
'write_depth': '2',
'delete_depth': '4',
'disk_chunk_size': '%s' % 2 ** 20,
}
mgr = server.DiskFileManager(conf, self.logger)
self.assertEqual(mgr.connect_retry, 6)
df = mgr.get_diskfile(self.device, '0', 'a', 'c', self.buildKey('o'),
self.policy)
self.assertEqual(df.conn.conn.connect_timeout, 10)
self.assertEqual(df.conn.response_timeout, 90)
self.assertEqual(df.write_depth, 2)
self.assertEqual(df.delete_depth, 4)
self.assertEqual(df.disk_chunk_size, 2 ** 20)
def test_config_sync_options(self):
expectations = {
'default': None,
'writethrough': 1,
'writeback': 2,
'flush': 3,
}
for option, expected in expectations.items():
conf = {'synchronization': option}
mgr = server.DiskFileManager(conf, self.logger)
self.assertEqual(mgr.synchronization, expected)
def test_create(self):
df = self.mgr.get_diskfile(self.device, '0', 'a', 'c',
self.buildKey('o'), self.policy)
self.assert_(isinstance(df.conn, KineticSwiftClient))
def test_put(self):
df = self.mgr.get_diskfile(self.device, '0', 'a', 'c',
self.buildKey('o'), self.policy)
with df.create() as writer:
writer.write('awesome')
writer.put({'X-Timestamp': time.time()})
def test_put_with_frag_index(self):
# setup an object w/o a fragment index
df = self.mgr.get_diskfile(self.device, '0', 'a', 'c',
self.buildKey('o'), self.policy)
start = time.time()
with df.create() as writer:
writer.write('awesome')
writer.put({'X-Timestamp': start})
resp = self.client.getKeyRange('objects,', 'objects/')
keys = resp.wait()
self.assertEqual(len(keys), 1)
head_key = keys[0]
# policy.hash.t.s.ext.nonce
nonce = head_key.rsplit('.', 1)[1]
nonce_parts = nonce.split('-')
# nonce is just a uuid
self.assertEqual(5, len(nonce_parts))
# now create the object with a frag index!
now = start + 1
with df.create() as writer:
writer.write('awesome')
writer.put({
'X-Timestamp': now,
'X-Object-Sysmeta-Ec-Frag-Index': '7',
})
resp = self.client.getKeyRange('objects,', 'objects/')
keys = resp.wait()
self.assertEqual(len(keys), 1)
head_key = keys[0]
# policy.hash.t.s.ext.nonce-frag
nonce = head_key.rsplit('.', 1)[1]
nonce_parts = nonce.split('-')
# nonce is now a uuid with a frag_index on the end
self.assertEqual(6, len(nonce_parts))
# and it has the value of the ec-frag-index
self.assertEqual(int(nonce_parts[-1]), 7)
def test_put_and_get(self):
df = self.mgr.get_diskfile(self.device, '0', 'a', 'c',
self.buildKey('o'), self.policy)
req_timestamp = time.time()
with df.create() as writer:
writer.write('awesome')
writer.put({'X-Timestamp': req_timestamp})
with df.open() as reader:
metadata = reader.get_metadata()
body = ''.join(reader)
self.assertEquals(body, 'awesome')
expected = {
'X-Timestamp': req_timestamp,
'X-Kinetic-Chunk-Count': 1,
}
for k, v in expected.items():
self.assertEqual(metadata[k], v,
'expected %r for metadatakey %r got %r' % (
v, k, metadata[k]))
def test_submit_write_all_sync_options(self):
for sync_option in ('flush', 'writeback', 'writethrough', 'default'):
conf = {'synchronization': sync_option}
mgr = server.DiskFileManager(conf, self.logger)
df = mgr.get_diskfile(self.device, '0', 'a', 'c',
self.buildKey('o'), self.policy)
options = {}
def capture_args(*args, **kwargs):
options.update(kwargs)
df.conn.put = capture_args
with df.create():
key = self.buildKey('submit_%s' % sync_option)
df._submit_write(key, 'blob', final=False)
# flush option does writeback unless final
if sync_option == 'flush':
self.assertEqual(options['synchronization'],
server.SYNC_OPTION_MAP['writeback'])
else:
self.assertEqual(options['synchronization'],
df.synchronization)
# final write always matches sync option
key = self.buildKey('submit_final_%s' % sync_option)
df._submit_write(key, 'blob', final=True)
self.assertEqual(options['synchronization'],
df.synchronization)
def test_put_all_sync_options(self):
expected_body = 'a' * 100
conf = {
'disk_chunk_size': 10,
}
for sync_option in ('flush', 'writeback', 'writethrough', 'default'):
conf['synchronization'] = sync_option
mgr = server.DiskFileManager(conf, self.logger)
mgr.unlink_wait = True
df = mgr.get_diskfile(self.device, '0', 'a', 'c',
self.buildKey('o'), self.policy)
req_timestamp = time.time()
with df.create() as writer:
writer.write(expected_body)
writer.put({'X-Timestamp': req_timestamp})
with df.open() as reader:
metadata = reader.get_metadata()
body = ''.join(reader)
self.assertEquals(body, expected_body)
expected = {
'X-Timestamp': req_timestamp,
'X-Kinetic-Chunk-Count': 10,
}
for k, v in expected.items():
self.assertEqual(metadata[k], v,
'expected %r for metadatakey %r got %r' % (
v, k, metadata[k]))
def test_get_not_found(self):
df = self.mgr.get_diskfile(self.device, '0', 'a', 'c',
self.buildKey('o'), self.policy)
try:
df.open()
except server.diskfile.DiskFileNotExist:
pass
else:
self.fail('Did not raise deleted!')
finally:
df.close()
def test_multi_chunk_put_and_get(self):
df = self.mgr.get_diskfile(self.device, '0', 'a', 'c',
self.buildKey('o'), self.policy,
disk_chunk_size=10)
req_timestamp = time.time()
with df.create() as writer:
chunk = '\x00' * 10
for i in range(3):
writer.write(chunk)
writer.put({'X-Timestamp': req_timestamp})
with df.open() as reader:
metadata = reader.get_metadata()
body = ''.join(reader)
self.assertEquals(body, '\x00' * 30)
expected = {
'X-Timestamp': req_timestamp,
'X-Kinetic-Chunk-Count': 3,
}
for k, v in expected.items():
self.assertEqual(metadata[k], v)
def test_multi_chunk_put_and_get_with_buffer_offset(self):
disk_chunk_size = 10
write_chunk_size = 6
write_chunk_count = 7
object_size = write_chunk_size * write_chunk_count
# int(math.ceil(1.0 * object_size / disk_chunk_size))
q, r = divmod(object_size, disk_chunk_size)
disk_chunk_count = q if not r else q + 1
df = self.mgr.get_diskfile(self.device, '0', 'a', 'c',
self.buildKey('o'), self.policy,
disk_chunk_size=disk_chunk_size)
req_timestamp = time.time()
with df.create() as writer:
chunk = '\x00' * write_chunk_size
for i in range(write_chunk_count):
writer.write(chunk)
writer.put({'X-Timestamp': req_timestamp})
with df.open() as reader:
metadata = reader.get_metadata()
body = ''.join(reader)
self.assertEquals(len(body), object_size)
self.assertEquals(body, '\x00' * object_size)
expected = {
'X-Timestamp': req_timestamp,
'X-Kinetic-Chunk-Count': disk_chunk_count,
}
for k, v in expected.items():
self.assertEqual(metadata[k], v)
def test_write_and_delete(self):
df = self.mgr.get_diskfile(self.device, '0', 'a', 'c',
self.buildKey('o'), self.policy,
disk_chunk_size=10)
req_timestamp = time.time()
with df.create() as writer:
chunk = '\x00' * 10
for i in range(3):
writer.write(chunk)
writer.put({'X-Timestamp': req_timestamp})
req_timestamp += 1
df.delete(req_timestamp)
try:
df.open()
except server.diskfile.DiskFileDeleted as e:
self.assertEqual(e.timestamp, req_timestamp)
else:
self.fail('Did not raise deleted!')
finally:
df.close()
# check object keys
storage_policy = server.diskfile.get_data_dir(int(self.policy))
start_key = '%s.%s' % (storage_policy, df.hashpath)
end_key = '%s.%s/' % (storage_policy, df.hashpath)
keys = self.client.getKeyRange(start_key, end_key).wait()
self.assertEqual(1, len(keys)) # the tombstone!
for key in keys:
expected = start_key + '.%s.ts' % Timestamp(req_timestamp).internal
self.assert_(key.startswith(expected))
# check chunk keys
start_key = 'chunks.%s' % df.hashpath
end_key = 'chunks.%s/' % df.hashpath
keys = self.client.getKeyRange(start_key, end_key).wait()
self.assertEqual(0, len(keys))
def test_overwrite(self):
num_chunks = 3
disk_chunk_size = 10
disk_chunk_count = num_chunks
df = self.mgr.get_diskfile(self.device, '0', 'a', 'c',
self.buildKey('o'), self.policy,
disk_chunk_size=10)
req_timestamp = time.time()
with df.create() as writer:
chunk = '\x00' * disk_chunk_size
for i in range(num_chunks + 1):
writer.write(chunk)
writer.put({'X-Timestamp': req_timestamp})
req_timestamp += 1
with df.create() as writer:
chunk = '\x01' * disk_chunk_size
for i in range(num_chunks):
writer.write(chunk)
writer.put({'X-Timestamp': req_timestamp})
with df.open() as reader:
metadata = reader.get_metadata()
body = ''.join(reader)
expected = {
'X-Timestamp': req_timestamp,
'X-Kinetic-Chunk-Count': disk_chunk_count,
}
for k, v in expected.items():
self.assertEqual(metadata[k], v)
self.assertEquals(body, '\x01' * (disk_chunk_size * num_chunks))
# check object keys
storage_policy = server.diskfile.get_data_dir(int(self.policy))
start_key = '%s.%s' % (storage_policy, df.hashpath)
end_key = '%s.%s/' % (storage_policy, df.hashpath)
keys = self.client.getKeyRange(start_key, end_key).wait()
self.assertEqual(1, len(keys))
# check chunk keys
start_key = 'chunks.%s' % df.hashpath
end_key = 'chunks.%s/' % df.hashpath
keys = self.client.getKeyRange(start_key, end_key).wait()
self.assertEqual(disk_chunk_count, len(keys))
if __name__ == "__main__":
unittest.main()
|
swiftstack/kinetic-swift
|
test_kinetic_swift/test_diskfile.py
|
Python
|
apache-2.0
| 13,975 | 0 |
from PythonQt import QtGui, QtCore
from uic.uiparser import UIParser
from uic.Loader.qobjectcreator import LoaderCreatorPolicy
class DynamicUILoader(UIParser):
def __init__(self):
UIParser.__init__(self, QtCore, QtGui, LoaderCreatorPolicy())
def createToplevelWidget(self, classname, widgetname):
if self.toplevelInst is not None:
if not isinstance(self.toplevelInst, self.factory.findQObjectType(classname)):
raise TypeError(("Wrong base class of toplevel widget",
(type(self.toplevelInst), classname)))
return self.toplevelInst
else:
return self.factory.createQObject(classname, widgetname, ())
def loadUi(self, filename, toplevelInst = None):
self.toplevelInst = toplevelInst
return self.parse(filename)
|
ximion/Clementine-LibDanceTag
|
data/pythonlibs/uic/Loader/loader.py
|
Python
|
gpl-3.0
| 849 | 0.004711 |
#!/usr/bin/env python
#
# Bzip2
#
# Bzip2 packages and versions
#
# Author P G Jones - 2014-08-15 <p.g.jones@qmul.ac.uk> : New file.
####################################################################################################
import nusoft.package.conditional as conditional_package
import os
class Bzip2(conditional_package.ConditionalPackage):
""" The Bzip2 installation package.
:param _tar_name: name of the tar file to download/install
:param _version: version of Bzip2 to install.
"""
def __init__(self, system, repository):
""" Initialise this bzip2 installation package.
:param system: class that manages system commands
:type system: :class:`nusoft.system.System` instance
:param repository: local name of the repository the package is from
"""
super(Bzip2, self).__init__(self._version, system, repository, libraries=["bz2"],
headers=["bzlib.h"])
self._tar_name = self._version + ".tar.gz"
def get_dependencies(self):
""" Return a list of dependency names
:returns: list of dependency package names
:rtype: list
"""
return []
def _download(self):
""" Download the bzip2 tar file."""
self._system.download("http://bzip2.haxx.se/download/" + self._tar_name)
def _install(self):
""" Untar the tar file to the install path."""
self._system.untar(self._tar_name, self.get_install_path(), 1)
self._system.make(args=["-f", "Makefile-libbz2_so"], cwd=self.get_install_path())
self._system.make(args=["install", "PREFIX=" + self.get_install_path()],
cwd=self.get_install_path())
def _update(self):
""" Nothing to do here..."""
pass
def _remove(self):
""" Remove the install directory."""
self._system.remove(self.get_install_path())
def _is_installed(self):
""" Check if root is installed by looking for the root executable in the bin directory.
:return: True if installed
"""
return self._system.exists(os.path.join(self.get_install_path(), "include/bzlib.h")) \
and self._system.is_library(os.path.join(self.get_install_path(), "lib/libbz2"))
# The versions of root that can be installed
versions = [type('bzip2-1.0.6', (Bzip2, object), {"_version" : "bzip2-1.0.6"})]
|
pgjones/nusoft
|
packages/nusoft/bzip2.py
|
Python
|
mit
| 2,422 | 0.009083 |
#!/usr/bin/python
import feedparser
import wget
import sqlite3
import time
RssUrlList = ['http://postitforward.tumblr.com/rss','http://for-war3-blog-blog.tumblr.com/rss']
sleep=3600/len(RssUrlList)
def mkdir(path):
import os
path=path.strip()
path=path.rstrip("\\")
isExists=os.path.exists(path)
if not isExists:
os.makedirs(path)
conn = sqlite3.connect('tumblr.db')
def DownloadVideo(rss_url):
feeds = feedparser.parse(rss_url)
table=rss_url[7:-15].replace('-','')
try:
conn.execute('''CREATE TABLE %s(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE REAL)'''% table)
conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','new','0')" % (table,rss_url))
# conn.execute("SELECT * FROM TUMBLR WHERE BLOG == %s").next()
except:
pass
# conn.execute('''CREATE TABLE(BLOG TEXT, ADDRESS TEXT PRIMARY KEY, DATE TEXT);''')
# conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('rss_url','TEST','TEST')" % table)
mkdir(rss_url[7:-4])
for post in feeds.entries:
thisposttime=float(time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S")))
if conn.execute("SELECT MAX(DATE) FROM %s"%table).next()[0] == thisposttime:
break
if post.description.find("video_file") == -1:
continue
sourceadd= post.description.find("source src=")
tumblradd= post.description[sourceadd:].find("tumblr_")
typeadd = post.description[sourceadd:][tumblradd:].find("type=\"video")
video_id=post.description[sourceadd:][tumblradd:][:typeadd-2]
if video_id.find("/") !=-1:
video_id=video_id[:video_id.find("/")]
try:
list(conn.execute("SELECT * FROM %s WHERE ADDRESS == '%s'"%(table,video_id)).next())
except:
print(post.title + ": " + post.link + post.published+"\n")
wget.download("http://vt.tumblr.com/"+video_id+".mp4",rss_url[7:-4])
print("\n")
conn.execute("INSERT INTO %s (BLOG ,ADDRESS, DATE) VALUES ('%s','%s','%f')" % (table,rss_url,video_id,time.mktime(time.strptime(post.published[:-6],"%a, %d %b %Y %H:%M:%S"))))
#wget.download(get_download_url("https://your.appspot.com/fetch.php?url="+post.link),rss_url[7:-4])
conn.commit()
while(1):
for rss_url in RssUrlList:
print("Downloading "+rss_url)
DownloadVideo(rss_url)
print("Sleep "+str(sleep)+" seconds")
time.sleep(sleep)
|
yanyuechuixue/tumblr2onedriver
|
Main.py
|
Python
|
gpl-3.0
| 2,513 | 0.018305 |
import requests
import json
import sqlite3
import time
#searching the api key of velib
key_file = open('jcdecaux.key','r')
api_key = key_file.readline().rstrip('\n')
key_file.close()
startime = time.time()
url = 'https://api.jcdecaux.com/vls/v1/stations?contract=Paris&apiKey=' + api_key
response = requests.get(url)
print(response.status_code)
data = response.json()
conn = sqlite3.connect('velib.db')
cursor = conn.cursor()
keep = 0
request_date = int(time.time())
for station in data:
number = int(station['number'])
status = station['status']
bike_stands = int(station['bike_stands'])
available_bike_stands = int(station['available_bike_stands'])
available_bikes = int(station['available_bikes'])
last_update = int(station['last_update'])
cursor.execute("""
INSERT INTO statistics(number,request_date,status,bike_stands,available_bike_stands,available_bikes,last_update)
VALUES(?, ?, ?, ?, ?, ?, ?)""", (number,request_date,status,bike_stands,available_bike_stands,available_bikes,last_update))
conn.commit()
endtime = time.time()
print(int(endtime - startime))
conn.close()
|
Gokender/velib
|
dataParsing.py
|
Python
|
mit
| 1,135 | 0.009692 |
import pytest
from proposals.models import AdditionalSpeaker, TalkProposal, TutorialProposal
@pytest.fixture
def talk_proposal(user):
proposal = TalkProposal.objects.create(
id=42,
submitter=user,
title='Beyond the Style Guides<br>',
)
return proposal
@pytest.fixture
def cancelled_talk_proposal(talk_proposal):
talk_proposal.cancelled = True
talk_proposal.save()
return talk_proposal
@pytest.fixture
def tutorial_proposal(user):
proposal = TutorialProposal.objects.create(
id=42,
submitter=user,
title='Beyond the Style Guides<br>',
)
return proposal
@pytest.fixture
def cancelled_tutorial_proposal(tutorial_proposal):
tutorial_proposal.cancelled = True
tutorial_proposal.save()
return tutorial_proposal
@pytest.fixture(params=['talk', 'tutorial'])
def proposal_type(request):
return request.param
@pytest.fixture
def proposal(proposal_type, talk_proposal, tutorial_proposal):
return locals()[proposal_type + '_proposal']
@pytest.fixture
def additional_speaker(another_user, proposal):
speaker = AdditionalSpeaker.objects.create(
id=81, user=another_user, proposal=proposal,
)
return speaker
|
uranusjr/pycontw2016
|
src/proposals/tests/conftest.py
|
Python
|
mit
| 1,232 | 0 |
#!/usr/bin/env python
"""Execute the tests for the samcat program.
The golden test outputs are generated by the script generate_outputs.sh.
You have to give the root paths to the source and the binaries as arguments to
the program. These are the paths to the directory that contains the 'projects'
directory.
Usage: run_tests.py SOURCE_ROOT_PATH BINARY_ROOT_PATH
"""
import logging
import os.path
import sys
# Automagically add util/py_lib to PYTHONPATH environment variable.
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',
'..', '..', 'util', 'py_lib'))
sys.path.insert(0, path)
import seqan.app_tests as app_tests
def main(source_base, binary_base):
"""Main entry point of the script."""
print 'Executing test for samcat'
print '========================='
print
ph = app_tests.TestPathHelper(
source_base, binary_base,
'apps/samcat/tests') # tests dir
# ============================================================
# Auto-detect the binary path.
# ============================================================
path_to_program = app_tests.autolocateBinary(
binary_base, 'apps/samcat', 'samcat')
# ============================================================
# Built TestConf list.
# ============================================================
# Build list with TestConf objects, analoguely to how the output
# was generated in generate_outputs.sh.
conf_list = []
# ============================================================
# Run on DNA (Adenoviruses).
# ============================================================
conf = app_tests.TestConf(
program=path_to_program,
args=[ph.inFile('ex1_a1.sam'),
ph.inFile('ex1_a2.sam'),
ph.inFile('ex1_a3.sam'),
'-o', ph.outFile('ex1_merged.sam')],
to_diff=[(ph.inFile('ex1_merged.sam'),
ph.outFile('ex1_merged.sam'))])
conf_list.append(conf)
conf = app_tests.TestConf(
program=path_to_program,
args=[ph.inFile('ex1_a1.sam'),
ph.inFile('ex1_a2.sam'),
ph.inFile('ex1_a3.sam'),
'-o', ph.outFile('ex1_merged.bam')],
to_diff=[(ph.inFile('ex1_merged.bam'),
ph.outFile('ex1_merged.bam'), "gunzip")])
conf_list.append(conf)
# Execute the tests.
failures = 0
for conf in conf_list:
res = app_tests.runTest(conf)
# Output to the user.
print ' '.join(conf.commandLineArgs())
if res:
print 'OK'
else:
failures += 1
print 'FAILED'
# Cleanup.
ph.deleteTempDir()
print '=============================='
print ' total tests: %d' % len(conf_list)
print ' failed tests: %d' % failures
print 'successful tests: %d' % (len(conf_list) - failures)
print '=============================='
# Compute and return return code.
return failures != 0
if __name__ == '__main__':
sys.exit(app_tests.main(main))
|
rrahn/gdf_tools
|
include/seqan/apps/samcat/tests/run_tests.py
|
Python
|
gpl-3.0
| 3,112 | 0.001285 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Proxy AMI-related calls from cloud controller to objectstore service."""
import base64
import binascii
import os
import shutil
import tarfile
import tempfile
import boto.s3.connection
import eventlet
from lxml import etree
from nova.api.ec2 import ec2utils
import nova.cert.rpcapi
from nova import exception
from nova.image import glance
from nova.openstack.common import cfg
from nova.openstack.common import log as logging
from nova import utils
LOG = logging.getLogger(__name__)
s3_opts = [
cfg.StrOpt('image_decryption_dir',
default='/tmp',
help='parent dir for tempdir used for image decryption'),
cfg.StrOpt('s3_host',
default='$my_ip',
help='hostname or ip for openstack to use when accessing '
'the s3 api'),
cfg.IntOpt('s3_port',
default=3333,
help='port used when accessing the s3 api'),
cfg.StrOpt('s3_access_key',
default='notchecked',
help='access key to use for s3 server for images'),
cfg.StrOpt('s3_secret_key',
default='notchecked',
help='secret key to use for s3 server for images'),
cfg.BoolOpt('s3_use_ssl',
default=False,
help='whether to use ssl when talking to s3'),
cfg.BoolOpt('s3_affix_tenant',
default=False,
help='whether to affix the tenant id to the access key '
'when downloading from s3'),
]
CONF = cfg.CONF
CONF.register_opts(s3_opts)
CONF.import_opt('my_ip', 'nova.netconf')
class S3ImageService(object):
"""Wraps an existing image service to support s3 based register."""
def __init__(self, service=None, *args, **kwargs):
self.cert_rpcapi = nova.cert.rpcapi.CertAPI()
self.service = service or glance.get_default_image_service()
self.service.__init__(*args, **kwargs)
def _translate_uuids_to_ids(self, context, images):
return [self._translate_uuid_to_id(context, img) for img in images]
def _translate_uuid_to_id(self, context, image):
image_copy = image.copy()
try:
image_uuid = image_copy['id']
except KeyError:
pass
else:
image_copy['id'] = ec2utils.glance_id_to_id(context, image_uuid)
for prop in ['kernel_id', 'ramdisk_id']:
try:
image_uuid = image_copy['properties'][prop]
except (KeyError, ValueError):
pass
else:
image_id = ec2utils.glance_id_to_id(context, image_uuid)
image_copy['properties'][prop] = image_id
return image_copy
def _translate_id_to_uuid(self, context, image):
image_copy = image.copy()
try:
image_id = image_copy['id']
except KeyError:
pass
else:
image_copy['id'] = ec2utils.id_to_glance_id(context, image_id)
for prop in ['kernel_id', 'ramdisk_id']:
try:
image_id = image_copy['properties'][prop]
except (KeyError, ValueError):
pass
else:
image_uuid = ec2utils.id_to_glance_id(context, image_id)
image_copy['properties'][prop] = image_uuid
return image_copy
def create(self, context, metadata, data=None):
"""Create an image.
metadata['properties'] should contain image_location.
"""
image = self._s3_create(context, metadata)
return image
def delete(self, context, image_id):
image_uuid = ec2utils.id_to_glance_id(context, image_id)
self.service.delete(context, image_uuid)
def update(self, context, image_id, metadata, data=None):
image_uuid = ec2utils.id_to_glance_id(context, image_id)
metadata = self._translate_id_to_uuid(context, metadata)
image = self.service.update(context, image_uuid, metadata, data)
return self._translate_uuid_to_id(context, image)
def detail(self, context, **kwargs):
#NOTE(bcwaldon): sort asc to make sure we assign lower ids
# to older images
kwargs.setdefault('sort_dir', 'asc')
images = self.service.detail(context, **kwargs)
return self._translate_uuids_to_ids(context, images)
def show(self, context, image_id):
image_uuid = ec2utils.id_to_glance_id(context, image_id)
image = self.service.show(context, image_uuid)
return self._translate_uuid_to_id(context, image)
@staticmethod
def _conn(context):
# NOTE(vish): access and secret keys for s3 server are not
# checked in nova-objectstore
access = CONF.s3_access_key
if CONF.s3_affix_tenant:
access = '%s:%s' % (access, context.project_id)
secret = CONF.s3_secret_key
calling = boto.s3.connection.OrdinaryCallingFormat()
return boto.s3.connection.S3Connection(aws_access_key_id=access,
aws_secret_access_key=secret,
is_secure=CONF.s3_use_ssl,
calling_format=calling,
port=CONF.s3_port,
host=CONF.s3_host)
@staticmethod
def _download_file(bucket, filename, local_dir):
key = bucket.get_key(filename)
local_filename = os.path.join(local_dir, os.path.basename(filename))
key.get_contents_to_filename(local_filename)
return local_filename
def _s3_parse_manifest(self, context, metadata, manifest):
manifest = etree.fromstring(manifest)
image_format = 'ami'
image_type = 'machine'
try:
kernel_id = manifest.find('machine_configuration/kernel_id').text
if kernel_id == 'true':
image_format = 'aki'
image_type = 'kernel'
kernel_id = None
except Exception:
kernel_id = None
try:
ramdisk_id = manifest.find('machine_configuration/ramdisk_id').text
if ramdisk_id == 'true':
image_format = 'ari'
image_type = 'ramdisk'
ramdisk_id = None
except Exception:
ramdisk_id = None
try:
arch = manifest.find('machine_configuration/architecture').text
except Exception:
arch = 'x86_64'
# NOTE(yamahata):
# EC2 ec2-budlne-image --block-device-mapping accepts
# <virtual name>=<device name> where
# virtual name = {ami, root, swap, ephemeral<N>}
# where N is no negative integer
# device name = the device name seen by guest kernel.
# They are converted into
# block_device_mapping/mapping/{virtual, device}
#
# Do NOT confuse this with ec2-register's block device mapping
# argument.
mappings = []
try:
block_device_mapping = manifest.findall('machine_configuration/'
'block_device_mapping/'
'mapping')
for bdm in block_device_mapping:
mappings.append({'virtual': bdm.find('virtual').text,
'device': bdm.find('device').text})
except Exception:
mappings = []
properties = metadata['properties']
properties['architecture'] = arch
def _translate_dependent_image_id(image_key, image_id):
image_uuid = ec2utils.ec2_id_to_glance_id(context, image_id)
properties[image_key] = image_uuid
if kernel_id:
_translate_dependent_image_id('kernel_id', kernel_id)
if ramdisk_id:
_translate_dependent_image_id('ramdisk_id', ramdisk_id)
if mappings:
properties['mappings'] = mappings
metadata.update({'disk_format': image_format,
'container_format': image_format,
'status': 'queued',
'is_public': False,
'properties': properties})
metadata['properties']['image_state'] = 'pending'
#TODO(bcwaldon): right now, this removes user-defined ids.
# We need to re-enable this.
image_id = metadata.pop('id', None)
image = self.service.create(context, metadata)
# extract the new uuid and generate an int id to present back to user
image_uuid = image['id']
image['id'] = ec2utils.glance_id_to_id(context, image_uuid)
# return image_uuid so the caller can still make use of image_service
return manifest, image, image_uuid
def _s3_create(self, context, metadata):
"""Gets a manifest from s3 and makes an image."""
image_path = tempfile.mkdtemp(dir=CONF.image_decryption_dir)
image_location = metadata['properties']['image_location']
bucket_name = image_location.split('/')[0]
manifest_path = image_location[len(bucket_name) + 1:]
bucket = self._conn(context).get_bucket(bucket_name)
key = bucket.get_key(manifest_path)
manifest = key.get_contents_as_string()
manifest, image, image_uuid = self._s3_parse_manifest(context,
metadata,
manifest)
def delayed_create():
"""This handles the fetching and decrypting of the part files."""
context.update_store()
log_vars = {'image_location': image_location,
'image_path': image_path}
def _update_image_state(context, image_uuid, image_state):
metadata = {'properties': {'image_state': image_state}}
self.service.update(context, image_uuid, metadata,
purge_props=False)
def _update_image_data(context, image_uuid, image_data):
metadata = {}
self.service.update(context, image_uuid, metadata, image_data,
purge_props=False)
_update_image_state(context, image_uuid, 'downloading')
try:
parts = []
elements = manifest.find('image').getiterator('filename')
for fn_element in elements:
part = self._download_file(bucket,
fn_element.text,
image_path)
parts.append(part)
# NOTE(vish): this may be suboptimal, should we use cat?
enc_filename = os.path.join(image_path, 'image.encrypted')
with open(enc_filename, 'w') as combined:
for filename in parts:
with open(filename) as part:
shutil.copyfileobj(part, combined)
except Exception:
LOG.exception(_("Failed to download %(image_location)s "
"to %(image_path)s"), log_vars)
_update_image_state(context, image_uuid, 'failed_download')
return
_update_image_state(context, image_uuid, 'decrypting')
try:
hex_key = manifest.find('image/ec2_encrypted_key').text
encrypted_key = binascii.a2b_hex(hex_key)
hex_iv = manifest.find('image/ec2_encrypted_iv').text
encrypted_iv = binascii.a2b_hex(hex_iv)
dec_filename = os.path.join(image_path, 'image.tar.gz')
self._decrypt_image(context, enc_filename, encrypted_key,
encrypted_iv, dec_filename)
except Exception:
LOG.exception(_("Failed to decrypt %(image_location)s "
"to %(image_path)s"), log_vars)
_update_image_state(context, image_uuid, 'failed_decrypt')
return
_update_image_state(context, image_uuid, 'untarring')
try:
unz_filename = self._untarzip_image(image_path, dec_filename)
except Exception:
LOG.exception(_("Failed to untar %(image_location)s "
"to %(image_path)s"), log_vars)
_update_image_state(context, image_uuid, 'failed_untar')
return
_update_image_state(context, image_uuid, 'uploading')
try:
with open(unz_filename) as image_file:
_update_image_data(context, image_uuid, image_file)
except Exception:
LOG.exception(_("Failed to upload %(image_location)s "
"to %(image_path)s"), log_vars)
_update_image_state(context, image_uuid, 'failed_upload')
return
metadata = {'status': 'active',
'properties': {'image_state': 'available'}}
self.service.update(context, image_uuid, metadata,
purge_props=False)
shutil.rmtree(image_path)
eventlet.spawn_n(delayed_create)
return image
def _decrypt_image(self, context, encrypted_filename, encrypted_key,
encrypted_iv, decrypted_filename):
elevated = context.elevated()
try:
key = self.cert_rpcapi.decrypt_text(elevated,
project_id=context.project_id,
text=base64.b64encode(encrypted_key))
except Exception, exc:
msg = _('Failed to decrypt private key: %s') % exc
raise exception.NovaException(msg)
try:
iv = self.cert_rpcapi.decrypt_text(elevated,
project_id=context.project_id,
text=base64.b64encode(encrypted_iv))
except Exception, exc:
raise exception.NovaException(_('Failed to decrypt initialization '
'vector: %s') % exc)
try:
utils.execute('openssl', 'enc',
'-d', '-aes-128-cbc',
'-in', '%s' % (encrypted_filename,),
'-K', '%s' % (key,),
'-iv', '%s' % (iv,),
'-out', '%s' % (decrypted_filename,))
except exception.ProcessExecutionError, exc:
raise exception.NovaException(_('Failed to decrypt image file '
'%(image_file)s: %(err)s') %
{'image_file': encrypted_filename,
'err': exc.stdout})
@staticmethod
def _test_for_malicious_tarball(path, filename):
"""Raises exception if extracting tarball would escape extract path."""
tar_file = tarfile.open(filename, 'r|gz')
for n in tar_file.getnames():
if not os.path.abspath(os.path.join(path, n)).startswith(path):
tar_file.close()
raise exception.NovaException(_('Unsafe filenames in image'))
tar_file.close()
@staticmethod
def _untarzip_image(path, filename):
S3ImageService._test_for_malicious_tarball(path, filename)
tar_file = tarfile.open(filename, 'r|gz')
tar_file.extractall(path)
image_file = tar_file.getnames()[0]
tar_file.close()
return os.path.join(path, image_file)
|
maoy/zknova
|
nova/image/s3.py
|
Python
|
apache-2.0
| 16,517 | 0.000848 |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.6.2)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x06\x92\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x06\x59\x49\x44\x41\x54\x78\x9c\xed\xdd\xdd\x4f\x53\x67\
\x00\xc7\x71\xff\x04\xfe\x03\xf6\x27\x78\xc5\x95\x17\x27\x9c\xb6\
\x16\xa5\x8a\x22\x38\xdf\xdd\x50\x93\x85\xe8\x58\x32\xd6\x06\xc1\
\xd1\x69\x11\xda\x41\x51\x2b\x01\x7b\x46\x79\x69\x05\x31\x48\x16\
\xd4\x01\x2a\x95\xb7\xbe\x31\x50\x1c\x8c\x19\x44\x87\x21\x83\x32\
\x05\xa9\x85\x8e\x22\xfe\x76\xe1\x20\xe0\xc0\xb6\x50\x7a\x9e\x9e\
\xf3\xfc\x92\xef\xfd\x79\xce\x27\x3d\x3d\xbd\xea\x96\x2d\x74\x74\
\x74\x74\x11\x9f\x95\x61\x62\xba\xa4\xd2\xad\xb6\xf8\xf8\x0c\x1b\
\xcb\x26\x75\x49\xa5\x5b\xf9\xbe\x26\x51\xae\x4b\x2a\xdd\xea\x90\
\x4a\x6f\x3b\xa4\x52\xaf\x9d\x65\xd1\xbb\x7b\xf7\xac\x53\x26\xf3\
\xdb\x59\x16\x76\x96\x85\x43\x22\xd1\xf3\x7d\x8d\xa2\x99\x2d\x3e\
\x3e\xc3\x21\x95\xce\x3c\x49\x49\x99\x9f\x54\xab\xe1\xd5\x6a\xe1\
\xd3\xeb\xe1\xd3\xeb\xe1\xd5\x6a\x31\xae\x54\xe2\xd7\x9d\x3b\xe7\
\xec\x12\x89\xc7\xc6\xb2\x49\x7c\x5f\xaf\x60\x67\x65\x98\x18\xa7\
\x4c\x66\x71\xca\x64\xfe\x97\x67\xce\x2c\x21\xac\xd5\xb3\x13\x27\
\x60\x67\x59\x50\x94\x4d\x18\x97\x96\x1e\xab\x57\x5d\x18\xbe\xb3\
\xff\xf0\xfc\xa4\x5a\x1d\x10\x63\x05\x8a\x44\xe2\xe1\xfb\xfa\x05\
\x35\x2e\x2d\x3d\xb6\xf8\x6c\xfe\x8b\x73\x45\xe5\x50\xe9\x38\x0c\
\x16\x5d\x0e\x1a\xc4\xa7\xd7\xe3\x51\x52\xd2\x9c\x83\x65\x1b\xf8\
\x3e\x87\x20\xb6\x1c\xc3\xfe\xfb\x28\xaa\x1a\x3b\x42\x46\x19\x57\
\x2a\x61\x67\x59\xf0\x7d\x96\xa8\xdf\xc7\x18\x4f\xc7\x66\xf0\x74\
\x6c\x26\x64\x14\xaf\x56\x0b\x3b\xcb\x82\xbe\x12\x6f\x60\x19\x7a\
\x53\xcc\x6a\x18\xeb\x45\xe9\xde\xb1\xc3\x67\x8b\x8f\xcf\xe0\xfb\
\x5c\x51\xb9\x0c\xbd\x29\x26\xbb\xc8\x64\xc9\x29\x2a\x5f\x58\x0d\
\x63\x3d\x28\x7d\xc9\xc9\x6f\x29\xc8\x3a\xc6\x19\xd2\x63\xb3\x8b\
\x4c\x96\x6c\x7d\x85\xff\x53\x18\xa1\xa2\x38\x65\xb2\x39\xfa\xfa\
\x1b\xe2\x38\x43\x7a\x6c\xe1\xe5\x82\x17\x39\xc5\xa6\xf7\xc1\x60\
\x04\x8b\x32\xa9\x56\xc3\xce\xb2\xb0\x33\xcc\x67\x7c\x9f\x31\x6a\
\x66\x35\x31\x31\x37\xcb\xbe\xb8\xab\xd2\x71\xb8\xd1\xec\x0c\x1a\
\x23\x18\x94\xa1\xb4\xb4\x7f\x5c\x72\x79\x37\xdf\x67\x8c\x9a\x59\
\x4d\x4c\xcc\x83\xf2\x44\xcb\x03\xd3\x4e\xff\x6f\x4e\x03\x54\x3a\
\x0e\x77\x3a\xfb\xc3\x82\x32\xae\x54\xc2\x21\x95\xfa\xec\x12\x09\
\xc3\xf7\x39\xa3\x62\xcb\x31\x3c\xcf\x2f\x03\x93\xd5\x18\x1e\xa8\
\x0c\x0b\xca\xb4\x46\x03\xe7\xf6\xed\xf3\x2e\xb9\xfc\xe2\x9a\x17\
\xa0\xd2\x19\x19\x55\x81\x31\x57\xa8\x69\x4a\x6a\xf2\xdb\x7b\x06\
\x9b\x5c\xfd\xcf\xad\x81\x72\x3c\xee\xeb\x78\x58\x9f\xe9\x7e\x50\
\xb1\xeb\xfd\x22\xc6\x62\xe1\x40\xb9\x7b\xf0\xd8\xbc\x43\x22\xe9\
\xb0\x32\x4c\xcc\xda\x20\x05\xc6\xdc\x73\x97\x2a\xdf\x19\xcc\x8d\
\x3e\xa1\x75\xad\xae\xc9\x37\xf0\xe7\xd8\x82\xc7\x0f\x04\x6a\xca\
\xeb\xc5\xa3\x56\x2d\x5a\x2b\x76\xe3\x63\x8c\x8d\xa2\xf4\x0c\x4d\
\x40\xa5\xe3\x90\x9f\xad\x7d\xf3\x49\x8c\x45\x10\x83\xb9\xd1\x17\
\xcc\x45\x0b\xb5\x60\x30\xd6\x8b\xd2\x33\x34\x81\x0b\x57\xaa\x91\
\xff\x7d\xe1\x34\x97\x96\x1e\x1b\xf0\x99\x29\x76\x90\x50\x30\x42\
\x45\x09\x19\x43\xec\x20\xeb\xc1\x08\x16\x65\x5d\x18\x62\x06\xd9\
\x08\x46\x20\x94\x9e\xa1\x09\x9c\x37\x98\xdf\x85\x8c\x21\x56\x90\
\x70\x60\xac\x85\xd2\x33\x34\x81\x1f\x0c\x96\xb9\xac\xc2\xf2\x8e\
\x90\x31\xc4\x08\x12\x4e\x8c\x8f\x51\x6e\x34\x3b\x71\xe1\xaa\xf9\
\x7d\x56\x61\x79\x47\x86\xde\xf4\xe9\xb7\x29\x0a\xb2\x39\x18\x8b\
\x0d\xf6\x96\x41\xa5\xe3\x90\x57\x5c\x34\xcd\x19\xd6\xf1\xc9\x10\
\x1b\xc8\x66\x62\xcc\x8e\x96\xa2\xdd\xbc\x77\xee\x7a\xe9\x97\x4f\
\x36\x84\x21\x16\x90\x48\x60\xdc\xfb\x29\xa1\xc3\x6a\x0a\xf0\xa3\
\x8f\x82\x44\x02\x23\x65\x21\x6c\x18\x42\x07\xd9\x74\x8c\xeb\xa9\
\x68\xad\x39\xf5\x26\x6c\x18\x42\x06\x89\x04\x86\xa3\xf1\x5b\x38\
\x1e\xf7\x75\x84\x0d\x43\xa8\x20\x91\xc2\x98\xf2\x7a\xe1\xea\x7f\
\x6e\xa5\x20\x84\x60\x78\xfc\xa0\x20\x24\x61\x50\x10\xc2\x30\x28\
\x08\x61\x18\x14\x84\x30\x0c\x0a\x42\x18\x06\x05\x21\x0c\x83\x82\
\x10\x86\x41\x41\x08\xc3\x10\x3d\x08\x69\x18\x23\x7f\x7b\x16\x34\
\x25\x35\xf9\xa2\x04\x21\x0d\xc3\xe3\x07\xae\xd5\x35\xf9\x54\x05\
\xc6\x5c\xd1\x81\x90\x88\xe1\xf1\x03\x06\x73\xa3\xf8\x40\x48\xc5\
\x10\x25\x08\xc9\x18\xa2\x03\x71\xbf\x1e\x47\xcf\xfd\x3c\x62\x31\
\x44\x05\xe2\x7e\x3d\x8e\x7b\x55\xc7\x71\xbf\x3c\x91\x58\x0c\xd1\
\x80\x44\x0b\x86\x28\x40\xa2\x09\x43\xf0\x20\xd1\x86\x21\x68\x90\
\x68\xc4\x10\x2c\xc8\x22\x46\x8b\x51\x1e\x55\x18\x82\x05\x79\x70\
\xab\x00\xf5\xfa\x04\xb4\x18\xe5\xb0\xd7\x1d\xc0\xec\x68\x69\x54\
\x60\x08\x12\xe4\x95\xe7\x2d\x38\xb5\x02\x7d\x0d\xa7\x31\xdd\xad\
\x46\x97\x65\x1f\xba\x1b\x8e\x46\x05\x86\x20\x41\x06\x9e\x74\x81\
\xcb\xdd\x81\x29\xa7\x1a\x33\xbd\x1a\x4c\x77\xab\xd1\x62\x94\x6f\
\xe8\x53\x12\x29\x0c\x41\x82\x34\x56\x66\xe3\xf6\xd5\x43\x98\xe9\
\xd5\x2c\xd5\x56\x95\x04\x7b\xdd\x01\x8c\xb8\xce\x12\x8d\x21\x38\
\x90\xc5\xc7\xd5\xe0\xed\x6f\x56\x80\xfc\xd5\xf6\x1d\xfa\x7f\x4e\
\x43\xab\x29\x11\xed\x96\xe4\xa0\xbf\xe8\x67\x47\x4b\xd1\x66\x4e\
\x8e\x18\x86\xe0\x40\x5c\xed\xf5\xa8\xca\xdb\xb3\x02\x63\x79\xd3\
\xdd\x6a\x38\x6b\xf7\xa3\xb5\x42\x11\xf0\x11\x36\x3b\x5a\x8a\x87\
\xd5\x7b\xd0\xc4\x25\xc3\xfd\x7a\x3c\x62\x67\x10\x14\x48\x7d\xd9\
\xd7\x78\x68\x3a\xbe\x26\xc8\x62\x5d\x96\x7d\xf8\xc3\x9a\x41\x1c\
\x86\xa0\x40\x46\x5e\x3e\x43\x49\x96\x04\xc3\xcd\x99\x01\x41\x5e\
\xdc\x3b\x83\xd6\x0a\xc5\xaa\x8f\x2e\x3e\x31\x04\x05\x52\x6a\xb8\
\x88\x5a\x5d\x4a\x40\x8c\x99\x5e\x0d\x26\x3a\xb3\xd0\x5e\x95\xf4\
\xdf\xef\x94\xcf\x97\x60\xf8\xc6\x10\x14\x08\x9b\xb0\x0b\x07\x0f\
\x24\x07\x05\xb2\xfc\x3b\xe5\xf1\xad\x63\x68\x31\xca\x31\xf9\xf4\
\x47\xde\x31\x04\x03\xd2\xd6\xd9\x86\x38\x46\x81\x38\x46\x81\x53\
\x27\x0e\x85\x84\x32\xd3\xab\x81\xed\x7a\x2a\x5a\x8c\x72\xde\x31\
\x04\x03\x92\x93\xa3\x5c\x02\x09\x05\x65\xca\xa9\xc6\x70\x73\x26\
\x6e\x16\x26\xe2\x97\x6b\x7b\x79\xc7\x10\x04\xc8\xc8\x98\x1b\x6c\
\xc2\xae\x15\x20\x81\x50\x86\x9b\x33\x61\x33\x9f\x44\x55\x5e\x12\
\x38\x75\x22\x1a\xca\xbe\x22\x02\x43\x10\x20\xd5\xe6\x8a\xff\x61\
\xac\x86\xe2\xee\xcc\x81\xcd\x7c\x12\xb5\xba\x14\x94\x64\x49\x50\
\x5f\x7a\x1a\xae\xf6\x7a\xbc\xf2\xbc\xe5\x1d\x41\x50\x20\x47\x8e\
\x1e\x5e\x13\x24\x8e\x51\xe0\xc8\xe1\x14\xd4\x6a\xf7\x82\x53\x27\
\xa2\xa6\x38\x0d\x9d\x2d\x95\x18\x79\xf9\x8c\xf7\x1b\x2f\x48\x90\
\x91\x31\x37\xb6\x49\xd6\xc6\x88\x63\x14\xd8\x26\x51\xe0\x4a\x51\
\x36\xd1\x08\x82\x01\x29\x29\x29\x5e\x13\x61\x5f\x6a\x2a\x6a\xea\
\x6a\x30\x32\xe6\xe6\xfd\x26\x8b\x06\x64\xf9\xe3\xea\x03\x42\x0a\
\x0a\x0a\xce\x47\x1d\x82\x20\x40\xfa\x06\x06\x10\xc7\x7c\x40\xc8\
\xc9\x51\xc2\xe6\xea\xe6\xfd\x66\x8a\x1e\xa4\xad\xb3\x8d\xf7\x1b\
\x48\x41\x04\x1e\x05\x21\x2c\x0a\x42\x58\x14\x84\xb0\x28\x08\x61\
\x51\x10\xc2\xa2\x20\x84\x45\x41\x08\x8b\x82\x10\x16\x05\x21\x2c\
\x0a\x42\x58\x14\x84\xb0\x28\x08\x61\x51\x10\xc2\xa2\x20\x84\x45\
\x41\x08\x8b\x82\x10\x16\x05\x21\x2c\x0a\x42\x58\x14\x84\xb0\x28\
\x08\x61\x51\x10\xc2\xa2\x20\x84\xb5\x69\x20\x2a\x1d\x07\x83\x99\
\xff\x7f\x77\x8e\xb6\xce\x5d\xaa\x7c\x17\x7e\x10\x81\xff\x9f\xfa\
\xa6\xa7\x33\x32\x61\x05\xa1\xa3\xa3\x5b\x65\xff\x02\x86\x7b\xd4\
\x88\xbd\xa9\xcd\xfc\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
\x00\x00\x05\x44\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x50\x00\x00\x00\x50\x08\x06\x00\x00\x00\x8e\x11\xf2\xad\
\x00\x00\x05\x0b\x49\x44\x41\x54\x78\x9c\xed\x9c\x4f\x4b\x1b\x5b\
\x18\xc6\xf3\x11\xfc\x08\xfd\x08\xfd\x08\xfd\x04\xe2\xc6\x9d\x8b\
\x2e\xdc\xb9\x09\xe2\xd8\x21\xe5\x42\x67\x65\xe2\x11\xa6\x85\x4a\
\xc0\x7b\xec\x90\x91\x88\xa4\x72\x6f\x03\x09\xb5\x0e\x66\x68\xa5\
\xb5\x05\x83\x78\x35\xc8\x1d\xca\x0d\x69\xe1\x92\x45\x8a\xd8\x82\
\xd0\x6c\xde\x2e\xf4\x8d\x69\x3b\x93\xcc\x9f\xf3\x27\x4e\xe6\x81\
\x67\x13\x4d\x72\xce\x93\x77\xe6\xfc\xe6\x9d\xe1\xa4\x52\x89\x12\
\x8d\x94\x96\x15\xe3\x2e\x51\x0a\x47\x44\x29\x40\xe2\x7e\x9b\x36\
\x59\x30\xee\x0d\x0d\x90\x28\xa6\x2d\x7f\xb0\xa3\xe9\x15\xa5\xd0\
\xf4\x11\xe0\xd5\x3f\x7f\x6c\x7c\x4d\xdc\x67\xcc\x25\x09\x30\x09\
\x30\x09\xf0\x56\x3a\x09\x30\x09\xf0\x96\x04\xb8\xa2\x14\x9a\xb2\
\x71\x61\x84\x7d\x34\xbc\x02\x17\x8c\x7b\x49\x88\x6e\x36\xed\x65\
\xc5\xb8\x3f\x34\x40\xd4\x03\x42\xcf\x55\x42\x61\xef\xd3\x77\x78\
\xd3\x86\xb1\xf4\xde\xa7\xef\xa0\x12\x0a\x0f\x08\x3d\xf7\x1d\x1c\
\x4a\x25\xd4\x56\x09\x85\xbf\x8f\xff\x97\x3e\x11\x59\xae\x9c\x75\
\x30\xc0\xe1\x87\xee\xef\x15\xb8\xae\xab\x84\x82\x69\x9f\x4a\x9f\
\x88\x2c\x6f\xbe\x75\x40\x25\x14\xd4\x15\x6a\x04\xaf\xc0\xdc\x9f\
\x8f\x54\x42\x61\x7d\xb7\x2e\x7d\x22\xb2\x6c\xda\xa7\xd7\x15\xb8\
\xae\x07\x0e\x70\x31\x47\xa7\x54\x42\x41\xdf\xb2\xa4\x4f\x44\x96\
\x9f\xfe\xf5\xfa\xaa\x02\x97\xd7\xee\x07\x0e\x50\x59\x5e\xbb\xab\
\x12\x0a\x5a\x7e\x4b\xfa\x44\x64\x59\xdf\xb2\x40\x25\x14\x16\x73\
\x74\x2a\x70\x80\x69\xdd\x98\x50\x09\x85\x3f\x9e\x98\xd2\x27\x22\
\xcb\x5a\x7e\xeb\xaa\x02\x97\xf2\x77\x02\x07\x98\x4a\x8d\x37\xca\
\x44\x42\x18\xd4\x38\xa3\x4c\x24\x84\xb9\xa9\xc0\xf1\x45\x99\x48\
\x08\xd3\xab\xc0\x31\x46\x99\x48\x08\x83\x1a\x67\x94\x89\x84\x30\
\x28\x59\x28\x53\xac\x77\x40\x2b\x35\x60\x26\x5b\x83\x99\x6c\x0d\
\xe6\xe9\x21\xe4\xed\xcf\x42\xc7\x10\x09\x61\x50\x32\x50\xa6\x58\
\xef\xc0\xb4\xb6\x0b\x93\x99\xea\x6f\x16\x19\x62\x64\x84\x41\x21\
\xca\xbc\xfa\xf8\x8d\xfb\xa0\xb7\x4f\x2e\x3c\xc3\x13\x19\x22\x13\
\x84\x41\x89\x42\x19\xab\xd5\x1d\x1a\x1e\x7a\xfb\xe4\x82\xeb\x58\
\x98\x20\xcc\x4d\x05\x8a\x41\x19\xad\xd4\xf0\x15\xde\x64\xa6\x0a\
\xb3\xfa\x3e\xd7\xb1\x30\x41\x98\x5e\x05\x0a\x40\x99\x62\xbd\xe3\
\x3b\x3c\x11\x87\x32\x13\x84\x41\x89\x40\x99\xb9\xd5\x03\xd7\x90\
\xe6\xe9\x21\x64\xcb\x8e\xeb\xdf\xa6\xb5\x5d\x6e\xe3\x61\x82\x30\
\x28\xde\x28\x53\x71\x2e\x41\x2b\x35\x7a\xee\x0f\x68\xfb\xe4\x02\
\xac\x56\x17\x66\xb2\x35\xd7\x10\x33\x1b\xc7\x5c\xc6\x44\xcc\x6a\
\x74\x84\x41\x89\x46\x99\xcc\xc6\x31\x4c\x66\xaa\x90\x2d\x3b\xbd\
\xd7\x9e\xbd\x6b\x7b\x1e\xca\x15\xe7\x92\xf9\x18\x98\x21\x0c\x4a\
\x24\xca\x60\xc5\x59\xad\xee\x4f\xaf\xcf\xea\xfb\xae\x01\xce\xad\
\x1e\x30\xfd\x7e\xa6\x08\x83\x12\xdd\x95\x71\xab\xaa\x8a\x73\xe9\
\x59\x85\xbf\x86\x1d\xe9\xbb\x59\x22\x0c\x6a\x54\xba\x32\x78\x78\
\xf3\x44\x1a\xa6\x08\x83\x62\x8d\x32\x56\xab\x0b\xc5\x7a\x07\xb2\
\x65\x07\xe6\x56\x0f\x7c\x83\xb1\x1b\x6c\xb3\x5e\x48\x98\x22\x0c\
\x8a\x25\xca\xb8\x85\xd0\xbf\x60\xf8\x79\xbf\xbe\xd3\x84\x79\x7a\
\x08\xd3\xda\x2e\x3c\x7b\xd7\x66\x1a\x20\x53\x84\x41\xb1\x44\x19\
\xb7\x2b\x8e\x28\x4c\xc7\xf2\xfc\xf7\xa6\xcd\x18\x61\x50\xac\x50\
\x66\x50\xb3\x80\x75\x10\xa1\x7f\x60\xd6\x08\x83\x62\x81\x32\x83\
\xae\x38\x58\x4c\xde\x6a\x75\x23\x7d\x16\x17\x84\x41\x45\x45\x19\
\x2f\x18\xc6\x2b\x0e\x16\x01\xe2\x65\x5f\xd8\xf3\x22\x17\x84\x41\
\x45\x41\x19\xab\xd5\xf5\x04\x61\xad\xd4\x08\x1d\xd8\xf6\xc9\x05\
\x14\xeb\x1d\x28\xd6\x3b\x50\x71\x2e\x7b\x97\x7c\x61\xcf\xa9\x5c\
\x10\x06\x15\x05\x65\x06\x35\x04\xa2\x9c\xfb\x06\xf5\x10\x83\xac\
\xec\x68\x2e\x08\x83\x0a\x8b\x32\x83\x9a\x01\x61\x26\xf9\xab\xf3\
\xf6\x67\x66\xdd\x1a\x2e\x08\x83\x0a\x8b\x32\x5e\xbd\x3e\xb7\xeb\
\xdd\xb0\xf6\x3a\x3d\x04\xed\x19\x72\x41\x18\x54\x58\x94\xd1\x77\
\x9a\x9e\x4d\x80\xfe\x36\x16\x3a\x5b\x76\x5c\x83\xb5\x5a\x5d\xc8\
\x96\x1d\xd7\xf7\xcc\xd3\x43\x26\x15\xce\x0d\x61\x50\x61\x50\xc6\
\x2b\x40\x2f\xbb\x4d\x1a\xf1\x24\xc8\xe7\x04\x0d\x90\x2b\xc2\xa0\
\xc2\xa0\xcc\xa0\x2e\x8a\x1f\x1e\x0c\x1b\xde\x64\xa6\x0a\xc5\x7a\
\xc7\xff\x38\x79\x22\x0c\x2a\x2c\xca\x78\x01\xb4\x9f\x73\x56\x90\
\x1f\xa0\xdf\x41\x9b\x0c\x5c\x11\x06\x15\x05\x65\xdc\x5a\x51\x7e\
\xa1\x3a\xe8\x4d\xa7\xcc\xc6\x71\xe0\x05\x8a\x2b\xc2\xa0\xa2\x76\
\x65\xf4\x9d\x26\x68\xa5\x86\xe7\xaa\x89\xab\xb3\xdb\x7b\x87\x85\
\x38\xab\xef\xf7\x1e\x01\x09\xb3\xba\x73\x45\x18\x54\x9c\x1f\xfb\
\xe5\x8a\x30\xa8\x38\x3f\xf6\xcb\x1d\x61\x50\x22\x6f\x30\x89\xb2\
\x10\x84\x41\xc5\xf1\xb1\x5f\x21\x08\x83\x1a\x95\x1b\x4c\x2c\x2d\
\x04\x61\x50\x71\x7c\xec\x57\x08\xc2\xa0\xe2\xf8\xd8\xaf\x10\x84\
\x41\xc5\x11\x65\x84\x20\x0c\x2a\x8e\x28\x23\x0c\x61\x50\x71\x42\
\x19\xa1\x08\x83\x8a\x13\xca\x08\x45\x18\x54\x9c\x50\x46\x28\xc2\
\xa0\xe2\x84\x32\x42\x11\x06\x15\x27\x94\x11\x8a\x30\xa8\x38\xa1\
\x8c\x50\x84\x41\xc5\x09\x65\x84\x23\x0c\x4a\xfe\x46\x38\x6c\x2d\
\x34\xbc\x24\x40\x86\x01\xca\xde\x14\x4c\xd8\xa6\x62\x49\x80\x23\
\x16\x20\x6e\x50\xf6\xcf\x87\xb6\xf4\x10\xc2\xfa\xec\xe8\x0b\x06\
\x28\xee\x32\xee\x26\x40\xd3\x20\x4a\x01\x6a\xe5\x53\xe9\x41\x84\
\xf5\xfe\xcb\x7f\x7b\x3b\xb3\x09\x0f\x30\xb7\x68\x4c\x11\xa5\x00\
\x8f\x1f\x6e\x4a\x0f\x22\x6c\xf5\xad\x2d\xbd\x00\xa2\x14\x20\xb7\
\x60\x3c\x12\x1e\x60\x2a\x75\xb3\xe7\xf4\xe3\x87\x9b\x50\x2b\x9f\
\xc2\xd9\xd1\x17\xe9\xc1\xf8\x09\xee\xfd\xde\x7f\xb0\xaa\x3d\xef\
\x55\x9f\x9e\x36\x26\xa4\x04\xa8\xa7\x8d\x89\xdb\xbd\x71\xb7\x69\
\x2f\xa9\xc6\x1d\x29\xe1\xf5\x8b\x2c\x18\xe9\xeb\x20\xcf\xe5\x87\
\x32\xd4\xe7\xd7\xdb\xba\xa7\xa5\x55\x5e\xa2\x44\xcc\xf4\x03\xce\
\x70\x3e\x0c\x74\x0d\x10\xe8\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
\x00\x00\x04\x14\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x50\x00\x00\x00\x50\x08\x06\x00\x00\x00\x8e\x11\xf2\xad\
\x00\x00\x03\xdb\x49\x44\x41\x54\x78\x9c\xed\x9a\x3f\x4b\x23\x51\
\x10\xc0\xf3\x11\xf2\x11\xee\x23\xf8\x11\x02\xa6\xcc\x6e\x56\x0b\
\xc1\x22\xa0\x85\x85\x90\x62\x41\xe1\x76\x5f\x54\x96\x13\x09\xb9\
\x43\xc2\x15\x07\x29\x02\x5a\x08\xa6\x08\xb7\xc5\xe6\xad\x04\x0e\
\x62\x11\xd0\x2e\x45\x20\x85\x85\x92\x22\x85\x10\xd0\x4f\xe0\x5c\
\x61\x66\x5d\xc4\x3f\xd9\xbc\xdd\x3c\x77\x33\x3f\x98\x7e\xdf\x8f\
\x99\x79\x33\x8f\x4d\xa5\x08\x82\x20\x08\x82\x58\x70\x34\xcb\x4e\
\xe7\x0c\x27\xa3\x96\xdc\xaa\x62\xba\x1d\x95\xf1\x9e\xca\xf8\xc3\
\x24\xee\xf2\x8c\xdb\x79\xc6\x4f\x14\xc6\x37\x64\x7f\xeb\x97\x22\
\x67\x38\x19\xc5\x74\x3b\x79\xe6\x3e\xaa\x8c\xc3\xb4\x91\x67\xdc\
\x5e\x68\x99\x39\xc3\xc9\xe4\x19\xb7\x51\xc8\xca\x7e\xfb\xa9\x50\
\xe9\x42\xb1\xd6\x87\xbd\xc6\x10\xac\xe6\x08\xca\xce\x18\xca\xce\
\x18\xac\xe6\x08\x8c\xb3\x5b\xd0\xeb\x03\x28\x54\xba\xaf\x65\xf6\
\x34\xe3\xe2\x9b\xec\xf3\xcc\x15\x95\x39\x16\x66\xdc\xca\x7e\xfb\
\x49\xaf\x0f\xa0\xec\x8c\xe1\xd7\xc5\xe3\xd4\xa1\xd7\x07\xaf\x44\
\x3a\x96\xec\x73\x45\x8e\x66\xd9\xe9\x3c\xe3\x27\x28\xae\x58\xeb\
\x07\x16\xf7\x91\x48\xc5\x74\x3b\xb2\xcf\x18\x19\x13\x79\x36\xca\
\x33\xce\x6e\x85\xc4\xf9\xc3\x6a\x8e\x60\xf5\xa0\xed\x95\xb4\xec\
\xb3\x46\x02\xca\x5b\x3f\xba\x84\x43\xfb\x3e\x34\x79\x18\x65\x67\
\x0c\xeb\x47\x97\xc9\xcc\x44\xc5\x6c\xe9\x98\x79\x56\x73\x14\xba\
\x3c\xbf\x44\xcc\xc4\x3c\xe3\x27\xb2\xcf\x1d\x0a\x39\xc3\xc9\x44\
\x51\xb6\x1f\x95\x33\xf6\x44\x8d\xf1\x25\xd9\xe7\x17\x66\x32\x14\
\x43\xb1\xd6\x8f\x5c\x1e\x46\xb1\xd6\x4f\x46\x3f\x54\x4c\x57\xc3\
\xec\x13\xbd\x6d\x83\x96\xf2\xda\x8f\x7f\xd8\x0f\x35\xd9\x1e\x66\
\x06\x2f\x8e\x79\x66\x1f\xc6\xee\xe9\x8d\xb7\xb1\xc8\xf6\x30\x13\
\xcf\x63\xcb\xf3\xb0\x1c\xc5\xad\x3b\x4d\x16\x7a\xbd\x30\x8e\x9b\
\x0a\x96\x6f\xa1\xd2\x9d\xbb\x3c\x8c\xcd\xe3\xab\xe7\x32\x8e\xe3\
\xce\x8c\x1b\x87\x8c\xf2\x4d\x44\x19\x63\xff\x9b\xc7\xe8\xf2\x5e\
\xec\x35\x86\xf1\x1d\xac\x55\xc6\xef\x54\xc6\x21\xca\xc1\x39\x40\
\x1f\x7c\x90\xed\x23\x30\x28\x50\xc6\x05\x92\x14\x81\x0f\x2a\xe3\
\xc2\xaf\x2d\xa2\x81\x37\xb1\x6c\x1f\x81\x99\x3c\xc9\x4b\x2d\xe1\
\x43\xfb\x1e\x05\xde\xc9\xf6\x11\x18\x5c\xe1\xf6\x1a\x43\x69\x02\
\x7d\x7b\x71\xfc\x56\x3a\xb5\xe4\x56\x55\xc6\x41\xaf\x0f\xa4\x09\
\xf4\x8d\x31\xf1\x7b\x99\x51\x18\xdf\x50\x19\x87\xcd\xe3\x2b\x69\
\x02\xbd\x41\xda\x6c\xe9\xb2\x7d\x04\x46\xb3\xec\x34\x36\x70\x19\
\x17\x49\xd9\x19\xc3\xca\x7e\xfb\x29\xb6\xab\x5c\x2a\xf5\x32\x4c\
\xef\x9e\xde\x48\x2b\xdf\x58\xf6\x3f\x04\xf7\xe1\xd5\x83\xf6\xdc\
\xb3\xcf\x7b\xce\x8a\xe3\x1e\xec\x07\xc7\x19\x19\x0f\xaa\xb1\x5c\
\xe1\x5e\xa3\x31\xbe\x84\xbd\x70\x1e\x33\xa1\xd5\x1c\x79\xbd\x2f\
\x67\x38\x19\xd9\xe7\x0f\x05\x7c\x99\x59\x3d\x68\x47\xba\xda\xf9\
\x4b\x57\x2d\xb9\x55\xd9\xe7\x0e\x15\x1c\xac\xd7\x8f\x2e\x23\xb9\
\x95\xcb\xce\xd8\x1b\x5b\x54\xc6\x7b\x9a\x65\xa7\x65\x9f\x39\x54\
\x26\x63\x4d\x0f\x33\x31\xcc\x72\xb6\x9a\xa3\x97\xcc\x63\xfc\x2e\
\x71\xf2\x10\xcd\xb2\xd3\x98\x89\x2a\xe3\xb0\xfd\xa7\x27\x9c\x75\
\xc5\x5a\xdf\xeb\x79\x8a\xe9\x76\x62\x3b\xf3\x05\x01\xd7\x3c\xcc\
\xc6\xa0\x73\x62\xd9\x19\x83\x71\x76\xeb\xcf\x3a\x50\x4b\x6e\x35\
\xb1\x99\xf7\x16\x93\xdb\xb9\xe7\xff\xc3\x6a\xf3\xf8\x0a\xf4\xfa\
\x00\xf6\x1a\x43\xef\xb2\x29\x3b\x63\x38\xb4\xef\xbd\x5f\xdc\x0a\
\x95\xae\x97\x71\x98\x75\x89\xb9\x6d\x67\x41\x61\x7c\xc3\x5f\xd6\
\xd3\xfd\x5c\xe9\x3e\x2a\xa6\xdb\x51\xcc\x96\xbe\x50\x59\xf7\x19\
\x8a\xd9\xd2\x4b\x5b\x3f\xa1\x58\xac\x41\x61\xe7\x1c\x54\xc6\x61\
\xed\xfb\x5f\x28\xec\x9c\xc3\x96\x7e\x0a\xbb\xdb\xbf\x41\x31\x5b\
\xfa\x42\xf4\xb9\x59\xb9\x5a\x5e\x86\x8f\x42\xf6\xf7\x7d\x79\x48\
\xa0\x20\x24\x50\x10\x12\x28\x08\x09\x14\x84\x04\x0a\x42\x02\x05\
\x21\x81\x82\x90\x40\x41\x48\xa0\x20\x24\x50\x10\x12\x28\x08\x09\
\x14\x84\x04\x0a\x42\x02\x05\x21\x81\x82\x90\x40\x41\x48\xe0\x3b\
\x5c\x67\xb3\xd5\xcf\xe4\x84\x15\xd7\xd9\x6c\xb2\xfe\x4a\x40\xe6\
\x21\x31\xb1\xf2\x90\x28\x25\x26\x5e\x1e\x12\x85\xc4\x85\x91\x87\
\x84\x29\x71\xe1\xe4\x21\x61\x48\x5c\x58\x79\x88\x88\xc4\x85\x97\
\x87\xcc\x22\x91\xe4\xbd\x22\x88\x44\x92\xf7\x0e\xd3\x48\x24\x79\
\x9f\xf0\x91\x44\x92\x37\x25\x6f\x49\x24\x79\x01\xf1\x4b\x24\x79\
\x33\x72\x9d\xcd\x56\x49\x1e\x41\x10\x04\x41\x10\x6f\xf3\x1f\xe8\
\x8a\x6f\xaf\x03\x06\xdc\x13\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
\x00\x00\x09\xf3\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x09\xba\x49\x44\x41\x54\x78\x9c\xed\x9b\x4f\x6c\x13\x57\
\x1e\xc7\x23\x0e\x3d\x94\x1e\xac\x1e\xb6\xd7\x72\xd8\x5e\x7a\x28\
\xa7\x4a\xbd\x65\xb7\x52\x2b\x2d\x99\xf1\x58\x80\x4a\xb5\xf6\x9b\
\x49\xd2\x05\x15\x11\x34\x2b\xec\x37\x33\x6f\x6c\x76\x08\x7f\x72\
\x08\xab\x4a\x68\xd9\x96\xcc\x1b\x8f\x1b\xa8\xdc\x16\x44\x50\x3d\
\x6f\x9c\x84\x25\xe9\x86\x43\xaa\x80\x84\x54\xb4\x69\xd4\x22\x60\
\x55\x01\x5a\xa9\x6a\xd9\x4d\x62\x93\x04\xf2\xf6\x10\x27\x98\xd4\
\x4e\x1c\x32\x1e\x3b\xf1\xfb\x48\x56\x0e\x89\xdf\xfb\x4d\xbe\xf3\
\x7e\xbf\xf7\x7d\xf3\x9b\xa6\x26\x06\x83\xc1\x60\x30\x18\x0c\x06\
\x83\xc1\x60\x30\x18\x0c\x06\x83\xb1\x61\x90\x6c\x39\x10\x36\xa3\
\xcd\x11\x13\xca\x61\x33\xda\x5c\xeb\x78\x1a\x9a\x30\x86\x82\x68\
\xa9\x93\xad\xb6\x36\xd3\x91\xee\xcc\x01\x0c\x29\xc0\x90\x46\x7a\
\x62\x52\xad\x63\x6b\x38\x22\x3d\x31\xa9\x35\xa9\xe5\x4f\x8d\x24\
\xa9\x73\x6b\x80\x0e\xde\xbd\x42\x07\xef\x5e\xa1\xa7\x46\x92\x14\
\x60\x48\xc3\x18\x0a\xb5\x8e\xb1\x61\x90\x6c\x39\x20\x25\x95\xa9\
\x93\x43\x67\x96\x84\x28\xfe\x58\xd7\xd2\x54\x4a\x2a\x53\xd2\xc7\
\xea\xab\xb5\x8e\xb5\x21\x08\x9b\xd1\xe6\x36\x5b\x9b\x2d\x5e\x19\
\xcb\x3f\x6d\xb6\x36\xcb\x56\x89\x4f\x44\x4c\x28\x77\xa4\x3b\x73\
\xe5\xc4\x18\xbc\x7b\x85\x1e\x3a\x7f\x22\x0f\x4c\x68\xd4\x3a\xd6\
\x86\x20\x8c\xa1\xd0\x96\x54\x1f\xad\x24\xc8\xde\xde\xf8\x3c\xc0\
\xf0\x9f\xc0\x8c\x86\xdb\xb1\xf1\x72\xad\x63\xde\xd4\x48\xb6\x1c\
\x00\x18\xd2\x53\x23\xc9\x92\x62\x9c\x1a\x49\x52\x29\xa9\xce\x00\
\x1c\xbb\x04\x30\xfc\x19\x60\x48\x81\xa5\x5c\x11\xb1\x12\x15\x7b\
\x94\xd7\x6b\x1d\xff\xa6\xa4\x15\x43\x11\x60\x48\x3f\x1e\xed\x7d\
\x46\x8c\x8f\x47\x7b\x69\xab\xad\xcd\x44\x4c\x28\x2f\xfe\xad\x88\
\xe1\xdb\xa2\x05\x4f\x02\x0c\xc7\x0b\x5b\xe3\x71\xd1\x82\x27\x45\
\x0c\xdf\x5e\x6d\x1e\xc9\x96\x03\xc0\x84\x46\x9b\xad\x4e\x88\x96\
\x3a\xd9\x9e\xd2\x07\x44\x0b\x7e\x54\xdd\xab\xdb\x80\x44\x70\x6c\
\x38\x82\xe1\x0f\x8b\xde\xe3\xd0\xf9\x13\xf9\x36\x5b\x9b\x95\x92\
\x6a\xae\x58\x8c\xe5\x88\x3d\xca\xeb\x22\x56\xa2\xc0\x52\xae\x14\
\xbe\xfb\x33\xc0\xb1\xcf\x4a\xa5\x36\xc9\x96\x03\x6d\xb6\x3a\xd1\
\x66\x6b\xb3\x27\x87\xce\x50\xeb\x5a\x9a\x9e\x1c\x3a\x43\xf7\x9d\
\x4d\xcc\xb4\x26\xd5\x7f\xd5\x7c\x17\x27\xf5\xc4\xb6\x87\x31\x14\
\x22\x3d\x31\x49\xea\x89\x6d\xaf\x55\x1c\x22\x86\x69\x60\xc6\xbe\
\xfb\xc0\x44\xaf\x48\xb6\x1c\x08\x63\x28\x00\x13\x1a\x61\x33\xda\
\xbc\x96\xb8\xda\xf1\x9f\x5f\x16\x2d\xf8\x47\x80\x63\x9f\x3d\x4d\
\x6d\xb1\xa5\xd4\x26\xd9\xaa\xd3\x91\xee\xcc\x95\xda\xcd\x45\x2f\
\x74\xcd\x89\x96\x72\xb5\x9a\xd7\x59\x16\xc9\x96\x03\x92\xad\x3a\
\x00\x43\xda\x9e\x42\x73\x7b\x7b\xe3\xd3\x85\xbb\xeb\x86\xdf\x77\
\x89\x68\xc1\x8f\x00\x86\xbf\x84\x2d\xe5\x0d\xcf\xc7\xfe\x75\x6a\
\xa3\x5f\xdc\xbc\x54\xb2\x4e\x39\xb7\x06\x28\xc0\x90\xfa\x7e\x63\
\x4a\xb6\x1c\x68\xb5\xd5\xd1\x43\xe7\x4f\xe4\x8b\x83\xfb\xe2\xe6\
\x25\x7a\xf0\xf3\xa3\x8f\x5b\x93\xea\x3d\xbf\x62\x01\x18\x2a\x00\
\x43\x1a\x49\x2a\xef\x56\x7d\x2e\x4b\x41\x7b\x3f\x8d\xcf\xae\xb4\
\x93\xdb\x77\x36\x31\xb3\x52\x7a\xac\x4e\x60\x26\x34\xf6\xf5\x26\
\xf2\xa5\x96\xad\x73\x6b\x80\x7e\x78\xee\xf0\x9c\x88\x15\xbb\xea\
\x71\x14\x8a\x38\xc0\x50\xac\xf6\x5c\x4d\x4d\x0b\x5b\xeb\xf6\x14\
\x9a\x5b\x55\x10\xbf\xcf\xcd\xf6\x7e\xaa\x0f\x9d\x18\x3c\x5d\x36\
\xa8\x93\x43\x67\xa8\x68\x29\xff\xab\x66\x0c\x91\xa4\xf2\x6e\x41\
\x0c\xa5\x9a\xf3\x14\xb3\xb8\xb5\xbe\x38\xe1\x94\xbc\xee\x8b\x13\
\x4e\x6d\xce\xcc\x44\x4b\x9d\xb4\xae\xa5\xcb\x0a\x62\x5d\x4b\x53\
\x80\x21\xad\xd6\xfc\x61\x4b\x79\x03\x60\xf8\x4b\x2d\xb6\x9a\xa2\
\xa5\x5c\x3d\x74\xfe\x44\xbe\x54\x66\xd0\x2e\x75\xcf\x8a\x96\x72\
\x55\xb2\xe5\x80\xaf\x41\x49\x96\xf2\x6d\x39\x03\xb6\x68\xc2\xda\
\x6c\x34\x51\x8d\xb9\x3f\x30\xd1\x2b\xc0\x8c\x7d\x27\x62\x98\xae\
\xc6\xf8\xab\x21\xd9\x72\xa0\x35\xa9\xde\xeb\x48\x77\xe6\x16\xb7\
\xbd\xa7\x46\x92\xb4\x23\xdd\x99\x6b\xb3\xd5\x89\x9a\xec\x34\x81\
\x09\x8d\x76\x1b\x3d\x29\xb7\xd3\x38\x90\x3e\x32\x2f\x25\x95\x29\
\x11\x2b\x51\xc3\x30\xb6\x78\x39\xf7\x82\xd7\x88\x0d\x7b\x39\xe6\
\xf3\x00\x4c\x68\xb4\xa7\xd0\x0f\x92\xa5\x4e\xb7\xd9\xea\x44\x7b\
\x0a\xfd\xdd\xf7\x95\xb1\x14\x0c\x86\xfb\x45\x4b\xa1\x07\x3f\x3f\
\x3a\x5f\x9c\x4f\x17\x97\x6d\xab\xad\x3e\x00\x26\x4c\x00\x0c\xef\
\x03\x0c\xef\x7b\x25\x4c\xb1\xd7\xf0\xe2\x3a\x36\x05\xc0\x82\x31\
\x80\x21\x8d\xe0\xd8\x61\xd1\x52\xae\x02\x0c\xe9\xbe\xde\x44\xbe\
\x23\xdd\x99\x6b\x4b\xaa\x8f\x5a\x6d\x75\x74\xd1\x87\x18\x86\xb1\
\x45\xc4\x4a\xd4\x0b\x61\xaa\xe9\x35\x36\x2c\x00\xc7\xba\x01\x86\
\x34\x62\x3d\xdd\xd6\x85\x31\x14\x22\x26\x94\x23\x26\x94\xa5\x9e\
\xd8\xf6\x52\xcb\x76\x2d\xc2\x94\x72\xfe\x7e\x7a\x8d\x0d\x03\xc0\
\x30\x05\x2c\x38\x25\x62\x65\xc7\xf3\x8e\xb1\x92\x30\x2b\x38\xff\
\x7f\xfb\xe9\x35\xea\x9e\x48\x6f\x74\x2b\xc0\x90\x00\x0b\xde\x01\
\x18\xbe\xe9\xc5\x98\xcb\x85\x01\x26\x4c\xb4\x26\xd5\x6f\xca\x38\
\x7f\x2a\x25\xd5\xff\x7a\x31\xef\x86\x27\x62\x46\xb7\x89\x96\x32\
\x16\xb1\x94\x31\xc9\xf6\xfe\x7c\x6a\x49\x18\x13\x4e\xee\x3d\x1b\
\x9f\xaf\xb5\xf3\xaf\x6b\x00\x86\x6f\x16\x56\x05\x89\xf4\x46\xb7\
\x56\x73\xae\x3f\xf5\xea\xc3\xb5\x76\xfe\x75\x8d\x88\x95\x1d\x00\
\xc3\x69\x80\x61\xca\x97\xf9\x6a\xec\xfc\xeb\x9a\x88\xa5\x4a\x0b\
\x45\x34\xd6\xed\xd7\x9c\x95\x39\x7f\xb5\x2a\xce\xbf\xae\x59\xf4\
\x18\xc0\x82\x31\x5f\xe7\x35\xa1\xd1\x9e\xd2\x1e\x97\x73\xfe\x1d\
\xe9\xce\xdc\x07\x9f\xea\x5d\x7e\xc6\x54\x73\x4a\x79\x0c\xbf\x90\
\x6c\x39\x20\x5a\xca\x8f\x07\x3f\x3f\xfa\xb8\xa4\xf3\x4f\xaa\x0f\
\x6a\x76\x2c\x51\x0b\x00\x86\x29\x80\xe1\xf4\x7a\x3c\xc6\x7a\x29\
\x88\xb2\xa2\xf3\xdf\x34\x94\xeb\x06\xdf\x7d\x7a\xff\x4b\x5e\x7b\
\x8c\xf5\x52\x89\xf3\xdf\xd0\x94\xeb\x06\x17\xb1\x12\x15\x2d\x65\
\x4c\xb4\x94\xb1\x88\x19\xdd\x56\xeb\x38\x1b\x82\xd5\xba\xc1\x01\
\x56\xae\x57\xdb\x63\x30\x0a\xb0\x6e\xf0\x3a\x83\x75\x83\xd7\x19\
\xac\x1b\xbc\xba\x08\x46\x5f\xa0\x45\xcd\x34\x73\x9a\x23\xb7\xa8\
\x99\xe6\x55\xbf\x50\x61\x37\xf8\x34\x5b\x21\x6b\x87\xd3\x5c\x21\
\xa8\xbb\x93\xa1\x78\x76\x66\xcf\xb1\xe1\x1c\x8f\x08\xe5\x11\xa1\
\x1c\x22\x52\xd9\x2f\x55\xd8\x0d\x9e\x63\x35\x64\x6d\x70\x88\x48\
\xc1\x78\x36\x2f\xe3\x71\xda\x95\xf9\x89\x76\x67\x1f\xd2\xee\xec\
\x43\x2a\xe3\xf1\x05\x51\x34\xb7\xfc\x0d\x1e\xc6\x50\xa8\xb4\x1b\
\x9c\xb1\x3a\x82\xd1\x17\x08\xea\xee\xd4\x81\x4f\x6e\x2e\x09\x51\
\xfc\x51\xcf\xdd\xa6\x41\xdd\x9d\x12\xd4\xec\xab\x65\x07\x89\x98\
\x50\x5e\x6b\x37\x38\xa3\x34\x2d\x6a\xa6\x39\x14\xef\x9f\x2d\x5e\
\x19\xcb\x3f\xa1\x78\xff\xec\x8a\xab\xa4\xa9\xa9\xe0\xd4\x9f\xb3\
\x1b\x7c\xa3\x23\x20\xb2\x9d\xd3\x5c\x81\x43\x44\x12\x10\x59\xd7\
\x75\xf3\x88\xc4\xde\x3b\x36\xf4\xa8\x9c\x18\xdd\xd9\x87\x34\xdc\
\x35\x92\xe7\x51\xc6\xf0\x28\xfc\xcd\xc3\x42\x7a\xc9\x3a\x3c\x22\
\x34\x94\xe8\x9f\xdb\x69\x0c\x4e\x17\x8a\xef\x8d\x15\x53\x4a\x11\
\x7f\xd0\xc8\x6b\x9c\x46\x22\x3c\x22\x7f\xe3\x35\xe7\x3a\x8f\x08\
\x15\xe2\xd9\xb2\x62\x74\x67\x1f\xd2\x9d\xc6\xe0\xf4\xaa\x2b\xa4\
\xd1\x10\x8c\xbe\x40\x28\x9e\x1d\x0d\x77\x8d\xe4\x8d\x0b\xf7\x96\
\xfe\x59\xc6\x85\x7b\xf4\xfd\xe3\xc3\x8f\x83\xf1\xec\xaf\xba\xf3\
\x39\x23\xf3\x22\x8f\x9c\xdf\xf1\x88\x68\x3c\x22\x97\x78\x9d\x3c\
\xe0\x11\xa1\x85\x9f\x97\x78\x44\x34\x5e\x73\x78\x1e\x11\x2a\xe3\
\xf1\x92\x62\xc8\x78\x9c\x0a\xba\x9b\xab\x54\xf0\x86\x81\x47\x19\
\x63\x97\x71\x39\x5f\x2a\xd7\x77\x65\x7e\xa2\xbb\x8e\xfc\x63\x8e\
\x47\xce\xc5\xa7\x77\x3f\xb9\xbe\xb8\x75\xe5\x75\x72\x9d\x43\xce\
\x69\x41\x27\x91\x90\x46\x5e\x5b\x3e\x36\xa7\xb9\x02\x8f\x08\x8d\
\xa6\xbe\x7f\x66\xdc\x68\xea\x7b\x1a\x8a\x67\x67\x38\xcd\x61\xb5\
\x79\x39\x3b\x0f\x0f\x0c\x7d\x78\xfa\x46\xd9\xb4\x72\xe0\x93\x9b\
\x34\xa8\xbb\xcf\xdc\xfd\x9c\xea\xfe\x9e\x33\x32\x2f\x56\x32\x3e\
\xa7\x39\xf2\xa2\x80\xe1\xae\x91\x7c\x28\xde\x3f\x2b\xe8\x6e\xae\
\x2e\xc4\xf0\xb2\x68\x7a\x45\x50\x77\x27\xd5\x73\xb7\xcb\x0a\xa2\
\x9e\xbb\x4d\x79\x44\xd6\xf5\x8c\x5e\x30\xfa\x02\x0b\xab\x25\x63\
\xb4\xa8\x99\xe6\x9a\x5f\xbb\x17\x45\xb3\x5a\x04\x75\xf7\xdb\x72\
\x79\x7e\x29\xd7\xc7\xb3\x9b\xe7\x19\xfd\xf3\x14\x4d\x3f\xe1\x91\
\xd3\x19\x4a\xf4\xcf\x97\x12\xa3\x2b\xf3\x13\xdd\x73\x6c\x38\x17\
\x4a\x0c\x6c\x9e\x67\xf4\x95\x14\xcd\x20\x22\x35\x69\x6c\x0b\x69\
\xd9\xb7\x38\xe4\x8c\x07\x75\x77\xf6\xfd\xe3\x5f\x3f\x39\xda\xf7\
\x9f\x67\x62\x93\xfe\x3a\x3a\x1b\x4a\x64\x1f\x08\x46\xdf\xe6\x79\
\x12\x59\x51\xd1\x44\xc4\xf7\xc6\x36\x5e\x77\x0f\xf0\x88\xd0\x20\
\x22\x76\x28\x7a\xf1\x37\x82\xee\x5e\xe5\x11\xa1\xbb\x8c\xcb\xf9\
\x3d\xc7\x86\x73\x42\x3c\xfb\x28\x14\xcf\x8e\xd6\x3a\xa5\x7a\x8e\
\x1f\x45\x73\x2d\xec\x36\xbe\x7c\x21\x88\x88\xbd\xb0\x6d\x75\x0f\
\x14\xff\x8e\xd3\x5c\x81\xd3\x1c\x99\xd3\x1c\x59\x40\x64\xfb\xa6\
\x5a\x19\x8b\x54\x54\x34\x13\xfd\xbe\x14\xcd\x42\x8a\xba\xc9\x21\
\xe7\x66\x48\xcb\xbe\xe5\xc7\x9c\x75\x07\x8f\x32\x46\x28\xd1\xff\
\xa4\x5c\xd1\x7c\xef\xd8\xd0\x7c\x50\xcf\x4e\x05\x75\x27\x6a\x18\
\xd4\xd3\x57\xda\x9e\x89\xa3\x28\x45\xed\x36\xbe\x7c\xa1\x5a\xf3\
\xd4\x3d\x9c\xee\xec\x0f\xea\x2e\xdd\x73\x7c\x78\xbe\x6c\xd1\x44\
\x4e\x82\x47\xe4\x3e\x8f\xc8\x7d\xaf\x85\x59\x29\x45\x35\x1c\x3c\
\x22\x31\x1e\x11\x1a\xd4\x9c\xc3\xab\x15\x4d\xc3\xa0\x5b\x82\xba\
\x13\xf5\x52\x18\x96\xa2\x8a\xe0\x34\xa7\x7b\xf9\xa3\xca\x4a\x8a\
\xe6\x5a\x84\x59\xc9\xf9\xb3\x14\x55\x04\xaf\x39\x29\x1e\xb9\x53\
\x3c\x72\xd6\xf1\x4a\x5b\x79\x61\x56\x72\xfe\x2d\xf1\xcc\x6f\x59\
\x8a\x2a\xf0\x4e\x74\x60\x2b\xa7\x3b\x84\x47\xe4\x4e\x8b\x9e\xf1\
\xe8\x95\xb6\x67\x85\x11\x90\x93\x10\xe2\xfd\xdf\x94\x76\xfe\x5f\
\x3f\x09\xea\xee\x1c\x4b\x51\x4d\x4d\x4d\x5c\xfc\xab\x6d\x3c\x22\
\x63\x3c\x22\x63\x82\xe1\xbd\x99\x7a\x2a\x8c\x33\xb9\xd3\xb8\x3c\
\x5f\xce\xf9\xef\x3e\x72\xf9\x09\xa7\x91\x5e\xaf\xe7\xdf\x50\xb4\
\xe8\x99\x37\x79\xe4\xde\xe1\x74\x87\xbc\x13\x1d\xa8\x6a\xbb\xe9\
\xce\xbf\x0c\x0e\xd7\xa3\xf3\xaf\x1b\x78\xe4\xec\xe0\x11\x99\xe6\
\x35\x27\xe5\xc7\x7c\xf5\xe6\xfc\xeb\x0a\x0e\x11\x69\xa1\xbf\xc8\
\xf1\xed\x95\xb6\x86\x3b\x2e\xaf\x94\x45\x8f\xc1\x23\xe2\xeb\x2b\
\x6d\x05\xe7\xff\xb8\x61\x8e\xcb\x2b\xa1\x94\xc7\xf0\x0b\xc1\xe8\
\x0b\x04\xe3\xee\x8f\xef\x1f\x1f\x7e\xdc\x10\xc7\xe5\xab\xb1\xe0\
\x31\xc8\xf4\x7a\x3c\xc6\x7a\x11\x8c\xbe\x40\xc3\x1c\x97\x97\xeb\
\xd4\xde\x6d\x7c\xf9\x12\xa7\xbb\x84\x47\xae\x67\x1e\x63\xbd\x6c\
\xfa\xe3\xf2\xb2\x9d\xda\xba\x13\xe5\x91\x3b\xc6\x23\x77\x8c\x8b\
\x7f\xc5\x5e\x69\xf3\x83\xd5\x3a\xb5\x79\x9d\x5c\xaf\xb6\xc7\x60\
\x14\xf0\xa4\x53\x9b\xe1\x1d\x9e\x75\x6a\x33\xbc\x81\xd3\x1c\x79\
\xcf\xb1\xe1\x1c\xeb\xd4\xae\x13\x38\xcd\x15\x84\x78\x76\xc5\xd6\
\x79\xd6\xa9\xed\x23\x82\xd1\x17\x60\x9d\xda\x75\x06\xeb\xd4\xae\
\x43\xea\xba\x53\xbb\x51\xa9\xbb\x4e\x6d\x06\x83\xc1\x60\x30\x18\
\x0c\x06\x83\xc1\x60\x30\x18\x0c\x06\x83\x51\x77\xfc\x1f\xc0\x4f\
\xef\x1c\x72\xd3\x34\x0a\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\
\x60\x82\
\x00\x00\x01\xaa\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x50\x00\x00\x00\x50\x08\x06\x00\x00\x00\x8e\x11\xf2\xad\
\x00\x00\x01\x71\x49\x44\x41\x54\x78\x9c\xed\xda\xcd\x0d\x82\x40\
\x10\x86\x61\xab\xf2\x6c\x03\x76\x01\x1d\x98\x60\x09\xde\x49\x6c\
\x83\x1e\xd4\x62\xec\x02\x0f\x66\x8d\x21\x10\x91\x5d\xbe\x99\x09\
\xef\x24\xdf\x8d\x64\x33\x0f\x7f\x1b\x98\xdd\x8e\xa2\x28\x8a\xfa\
\xd4\xe9\xfa\xe8\xc9\x74\x00\x54\x01\xde\x9e\x3d\xf9\x0a\x80\x00\
\x02\x18\x3a\x00\x02\x08\x60\xe8\x00\x08\x20\x80\xa1\x03\x20\x80\
\x00\x86\x0e\x80\x00\x6e\x18\xb0\xed\xee\xf2\x86\x4b\xaf\x69\x06\
\x58\x9f\x2f\xfd\xfe\x70\xec\xab\xe6\x22\xc3\xab\x9a\xf7\x9a\xf5\
\xb9\xdc\x9a\x26\x80\x09\x2f\x45\x81\x98\xf0\x52\x4a\x21\xca\x01\
\x87\x78\x0a\xc4\x21\x5e\x49\x44\x29\x60\xdb\xdd\x47\x1b\x59\x13\
\x71\x0a\x2f\x25\xf7\x99\x28\xbf\x02\x7f\x35\x54\x12\x51\xb1\x96\
\xc9\x33\x50\xd1\x98\xea\x44\x99\xbd\x85\xd7\x6c\x50\x79\x95\x9b\
\xee\x03\xd7\x68\x54\x89\x67\x0e\x58\xba\x61\x35\x9e\x0b\xc0\x52\
\x8d\x5b\xe0\xb9\x01\xcc\x05\xb0\xc2\x73\x05\xb8\x14\xc2\x12\xcf\
\x1d\xe0\xbf\x20\xd6\x78\x2e\x01\xe7\xc2\x78\xc0\x73\x0b\x38\x07\
\xd1\x03\x9e\x6b\xc0\xa5\x88\x4a\x3c\xf7\x80\xff\x22\xaa\xf1\x42\
\x00\xce\x45\xb4\xc0\x03\x70\x0b\x80\xdc\xc2\x22\x3c\x5e\x22\x05\
\xf0\xd8\xc6\xcc\xc4\x63\x23\x9d\x89\xb7\xe4\xd8\x4d\x00\xf2\x31\
\x41\x8c\xe7\x01\xd1\x05\x20\x1f\x54\x8d\xf1\x2c\x11\xf9\xa9\x14\
\x15\x90\xdf\x9a\x4e\xf1\xd4\x88\x8c\x76\x44\x02\x64\xb8\x88\xf1\
\x36\x7b\xc0\x31\x44\x06\x2c\x33\x10\x19\xf1\xcd\x08\x43\xe6\x04\
\x40\x00\x01\x8c\x1d\x00\x01\x04\x30\x74\x00\x04\x10\xc0\xd0\x01\
\x10\x40\x00\x43\x07\x40\x00\x83\x01\x92\xf1\x00\xb8\x36\x20\x45\
\x51\xd4\x86\xea\x05\x3a\xa3\x57\xdd\x02\xc6\x6d\x66\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x06\xdb\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x06\xa2\x49\x44\x41\x54\x78\x9c\xed\x9d\xdb\x4e\x1b\x47\
\x18\xc7\x79\x04\x3f\x42\x1e\x81\x47\xe0\x11\xfc\x00\xf6\x7a\xd6\
\x47\xa1\x2a\xd4\x95\xf0\x78\x96\xd4\x64\x6b\x12\x22\x08\x68\x15\
\x54\x8a\xb0\xf7\x60\x61\xa8\x43\x13\xa5\x54\xda\x5c\xd8\x52\x40\
\x22\x17\xbe\xe0\x22\x17\x51\x24\xa4\x72\x11\xa9\x42\x28\x57\x55\
\x6b\x20\x98\x43\xa7\x17\x78\xa3\xc4\xf1\x82\x0f\x3b\x87\x75\xe6\
\x2f\xfd\xef\x77\xe6\xa7\xef\x9b\x6f\xe6\xdb\x9d\x1d\x19\x11\x12\
\x12\xea\x42\x71\xa4\x8d\x25\x90\x06\x58\x3f\x87\x50\x4b\x49\xb4\
\xa8\x26\x14\x6d\x87\xf5\x73\x08\xb5\x24\x80\x70\x26\x01\x84\x33\
\x09\x20\x9c\x49\x00\xe1\x4c\x02\x08\x67\x12\x40\x38\x93\x00\xc2\
\x99\x04\x10\xce\x24\x80\x70\x26\x01\x84\x33\x09\x20\x9c\x49\x00\
\xe1\x4c\x02\x08\x67\x12\x40\x38\x93\x00\xc2\x99\x04\x10\xce\x24\
\x80\x70\x26\x01\x84\x33\xf9\x11\x08\x28\x64\x46\x43\x3a\x0c\x86\
\x8b\x30\x1d\x2e\xc2\x74\x48\x87\x41\x50\xc8\x8c\xb2\x7e\x2e\x4f\
\xe4\x17\x20\xc0\x4a\x07\xa4\x22\x54\x63\xa5\xa9\x3f\x25\x1d\xe2\
\x58\x69\xea\xe2\x6e\x25\x7f\x9a\x5a\xcb\x7d\x94\x74\x88\x25\x1d\
\xe2\x88\x81\x1a\x11\x03\x6a\xac\x9f\x75\x20\xf9\x01\x48\xb8\x90\
\x01\x11\x03\x35\x52\xe5\x5c\x73\x61\x7b\x15\x6f\xbe\xdd\xc2\xb5\
\xf7\xaf\xbe\xb0\x7d\x50\xc5\x4b\xbb\x26\x4e\x95\x73\x4d\x49\x87\
\x38\x5c\x84\x69\xd6\xcf\xdd\x97\x78\x06\x02\xac\x74\x40\xb6\x94\
\x8d\xa8\x89\xce\x96\x76\x4d\x6c\x1f\x54\xbf\x02\xd1\xc9\x4b\xbb\
\x26\x96\x74\x88\xc3\x7a\x86\xcb\x71\xdd\x28\x9e\x81\xc8\x96\xb2\
\x11\xb5\x94\x73\x63\xaf\xd2\x15\x88\xcf\xbd\xf9\x76\x0b\xa7\xca\
\xb9\x66\xc4\xc8\xbe\x66\x3d\x8e\x9e\xc4\x2b\x90\x58\x69\xea\x97\
\xa8\xa5\x9c\x77\x4a\x4f\xdd\xda\x3e\xa8\xe2\x58\x69\xea\x22\xa2\
\xc3\xdf\x59\x8f\xa7\x6b\xf1\x08\x24\x5c\x84\xe9\xa8\x89\xce\xfa\
\x89\x8c\x4e\x91\xe2\xab\x35\x85\x37\x20\x21\x1d\x06\x81\x89\x4e\
\x97\x76\xcd\x81\x61\x38\x36\xf6\x2a\x58\xd2\x21\x0e\xe9\x30\xc8\
\x7a\x7c\xb7\x8a\x27\x20\xc0\x4a\x07\x64\x13\xbd\x7b\x58\xfd\xf9\
\xcc\x2b\x18\x8e\x67\x6b\xcb\x38\x62\x64\xff\x65\x3d\xc6\x5b\xc5\
\x13\x90\xd6\x3e\xe3\xa2\xdb\x6a\xaa\xd7\xf5\x24\x55\xce\x35\xa5\
\x22\x54\x59\x8f\xf3\x46\xf1\x02\x04\xac\xa0\x3b\xc0\xcc\x1e\x7b\
\x99\xaa\xdc\x52\x17\x58\x41\x77\x58\x8f\xd7\x55\xbc\x00\x91\x2d\
\x65\xe3\x87\xdf\x1e\x36\x48\x44\xc7\xe7\x9e\x78\x3a\x73\xc9\xf5\
\xfe\x84\x07\x20\xa0\x90\x19\x95\x74\x88\xcb\x6f\x9e\x11\x85\x51\
\x7b\xff\x0a\xbf\xd8\xb7\xaf\xa3\x84\xd7\xb3\x2f\x1e\x80\xc8\x96\
\xb2\xa1\x6c\xcd\x9f\x90\x86\xe1\xf8\xbe\xad\x35\x81\x85\x6c\x96\
\x63\x76\x15\x6b\x20\x60\x05\xdd\x01\x06\x3a\xf1\x62\xcf\xd1\xad\
\x9d\xbd\x09\x97\x6b\x09\x6b\x20\x52\x11\xaa\xdf\xfd\xfa\x53\x83\
\x16\x0c\xc7\xe3\xeb\xd3\x57\x11\x3d\x6b\xb1\x1a\xb7\xab\x58\x02\
\x69\x1d\x1e\x7e\x58\xd8\x5e\xa5\x0a\xa3\xf6\xfe\x15\x5e\xa9\xaf\
\x61\x60\xa0\x13\xee\xa2\x84\x25\x90\x70\x21\x03\xa2\x96\x72\x4e\
\xba\xb2\x72\x73\xaa\x9c\x6b\x72\x77\xa4\xc2\x12\x88\x6c\xa1\x3a\
\x8b\xe8\x70\xbc\xb0\xbd\x8a\x23\x46\xf6\x1f\x16\x63\x77\x15\x2b\
\x20\x34\x4b\x5d\x37\xdb\x07\x55\xfe\x16\x77\x56\x40\xa4\x22\x54\
\xef\x56\xf2\xa7\xac\x60\x38\xe6\x6e\x71\x67\x05\x24\x6a\xa1\x7d\
\x96\xe9\xca\xf1\x4a\x7d\x0d\x03\x33\x7b\x4c\x7b\xfc\xae\x62\x01\
\xc4\x49\x57\x2f\xf6\x6d\xe6\x40\x9c\xb4\x15\x2e\x64\x00\xcd\x39\
\x70\x15\x0b\x20\x52\x11\xaa\x13\x95\x7c\x93\x35\x0c\xc7\xf7\x6d\
\xad\xc9\x4d\xda\x62\x01\x44\x36\xd1\x21\x0f\xe9\xea\x8b\xb4\x65\
\xa0\x13\x9a\x73\xe0\x2a\xda\x40\x78\x4a\x57\xed\x69\x8b\x8b\x8e\
\x22\x6d\x20\xbc\xa5\x2b\xc7\xca\xd6\xe3\x73\x2e\xd2\x16\x6d\x20\
\xbc\x54\x57\x9d\xd2\x56\xc4\x40\x0d\x5a\xf3\xe0\x2a\x9a\x40\x9c\
\x74\x35\xc8\xab\x3d\xa4\xd3\x16\xf3\x4d\x22\x4d\x20\xe1\x22\x4c\
\xb3\x38\xd9\xed\xd6\xe3\xeb\xd3\x57\xcc\x7b\xee\x54\x23\xc4\x42\
\xf6\x6c\x6d\x99\xf9\xc4\xbb\x99\x8b\xb3\x2d\x5a\x40\x80\x95\x0e\
\xd0\x6e\x44\xf9\x32\x6d\xd1\x02\x12\xd2\x61\x90\xe5\x51\x7b\xb7\
\x4e\xae\xfd\x78\xc2\xf4\x48\x9e\x16\x90\x88\x9e\xb5\x68\xf6\xcd\
\xfb\xf5\x6c\x6d\x19\x33\xed\xb7\xd3\x02\x22\x9b\xe8\x68\xa5\xbe\
\xc6\x7c\xc2\x6f\xb3\xb1\x57\x61\xbb\x6b\xa7\x01\x84\xc7\xdd\xb9\
\x9b\x3f\xed\xda\x8b\x93\x63\x24\xe7\xc4\x55\x34\x80\xf0\x5e\xee\
\xb6\x7b\xf2\xf9\xa3\x0b\x66\x9f\xc6\x51\x89\x10\xce\xcb\xdd\x76\
\x33\x2d\x7f\x49\x03\xf1\x43\xb9\xeb\x96\xb6\x98\x94\xbf\xa4\x81\
\xf8\xa5\xdc\x6d\xf7\xf8\xfa\xf4\x05\x93\xf2\x97\x34\x10\xbf\x94\
\xbb\xed\x9e\xad\x2d\xe3\x58\xe9\x5e\x95\xd4\xbc\xb8\x8a\x34\x10\
\xd9\x44\x47\x7e\x4a\x57\x8e\x8d\xbd\x0a\x06\x26\x3a\x25\x35\x2f\
\xae\x22\x09\xc4\x4f\xe5\x6e\xbb\x99\x95\xbf\x24\x81\xf8\xad\xdc\
\x6d\x37\x93\x5e\x3b\xd1\x08\xb1\x90\xcd\x63\x33\xaa\x5b\x33\x69\
\x5a\x91\x02\x02\xac\x74\x80\xf5\x9b\x89\x83\x9a\x49\xf9\x4b\x0a\
\x88\x53\xee\xb2\x9e\xd4\x41\x3d\xbe\x3e\x7d\x45\xb5\xfc\x25\x17\
\x21\xc8\xbe\x6f\x6b\xdc\xbd\xcc\xd0\xab\x67\x6b\xcb\x58\x36\xd1\
\x3b\xaf\xe7\xc7\x55\x24\x80\x00\x2b\x1d\x00\x66\xf6\xd8\x8f\xe5\
\x6e\xbb\xcb\x6f\x9e\xd1\x4d\x5b\x24\x80\x38\xe9\xca\x8f\xe5\x6e\
\x27\xc7\x4a\x53\xf4\x76\xed\x24\x80\x44\x0c\xa8\xf9\x71\x77\xee\
\x66\xf5\xe5\x13\x7a\x4d\x2b\x12\x40\x64\x4b\xf9\x30\x0c\xe9\xca\
\xb1\xd3\xb4\x02\x56\x3a\xe0\xe5\x3c\x75\x94\xd7\x40\x42\x3a\x0c\
\xca\x96\xd2\x1c\x96\x74\xe5\x38\x6a\x29\xe7\x54\xde\x90\xf7\x1a\
\x08\xb0\x90\xad\x6c\x3d\xf6\x7d\xb9\xdb\x6e\xf5\xe5\x13\x3a\x87\
\x8d\x5e\x02\x19\xa6\xea\xaa\xdd\xd4\xee\x49\xf1\x12\x88\x5f\x7b\
\x1f\xdd\x9a\x4a\x8f\xc4\xdb\x08\x19\xce\x74\xe5\xb8\x75\xe7\xd6\
\x5f\x5e\xcc\x95\xab\xbc\x02\xe2\xb4\x6a\xfd\x7c\x76\x75\x9b\xa9\
\x9c\x6d\x79\x05\x84\xd5\x15\x19\xb4\x4d\xfc\xab\x5d\x2f\x80\xb0\
\xbc\x22\x83\xb6\x89\x5f\xc9\xe1\x05\x90\x61\x3b\x2a\xb9\xc9\x9f\
\xbe\xda\x25\xb5\xb8\x7b\x01\x84\xf5\x15\x19\xb4\x4d\xf4\x42\xcd\
\x41\x81\x84\x8a\x93\x63\xb2\xa9\x7c\xfc\x16\xa2\xe3\xab\x28\x21\
\xb1\x73\x1f\x14\x88\x6c\xa1\x3a\x89\x6b\x5d\x79\x77\xab\x4f\x72\
\xe8\x25\x8b\x91\x91\x91\xc1\x80\x38\xe7\x56\x3c\x7e\x33\x48\x2d\
\x4a\xbc\x5e\x4b\xfa\x05\xd2\xba\xf4\xf8\xc8\x4f\xef\xec\x92\x88\
\x12\x60\x66\x8f\x3d\xad\xb8\xfa\x05\x22\x15\xa1\x9a\x5a\xcb\x7d\
\x1c\xd6\x63\x92\x6e\xa3\xc4\xf3\x7d\x49\x3f\x40\x40\x21\x33\x0a\
\x4c\x74\x3a\x8c\x87\x88\xbd\xda\x69\xf1\x7a\xf6\x42\x5d\xaf\x40\
\x5a\xa9\xea\x70\x18\x5e\x60\xf0\xca\xf7\xfe\x58\xc0\x11\x03\x35\
\x3c\xb9\x0b\xb8\x17\x20\xd7\x3b\x72\x54\x9f\x78\x3a\x73\xf9\x2d\
\xa7\xaa\x76\xdb\x07\x55\x3c\xf1\x74\xe6\x52\x36\xd1\xe1\xc0\x5d\
\xc5\xde\x80\x20\x3b\x55\xce\x0d\x5d\x37\xd0\x2b\x28\xe3\xeb\xd3\
\x57\x03\xff\xd1\xa7\x1b\x20\x9f\xfe\x05\x35\xe0\x1f\x6f\x86\xdd\
\x2f\xf6\x6d\x1c\x2b\x29\x97\xb2\x85\xea\x7d\x47\xca\x6d\x40\xae\
\x77\xe2\xe8\x68\xf2\xf9\xa3\x0b\x01\xe3\x76\x6f\xbe\xdd\xc2\xdf\
\x6f\xce\x60\x60\xa0\x93\xbe\x76\xf2\x9d\x80\x00\x2b\x1d\x08\xe9\
\x30\x18\x31\xb2\xaf\x7b\xfd\x43\x9a\xf0\xb5\x17\xb6\x57\xb1\xa4\
\x43\x2c\x9b\xe8\x5d\x4f\x60\x92\x68\x51\x4d\x2a\x1a\x4e\x28\xda\
\x4e\x5c\x9d\x6b\x24\xf2\xf3\x67\xf1\xfc\xfc\x65\x3c\x3f\x87\xe3\
\xf9\x39\x9c\xc8\xcf\xe1\xf1\x87\x8b\x57\xc2\xbd\x3b\xf9\xe0\xf1\
\x7f\xce\x3c\x5e\xcf\xe5\xfc\x59\x5c\x9d\xff\x3b\xa1\x68\x3b\x09\
\x45\xdb\x49\x20\xed\x6b\x50\x71\xa4\x8d\x25\xd1\xa2\x9a\x44\x8b\
\x6a\xec\xc1\xa3\x1d\x61\xf2\x76\xe6\x3b\x8e\xb4\xb1\x9e\x53\x9a\
\x90\xd0\x37\xa9\xff\x01\xaf\xf6\x51\x25\x57\xc5\x63\xb4\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x04\x38\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x03\xff\x49\x44\x41\x54\x78\x9c\xed\x9d\x41\x4b\x1b\x51\
\x14\x85\xfd\x29\xfd\x29\x82\x43\xd2\x98\x62\x02\x52\xac\x54\xa9\
\x45\xba\x70\x21\xd4\x85\xa0\x23\xc6\x24\xa2\x56\xda\x6a\xa9\x4d\
\x6a\x8c\x18\xad\x8d\x12\x50\x14\x6a\x41\x05\x5b\x85\x96\xa2\x82\
\x60\x37\x59\x88\x2b\x57\x5d\x14\x57\x6d\x90\xb4\xbd\x5d\x94\x29\
\x93\xf8\xe6\xcd\x6c\x6e\xde\xf8\x72\x0e\x7c\xfb\x1b\x3f\x86\x79\
\xe7\xe5\x12\x1b\x1a\x10\x04\x41\x10\x04\x41\x10\x04\x41\x10\x04\
\xf1\x61\x52\xa6\x11\x4f\x9b\x06\xd5\x8c\x41\xe3\xb1\xea\xcf\xec\
\xeb\xa4\x4c\x23\x9e\x1f\x0f\x97\x8e\x96\x3b\x89\x9b\x83\xf9\x76\
\x48\x71\x8b\x25\xe4\x6c\xf3\x11\xd5\x82\xa3\xe5\x4e\x48\x91\xa5\
\xd6\x42\x20\xc5\x25\x2a\x84\x40\x8a\x24\xaa\x84\x40\x8a\x43\x54\
\x0a\x81\x14\x41\x54\x0b\xa9\x90\x32\x64\x4c\xa7\x4c\x23\xae\x23\
\x33\x03\x46\xe3\x8d\x11\x62\x49\x59\x9d\xb8\x53\xce\x8f\x87\x4b\
\xba\x91\x19\x0e\x94\x53\xa6\x11\xbf\x51\x42\x74\x26\x3f\x1e\x2e\
\x41\x88\x8f\x80\x10\x9f\x01\x21\x3e\x03\x42\x7c\x06\x84\xf8\x0c\
\x08\xf1\x19\x10\xe2\x33\x20\xc4\x67\x40\x88\xcf\x80\x10\x9f\xa1\
\x9d\x90\xe2\x7a\x37\xbd\x7f\xd9\x5a\xbb\xef\xfd\x15\x70\x63\x84\
\x1c\x2d\x77\x52\x2e\x19\xa2\xd5\xc9\x08\x9d\x6e\xf4\xd2\xf9\x4e\
\xbf\x56\xec\x66\x3a\x28\x3b\x12\x3c\xf3\xbd\x10\xeb\xa9\xc8\xc6\
\x82\xf4\x71\xa1\x8b\x2e\x0f\x13\xf4\xe3\x64\x4c\x3b\xf6\x73\x5d\
\x34\x17\x0b\x7c\xf1\xb5\x10\xfb\x53\x51\xdc\xea\x53\xfe\x47\xe3\
\x64\xeb\xd5\x3d\x4a\x0f\x19\xd3\xbe\x14\x52\x2f\x4f\x85\x9d\xfc\
\x44\xe4\x67\xc5\x0b\x5f\x26\xa4\xb8\xde\x5d\xf3\x97\x5b\x3d\x3c\
\x15\x76\x72\xc9\xe6\x52\x7a\xb0\x29\xea\x49\xc8\xc9\xca\x03\x4a\
\x9b\x06\x15\xb7\xfa\x6a\xf6\x92\xab\x87\xa7\xc2\xe2\xf2\x30\x41\
\x69\xd3\xa0\x8a\xaf\x77\x65\x42\x3e\xe7\xee\x53\x76\x24\xa8\x7c\
\x70\x5d\xb9\xf8\x30\x48\x69\xd3\xa0\xd7\x03\x8d\xb7\x3c\x09\xd9\
\x9b\x6d\xa3\xd5\xc9\x88\xf2\xc1\x75\xe5\x7c\xa7\xbf\xb2\x83\xb8\
\x09\xd9\x9c\x8a\xd2\xbb\x99\x36\xe5\x83\xeb\xca\x71\xa1\xa7\xb2\
\x83\xb8\x09\x59\x7b\x16\xa5\xfd\x5c\x97\xf2\xc1\x75\xe5\x5a\x07\
\x71\x13\x92\x4b\x84\xe8\x74\xa3\x57\xf9\xe0\xba\x72\xad\x83\xb8\
\x09\xb1\x4e\x58\xaa\x07\xd7\x95\x6b\x1d\x44\x26\xe4\x6b\xe1\x21\
\xa5\x4d\x83\xbe\x7d\x1a\x56\x3e\xb8\xae\x5c\xeb\x20\x32\x21\xd6\
\x7a\xa7\xea\xa1\x75\x45\xd8\x41\x64\x42\xf6\x66\xdb\x68\xe5\x49\
\x8b\xf2\xc1\x75\x45\xd8\x41\x64\x42\xb6\x53\x77\x69\x7d\xaa\x55\
\xf9\xe0\xba\x22\xec\x20\x32\x21\x6b\x4f\x23\x38\xf2\x32\x22\xec\
\x20\x32\x21\xb9\x44\x88\x8e\x0b\x3d\xca\x07\xd7\x15\x61\x07\x91\
\x09\x59\x4c\x86\x70\xe4\x65\x44\xd8\x41\x9c\x84\xe0\xc8\xcb\x8f\
\xb0\x83\x38\x09\xb1\xae\xdd\x21\x84\x0f\x61\x07\x71\x12\x72\x30\
\xdf\x4e\x4b\xa3\x61\xe5\x43\xeb\x8a\x63\x07\x71\x12\x82\x6b\x77\
\x5e\x1c\x3b\x88\x93\x90\xcd\xa9\x28\xed\x66\x3a\x94\x0f\xae\x2b\
\x8e\x1d\xc4\x49\xc8\xfa\x73\x5c\xbb\x73\xe2\xd8\x41\x9c\x84\xe4\
\x92\xb8\x76\xe7\xc4\xb1\x83\x88\x84\x58\x9b\x26\xe7\x3b\xfd\xca\
\x07\xd7\x15\xc7\x0e\x22\x12\x82\x23\x2f\x3f\x8e\x1d\x44\x24\x04\
\x9b\x26\xfc\x38\x76\x10\x91\x10\x1c\x79\x79\x91\x76\x10\x91\x10\
\x6c\x9a\xf0\x22\xed\x20\x22\x21\xd8\x34\xe1\x45\xda\x41\x44\x42\
\xb0\x69\xc2\x8b\xb4\x83\x88\x84\x60\xd3\x84\x17\x69\x07\xa9\x16\
\x82\x6b\x77\x7e\xa4\x1d\xa4\x5a\x08\x36\x4d\xf8\x91\x76\x90\x6a\
\x21\xd8\x34\xe1\x47\xda\x41\xaa\x85\x60\xd3\x84\x17\xd7\x0e\x52\
\x2d\x04\x9b\x26\xbc\xb8\x76\x90\x6a\x21\xd8\x34\xe1\xc5\xb5\x83\
\x54\x0b\xc1\xa6\x09\x2f\xae\x1d\xc4\x2e\x04\x47\x5e\x7e\x5c\x3b\
\x88\x5d\x08\xae\xdd\xf9\x71\xed\x20\x76\x21\xd8\x34\xe1\xc7\xb5\
\x83\xd8\x85\xe0\xda\x9d\x1f\xd7\x0e\x62\x17\x82\x4d\x13\x5e\x3c\
\x75\x10\xbb\x10\x6c\x9a\xf0\xe2\xa9\x83\xd8\x85\x60\xd3\x84\x17\
\x4f\x1d\xc4\x12\xf2\x76\x2c\x5c\xc2\xa6\x09\x2f\x9e\x3a\x88\x25\
\xe4\xcd\x68\xf3\x15\x8e\xbc\xbc\x78\xea\x20\x96\x90\xa5\x64\xe8\
\x0a\x9b\x26\xbc\x78\xea\x20\x96\x90\x85\xc4\xed\x32\x8e\xbc\xbc\
\xe4\x27\x5a\xbc\xfd\x0f\x91\x94\x69\xc4\xe7\x63\xc1\x5f\xd8\x34\
\xe1\x65\x21\xd9\x5c\x76\xed\x20\xff\x9f\x90\x78\xe8\x37\x8e\xbc\
\x7c\x78\xee\x20\x96\x90\xb9\xe1\xc0\x1f\x1c\x79\xf9\xf0\xdc\x41\
\x2c\x21\xd8\x34\xe1\xc5\x73\x07\xf9\x27\xa4\xe9\x05\x8e\xbc\xbc\
\x1c\x17\x7a\x28\x1b\x0b\x7e\xf7\x24\x24\x3d\xd8\xb4\x68\x95\x42\
\xc0\xf7\x63\xc9\x99\xe1\xc0\x85\x37\x21\xa6\xb1\xad\xfa\x67\xb6\
\xeb\x04\xf7\x52\x88\x20\x08\x82\x20\x08\x82\x20\x08\x82\x20\x88\
\x4f\xf2\x17\x8f\xf1\xb8\x3b\xc5\xcf\xc2\xb1\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\x0c\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x60\x00\x00\x00\x60\x08\x06\x00\x00\x00\xe2\x98\x77\x38\
\x00\x00\x04\xd3\x49\x44\x41\x54\x78\x9c\xed\x5d\x31\x8e\xdb\x3a\
\x10\xd5\x11\x7c\x04\x1f\xc1\x47\x70\x29\x8d\x1b\x75\x6e\xb7\x48\
\x93\xce\x47\xd8\xce\xb0\x29\x40\xbf\x73\xe9\x22\x45\x4a\x1f\x81\
\x58\x51\x06\xb6\x08\xec\x2e\x40\x8a\x60\x8f\xa0\xf2\x17\x29\xf8\
\x0b\x49\x86\xbf\xb2\xbb\x12\x25\x0e\xc9\xa1\xf9\x80\x57\xda\x96\
\xde\x23\x39\xe4\x70\x48\x47\x51\x40\x40\x40\x40\x40\x40\x40\x40\
\x00\x21\xe4\x3c\x9d\xb1\x72\x95\xb3\x72\x95\xdb\x7e\x96\x87\xc3\
\x96\xc7\xf3\x4c\xc0\x89\x09\x90\x0d\xaf\x5b\x1e\xcf\x6d\x3f\xd7\
\x43\x80\x89\x78\xc9\x04\x54\x77\xe2\xdf\xb8\x2b\x92\xd4\xf6\xf3\
\x79\x8b\xdb\x90\xf3\x81\xf8\x37\x86\x21\x49\x3f\x72\x9e\xce\x3a\
\x43\x4e\x1f\xaf\x39\x4f\x67\xb6\x9f\xdb\x29\xe4\x3c\x9d\x31\x11\
\x2f\xf7\x67\x58\xa8\x7c\x8e\x89\x78\x99\x95\xf0\xa6\x20\x7e\xcb\
\x8a\x89\x78\xa9\xf2\x7c\xfb\x33\x2c\xbc\x1d\xc6\x76\x45\x92\xde\
\x0b\x94\x95\xf0\x96\x09\x38\x65\x25\x1c\x77\x2f\xf0\xbc\x2f\xe0\
\xa9\x2b\x18\x2b\x92\x4d\xef\x90\xd3\xc3\xdd\x0b\x3c\xb7\xdf\x77\
\x2f\x32\x2b\x92\x4d\x3d\xa4\x25\xbc\x31\xf8\xf6\x3b\xfb\x02\x9e\
\x0c\xcb\x83\x0f\xc5\x21\x44\x32\x01\xd7\x29\xc2\xff\xdd\x1b\x94\
\x8c\xbc\xda\xd6\x4b\x2b\xf6\x67\x58\x68\x14\xd3\x08\xbd\x9a\xd6\
\xee\x5e\xe0\xd9\xb6\xa0\xaa\xcc\x4a\x38\xda\xd6\x4d\x0b\x72\x9e\
\xce\x46\x06\x51\xdb\xac\x6c\x6b\xa7\x05\xdd\xe0\x4b\x89\x5e\x04\
\xe3\x11\xc1\xd7\x25\xd2\x0e\xc6\x14\x83\x6f\x97\xa4\x83\x31\xc5\
\xe0\xdb\x25\xd9\x60\x4c\x38\xf8\x76\x49\x2f\x18\xe7\x3c\x9d\xd5\
\x2b\x5b\xeb\xe2\x69\xe1\xfd\x6a\xda\x69\x6c\x79\x3c\xdf\xbd\xc0\
\xb3\x27\x2d\xff\x6f\x96\xab\xdc\xb9\x98\xd0\x26\xd9\x58\x9d\x3a\
\x98\x94\xb7\xa1\xc2\x4c\xc0\xc9\xfa\x14\xd5\xfb\xd6\xee\x62\xaf\
\x78\xc4\xd6\x3e\x94\x46\x7a\x05\xe5\x55\xad\x29\xa2\x9a\xb0\xe5\
\xf1\xdc\xf6\x0b\xba\x4e\xd4\x0d\x9d\x7a\x08\xb2\xff\x92\x98\x3c\
\x5c\xd6\xf2\xfb\xef\xaf\xa3\x3f\x8f\x1e\x0f\x98\xc7\x63\xff\xe1\
\xb2\x96\xe5\xbf\x5b\xf9\x43\xfe\x33\xd6\x04\xfc\x45\x1b\x13\x09\
\xb7\x2d\x14\xb6\xf8\x2d\x47\x98\x80\x9f\xbc\x6b\x4a\x43\xac\x0b\
\x86\x2d\xfe\x18\x13\x8c\xe4\x8d\x7c\x48\xb0\x0d\x15\x5f\xd9\x04\
\x13\x35\x49\x3e\x4d\x45\x87\x88\xdf\xf2\xdb\xaf\x2f\xbd\xdf\x67\
\x64\x75\xec\x43\x8e\x5f\x55\xfc\xd7\x3f\x99\x3c\xfe\x7c\xea\xfd\
\x4e\x23\x35\x45\x3e\x4c\x45\x31\xc4\x67\xc2\xe0\xe6\x0d\x23\x3c\
\x15\xc5\x12\x9f\x09\x90\x46\xc4\x8f\xa2\x28\xa2\x9a\x80\xc3\x14\
\x9f\x99\xda\xb8\x69\xca\x05\xad\x8b\xe9\x98\xf8\x35\x8b\x64\x83\
\x2b\xfe\x27\xb5\xf9\x2e\xd3\x88\xf8\x37\x0e\x2f\x08\x56\x02\xd5\
\x3d\x5e\xb3\xe2\x83\x64\x02\x2a\x94\x60\x4c\xb1\xbe\xc7\x82\xf8\
\x0d\x13\xae\x55\x7c\x8a\xab\x5f\x7b\xe2\xd7\xd4\x96\x96\x68\x56\
\xbe\xa4\xc6\x7d\xdb\xe2\xb7\x9c\xbc\x30\x6b\x36\x60\x50\xc4\xff\
\xf6\xeb\x8b\x3c\x5c\xd6\xde\x8a\xdf\x70\x7c\x3c\xa8\x57\xbc\x38\
\xa9\xe7\xef\xbf\xbf\xde\x04\xd0\x69\x82\x63\xe2\x4b\x26\xea\x93\
\x40\xa3\x0c\xc0\x4a\x3b\xb7\xe2\xdf\x0b\xa1\xc3\x04\x17\xc5\xbf\
\x99\x20\xe0\xa4\x6c\x00\xc6\xac\xa7\x2b\xbe\x2e\x13\x5c\x16\xbf\
\xe9\x05\xe3\x02\x32\xd3\x78\x4e\xeb\x23\xf1\xa7\x9a\xe0\xba\xf8\
\x6c\xca\x91\x59\x5d\x55\x10\x7d\xe2\x8f\x35\x81\x80\xf8\x95\xea\
\xb1\xdc\x77\x7a\x41\xbc\x9c\xfa\x20\x87\xcb\x5a\xbe\xfe\xc9\xb4\
\x9a\x40\x40\x7c\x7d\xb9\x21\x1d\x01\x59\xa7\x09\x24\xc4\xd7\xbd\
\x3d\xc9\x34\xc4\x03\x1d\x26\x90\x10\x1f\xe3\xaa\x84\x66\x27\x6c\
\xf2\xa2\x6c\x8a\x09\x44\xc4\x9f\x3e\xee\x7f\x04\x1d\xf1\x60\xac\
\x09\x44\xc4\xc7\xdf\x17\xd6\xb5\x40\x53\x35\x81\x82\xf8\xc6\xae\
\xca\xd1\xb5\x2f\xa0\x62\x82\xeb\xe2\x67\x25\xbc\x19\xbb\x22\x47\
\xe7\xc6\x8c\x2e\x13\xac\xb6\xfc\x9a\x66\xf6\x84\x31\xca\x52\xa6\
\x9a\xe0\x80\xf8\x92\x09\x90\x46\x7a\x00\x56\x61\xd6\x58\x13\x5c\
\x11\xbf\x26\xd2\x7e\xf0\xff\x0c\x40\x3c\x7e\xaa\x6a\x82\x5b\xe2\
\x03\x7e\x55\x44\x14\xe1\x97\xa5\x0c\x35\xc1\x39\xf1\x85\xa1\xf3\
\xc4\x59\x09\x47\xec\x17\xe9\x33\xc1\x45\xf1\x99\x18\x99\xf7\x57\
\x36\xc0\x50\x75\xc4\x47\x26\xb8\x2a\x7e\x43\xfc\x03\x1a\x26\x6b\
\x83\xba\x26\x38\x2e\xbe\x64\x66\x8e\x28\x99\x7d\xa9\xd6\x04\x02\
\xe2\x4b\x26\x90\xa7\xa2\xb6\xce\x06\x1c\x2e\x6b\x12\xe2\x33\x01\
\x12\x2d\x11\x17\x45\x7e\x9d\x8e\x41\x23\x7a\x81\x6e\x3d\x0d\xd5\
\x79\xa7\xa7\x27\x4c\x38\x2b\x92\x8d\xb1\x7c\xd0\xfe\x0c\x0b\x13\
\x53\x52\xc7\x59\x65\x25\x1c\x51\x87\x9d\x21\x78\xbc\x5e\x61\xb8\
\xb5\x0f\x85\xe7\xbd\xc2\x8d\xd6\x3e\x14\x5e\x1d\xe8\x2e\x57\xb9\
\x73\xad\xbd\x0f\x54\x0f\x74\xbc\xd7\xf2\xc9\x89\xdf\xc2\x87\x5e\
\x40\xf6\xda\xca\x28\xf2\xe3\x4c\xb1\x91\x1c\x3f\x26\x18\xed\xd9\
\x11\xed\xab\x8b\xa3\x08\x77\x03\x07\x9b\x64\xee\x0b\xfd\x0c\x84\
\x83\x31\xdd\xe0\xdb\x05\xc5\x60\x4c\x3a\xf8\x76\x41\x33\x18\x13\
\x0f\xbe\x5d\x30\xb5\x60\xac\xfa\xa7\x3b\x7d\xad\xf9\x4d\xf1\x33\
\xf4\x83\x6f\x17\xef\x04\xe3\x8a\x09\xb8\x66\x02\x4e\xac\x5c\xe5\
\x75\x3e\x29\x5e\x6e\x79\x3c\x6f\xc7\x5e\x2d\xe7\x93\xef\x56\xb1\
\x5b\x1e\xcf\x77\x45\x92\xee\x0b\x78\x6a\x6e\xfd\x3d\xbe\x77\x10\
\xd1\x8b\xe0\xdb\x45\xce\xd3\x19\x2b\x92\xcd\xfe\x0c\x0b\x95\xe0\
\x36\xe1\x7e\x8a\x4a\xb5\x50\xb6\x35\xc7\x9b\xe0\xab\x0b\x4d\x0c\
\x51\x19\xc2\xc2\xbf\xab\x62\x60\xd0\x6c\x8a\x62\xe2\x8c\x12\x3e\
\xd9\x16\xad\xac\x5f\x29\xff\x28\x68\x8a\x03\xee\x87\xa4\x2b\x99\
\x3c\xbd\x4f\xc8\x4a\x38\x66\x02\x4e\x61\xc8\x09\x08\x08\x08\x08\
\x08\x08\x08\xa0\x86\xff\x00\x8d\xec\xef\xfd\x77\x9f\x1b\xef\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x0b\x6a\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x0b\x31\x49\x44\x41\x54\x78\x9c\xed\x9d\xff\x4e\x1b\x57\
\x16\xc7\xfd\x08\x7e\x04\x1e\x81\x47\xc8\x03\xd4\xf3\x83\xb0\xab\
\x64\x4b\xb5\x54\x55\x56\xc9\x2e\x2b\x8d\x9a\x01\xdb\x33\x63\x76\
\xb2\x49\xea\x54\x10\x39\x6c\xe2\xb1\x4d\xd2\x80\x94\x20\x45\x29\
\xaa\xbb\xb2\x67\xee\x50\x36\x9d\xd6\x1e\x8f\x1b\xb2\x01\x29\x14\
\x04\xa8\xcb\x2e\x8b\xac\xc2\x2e\xd5\xd2\xc4\x60\x6c\x93\x9c\xfd\
\xc3\x19\xe2\xb8\xc6\xf6\x18\xff\x18\xc8\x7c\xa4\xf3\x47\xa2\x44\
\xb9\xc3\xd7\x77\xbe\xe7\x9e\x73\xae\x63\xb3\x59\x58\x58\x58\x58\
\x58\x58\x58\x9c\x44\x08\x3e\x6c\x27\x38\x79\x82\xe4\x50\x92\x60\
\xc4\xce\x76\xaf\xe7\x9d\x07\x67\x51\xb4\x9b\x9f\xce\x5e\xf0\xcf\
\x01\xc6\x88\xe0\x70\x47\xa9\x76\xaf\xe9\x9d\xc5\xe1\x8e\x52\x24\
\x27\xe7\xf8\xc9\x14\x0c\xa1\x6d\xa0\xee\x2c\x02\xc6\x88\x80\xb1\
\x92\xaf\xdd\x6b\x7b\xe7\x70\x30\x62\x2f\xc1\x4a\xbb\xae\xfb\xab\
\x30\x84\xb6\x0f\x82\x9f\x4c\x01\xe9\x91\xf7\x71\x56\x7a\x46\xf0\
\x61\x7b\xbb\xd7\xf9\x4e\x40\x30\x62\x27\xce\x49\x3b\xe7\x6f\xcd\
\xee\x15\x8b\xa1\x87\x37\xb2\x05\x67\xaf\x2a\xfb\x38\x23\x3e\xb7\
\x7c\xa5\xc9\x14\x4c\x1c\x2d\x7d\x74\x63\x26\xeb\x8d\x6c\xfd\x42\
\x8c\xe2\x38\x37\xf2\x04\x30\x46\x84\xf7\x5c\x91\x53\xed\x5e\xf7\
\x89\x05\x67\x51\xf4\xcc\x15\x65\xb7\x9a\x18\xde\xc8\x16\xf4\x0e\
\x6b\x40\xb0\xd2\x73\x07\x13\xbd\x66\xed\x94\x26\x80\x31\x11\xbe\
\xd8\xc4\x2b\xc5\xf9\x5b\x73\x40\x7a\x64\xf8\xdd\x5f\xfe\x0e\xdd\
\xfc\x74\x06\x63\x44\x20\x58\xb4\xe0\x60\xc4\xde\x76\x3f\xc7\x89\
\x80\x70\xa1\x0e\x8c\x95\x7e\x3e\x7b\x55\xd9\xaf\xb6\x3b\xa8\x3b\
\x8b\xd0\x35\x38\x05\xc5\x86\xef\x8d\x6c\xbd\xc9\xc2\xac\x4c\xac\
\x31\x10\x2e\xd4\xd1\xe5\x91\x37\x7e\x75\x69\x1a\x0e\xdb\x25\xfc\
\x64\x0a\xba\x06\xa7\x80\xba\xb3\x58\x51\xb0\x6e\x7e\x3a\x8b\xb3\
\xd2\x0b\x6b\xc7\x1c\x81\x8f\xfd\xb1\x5e\x5a\x50\xe1\x0f\xb7\x92\
\x80\x31\x22\x94\xa6\xbc\xde\xc8\x16\xfc\xfa\xcf\x7f\x83\x0b\xfe\
\xb9\xaa\xaf\xb4\x21\xb4\x0d\x7d\xc1\x79\xc0\x18\x11\x70\x46\x1c\
\x6b\xf7\xb3\x1d\x3b\xfa\xfd\x89\xce\x81\xa0\xb6\xe3\x97\xff\xb1\
\x17\xdb\x00\xb8\xf6\xd7\x15\xc0\x18\x11\xfa\x82\xf3\x07\x62\xfc\
\x76\x58\x7b\x45\x7a\xa6\x5e\x5d\x0e\x6f\xd6\x24\x88\xbe\xa3\xce\
\x5c\x51\x76\x71\x46\x7c\x6e\x65\x63\x35\x42\xf9\x14\xbb\x33\xa4\
\x2d\xdd\x88\x2c\x67\x63\x1b\x00\xb1\x0d\x80\x71\x35\x05\x7d\x23\
\xdf\xee\x61\x8c\x08\x1f\x7c\xaa\xc2\xf9\x5b\xb3\x7b\x84\x07\xfd\
\x17\x67\xa5\x25\xd2\x23\xef\xd7\x62\xfc\xc5\xa1\x97\x5e\x2c\x51\
\x6a\xc0\x19\xd4\xa2\x97\xee\x3d\xdd\x7d\xb4\x9e\x87\xd8\x06\x40\
\x64\x39\x0d\xce\x90\xb6\x47\x0b\x2a\x45\x30\x62\x27\xc1\xa1\x25\
\x9c\x95\xd2\xfa\x0f\x13\x63\x25\x1f\xc6\x88\x15\x7d\xc4\x12\xa5\
\x4e\x74\xdf\x18\x57\x53\x10\xdb\x00\x78\xb4\x9e\x07\x6e\x6c\x26\
\xeb\x0c\x25\x85\xe2\x3f\xe7\x70\x4b\xc4\xdb\xbf\x8e\x52\x18\x23\
\xd6\xec\x27\x96\x28\x06\xa0\xfd\x71\x9e\x16\x54\xb8\xad\xac\xc1\
\xf0\x17\x0b\x39\x67\x50\x5b\xa0\x7c\x4a\xd5\x3a\x15\xc1\x88\x9d\
\x38\x23\x3e\xff\xe0\x53\xb5\x2e\x51\xac\xc3\x64\x05\xe8\x9b\xf1\
\x53\xb4\xa0\xc2\x40\x50\x4b\x53\x01\xa5\xa3\xd6\xbf\x47\xb8\x50\
\x07\xce\x88\xcf\x7b\xae\xc5\xf3\x46\x44\xf9\xf0\x7a\x12\x70\x4e\
\x5a\x6f\xe2\x23\x1d\x7f\xa8\x80\xd2\x41\xdf\x8c\x9f\x32\xfa\xf7\
\xea\x11\xc5\x1b\xd9\x2a\x9c\xf0\xad\x03\x64\x73\x20\x5c\xa8\xc3\
\xa8\xd1\x73\x0f\xd6\xac\x57\x57\x33\x71\xb8\x25\x82\xe4\xe4\x9c\
\x91\x73\x4a\xe1\xd5\x85\x52\xed\x5e\xfb\x89\x05\x67\x51\xb4\xc7\
\x1b\xcb\x18\x79\x75\x61\x8c\x08\x56\x89\xa5\x49\x10\x7c\xd8\x8e\
\x31\x22\x18\xd9\x25\x17\xfc\x73\x40\xb0\x68\xa1\xdd\x6b\x3f\xb1\
\xe0\x2c\x8a\x9e\x1b\x79\x52\xb3\x20\xfa\x2e\x21\x5c\xa8\xa3\xdd\
\x6b\x3f\x91\x38\x18\xb1\x97\xe4\x50\xd5\x8e\x63\x71\xf4\x5e\x4f\
\xe6\xde\x89\x22\xa4\x33\x94\x14\x68\x41\x6d\xf9\xe8\x0e\xce\x4a\
\x69\xee\xc1\x5a\xcd\x82\xb8\xee\xaf\x02\xce\x49\x3b\x27\x7a\x70\
\x82\x16\x54\xea\x75\x4d\x0a\x2e\x06\xd4\x96\x7e\xfa\x08\x56\x8a\
\xeb\x95\x62\x23\xaf\xad\x13\x5b\x52\x79\x5d\x4e\xdf\x1d\x57\x53\
\x10\x59\x4e\x17\xea\x53\x41\x6d\xc1\xc8\x29\xfc\x28\x60\xac\xe4\
\xeb\x1d\xd2\x76\x8c\x9c\xde\xcf\x5e\x55\xf6\x4f\xe4\x20\x1e\xe5\
\x53\xec\x03\xa1\x44\x4a\xef\x6d\xe8\xe5\xf4\x81\xa0\xb6\x5b\xcf\
\x49\xbc\x1e\x1c\x6e\x89\x20\x3d\xb2\xa1\x72\xca\x05\xff\x1c\xe0\
\x2c\x8a\xb6\x62\x7d\x2d\x83\xf2\x29\x76\x67\x50\x8b\x5e\xba\x3f\
\x9b\xd5\xcb\xe9\xe1\xef\xb7\xc1\x19\xd2\xf6\x3e\xf6\xc7\x7a\x5b\
\xb5\x0e\xfd\x90\x68\x44\x10\xd7\xfd\x55\xc0\x59\xe9\x45\xab\xd6\
\xd8\x12\x9c\xa1\xa4\xc0\x7e\xf6\x78\x47\x5e\xdd\x83\xd8\x06\x80\
\xbc\xba\x07\xae\x50\x32\x5f\x5a\x4e\x6f\x36\xf5\x08\x72\xe2\xd2\
\x5f\xdd\xc4\x1f\xce\xfe\x74\xd0\xdb\xb8\xfa\xe0\x59\x66\x20\x98\
\x88\xd7\x52\x4e\x6f\x24\xef\xb9\x22\xa7\x30\x46\xac\x59\x8c\xb7\
\x04\x39\x4e\xb5\x2d\xc2\x85\x3a\x4a\x9b\x45\x36\xdb\xdb\x26\xae\
\xfb\xc6\x8d\xc8\x72\x76\x20\xa8\xfd\xd8\x6a\x31\x6c\xb6\x42\x03\
\xeb\x34\xff\x95\x21\x53\x3f\x76\x82\x10\x7c\xd8\x4e\xb0\x68\xa1\
\x30\xfb\x14\xe1\xf5\xdf\xa7\x02\x4a\x47\x7f\x20\xf1\xbc\x8c\x89\
\xef\xb4\xca\xc4\x4b\xc1\x19\x71\xcc\x68\x96\x75\xec\x04\xd1\xef\
\x6a\xb8\xee\xaf\x02\xe9\x91\xf3\x0e\xb7\xa4\x50\x3e\xc5\xee\x1c\
\x4d\x26\x3f\x79\xf8\x2c\xaf\x9b\xf8\xc3\xd9\x9f\x5a\x6e\xe2\xa5\
\x90\x9e\xa9\x29\xa3\xed\xdd\x63\x75\x16\x29\xbd\xab\x71\x39\xbc\
\x09\x67\xae\x7c\x9d\xed\xf2\xa0\xdd\x8b\x81\x44\xf6\xad\x01\x85\
\xd1\x64\xae\xd5\x26\x5e\x0c\xc1\x87\xed\x38\x2b\xa5\x4b\xe7\xba\
\x6a\x89\x63\x21\xc8\x61\x77\x35\x86\x50\xa1\x9f\x80\x31\x22\xdc\
\xd5\x36\xe0\xd1\x7a\x1e\x2e\xdd\x7b\xba\xeb\x0c\x69\x13\xed\xf0\
\x8d\xe2\xf5\x92\x9c\x9c\x33\x52\xcb\xd2\x3f\x64\xa6\xcf\xb2\xaa\
\xdd\xd5\x18\x42\x6f\x26\x04\xff\x78\x33\xf1\xd2\x19\xd2\x96\xda\
\x29\x86\xcd\x66\xb3\x91\x1c\x4a\x1a\x7d\x5d\x15\x9d\x43\xd2\xed\
\x5c\x7b\x45\x8c\xdc\xd5\x38\x68\x87\xb2\xd2\x93\x76\xae\x59\x4f\
\x77\x8d\x0e\xd2\x0d\xa1\xc2\x4c\xb0\xa9\xfb\x22\xb5\xde\xd5\xd0\
\x43\x1f\xdb\x24\x38\xb4\xd4\x8e\x4c\x45\xff\x00\x55\xda\xcd\x95\
\xc2\xd4\x25\xf8\x52\x13\x37\x12\xba\xaf\x94\x3b\xaf\x34\x13\x8c\
\x89\xf0\xdd\xfc\x74\xc6\xa8\x77\xe8\xd1\xcd\x4f\x67\x4d\xd9\xca\
\x75\x30\x62\x6f\xb9\xe9\x73\x23\xa1\xfb\x4a\xf1\x79\xa5\xa9\x6b\
\x76\x47\xa9\xc3\x12\x8f\x5a\x77\xb7\x29\x33\x2c\x82\x11\x3b\x71\
\x56\x4a\x1b\x69\x81\x56\xf3\x15\x87\x5b\x52\x9a\xb9\x66\x5d\x0c\
\x7a\x7c\xe5\x48\x1f\x20\x82\x43\x4b\xcd\x5c\x67\x5d\x14\x4e\xe3\
\x52\x9c\xf4\xc8\x79\x23\x1d\xb7\x4a\x9f\xbc\x6e\x7e\x3a\x4b\xb0\
\x68\xa1\x19\xe9\x64\x23\xc4\xf0\x46\xb6\xe0\x34\xff\xd5\x8e\x29\
\x5f\x57\x3a\x5d\x83\xe8\x7e\x3d\x13\xe6\x87\x3d\xb0\x7e\x63\xb6\
\x51\x0d\xa0\xc2\x4e\x46\x51\x82\x43\x7b\x47\x11\x63\x08\xbd\x49\
\x77\x4d\xdb\xbe\xd5\xef\x6b\xfc\x69\x62\x2e\x8f\x31\x22\x7c\x78\
\x3d\x79\x64\x51\x86\xd0\x9b\x6f\x62\x38\xca\xe5\x4c\x82\x0f\xdb\
\x31\x56\xf2\xe1\xac\xf4\xa2\xc7\x1b\xcb\xd4\x93\x74\x94\x46\x8f\
\x37\x96\xe9\xe2\xe4\xb6\x55\x16\xaa\x52\x7c\x5f\x63\x72\xfe\x67\
\x78\xdf\xfb\x35\xfc\xe6\x93\x6f\x0c\xcd\x3a\xd5\xb2\x5b\x70\x4e\
\x5a\xc7\x98\x08\x5f\xcd\x48\x09\x3e\x6c\x77\xb8\x25\x02\x67\x51\
\x14\x67\xa5\x34\xe9\x91\xf7\x1b\xb1\x73\x87\xd0\x36\xd0\xe3\x2b\
\x40\xb0\xd2\xae\x69\x0b\x8a\xb4\x3f\xce\x3b\x47\x93\xb9\xc8\x72\
\xfa\xa0\x8c\x3e\xbd\x96\x83\xdf\xfb\x93\x70\x7a\x70\x0a\x1a\xe1\
\x2b\xc5\x3b\xa6\xc7\x1b\xcb\xe8\x37\x67\x09\x0e\x2d\x91\x9e\xa9\
\x29\x9c\x11\xc7\x08\x56\x8a\xeb\x17\x75\x70\x4e\xda\x21\x39\x39\
\xf7\xd1\x8d\x99\x2c\x3f\x99\x82\x7a\xd3\xda\x72\x1f\x0e\x92\x93\
\x73\xad\xca\x04\x0d\x41\xf9\x14\xfb\x45\x21\x36\xe8\x0c\x69\x19\
\xbd\xc1\x54\x1a\x97\x1e\x2e\x40\x25\x5f\xe1\x1e\xac\x19\xbe\x40\
\x53\xfc\x1e\xa7\xc7\x57\xa0\x2f\x38\x0f\xe7\x46\x9e\xc0\x05\xff\
\x1c\x50\x77\x16\x81\x7b\xb0\x06\x8d\x14\xa1\x38\x3e\xba\x31\x93\
\x25\x38\xb4\x64\x2a\xef\xa0\x7c\x8a\x9d\xf6\xc7\x83\xfd\x81\x44\
\x86\x16\x54\xe0\xef\x3d\x2d\x2b\x86\x1e\x01\x65\x1d\xca\xdd\x5c\
\xba\x1c\xde\x04\xd2\x23\xe7\x31\x46\x84\xa3\x9a\x6c\x2b\xa2\x30\
\x87\x85\x32\xa6\x39\x77\xbc\x36\xee\xcf\x68\x21\x91\xa3\x05\x15\
\x8a\xe3\xf2\xc4\x1c\xe8\x3d\xf1\x72\x31\x22\xfd\x00\xa7\x07\xe5\
\x57\x67\xaf\x7e\xf3\xd2\x1b\xd9\x02\x6f\x64\x0b\x7a\xbc\xb1\x0c\
\xc1\x4a\x71\x8c\x89\xf0\x66\x17\x85\x9f\x4c\x01\xc9\xc9\x39\xd2\
\x33\xe5\x6d\xb7\x0e\xb6\x7e\x7f\xa2\xd3\x19\x4a\x0a\xfd\x01\x75\
\xcf\x19\x4a\xbe\x2c\x15\x43\x0f\xf7\xed\xef\x20\xfc\xfd\xf6\x2f\
\xc4\xd0\x2f\x64\x52\x23\x31\x37\xce\x4a\xcf\x48\x8f\xbc\xdf\x7b\
\x3d\x99\x23\x3d\xe8\x47\x7d\xeb\xe3\x8c\x38\x86\x31\x62\x43\xfd\
\xa6\xa1\x62\x78\xe4\x7c\x17\x27\x0b\x6d\x7f\x55\x15\x76\x45\x72\
\x85\xbb\x3b\x93\x77\x8d\x26\xcb\x0a\x51\x1a\xc5\x3d\x72\x79\x75\
\x0f\x5c\xa3\xc9\xfd\xe2\xe6\x93\x9e\x82\x96\x6e\xfd\xc2\x6e\x31\
\x97\x28\xe6\x13\x23\xa8\x45\xb9\xbb\x33\xd9\x5a\x84\x28\x8e\xe1\
\x2f\x17\x0f\x26\x48\xfa\x03\x89\x67\xa5\xfd\x8e\xc3\x52\x46\x9c\
\x11\xc3\x66\x11\xc5\x54\x62\xd8\x6c\x36\x9b\xfb\xf6\x77\x5e\x3d\
\x9d\x0d\x7f\xbf\x6d\x48\x10\x5a\x50\x81\xbd\xfb\x18\x06\x02\x89\
\xff\x18\x6d\x3e\xe9\xaf\xaf\x46\x9d\x1b\xea\x09\x7a\x7c\xe5\x40\
\x8c\x66\xfd\x7c\x0d\xf1\xb1\x3f\xd6\x5b\x9a\xce\x46\x96\xd3\x70\
\x79\x62\xce\x98\x30\x7e\xf5\xcb\x7a\xfe\x7d\xfd\x4e\xf9\xd9\xab\
\xca\x7e\x23\x0e\x97\x46\x76\x45\xcf\xb5\x78\x1e\xe7\xa4\x1d\xd3\
\xcc\xec\xea\xdf\x21\x72\x5b\x59\x2b\x9b\x31\x0d\x7f\xb9\x58\xb3\
\x20\x17\xfd\xdf\xbe\x5f\xef\x3a\x0a\x83\x07\x28\x5a\xcf\x65\x7f\
\xa3\xe1\x8d\x6c\xc1\xf9\x5b\xb3\x7b\x24\x87\xb2\x04\x2b\xc5\x4d\
\xd3\x23\x2f\xf7\x1d\x22\xe5\xe2\xb6\xb2\x56\x93\x20\x8d\xe8\x95\
\x3b\xdc\x51\x8a\xf4\xa0\x1f\x75\x61\x1a\xb9\x63\xf8\xc9\x14\xf4\
\x05\xe7\xa1\x9b\x9f\xce\x10\x1c\x5a\x6a\x75\x83\xac\x22\x07\x43\
\xcf\x45\xdf\x21\x52\x29\xaa\xf9\x4a\x7f\x20\xf1\xef\x46\xae\xcf\
\xe1\x8e\x52\x38\x87\x52\x7a\xf1\x92\xba\xb3\x58\x57\x1f\xdc\x1b\
\xd9\x82\xbe\xe0\x3c\x9c\xb9\xa2\xec\xea\xe5\x17\x87\x3b\x4a\x99\
\xc2\xb8\x8b\x71\x86\x92\x42\x69\x4d\xaa\x5a\x44\x96\xd3\x30\x38\
\xfe\xe4\x17\x62\x38\x47\x93\xaf\x68\x41\x6d\xca\x05\x7b\x87\x5b\
\x22\x70\x46\x1c\xc3\x59\x29\x8d\x31\x22\x90\x1e\x39\x7f\x6e\xe4\
\x09\xf4\x05\xe7\xa1\x2f\x38\x0f\xf4\xf8\x0a\x70\x0f\xd6\x0e\x4a\
\x2b\xd4\x9d\x45\xb8\xe0\x9f\x83\x9e\x6b\xf1\xfc\x99\x2b\xca\x2e\
\xce\xa1\x0c\xc1\xa1\x25\x8c\x89\xf0\xe6\x2d\x10\x96\x0c\x3d\x1b\
\x89\x47\xeb\x79\xf0\x7e\x3e\xff\xb6\x20\xa1\x64\xae\x15\x53\x88\
\x85\xb9\xe1\x28\x85\x33\xe2\x18\xc9\xa1\x64\xa1\xd8\x28\x6f\xbe\
\xae\x0c\xef\xe0\x1c\x4a\x11\x2c\x5a\xc0\x59\x14\xc5\x58\xc9\xe7\
\x70\x47\x29\xd3\x8a\xa0\x53\x6e\xe8\xb9\x9e\x10\xbe\x5a\x7d\xdb\
\x3f\x5a\x74\xf3\xe9\x44\xa1\x9b\x78\xf1\xd0\xf3\x51\xe2\xe1\xec\
\x4f\xc0\xdd\x9d\x79\xe9\x0c\x69\xe6\xeb\x35\x9b\x1d\xa3\x26\x5e\
\x4b\x8c\xab\x29\x18\x08\x24\x76\xfa\xfd\x09\x73\xbf\x16\xcc\x48\
\x3d\x26\x5e\x29\x26\x1e\x6f\x16\xce\x1e\x42\xcc\x3c\xa9\xe3\x71\
\xe1\x28\x26\x7e\x58\xb6\xe5\x1c\x4d\x66\x9b\x95\x59\x9d\x68\x1a\
\x65\xe2\xc5\x59\x96\x19\x26\xd8\x8f\x25\xe5\xae\x1f\x1f\x35\x86\
\xbf\x58\xc8\x99\x61\x82\xfd\xd8\x51\xee\xfa\x71\x43\x4c\x3c\xa8\
\xa5\x2d\x13\xaf\x03\xdd\xc4\x2b\xb5\x5a\x2d\x13\x6f\x11\x96\x89\
\x9b\x88\x66\x9a\x78\xbb\x9f\xed\xd8\x61\x99\xb8\x89\x68\x86\x89\
\xdf\x56\xd6\x2c\x13\xaf\x17\xcb\xc4\x4d\xc4\x45\x21\x46\x58\x26\
\x6e\x22\x9c\x21\x6d\x69\xf8\x8b\x85\x9c\x65\xe2\x26\xc1\x19\xd2\
\x36\x27\x1e\x6f\x36\x44\x0c\xcb\xc4\x1b\xc0\x40\x30\x11\xf7\x7e\
\x3e\x6f\x99\xb8\x59\xd0\xff\xe7\x80\xc3\x46\x79\x2c\x13\x6f\x31\
\xfd\xfe\x44\x27\x2d\xa8\x2f\x68\x41\x85\x7a\x76\xca\x81\x89\xfb\
\xe3\x7c\xbb\x9f\xe5\xd8\x53\xb8\x48\xa3\xfe\xf3\xcd\xc0\x81\x06\
\xdc\xdd\x19\xa8\xb5\x11\x65\x99\x78\x03\xa1\x7c\x8a\xbd\x5f\x48\
\x84\x0f\x9b\x95\xaa\x96\x06\x5b\x26\xde\x60\x68\x41\xf5\x55\x9b\
\x26\xac\xe4\x2b\xa3\x8f\xfe\x95\xb1\x4c\xbc\x41\xe8\x46\x5e\x4b\
\x94\xf3\x95\x89\xc7\x9b\xe0\x0c\x69\x19\xcb\xc4\x1b\x08\x2d\xa8\
\x54\x2d\x82\x0c\x04\x13\xc0\xdf\x7b\xfa\x6a\xe2\xf1\x26\x3c\x5a\
\xcf\x17\xbe\x62\xcf\x32\xf1\xe6\xf0\x3a\xc3\xfa\x5f\x45\x51\x02\
\x89\x6d\x5a\x50\xd1\x1b\x81\xb4\x1d\xba\x0d\x5f\x80\xff\xce\x40\
\xf9\x14\x3b\x2d\xa8\x73\x87\x09\x52\x3c\xe6\xd9\xef\x4f\x74\x5a\
\x06\xde\x22\x0e\x31\x79\xab\x40\xd8\x4e\x8a\x7d\xa5\x5f\x48\x84\
\xad\xdd\x60\x02\x5e\xfb\x8a\x62\x89\x61\x61\x61\x61\x61\x61\x61\
\x61\x51\x23\xff\x07\x82\x1a\x32\xfe\xea\xd9\x2b\x4a\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x02\x28\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x01\xef\x49\x44\x41\x54\x78\x9c\xed\xda\x3f\x4b\x02\x71\
\x1c\xc7\x71\x9f\x46\x5b\x63\xcf\xa0\xd5\xc9\x4d\x70\x69\x6b\x08\
\x7c\x06\x0d\x0d\xb6\xe4\xdc\x70\x2e\x4e\x0e\xb6\x34\x39\x07\xe1\
\xd0\xb9\x89\x07\x2d\x12\xb4\x04\x61\x42\x84\x4b\x20\x86\x78\x45\
\xbf\x86\x56\xfb\x43\xda\xf9\xf9\x7e\xef\xfd\x81\xf7\x13\xb8\xd7\
\x72\xf7\xbb\x5f\xa1\xc0\x18\x63\x8c\x31\xc6\x04\x17\xf5\xe7\xc5\
\x46\x92\xc6\x06\x6b\x6f\xfa\xd9\xfd\xcb\xa2\xc1\xa2\xd2\x48\x16\
\xc1\x66\x69\xbc\xe9\xe7\xb7\xf6\xd9\x06\x71\x88\x62\x0d\xa4\x3d\
\x4c\x7d\xa3\x58\x03\xb9\x7a\x78\x0b\x97\xf7\xaf\x7e\x51\x2c\x82\
\x8c\x5e\xde\xfd\xa2\x58\x05\x71\x8b\x62\x19\xc4\x25\x8a\x75\x10\
\x77\x28\x1e\x40\x5c\xa1\x78\x01\x71\x83\xe2\x09\xc4\x05\x8a\x37\
\x10\xf3\x28\x1e\x41\x4c\xa3\x78\x05\x31\x8b\xe2\x19\xc4\x24\x8a\
\x77\x10\x73\x28\x79\x00\x31\x85\x92\x17\x10\x33\x28\x79\x02\x31\
\x81\x92\x37\x10\x79\x94\x3c\x82\x7c\x85\x12\x25\xf3\x83\x4d\x7b\
\xe4\x16\x64\x19\x0a\x20\x7f\xa8\x79\xfd\xf9\x5f\x7d\x5d\x01\x22\
\x1c\x20\x62\x01\x22\x16\x20\x62\x01\x22\x16\x20\x62\x01\x22\x16\
\x20\x62\x01\x22\x16\x20\x62\x01\x22\x16\x20\x62\x01\x22\x96\x1b\
\x90\x5a\x77\x1a\x0e\x2f\x9e\x25\xaa\x75\xa7\x80\x94\x9a\xa3\xb0\
\x75\x34\x94\x68\xf7\xf4\x0e\x10\x40\x00\x01\xe4\xbb\xca\xad\x71\
\xd8\x3e\xbe\x91\x08\x10\x47\x01\x22\x16\x20\x62\x01\x22\x96\x1b\
\x90\x72\x6b\x1c\x76\x4e\x6e\x33\xa9\xda\x99\x00\xf2\x53\x59\xbe\
\xf6\xee\x9f\x3f\x01\x02\x08\x20\x80\xac\x52\xbd\x37\xcb\xec\xf0\
\xb0\xde\x9b\x01\x92\x97\x00\x11\x0b\x10\xb1\xdc\x80\x54\x3b\x93\
\x50\x6e\x8d\x25\x5a\xe5\x3b\xc5\x0d\x08\xc7\xef\x80\x00\x02\x88\
\x21\x90\x6a\x67\x12\x4a\xcd\x91\x44\x7b\x67\x8f\x80\x78\x09\x10\
\xb1\x00\x11\xcb\x0d\x48\x96\x17\xe5\x38\xcb\xfa\x45\x9c\xf6\x02\
\x02\x08\x20\x86\x40\xb2\xbc\x28\x07\x48\x8e\x02\x44\x2c\x40\xc4\
\x02\x44\x2c\x40\xc4\x02\x44\x2c\x40\xc4\x02\x44\x2c\x40\xc4\x02\
\x44\x2c\x40\xc4\x02\x44\x2c\x40\xc4\x02\x44\x2c\x40\xc4\x02\x44\
\x2c\x40\xc4\x02\x44\x2c\x40\xc4\x02\x44\x2c\x0d\x90\xfe\xbc\xd8\
\x48\xd2\x98\xd2\x38\x1a\x2c\x2a\x9b\xf6\x60\x8c\x31\xc6\x18\x63\
\xcb\xf6\x01\xce\x57\x8e\xe7\x2f\x94\x02\x6b\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\x7b\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x50\x00\x00\x00\x50\x08\x06\x00\x00\x00\x8e\x11\xf2\xad\
\x00\x00\x05\x42\x49\x44\x41\x54\x78\x9c\xed\xda\x6b\x4f\x53\x77\
\x00\xc7\x71\x5f\xc2\xde\xc1\xf6\x2a\x7c\x66\x42\xc0\xe2\x0c\xa9\
\xc3\x6d\x20\xea\xe6\x16\x99\x97\x32\x60\x84\x5b\xe9\x00\xa5\x6a\
\x27\xb6\x90\x66\x20\xb6\x6c\x15\x69\x3b\x19\x28\x04\x98\x13\xac\
\xb5\x5c\x8a\xf6\xc2\x00\xdb\x00\x02\x72\x59\xc0\x31\x6a\xe4\x12\
\x4a\x2b\x12\x97\xfc\xf6\x60\x96\x80\xf4\xb4\xc0\xf9\x9f\xd1\x9e\
\xf3\xff\x26\xbf\xa7\xe7\xa4\x9f\x9e\x93\xd3\xb4\xdd\xb7\x8f\x46\
\xa3\xd1\x68\xeb\xa9\xf2\xf4\x20\xb9\x2b\xf9\x35\xc8\x2a\xae\xc2\
\x59\x99\x7a\x4f\x97\x73\x41\x83\x4b\xf9\x3a\xa8\x72\x6f\x65\x71\
\x0e\x38\x31\xec\xdd\xf1\x98\x00\x73\x2f\x6a\xd1\xd0\xd6\x03\x87\
\x7b\x7c\x4f\xd7\x62\x76\x20\xe7\xca\x4f\xd0\x5c\x6e\x5a\xe1\x14\
\x91\x34\xe0\x59\x99\x7a\xcf\xf1\x02\x3b\x2b\x53\x63\xe4\xe9\x02\
\xb7\x88\x7c\x07\x9c\x18\xf6\x72\x8b\x28\x04\x40\x4e\x11\x85\x02\
\xc8\x19\x62\xb4\x00\x32\x3d\x6d\x77\x02\xc8\x09\xa2\xd0\x00\x89\
\x23\x0a\x11\x90\x28\xa2\x50\x01\x89\x21\x0a\x19\x90\x08\xa2\xd0\
\x01\x59\x23\x52\x40\x96\x88\x14\x90\x25\x22\x05\x64\x89\x48\x01\
\x37\xe0\xb5\xda\xd0\xff\x4d\x1a\xfa\x8e\x7f\x09\x7b\x5c\x1c\x7a\
\x3e\x49\x9a\xb2\xc5\xc6\x86\x86\xa4\x80\x5e\x8c\xf7\xcd\xc1\x2d\
\x2f\xc7\x40\x72\x0a\x66\x32\x33\xb1\x28\x97\x63\x55\xad\x86\x47\
\x2a\xc5\xc8\xc9\x93\x0b\xce\xf8\xf8\x51\x7b\x4c\xcc\x47\x14\x30\
\xc8\x86\x1d\x33\x78\x9a\x91\x8b\xf1\xf3\x12\xf8\x94\x4a\xac\xaa\
\xd5\x5b\x36\x93\x99\x09\x87\x48\x34\x1b\xd5\x80\xbb\x45\x0f\x87\
\x77\xfd\x9a\x06\x2d\xd2\x8b\x8c\x78\x81\x0d\xa5\xa4\xf8\xed\xb1\
\xb1\x5f\x53\xc0\x77\x7b\xe6\x5a\x40\x55\xa5\x11\xba\xba\xfb\x50\
\x69\xeb\xd1\x59\x5e\x15\x12\xd0\x23\x95\xa2\x2f\x21\xa1\x87\x02\
\xbe\xc3\xd3\x18\xdb\x61\xf8\xad\x07\x83\xd3\x4b\x18\x9c\x5e\x0a\
\x8b\xe8\x53\x2a\xe1\x10\x89\x5e\x0b\x1e\x70\xa8\x7f\x06\x55\xb5\
\x8d\xeb\x78\x63\x73\x7e\x8c\xcd\xf9\xc3\x22\x2e\x2b\x14\xe8\x3d\
\x74\xe8\x15\x31\xc0\x2a\xf9\xdd\xa0\x80\xe9\x85\x95\x68\xeb\xee\
\xdf\x73\xbc\x86\xb6\x1e\x14\x2a\x6b\xb7\xe0\x55\x54\x6b\xa1\x6f\
\x36\x6d\xc2\xdb\x0e\xa2\x47\x2a\x45\xbf\x58\xec\x20\x06\xe8\xb0\
\x4c\x05\x45\xbc\x94\xaf\x43\x46\x51\x25\xeb\x9f\x25\xd9\xee\x92\
\xfa\x36\x9a\xef\xf5\x6d\xc1\x33\xb4\x3e\x0a\x8a\x17\x0a\xd1\xa7\
\x54\xc2\x75\xf4\xe8\x2b\x5b\x5c\x5c\x22\x31\xc0\x68\xda\x73\xf7\
\x1c\xec\xcd\x05\x50\x57\xdf\xc0\x8f\x86\x56\x46\x3c\x26\xc4\xd1\
\x53\xa7\xd6\xfa\x13\x12\x5a\x89\x7e\x8c\x89\x96\x05\xf0\x9e\x75\
\xe4\xe2\xed\xcb\x9b\xd0\x1a\x6b\x77\x84\xd8\x94\x2d\xfb\xc7\x19\
\x1f\x3f\xda\x19\x13\xf3\x81\xe0\x00\xdf\xc7\xc3\xa2\x11\x58\x34\
\x6e\x1b\x51\x77\xc7\x84\x1f\x14\x95\x6b\x8c\x78\x7c\x06\x64\xc2\
\xdb\x2e\xa2\xae\xc9\x82\xcb\x57\xab\xa1\x4b\xfd\xf6\x43\x46\x3c\
\xbe\x02\x86\xc3\x0b\x87\xa8\x6b\xb2\x40\x55\xdd\x08\x65\xbe\x1e\
\x21\xf1\xf8\x08\xb8\x5d\x3c\x26\x44\x5d\xa3\x09\xd7\x34\xbf\xe0\
\x99\x6b\x01\xaa\x3c\x81\x01\xee\x14\xef\x7d\x44\x5d\xe3\x43\x5c\
\xad\xb8\x81\xa1\xfe\x19\x4c\x0c\x7b\x85\x05\xb8\x5b\xbc\xc0\xd4\
\xda\xeb\x9b\xf0\x04\x05\xc8\x16\x6f\xd0\x94\x0e\xeb\xaf\xa9\x9b\
\xf0\x04\x03\x48\x0a\xef\xb9\x7b\x6e\xcb\xb1\x89\x00\x16\xa8\x74\
\x11\xbb\xa2\x32\x0d\x9a\x74\xa9\x70\x3f\xfc\x6e\x57\x78\xee\x07\
\x69\x8c\x78\x44\x01\x23\xb1\xb7\x6b\x3e\xb8\x4c\x45\x18\xed\xca\
\xdb\x15\xde\x90\x39\x1d\xf7\x6b\x52\x18\xf1\x78\x0d\x48\x02\xcf\
\xde\x74\x06\x45\x65\x9a\x90\xaf\x9d\x97\x80\xa4\xf0\xde\xae\xf9\
\x50\xa0\xd2\x09\x0b\x90\x24\x1e\x00\x61\x01\x92\xc6\x03\x04\x04\
\xc8\x05\x1e\x20\x10\x40\xb6\x78\xa3\x5d\xd9\x41\xf1\x00\x01\x00\
\xfa\x96\x3d\xb0\x18\x4e\x60\xa4\x23\x7b\x57\x78\x13\x4f\x64\x8c\
\x78\x00\xcf\x01\x7d\xcb\x1e\x58\xf4\xc7\x31\x64\x4e\xe7\x04\x0f\
\xe0\x31\x60\x00\x6f\x98\x43\x3c\x20\x42\x00\xad\x1e\xb2\xeb\x9c\
\xf4\xc0\x54\xbb\x7b\xbc\xf1\xc7\x32\x74\xde\x39\x03\xeb\x0b\x1f\
\xac\x9e\xd0\x6f\x54\x44\x00\x92\xae\xfd\xd6\x79\x3c\xfc\xf9\x63\
\x8c\x76\x66\x71\x76\xe5\x05\xe2\x1d\xe0\xec\x94\x0b\xf5\xe5\xc7\
\x30\x6f\x2b\x86\xad\x2e\x09\xb3\x4f\xe5\x9c\xe1\x01\x3c\x04\x6c\
\x37\x7c\x0f\x5b\x9d\x04\xfe\x01\x05\xfe\x34\x67\xc2\x7a\xfb\x33\
\xbc\xfe\x4b\xcb\x09\x1e\xc0\x33\x40\xef\xa2\x07\x86\xd2\xcf\xb1\
\xe4\x94\xc3\x3f\xa0\xc0\xf2\x1f\x72\x8c\x3f\x48\x83\xd5\x78\x14\
\x83\x26\x09\x23\xde\x58\x77\x0e\x1e\xd7\x7f\xb5\x63\x3c\x80\x67\
\x80\xbd\xe6\x9b\xe8\xd2\x9f\x83\x7f\x40\xb1\x65\xce\x86\x14\x4c\
\xf7\x16\x6e\x7d\x60\x58\x73\xf0\xa8\x26\x11\xbe\xe5\x30\x4f\x0b\
\x86\x78\x03\xf8\x66\x75\x05\xc6\xd2\x64\xbc\xe8\x90\x05\x05\x0c\
\xdc\xce\xde\xa9\x0a\x62\x78\x00\x8f\x00\xa7\x86\x9f\xe0\x77\xed\
\xe9\xa0\x78\x1b\x6f\xe7\xde\xbb\x27\x30\x68\x92\x10\xc1\x03\x22\
\x04\x90\xc4\x67\xbf\x94\x13\xc9\x30\xd5\xa4\x31\x02\x6e\x9c\xa5\
\xe6\x08\xcc\x35\x89\xe8\x9c\xf4\x6c\xeb\xd8\x11\x0f\xc8\xb6\x0e\
\xcb\x03\xec\x8f\x11\x23\xf6\x70\x22\xdc\xf7\x0a\x83\xa2\x2d\x39\
\xe5\x98\x34\xe5\xa1\xbe\x2c\x19\xcd\xd7\x8f\xb1\xbe\xf2\x02\xf1\
\x02\x50\x72\xfe\x34\xf6\xc7\x88\x83\x22\x2e\x39\xe5\xb0\xd5\x49\
\x60\xb8\xfa\x29\x5a\xaa\x33\x30\x3b\xe5\xc2\x9b\xd5\x15\x62\xe7\
\x8e\x7a\xc0\xf9\xf9\x97\x38\x20\x12\xaf\x03\x06\x10\x3b\x0c\x99\
\xe8\xd2\x9f\x83\xb1\x34\x09\xbd\xe6\x9b\x78\xf5\xf7\x04\x27\xe7\
\x8f\x7a\xc0\x92\xe2\xbc\x4d\x78\x81\x7d\x71\x32\x19\x36\xb3\x91\
\xe8\xd5\x16\xac\xa8\x06\xf4\xae\xf8\x70\xf0\xf0\x91\x75\xb4\x03\
\x07\xc5\x48\x4a\x4e\x82\xd3\xde\xcd\xc9\xf9\x82\x15\xd5\x80\x0d\
\x75\xb5\xff\xc1\x89\xc4\x28\x29\xce\xfb\x5f\xe1\x02\x45\x35\x60\
\xc9\x05\x29\xca\x95\x72\xcc\xcf\xbf\xe4\xe4\xf8\xdb\x29\x22\x00\
\x49\x7f\x1f\x48\x7a\x11\x0f\x18\xcd\x51\x40\x96\x51\x40\x96\x51\
\x40\x96\x51\x40\x96\x51\x40\x96\x51\x40\x96\x51\x40\x96\x51\x40\
\x96\x11\x03\x0c\xb5\xbd\xfe\x23\x39\xd7\x0b\xf7\xfa\xc3\x02\xd2\
\x68\x34\x9a\x80\xfa\x17\x01\x56\x0c\xdf\x04\x98\x4c\xc8\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x0b\xf2\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x0b\xb9\x49\x44\x41\x54\x78\x9c\xed\x9d\x6d\x53\x13\x59\
\x16\xc7\xf9\x08\x7c\x84\xf9\x08\x7e\x04\x5e\x9a\x86\x3c\x20\x6c\
\x59\x54\x0d\x0b\xee\xb0\xa3\x69\x92\xd0\x9a\x0e\x66\x17\x71\x22\
\x96\xc4\x71\x44\x0c\x53\x74\x8b\x3b\xd9\x36\x4f\x24\x82\x89\x50\
\x61\x34\xa8\x34\x4e\x08\x38\x30\x35\x5b\x29\x77\x4d\xe9\x2e\x5a\
\x2e\x96\x45\x7c\x41\xad\xbb\x84\x87\xc0\x5a\x67\x5f\xe0\xcd\x46\
\x4c\x77\xba\x93\x0e\xdd\xad\xf9\x57\x9d\x37\x1a\xab\xef\xbd\xbf\
\x3e\xf7\xdc\x73\xef\xe9\x6b\x4d\x4d\x55\x55\x55\x55\x55\x55\x55\
\x55\x55\x55\x55\x55\x55\xa5\x54\x31\x84\xa1\x96\xc1\xb1\x43\x8c\
\x51\x63\x60\x8c\x1a\x82\xc1\xb1\x76\xe6\xc4\xe1\x3a\xe6\xc4\xe1\
\x2f\xe4\x6e\xdb\x67\x25\xe6\xc4\xe1\xba\x60\x97\x8e\xf2\x99\xea\
\x33\x1e\x1c\x83\xd1\x2e\xdd\xee\x84\xfd\xe8\xe6\x2d\xb2\x69\xd7\
\x83\x63\x80\xcc\x8b\x63\x0c\x73\xe2\x70\x9d\xdc\xed\xfd\x64\xc5\
\xe0\xd8\x21\x5f\x67\xfd\xe3\x80\x59\xbb\x7d\xd7\xd1\xba\xbb\xec\
\xed\x85\x2c\x4b\x03\x24\x7e\xf8\xc0\xb2\x2c\x0d\x2b\xa1\x3e\x48\
\x5c\x36\x82\x07\xc7\xc0\x6f\xd6\x3e\x61\x70\xec\x90\xdc\xed\xff\
\xa4\xc4\x18\x35\x84\xdf\xac\xdd\xbc\xeb\x68\xdd\x5d\x8b\x0e\x7c\
\x04\x81\xcb\xd6\xa7\x87\x72\x60\x18\xa3\x86\x90\xbb\x1f\x9f\x84\
\x82\x5d\x3a\x2a\x60\xd1\x66\x53\x6e\xbb\x60\x10\xfb\x2d\xe5\xb6\
\xa3\x69\xec\xb6\xdc\xfd\x51\xb5\x18\xa3\x86\x08\x58\xb4\xd9\x95\
\x50\x5f\xc9\x30\x90\xad\x46\x9c\x7b\x9e\x82\x63\xed\x72\xf7\x4b\
\xf1\xca\xad\x98\xf2\x56\x48\x0c\x8e\xb5\xfb\xcd\xda\xcd\x72\x3c\
\x83\x13\xca\xfb\xe9\x2b\xf7\xdc\x7d\xcf\xfe\xec\xc4\x10\x86\x5a\
\xc6\xa8\x21\x7c\x9d\xf5\x8f\xd1\x6a\xa9\x90\x49\x09\x03\xd9\xb2\
\xb7\xb7\xe0\xb3\x90\xf9\xcd\xda\x29\xc6\xa8\x21\x3e\x0b\x40\x0c\
\x61\xa8\x45\x4b\xd6\x71\xb2\x29\xbb\x78\xd5\xb4\xbd\x1a\x71\xc2\
\x5a\x74\x00\xb2\x2c\x0d\xeb\xd3\x43\xb0\x16\x1d\x00\xf4\x67\x52\
\xc3\xc8\x5f\x89\xe5\x3f\x17\x3d\x7b\x25\xd4\x07\x49\xda\x0a\x63\
\xd6\x23\x5b\x1e\x1c\x03\x6f\x67\xfd\xa0\xdc\x63\x56\x31\x31\x27\
\x0e\xd7\xf9\x3a\x1b\xd6\x27\xec\x47\x25\x9d\x86\x2a\x65\x2b\xa1\
\x3e\x18\x27\x9b\xb2\x1e\x1c\x83\x4f\x2e\x9f\x41\xcb\xd6\xc5\xab\
\xa6\xed\x42\xb9\x43\x29\x6f\x38\xf2\xa6\x95\x50\x1f\xa4\xdc\x76\
\x48\xb9\xed\xb0\x12\xea\x83\xd5\x88\x13\xd6\xa7\x87\x24\x03\xb3\
\xe4\xb2\x7c\x5a\xcb\x66\x06\xc7\xda\xa5\x88\x07\xab\x11\x27\x24\
\x69\x2b\x44\x7b\x5a\x3e\x98\xf3\x83\x84\x1e\xa2\x3d\x2d\x10\xb6\
\x35\x7f\x14\x0f\x62\x8e\x36\x58\x72\x59\x60\x35\xe2\x2c\xdb\x5b\
\xde\x43\x31\xc8\x3d\x9e\x65\x89\xc1\xb1\x43\x3e\x53\xc3\xc6\x92\
\xcb\x52\xb2\x27\xe4\x43\x88\x39\xda\x20\x49\x5b\x61\x2d\x3a\x90\
\x9b\xff\x0b\xfd\x3b\x14\x0f\x52\x6e\x3b\xc4\x1c\x6d\x39\x70\x4b\
\x2e\x4b\xc9\xde\x83\x16\x03\xaa\xce\xfa\x03\x66\xed\xa3\x99\x0b\
\x5f\xed\x94\x02\x62\xc9\x65\x81\x20\xa1\xff\x00\x42\x39\x53\x5c\
\xca\x6d\xcf\x79\x51\xa9\x2f\x08\xdb\xdf\x01\x3e\x53\xfd\x2b\xb9\
\xc7\xb5\x24\x31\x46\x8d\x21\x60\xd6\x6e\x8b\x19\x48\xe4\x11\x68\
\x1a\x4a\xb9\xed\x9c\x5e\x50\xaa\x2d\x7b\x7b\x4b\x06\x93\x65\x69\
\xf5\x26\x97\x01\xb3\xf6\xd1\xdc\xa5\xe3\x59\x31\x9d\x65\xfb\x3b\
\x20\x48\xe8\x2b\x02\x62\xbf\xa5\xdc\xf6\x1c\x78\x31\xcf\x5a\x72\
\x59\xc0\xdb\x59\xff\x6f\xb9\xc7\x57\x94\x18\xc2\x50\xeb\xc1\x31\
\x10\xba\xe5\xb1\x16\x1d\x80\xb0\xad\x19\xa2\x3d\x2d\x92\xae\x90\
\x84\xbc\x04\xd1\x9e\x16\x08\x12\x7a\xc1\x53\x62\xce\x4b\xd4\x94\
\x38\x32\x38\xd6\x1e\xb0\xe8\x76\x84\xbc\x79\xab\x11\x27\x04\x09\
\x3d\xb0\xfd\x1d\x15\xf7\x0a\x2e\x63\xfb\x3b\x40\xcc\x0b\x74\xcb\
\xd6\xfc\x4e\x55\xcb\x60\x06\xc7\x1c\x77\xce\xb6\x6e\x09\xf1\x8c\
\x20\xa1\x87\x24\x6d\x95\x05\x44\xbe\x25\x69\x2b\x78\x70\x4c\xd0\
\x12\x79\xee\xd2\xf1\xac\x17\xc7\x18\xb9\xc7\x59\xb0\xbc\x38\xc6\
\x24\x2e\x1b\x79\x3b\xb5\x3e\x3d\x04\x61\x5b\x73\xc9\x2b\x9e\x4a\
\x58\xe2\xb2\x51\xd0\xf4\xb5\xe4\xb2\x80\xdf\xac\x9d\x92\x7b\x9c\
\x05\xcb\x6f\xd6\x4e\xf1\x0d\x74\x96\xa5\x21\xe6\x68\x83\x98\xa3\
\x4d\xb6\x69\x8a\xcb\x62\x8e\x36\x08\xdb\x9a\x8b\x7a\x93\xdf\xac\
\x7d\x22\xf7\x38\x0b\x96\xb7\xb3\x7e\x90\xed\xef\xe0\x7d\x13\xc3\
\xb6\x66\xc5\xc1\x40\x2f\x4b\xb4\xa7\x05\x62\x8e\x36\xde\xf6\xdf\
\x24\xf4\xe3\x72\x8f\xb3\x60\x79\x3b\xeb\x07\xef\x3a\x5a\x77\x0b\
\x75\x06\x9d\x49\x94\xbb\x9d\x51\x49\x5b\x9f\x1e\xe2\x0d\xf2\x6c\
\x7f\x87\xba\x76\x82\xbd\x38\xc6\x70\x05\xf5\x98\xa3\x0d\xf8\xbc\
\x47\x29\x86\xbc\xb8\xd0\xdf\xa9\x2a\xa8\xa3\x9d\xdd\x42\x9b\x89\
\x2b\xa1\x3e\x51\x6b\x7e\x39\x0d\xe5\x1b\xcb\xde\xde\x82\xfd\x50\
\xc5\x0e\x70\xb0\x4b\x47\x05\xcc\xda\x6d\xae\x9d\xdd\x68\x4f\x8b\
\xa2\x56\x55\xc5\x0c\x6d\xe3\x14\xfa\xbb\x5c\xe1\x84\x52\xa7\xae\
\xbd\x82\x04\xdd\x0e\xd7\xbc\xbb\x16\x1d\x00\x0f\x8e\x1d\x68\x26\
\x2e\x95\x97\x70\xf5\x09\xc5\x43\xc5\x41\x41\x67\x1e\x7c\x99\x2e\
\xda\x46\x97\x7b\x90\xc5\x1a\xdb\xdf\x01\x7c\x39\x95\xe2\xaa\x59\
\x18\xc2\x50\xeb\x33\xd5\x67\x8a\x4d\x45\x61\x5b\xb3\x22\x32\x72\
\xb1\xb6\xec\xed\xe5\x9c\xb6\xf2\x5f\x36\x9f\xa9\x3e\xa3\x88\xfd\
\x2d\x06\xc7\x1c\x63\xd6\x23\x5b\x7c\x39\x85\x1a\xa7\x2b\x64\x68\
\xda\x2a\xd6\xf6\x5b\x64\xd3\xae\xec\x2b\x2f\xe4\x1d\xc5\x8e\x67\
\x53\x6e\xbb\x2a\xa7\x2b\x64\x42\xbc\x3b\x77\xa2\x48\x18\x6a\xe5\
\x03\x62\xd4\x18\x84\xec\xea\x26\x2e\x1b\x79\xe7\x61\xa5\xdb\x92\
\xcb\x22\x28\x77\x0a\x58\x74\x3b\xb2\xc6\x12\x2f\x8e\x31\xf7\xcf\
\x1f\xdb\x28\xd6\x50\xb6\xbf\x43\x55\xcb\xdd\x52\x81\xbc\x4f\x18\
\xe5\xab\x1f\x0e\x12\xfa\x5f\x84\x0c\x34\x3a\x17\x97\x7b\x60\x4b\
\x35\x74\x1e\x5f\xec\x77\x49\xda\x0a\x7e\x53\xc3\x6b\xd9\x80\xf8\
\xcd\xda\xd5\x42\x99\xec\x7e\x0b\xdb\x9a\x0b\x66\xbc\x6a\x31\xb4\
\xc3\x20\xe4\x77\xbe\xce\x86\x75\xd9\x80\xf8\x4c\xf5\x19\x21\xa7\
\x6c\x62\x4e\xe3\x94\x68\x28\xd7\x10\xfa\x3b\x59\x81\x08\x29\x80\
\x43\x85\x0b\x72\x0f\x6c\xa9\x86\x2a\x55\x84\x78\x48\xc0\xac\x7b\
\x23\x1b\x10\xbf\x59\xfb\x44\x48\x6c\x40\x25\x3d\x72\x0f\x6c\xa9\
\x26\x74\xd9\x9e\x72\xdb\x21\xd8\xa5\xfb\x87\x6c\x40\x82\x5d\x3a\
\x4a\x48\x21\x9c\xda\x57\x59\x49\xda\xca\x7b\x60\x85\x6c\xe6\xc2\
\x57\x3b\xb2\x1e\xed\xee\xe5\x21\xda\xa2\xb5\x57\x42\x97\x8d\x4a\
\x35\xa1\x79\xd4\xd8\xa9\xc6\x0d\x59\xeb\x7f\x51\xfd\x55\xb1\x15\
\x14\xdf\x36\xb6\x1a\x0c\x15\x6d\xf3\xfd\x66\x25\xd4\x07\x7e\x53\
\xc3\x96\xec\xfb\x59\x5e\x1c\x63\x46\xbb\xf4\xef\xf8\x1a\x2b\x74\
\x3f\x48\x89\x86\xda\x5e\xec\xc8\xf9\xce\xd9\xd6\xad\x60\x97\x8e\
\x92\x15\x46\x4d\xcd\xff\xbd\xa4\x58\xd0\x0e\xdb\x9a\x55\x19\xd8\
\x51\xc9\x69\xb1\xdf\xf8\x4c\x0d\x1b\xb2\x7b\x07\x12\x63\xd4\x18\
\x8a\xbd\x45\x4b\x2e\x8b\xa0\xc0\xa8\x34\x2b\xb6\x20\x59\x8b\x0e\
\x40\xc0\xac\xdd\x56\xdc\x71\xee\xde\xd4\xa5\xdb\xe5\x82\x82\x92\
\x26\x35\x4d\x5b\x59\x96\x86\x20\xa1\xe7\x4c\x6a\xd7\xa2\x03\x30\
\x4e\x36\x65\x15\x31\x55\x15\x92\xb7\xb3\x7e\x90\x6f\xfa\x52\x5a\
\xa5\x62\x31\x4b\xd2\x56\xce\xfa\xb1\xbd\x24\x50\xbb\x1d\xec\xd2\
\x51\xb2\x6e\xb9\x17\xd3\xb5\x53\xc7\xbe\x2b\x56\xad\xa1\x06\x2f\
\x41\xde\x51\xe8\xe5\x5a\xf6\xf6\x82\xcf\x62\x80\x6b\xa7\x8e\x7d\
\x27\xf7\x78\xf3\xca\x36\x3c\x7f\xc8\x46\xcf\xff\xe7\xfa\x85\x6f\
\xde\xcd\x38\xbf\x2e\xd8\xd1\x68\x4f\x8b\x2a\xce\x46\x96\x5c\x16\
\x4e\xef\x60\xfb\x3b\xe0\x87\x73\xa7\xa1\x9b\x9e\x5f\xb7\x0d\xcf\
\x2b\xf3\x13\x37\x04\x63\x38\xf6\x7c\xfb\x76\xd0\x07\x77\xfb\x7e\
\x57\xb0\xa3\x28\x96\x28\xb9\x36\x6b\x7d\x7a\x08\x82\x84\x9e\x33\
\xbf\x9a\x71\x7e\x0d\xb7\x3d\x23\xe0\xfa\xf1\xef\xca\x84\x92\x0f\
\x63\xe6\xd5\x2e\x84\x47\xae\xf0\x7a\x01\xdb\xdf\x21\x68\xb3\x4e\
\x0e\xcb\x2f\x04\xe7\xf5\x1e\xd7\x39\x88\xa7\x41\x79\x50\xf6\xc3\
\x88\xa7\x01\xc2\xae\x73\xbc\xc1\x5b\x48\x41\xb3\x5c\x86\x5e\x16\
\xbe\x63\xe9\x24\x6d\x85\x5b\x17\xad\x10\x4f\x83\xb2\xa0\x14\x82\
\x11\x4f\x03\x84\x47\xae\x14\xdd\xbb\x42\xd3\x82\x92\x56\x5d\x28\
\x09\x2c\x36\x9d\x26\x2e\x1b\x61\xfc\x22\x99\xeb\xaf\x22\xa0\x70\
\xc1\x88\xa7\x01\x6e\x7b\x46\x80\x2b\xa8\xe7\x1b\x8a\x27\x4a\xc8\
\xe0\x51\x69\xa8\x90\x83\x34\xb6\xbf\x03\x6e\x7b\x46\x3e\xe8\xb3\
\xac\x50\xf8\x60\xc4\xd3\x00\x77\xa6\xef\xc3\x28\xd1\x28\x68\x20\
\x10\x14\xb9\x56\x5e\xf9\xdf\xc4\x0b\x3d\x66\x1e\xef\x3e\x0a\x93\
\x37\xfd\x1f\xf5\x5b\x16\x28\xc5\x60\xc4\xd3\x00\x0f\x5f\x66\x44\
\xe5\x1b\xe8\x5b\x43\xb1\x9f\x28\x4b\x01\x03\x7d\x8a\x2d\xe6\x8b\
\x61\x0f\x8e\x01\xfb\x34\x5d\xb0\xef\x07\x0a\x45\x08\x0c\x64\xa3\
\x7f\x6c\x17\xf5\xd6\xe7\x7f\xa2\x7c\x10\x05\x11\xab\x11\x67\x49\
\x9f\x62\x27\x69\x2b\x8c\x39\x8e\xf3\xf6\xfd\x40\xa0\x88\x81\x11\
\x4f\x03\xc4\x7e\x5a\x28\x29\x2b\x47\x73\x79\xb4\xa7\xa5\x22\x60\
\x56\x23\xce\x1c\xf8\x24\x6d\x15\xe5\x91\x59\x96\x86\x51\xa2\x11\
\xa6\x26\x27\x8a\xf6\xbf\xa2\x50\xc4\xc2\x40\x16\x3a\xd7\x59\xd2\
\x49\x61\x96\xa5\x73\xb7\x89\x4a\x01\x06\x5d\x1d\x1b\x73\xb4\xe5\
\xbe\x89\x2f\x65\xfb\xe6\xe7\x21\x02\x46\xff\xf0\x5b\x78\xf8\x32\
\x23\x78\x0c\x24\x87\x52\x2a\x8c\x78\x1a\xe0\x41\xf2\x39\xe7\x57\
\x48\x42\x6c\x2d\x3a\x90\x03\xe3\xc1\x31\x60\xfb\x3b\x20\x49\x5b\
\x61\x35\xe2\xe4\x7d\xb3\xb3\x2c\x0d\x6b\xd1\x81\x5c\x71\x42\x90\
\xd0\xe7\x40\x94\xba\x43\x90\x72\xdb\x21\x70\xb2\x09\x62\x3f\x2d\
\x88\x1a\x03\x49\xa1\x94\x03\x63\xff\xd4\x55\x6e\x5d\xd6\x4a\xa8\
\x2f\xb7\xbf\x94\x7f\x27\x56\xd8\xd6\x0c\x31\x47\x5b\xee\xce\xac\
\x20\xa1\xff\xe0\x2e\xad\x25\x97\x85\xf7\x2a\x27\xa1\xd3\x5c\xe0\
\xe4\x11\xc1\x53\x55\x45\xa0\x48\x01\x03\xd9\xe4\x4d\x3f\x78\x70\
\x4c\xb2\x72\x52\xe4\x01\x2b\xa1\x3e\x58\xf6\xf6\x42\x92\xb6\xe6\
\x6e\x95\xcb\xbf\x4b\x51\x8a\x67\xed\x79\xc6\x11\x88\x86\xbc\x65\
\x8d\x41\x59\x50\xa4\x84\x81\xec\xde\xe2\x63\xf0\xe0\x18\x4c\x9e\
\xf9\x52\x15\x65\xa5\xab\x11\x27\x4c\x9e\xf9\x12\xfc\x27\x9b\xe0\
\xce\xf4\x7d\x49\xc6\xa0\x24\x28\x95\x80\x81\xec\xe1\xcb\x0c\x84\
\x47\xae\x80\x07\xc7\xe0\xc7\xf3\xbf\xcf\x5d\xbf\x87\x02\x6c\xfe\
\x5d\x8a\x95\xcc\x47\xd0\x33\xf2\x9f\x8b\x6e\x2a\x4d\xd2\x56\x98\
\x3c\xd3\x0a\x81\x93\x4d\x10\x76\x9d\xe3\xcd\x37\x2a\x0e\xa5\x92\
\x30\xf2\xed\x41\xf2\x39\x4c\x4d\x4e\xc0\xd8\xb7\xdd\xbc\x77\xe8\
\x56\xc2\x93\x52\x6e\x3b\x8c\x12\x8d\x9c\xcf\xbc\x75\xd1\x0a\x93\
\x37\xfd\xf0\x20\xf9\xbc\x62\xfd\x17\x04\xc5\x36\x3c\x7f\x88\xa4\
\x12\xff\x22\xa9\x04\x5c\x08\x3d\xde\xa2\xee\xbd\x80\xc0\xe2\x1b\
\x88\x3e\x13\xbe\xc4\x2b\xd5\xd8\xa7\x69\xb8\xb7\xf8\x18\x1e\x24\
\x9f\x03\xfb\x34\x0d\x0f\x5f\x66\x60\x6a\x72\x42\xf2\x22\xed\x95\
\x50\x5f\x2e\x38\x3f\x7c\x99\xc9\x3d\x17\x3d\x5b\xcc\x72\x56\xac\
\xcd\xbc\xda\x85\xe8\xb3\x0c\x8c\xfd\x65\x0d\xfe\x34\xfb\x4f\x38\
\xe7\xfb\x75\x93\xa4\x12\x60\xa3\xe6\xdf\x72\x42\x21\x06\x67\x6b\
\xad\x54\xdc\x40\x0e\xcf\x39\x48\x2a\x31\xdb\x7d\x6d\x21\x43\x52\
\x09\x20\xa9\x04\x5c\x8a\xfc\x6d\xe3\x20\x21\xc5\xd3\x7b\x9b\x94\
\x01\xa2\x51\x92\xc3\xac\xb5\xe8\x00\x04\x88\x46\x98\xbc\x41\x57\
\xbc\xdd\x85\x06\xff\xf4\xf5\x47\x3b\x24\x95\x80\x6e\x7a\x7e\xfd\
\xf4\xb5\x85\x29\x72\x78\xce\x41\x7e\x3f\x57\x47\xd0\xb3\x5f\x08\
\x8a\x25\x4a\x81\x74\xfd\xc2\x37\xef\xca\xf5\x94\xd5\x88\x13\x02\
\x5d\x06\xb8\x7e\xfe\x8c\xe4\x5e\x50\xd1\xc1\x97\x02\xd2\xe5\x89\
\x94\x74\x30\x66\x5e\x6e\x75\x5f\x5b\xc8\x50\xe4\x89\x20\xba\xb4\
\x52\x4c\x86\x9d\x65\x69\x58\xbc\x6a\xda\xf6\x9b\xb5\x9b\x37\xc8\
\xb6\x3f\x93\x54\x02\xa8\x7b\x2f\x24\x6b\xdf\xd5\xe8\xb3\xec\x81\
\x0f\xbe\x18\x48\xef\x01\x49\x0a\xc3\x4a\xc5\x0d\x35\x35\x7b\x77\
\x00\xfb\x4d\x0d\xaf\x11\x18\xae\xfa\x2f\xb4\x5a\x5a\xbc\x6a\xda\
\x0e\x58\x74\x3b\x41\x42\xff\x0b\x2a\x80\x7e\x1f\x27\x25\x83\xd2\
\xe3\x5e\xdc\x20\xa9\x04\x21\xfb\xe0\x73\x89\x18\x9c\xad\x25\xa9\
\x44\xd9\xd3\xd7\x7e\x18\xf9\x62\x70\xac\xdd\x6b\xd4\xcc\xa2\x55\
\xd1\x98\xf5\xc8\xd6\x9d\xb3\xad\x5b\x13\xf6\xa3\x9b\x91\xee\xdf\
\xbc\x0d\x98\xb5\xdb\x7e\xb3\x76\xd3\xd7\xd9\x30\xc7\x18\x35\xc4\
\xfe\x9a\x29\xa9\xa0\x44\x9f\x65\x80\xa4\x12\xa0\x58\x18\x48\xdd\
\x23\xf3\xaf\x6f\x24\x5e\x57\x04\xc6\x7e\x31\x46\x8d\x81\xc1\xb1\
\x76\x06\xc7\x1c\x8c\x51\x43\xbc\xff\x6f\x26\xea\x8a\x15\xae\x49\
\x01\x25\xb0\xf8\x06\x4e\x8f\x2c\xc8\xf7\xb5\x94\x50\x9d\x1e\x59\
\x08\x94\x1a\x47\xc4\xc0\x28\x57\xe5\x42\xa1\xee\xbd\x00\xfb\xf5\
\x9f\xa7\x2b\xdd\xce\xb2\x45\x0e\xcf\x39\xec\xd7\x1f\xfd\x57\xc9\
\x30\x90\xca\x81\xf2\xed\xad\xbf\xbe\x25\xa9\x84\xb2\x6e\x01\x2a\
\x24\x2b\x15\x37\x88\x0d\xec\x72\xc0\x40\x2a\x15\x4a\x8f\x7b\x71\
\x43\x8e\xf6\x8a\x96\xd8\xc0\x2e\x27\x0c\x24\xb1\x50\x54\x13\xd0\
\x91\x84\x06\x76\x25\xc0\x40\x12\x03\x45\x35\x01\x1d\x49\x48\x60\
\x57\x12\x0c\x24\xa1\x50\x54\x13\xd0\x91\x8a\x05\x76\x25\xc2\x40\
\x12\x02\x45\x35\x01\x1d\x89\x2f\xb0\x2b\x19\x06\x52\x31\x28\xaa\
\x09\xe8\x48\x5c\x81\x5d\x0d\x30\x90\xb8\xa0\xa8\x2e\xa0\x23\xed\
\x0f\xec\x6a\x82\x81\x54\x08\x8a\xea\x02\x3a\x52\x7e\x60\x57\x23\
\x0c\xa4\xfd\x50\x54\x17\xd0\x91\x50\x60\x57\x33\x0c\xa4\x7c\x28\
\xaa\x0b\xe8\x48\x28\xb0\xab\x1d\x06\x12\x82\x42\x52\x09\x50\x65\
\x7f\x88\xc1\xd9\x5a\x1b\x35\xff\x56\x95\x8d\xe7\x10\x82\xa2\xba\
\x80\x8e\x44\x7e\x3f\x57\x27\x77\x1b\xa4\x96\x6a\x61\x54\x55\x55\
\x55\x55\x55\x55\x55\x55\x55\x55\x55\x95\x72\xf4\x3f\xf1\xdc\x88\
\x65\x1f\x04\x6a\xbc\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
\x00\x00\x03\x42\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x03\x09\x49\x44\x41\x54\x78\x9c\xed\xdd\x4f\x6e\xda\x40\
\x1c\x86\xe1\xef\x08\x3e\x82\x8f\xe0\x45\xe2\x64\x91\x85\x05\x54\
\x42\xc4\x55\x27\x37\xc8\x11\x38\x42\x6f\xe0\xdc\xa0\x37\xc8\xf4\
\x16\x48\x6c\x2c\x75\x55\xa9\x8b\x1e\xc1\x51\x01\xa1\x22\xb5\xee\
\x06\x57\x08\x81\x33\x33\x06\xcf\x78\xe6\x7b\xa5\x59\x12\xd9\x3c\
\xe2\x8f\x3d\x3f\x14\x80\x31\xc6\x18\x63\x8c\x31\xc6\x18\x63\xcc\
\xfb\x84\x14\x51\x2e\x45\x66\xfb\x38\x18\xfe\x63\xc8\x5c\x8a\x7a\
\x26\xc5\xb3\xed\xe3\x09\xba\x43\x8c\x66\x11\xc5\x52\xa7\x30\x88\
\x62\xa9\x36\x0c\xa2\x68\xf4\x28\xc5\x3c\x97\xa2\xec\xf2\x37\x54\
\x30\x72\x29\xaa\x99\x14\xc9\xa5\x8e\xdb\xcb\xf6\x18\xcd\x13\x66\
\x84\x42\x8c\x0b\x75\x84\x61\x84\x62\x05\xa3\x58\xee\x92\x62\xb1\
\xcd\x86\xb2\x54\xce\xe9\x0c\x86\x16\x8a\xb5\x57\xc6\xcb\x72\xf7\
\xe5\x65\xf9\xbb\x1e\xca\x2a\x16\xdb\x58\xe1\x89\xac\xde\x79\x22\
\x5b\x51\xac\xbe\x4d\xf9\x06\x02\x00\x53\x29\x62\x53\x14\xeb\x9f\
\x19\x3e\x82\x00\x66\x28\xd6\x31\x00\x7f\x41\x00\x3d\x14\x27\x30\
\x00\xbf\x41\x00\x35\x94\x8f\x52\x7c\x73\x02\x03\xf0\x1f\x04\x50\
\x7e\xa5\xd8\xc7\x00\xc2\x00\x01\x3a\xa1\xf4\x7b\xd1\x17\x0a\x08\
\x60\x84\xd2\xff\x15\x78\x51\xd6\x51\xb1\xd8\xc6\x83\x59\x65\x1d\
\x75\x39\xdf\xa9\x14\xf1\xa3\x14\x6f\x4e\x62\x84\xda\xf4\xf5\xe9\
\x21\x97\xe2\x6f\x0b\xc6\x9f\xfc\xeb\xa7\x99\xed\xe3\x0c\xaa\x16\
\x14\x62\xd8\xea\x04\x0a\x31\x6c\x77\x80\x42\x0c\x57\x9a\xbe\x3e\
\x3d\x10\x83\x31\xe6\x71\x1c\x94\x73\x28\x0e\xca\x39\x14\x07\xe5\
\x1c\x8a\x83\x72\x0e\xc5\x41\xb9\x0b\xc5\x41\x39\x87\xe2\xa0\x9c\
\x43\x0d\x76\x50\xce\xc7\x06\x3d\x28\xe7\x5b\x83\x1f\x94\xb3\xbe\
\x03\xa8\xb9\x54\xce\x89\x83\x72\x0e\xee\xa9\x73\x50\xce\x31\x10\
\x80\x83\x72\xce\x81\x00\x1c\x94\x73\x0e\x04\xe0\xa0\x9c\x73\x20\
\x00\x07\xe5\x9c\x03\x01\x06\x32\x28\x17\x12\x08\x30\x80\x41\xb9\
\xd0\x40\x00\xc7\x07\xe5\x8a\xb2\x8e\x8a\xe5\x2e\x19\xcc\xea\x38\
\x4a\xda\xc4\x41\x39\x07\xe3\xa0\x9c\x83\x71\x50\xce\xc1\x38\x28\
\xc7\x18\x63\x8c\x31\xc6\x3c\xaa\xca\x12\xe3\xab\xfc\x2e\x8f\x65\
\x27\x5a\x8f\xee\xe6\xeb\x71\x5a\xad\x3f\xdc\x6a\xdf\x30\xac\xb2\
\x24\x5a\x8f\xd3\x72\x35\x4a\x3f\x5f\xe1\xd0\xc2\x6b\x8f\x51\xef\
\x97\x16\x4a\x83\xd1\x3c\x9e\x28\x1d\x3b\xc2\xd0\x42\x39\xc6\x20\
\x4a\xc7\xce\x60\x28\xa1\x9c\xc3\x20\x8a\x61\x55\x76\x1f\xb7\x60\
\xb4\xa2\xbc\x87\xd1\xac\x2a\xbb\x8f\x2d\x9c\xda\x70\x5b\x4d\x6e\
\x32\x5d\x14\x55\x8c\x5f\xa3\xdb\x67\x8b\xa7\x36\xdc\x74\x50\x9c\
\xc0\x28\xca\x3a\x1a\xd2\x32\x39\x47\x45\x94\xb7\xf5\x38\xfd\x6e\
\xfd\x95\x11\xca\x9e\xba\x22\x8a\xfd\xb7\xa9\x50\x40\x80\x6e\x28\
\xbd\x7d\x66\x84\x04\x02\x98\xa1\xf4\xfa\x01\x1e\x1a\x08\xa0\x87\
\xd2\xfb\xb7\xa9\x10\x41\xaa\x2c\x89\x36\x93\xf4\xc7\x7b\x18\x9b\
\x71\xba\x31\xb9\xf7\xd5\xa9\xd0\x40\x54\xbf\xda\x9e\xbb\x4e\xb9\
\x7a\x21\x81\x18\x60\xf4\x8f\x12\x0a\x48\x07\x8c\x66\xfd\xec\x65\
\x4f\x64\xff\xdb\x3d\xeb\xff\x86\x42\x79\x19\x5c\x1c\x5e\x00\xa3\
\x5f\x14\x9f\xd3\xb9\x1d\xa2\xf8\xed\x8b\x28\xa6\x99\xdc\x9b\x22\
\xca\x95\x52\x7e\x9b\x1a\xdd\xcd\x8f\x1f\x4b\x94\x2b\xa4\x02\xd2\
\xb6\xc9\xa4\x80\x52\x72\x3f\x44\xb3\x36\x14\x95\x1d\xbf\x16\x14\
\x62\x98\x76\x0a\x45\x67\xfb\xf5\x04\x0a\x31\xba\x76\x88\x62\xb2\
\x17\x7e\x80\x42\x8c\x4b\x55\x65\x49\xd4\xe5\x46\xe1\x6a\x72\x93\
\x11\x83\x31\xc6\x18\x63\x8c\x31\xc6\x18\xf3\xad\x7f\x3c\x1f\xc2\
\xcd\x39\xbd\xb0\xe3\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
\x00\x00\x08\x5e\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x08\x25\x49\x44\x41\x54\x78\x9c\xed\xdd\xfb\x53\x1b\xe5\
\x1a\x07\x70\xff\x04\xfe\x83\xfa\x27\xf8\x27\x64\xb2\x45\xd1\x41\
\xa7\x8c\x4e\x87\x11\x1d\xf0\xd4\x71\xa4\x68\x07\x4f\x6d\xe9\xe1\
\x62\x13\x04\xdb\x70\x8b\x50\x48\xa0\x81\xa6\x27\x38\x14\xbc\x44\
\x7a\x39\x72\x29\x4d\x4a\x4b\x9a\x6b\x81\x23\xb4\x34\xb4\x16\x90\
\x18\x1a\x2e\x09\x94\x4b\xf4\x60\x7d\xce\x0f\x99\x5d\x77\x93\x4d\
\xb2\xd9\x5d\xf2\x6e\xb2\xef\x33\xf3\xfd\x25\x40\x76\xdf\xe7\x93\
\xf7\x79\x13\x3a\x53\x5e\x7a\x09\x17\x2e\x5c\x1c\xea\x4c\xa3\x41\
\x71\x46\x63\x50\xe1\x1c\x70\x1a\x0d\x0a\x6e\x20\x1a\x83\xea\x4c\
\x63\x37\xe0\x1c\x70\x34\x06\x15\x06\x91\x52\xf8\x80\xe0\x12\xbf\
\x30\x88\xc4\x0a\x83\x48\xac\x30\x88\xc4\x0a\x83\x48\xac\x30\x88\
\xc4\x0a\x83\x48\xac\x0e\x0c\x64\x3d\x0c\x30\x1f\xc2\x89\xce\x7a\
\x18\x11\xc8\x7c\x08\xe0\x3f\x4f\x71\xa2\x33\x1f\xc2\x20\x92\x0a\
\x06\x91\x58\x30\x88\xc4\x82\x41\x24\x16\x0c\x22\xb1\x60\x10\x89\
\x05\x83\x48\x2c\x18\x44\x62\xc1\x20\x12\x0b\x06\x91\x58\x30\x88\
\xc4\x82\x41\x24\x96\x8c\x04\x19\x5b\x06\xb0\x05\x00\x9c\x6b\x00\
\xee\x0d\x00\xfb\x2a\xc0\xc8\x92\xb0\x46\x58\x7c\xcc\xe7\xb4\x05\
\x22\x8f\x61\x90\x04\x19\x5a\x8c\x34\xcb\xbb\xc7\x9e\xc9\x60\xea\
\x30\x23\x4b\x91\x9f\x8b\xf7\x9c\xee\x8d\xc8\x75\x65\x0f\x62\xf1\
\x01\xfc\xbc\x05\x30\xb7\x13\xc9\x64\x10\x60\x76\x3b\x7e\xe3\xc8\
\xcc\xed\x44\x76\x10\x57\x0c\xae\xcf\x39\xbd\xf9\xf7\xf7\x4e\x06\
\x01\xee\xac\x64\x29\xc8\xd0\x62\xa4\x81\xf4\x26\xda\x02\xc9\x9b\
\x94\x28\xb3\xdb\xcc\x5d\x35\xb2\x14\x01\x8e\xde\x3d\xd3\x9b\xc2\
\xae\x63\x0b\xc4\x5f\x03\xfd\xfa\x63\xcb\xf1\xc7\x5f\xf4\xd7\x90\
\x82\x8c\x2d\x33\x5f\xa1\xb3\xdb\x89\x47\x52\xaa\xcd\x72\xae\x45\
\x5e\xdd\xd1\x5f\x73\xae\x09\x47\x27\x63\x5f\x8d\xec\xe4\xe8\x1d\
\x4a\xdf\xe1\xf4\xef\x9f\xde\x04\xb8\xed\x8f\x5d\xfb\xdc\x4e\xe4\
\x67\x90\x82\xb0\x35\x4b\x2e\x61\x5b\xfb\xdc\x0e\xc0\xfc\x26\x2a\
\x90\xad\xf4\x37\x21\x13\x32\xff\x1c\x15\xc8\x73\xf4\x8b\x97\x64\
\xb6\x11\x81\x78\x39\xbc\xbb\x91\x65\x30\x88\xc4\x82\x41\x24\x96\
\x6c\x04\x19\x5f\xde\x87\xfb\xeb\x2f\xd0\x37\x57\xce\x20\x33\x5b\
\x7f\xc1\xf8\xf2\x3e\x68\x86\x36\xa1\x62\x60\x03\x2a\x06\x36\xc0\
\xe4\xde\x45\xdf\x60\xb9\x81\xcc\x6c\xfd\x05\xfd\x53\x7b\xf0\xe5\
\xf5\x10\x05\x41\x8f\x66\x68\x13\xec\xcf\xfe\x14\xad\x61\x96\xc5\
\x30\xf4\xba\x42\x50\x6b\xf6\x83\x76\x64\x15\x3c\x6b\xfb\x18\xc4\
\xbb\x17\x19\x4b\x26\xf7\x2e\xd4\x98\x83\xac\x10\xd1\xe9\xa8\x2d\
\x82\xbe\xae\x93\xe0\xf2\xce\x0a\x42\x38\x71\x69\x31\x26\x3d\xb6\
\xa0\x3c\x41\xd8\xc6\x12\xd7\x34\x7d\xf1\x3e\xe8\x2a\x0f\x83\xae\
\xf2\x30\x5c\xac\x3d\x02\x7d\x5d\x27\xe1\xde\x8c\x93\x37\x02\x5b\
\x7a\x5d\x21\x79\x80\x90\x63\x89\x0f\x04\x99\x4f\xb4\xa3\x14\x08\
\x3d\x5d\xaa\x37\xa1\xb7\xed\x63\xb8\xe3\xb9\x0d\x36\xff\x1f\x29\
\x23\x44\xa7\x6a\xc0\x07\x57\x1f\x6c\x67\x1e\xc8\x14\x87\x77\x41\
\xa9\x8e\xa5\x44\xf9\xa8\xd5\x05\x1d\x95\xaf\xb2\xa2\x90\x69\xaf\
\x7a\x1d\x5a\xd4\xff\x80\x8a\x8e\x9b\xbc\x41\xc8\xd4\x9a\xfd\x60\
\xf3\xff\x91\x32\xc8\x7f\x83\x2f\xd0\x80\x38\x03\xf1\x0f\x43\xbe\
\x63\x29\x51\x4a\x3b\xa6\xa1\xb1\xe6\xbd\x84\x20\xf4\xb4\xa8\x3f\
\x80\xba\x86\xf3\x70\x4a\x7f\x57\x10\x8c\xce\xba\x9e\xd2\xc1\xef\
\x0c\xec\x4b\x03\x44\x8c\xb1\x94\x28\xc7\xf5\x33\xf0\xd1\xb9\xc1\
\x84\x08\xc6\xda\x37\x58\x1f\x6f\x3d\x5b\x28\x18\x87\xeb\xc1\x8f\
\x1c\xe4\xfe\xfa\x0b\xd1\xc6\x52\x32\x90\x12\x8d\x35\xee\xd8\xea\
\x6f\x38\x02\x21\xa7\x1a\x42\x4e\x35\xb8\x07\x4a\xc1\xac\x7d\x27\
\x2e\xce\xb9\xc6\x3a\xde\x30\xdf\x4e\x6d\x49\x13\x64\xf4\x97\xdf\
\x41\x37\xbe\x7d\xe0\x10\x64\xca\xba\x1e\x42\x89\xc6\xca\x3a\xb6\
\x4c\x75\xf9\x10\x98\xa8\x81\xdd\xc9\x7a\x46\xe2\xe1\xf4\xa8\x72\
\xe1\x9a\xee\x4d\x41\xe7\x4b\xbc\x83\x1f\x19\xc8\x65\xe7\x4e\x5a\
\x20\xc8\x9c\xe8\x79\x02\x25\x1a\x6b\xcc\xd8\xea\x3e\x9b\x07\xcb\
\x96\x7f\xc5\x60\xc4\xc3\x31\xd5\xe5\x43\x7f\xc3\xab\x30\x6a\xc8\
\x13\x84\x12\xef\xe0\x97\x0d\x48\xb9\x71\x01\x4a\x34\x56\xc6\xd8\
\xd2\x57\xe5\xc2\xdc\x8d\xcf\x92\x62\xd0\xe3\x1f\x3f\x0d\x23\x17\
\x5f\x87\x51\x43\x9e\x28\x28\xd1\x07\xbf\x2c\x41\xe8\x63\x8b\x6d\
\x54\x45\x67\xcb\xad\x06\xff\xf8\x69\x98\xbd\x7a\x0c\x2c\x97\xf2\
\x29\x0c\x31\x51\xc8\x83\x5f\x36\x20\x9f\xfd\xfb\x57\x0a\xe4\xd3\
\xfa\x5e\x0a\xc4\x3d\x50\x9a\x14\xe4\xf1\xf0\x71\x06\x80\xe5\xf2\
\x5b\xb0\xe4\xaa\x02\xf7\x60\xb1\xe8\x28\x86\xbb\x1b\xf2\x00\x39\
\xd9\xeb\xa3\x40\x4a\x34\x56\xd0\x56\xbf\x0d\xba\xca\xc3\x60\xd6\
\xbe\x93\x14\xc4\xf3\x5d\x11\xd5\x78\xf7\x60\x31\x04\xbd\x4d\x00\
\xc1\x5e\x80\x60\xaf\xe8\x28\xc8\x40\x2e\xd9\xb7\xd3\x0a\x72\xfa\
\xca\x2a\x03\xe4\x0b\x75\x25\xa7\xb1\xb5\x3a\x51\x49\x35\xfc\xc9\
\xc4\x29\x0a\x82\x1e\x31\x51\x2e\xde\x41\x04\xd2\x79\xf7\x79\x9a\
\x41\x9e\x31\x40\x8e\x7f\xd5\x9f\x74\x6c\xad\x4e\x54\x52\x67\x86\
\xe5\xf2\x5b\xb0\x1f\xe8\x61\x05\x89\x46\xf9\xde\x70\x8c\x37\xc8\
\x85\x5b\x6b\x68\x40\x46\x9e\x84\xa1\xcb\xb9\x99\xd6\x9c\xed\xb3\
\x33\x72\x41\x15\xf9\x7c\x71\xa3\xbd\x90\x71\x80\x2f\x5b\xfe\x19\
\x73\x80\x2f\xb9\xaa\xe2\x62\xb0\xa1\xdc\xf8\xe6\x63\x5e\xf7\x38\
\xf2\x24\xf1\xff\xad\x91\x55\x20\xea\x7e\x17\x03\xa4\xa9\xb9\x26\
\x66\x6c\x2d\xdc\x3c\x11\xf3\x2e\xea\x91\xb5\x3c\x29\x86\x58\x28\
\xb2\x06\xa9\x37\xf4\xc5\x8c\xad\x69\x73\x31\xe3\x00\x7f\xfe\xb4\
\x8d\x33\x86\x18\x28\xb2\x02\x69\x34\x7b\xe2\x8e\xad\xfe\x86\x23\
\x10\x74\xd4\x50\x63\xca\x3d\x58\x9c\x32\x84\x18\x28\xc8\x40\xc6\
\x17\xd2\x0f\xd2\x72\x75\x32\x06\xa4\xa9\xb9\x06\x3a\xab\x0f\x83\
\xb1\x36\x17\x86\xbb\xde\xa0\x9a\x18\x98\x3d\x27\x08\x84\x2f\x8a\
\xe5\x29\x22\x10\x8f\x2f\xfd\x20\xcd\x2c\x20\x75\xa6\x61\xb8\xa6\
\xcb\xe7\x7d\x66\x88\x8d\xe2\xf1\xc9\x08\xe4\xc2\x4f\x3f\xc7\x80\
\xb0\xa1\x08\x1d\x57\x42\x50\x64\x05\xd2\x31\xfc\x80\x15\x44\x4a\
\x28\xb2\x02\x69\x4f\x00\x22\x15\x14\x59\x81\xe8\x6f\xce\x25\x04\
\x91\x02\x8a\xbc\x40\xc6\xe6\x93\x82\xa0\x46\x91\x15\xc8\xc5\xdb\
\x0b\x9c\x40\x50\xa2\x20\x03\xf9\x6d\x6b\x1f\x3c\xbe\x70\x5a\x33\
\x36\xbb\x02\xa6\x5b\x33\x9c\xd3\x3b\x74\x07\x6e\xe8\xd3\x83\x72\
\x6b\xa0\x14\x3c\xbe\x30\xfc\xb6\x85\xe8\x1f\xa8\x50\xd4\xe6\x4e\
\x18\x6c\xb3\x4f\x53\xca\xc4\xf4\x03\x18\xeb\xfb\x30\x2d\x28\x76\
\xf3\xf1\xa4\x6b\xc8\x2a\x90\xdf\xff\xb7\x9f\x32\x88\xd4\x50\xb2\
\x0a\x64\xff\xcf\x17\xbc\x40\xa4\x84\x82\x41\x24\x86\x92\x55\x20\
\x00\x20\x08\x24\xdd\x28\xc3\x57\x4e\xc6\xdc\x7f\xd6\x81\x38\xe7\
\x16\x33\x02\xc5\xdc\x59\x08\xaa\xe6\x56\x38\xd7\x76\x81\x71\xff\
\x59\x07\xe2\xf1\xfe\x2a\x18\xe4\xa0\x51\xfc\x8f\x4d\x50\xfb\x75\
\x3b\xd5\x7c\x3a\x8a\x20\x90\x6c\x4f\x75\x93\x1e\xcc\x9d\x85\xa2\
\xa2\xf8\x1f\x9b\x40\xa3\xef\x89\xb9\x96\xaa\xb9\x8d\xf9\x18\x06\
\x39\x78\x94\x68\x0c\xd3\xf5\xbb\xd0\xfd\xc3\x2d\xf6\x6b\x73\x06\
\x91\xe1\x9f\x5e\x55\x35\xb6\x9d\xbf\xde\x55\xb0\x22\x04\x85\x0d\
\xc3\xbb\xb2\x0b\xde\x95\x5d\x06\x4a\xd5\xf9\xce\xf5\xc8\x8b\x9e\
\xe3\x9f\x5e\x95\x6b\x59\x8d\x8a\x9c\x51\xc3\x6b\x53\x7c\x50\x12\
\x61\xb0\xa1\xd4\xab\x5a\xf6\x50\xaf\x37\x23\x8a\x0f\x0a\x17\x0c\
\x8c\x22\xa0\x52\x41\x49\x05\x03\xa3\x08\x28\x2e\x28\x7c\x30\x30\
\x8a\x80\x4a\x84\x22\x04\x83\x8c\xae\x7f\x18\xa3\xa4\x5a\x6c\x28\
\x62\x60\xd8\x1f\xfa\xa0\x4e\x7f\x85\x7a\x0e\x43\xd9\x29\x70\x10\
\xc4\x14\xea\xf5\x66\x44\xd1\x51\xcc\x9d\x85\x8c\x4f\xe0\x62\x60\
\x98\xce\xa8\xc1\x4e\x10\x60\x27\x08\x70\x10\xc4\x8f\xa8\xd7\x9b\
\x11\x65\x35\x2a\x72\xbe\xd3\xbf\xfb\x48\xd5\xdc\x2a\x2a\xc6\x60\
\xb3\x0e\xc2\x5a\x2d\x3c\x2c\x2a\xa2\x50\xee\x11\xc4\x11\xd4\xeb\
\xcd\x88\xea\x6e\x2f\x3b\xf4\x65\x8b\x76\x8f\x6c\xe6\xd7\xa6\x6b\
\xa2\x60\x84\xb5\x5a\xd8\x69\x68\x80\xfb\xf9\xf9\xe4\x2e\x09\x59\
\x15\x8a\x1c\xd4\xeb\xcd\x88\xe2\x8b\x92\x08\x83\xcc\x5a\x75\xf5\
\xdf\xbb\x44\xa9\x2c\x47\xbd\xd6\x8c\xa9\xee\xf6\xb2\x43\xaa\xe6\
\xd6\x0d\xae\x28\x5c\x30\xc8\xd0\x76\x09\x3e\x4b\x52\xa9\x72\xad\
\x31\xa7\xa2\xb1\x7b\x2a\x19\x4a\x2a\x18\xf4\xb3\xc4\x41\x10\x21\
\xd4\x6b\xcc\xb8\x2a\xd7\x1a\x73\xaa\xce\x77\x3e\x8a\x87\x92\x2a\
\x46\x58\xab\x85\x85\xd2\x52\x6a\x6c\xa1\x5e\x5f\x46\x56\xf7\xb1\
\xb2\x43\xf5\xaa\x96\x98\x33\x85\x0f\x46\xd4\x0e\xc1\x9f\x49\xf8\
\x16\x1b\x0a\x1f\x0c\xfa\x19\x62\x27\x08\x23\xea\x75\x65\x74\x45\
\xa3\xf0\xc1\xa0\xbf\xcb\xb2\x29\x95\x2a\xd4\x6b\xca\xf8\x8a\x46\
\xb9\xfc\x79\x0d\x67\x8c\x9d\x86\x06\x98\x39\x7a\x94\x3a\xd0\x6d\
\xb9\xb9\xaf\xa0\x5e\x4f\x56\x14\x89\x62\x28\x3b\x05\x76\x82\x80\
\x99\xa3\x47\x39\x61\x78\x8b\x8b\xf1\x67\x90\x83\x2a\xab\x42\x91\
\xe3\x20\x88\x29\xb2\xc1\xae\xbc\x3c\x78\x56\x51\xc1\x8a\x11\x54\
\xab\x61\xba\xa0\x80\xc2\xb0\x13\x84\x15\xf5\xfd\x67\x65\x59\x15\
\x8a\x1c\x3b\x41\x18\x69\x8d\x86\xfb\xf9\xf9\xf0\xb0\xa8\x08\x16\
\x4a\x4b\xc1\x5b\x5c\x0c\xd3\x05\x05\xe0\xca\xcb\x63\x60\xd8\x15\
\x8a\x97\x51\xdf\x7b\x56\x97\x5d\xa9\x2c\x71\x10\x44\x88\x0e\x13\
\x1d\x07\x41\x84\xee\x29\x95\xe5\xf8\xf7\x57\x69\x2a\xab\x42\x91\
\x73\x4f\xa9\x2c\xb7\x13\x84\x91\x1c\x65\x0e\x82\x98\x72\x10\xc4\
\x8f\x36\xa5\x52\x15\x6f\x57\xfc\x1f\xed\x45\x69\x64\xa7\xc0\x7c\
\xeb\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x00\xea\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x50\x00\x00\x00\x50\x08\x06\x00\x00\x00\x8e\x11\xf2\xad\
\x00\x00\x00\xb1\x49\x44\x41\x54\x78\x9c\xed\xd4\xc1\x09\x80\x30\
\x14\x44\x41\x2b\xb5\x0c\xab\xb1\x12\xc1\x5e\xec\x44\x6f\x22\xa2\
\x17\x31\x92\x35\xb3\xf0\xee\x3f\x73\x48\xd7\x99\x99\xd9\xbe\x7e\
\x1c\x56\xdd\x07\xf0\x2b\xc0\x69\x99\x75\x08\x20\x40\x80\xd1\x01\
\x04\x08\x30\x3a\x80\x00\x01\x46\x07\x10\x20\xc0\x47\x07\xd7\x76\
\x0f\x40\x80\x00\x01\x02\x04\x08\x10\x20\x40\x80\x00\x01\x02\x04\
\x08\x10\x20\x40\x80\x00\x01\x02\x04\x08\xb0\x6d\xc0\xf3\x83\x6b\
\x0f\x20\x40\x80\x00\xdf\x04\x2c\xf5\x69\xd7\x76\x0f\x40\x80\x00\
\x01\x02\x04\x08\x10\x20\x40\x80\x00\x01\x02\x04\x08\x10\x20\x40\
\x80\x00\x01\x02\x04\x08\x10\xe0\x9f\x03\x08\x10\x60\x74\x00\x01\
\x02\x8c\x0e\x20\x40\x80\xd1\x01\x04\x18\x06\xa8\xeb\x00\x96\x06\
\x34\x33\x6b\x68\x1b\xca\x72\x68\xc1\x35\xba\x39\x8a\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x04\x0e\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x50\x00\x00\x00\x50\x08\x06\x00\x00\x00\x8e\x11\xf2\xad\
\x00\x00\x03\xd5\x49\x44\x41\x54\x78\x9c\xed\x9b\xbd\x4a\x23\x51\
\x14\x80\x7d\x84\x3c\xc2\x3e\x82\x8f\x10\x62\x9b\x99\x8c\x16\x82\
\x45\x40\x17\x2c\x84\x14\x03\x2b\x38\x73\x8d\x32\x6c\x90\x90\x5d\
\x44\xb6\x58\x48\x11\xd0\x42\x58\x8b\xe0\x14\x93\x3b\x12\x58\x88\
\x45\x30\x65\x8a\x40\x0a\x0b\xc5\x22\x85\x10\xd0\x27\xf0\x6c\x61\
\xce\x38\x64\xfd\xc9\xe4\xce\xe4\x3a\x93\xf3\xc1\x05\xcb\xb9\x1f\
\xe7\xf7\x12\x17\x16\x08\x82\x20\x08\x82\x20\xfe\xe3\x2a\x93\xd1\
\xaf\x32\x19\x5d\xf6\x77\xc4\x92\xab\x4c\x46\xef\x2c\x2d\x41\x67\
\x69\x09\x48\x62\x40\xfc\xf2\x48\x62\x40\x5e\x93\x47\x12\x27\xe4\
\x3d\x79\x24\xf1\x03\x26\x91\x47\x12\xdf\x20\x88\x3c\x92\x38\xc6\
\x34\xf2\x48\xe2\x08\x11\x79\x73\x2f\x31\x0c\x79\x73\x2b\x31\x4c\
\x79\x73\x27\x31\x0a\x79\x73\x23\x31\x4a\x79\x89\x97\x38\x0b\x79\
\x89\x97\x38\x09\x1f\xc9\x91\xfd\x7d\x9f\x1e\x12\x28\x08\x09\x14\
\x84\x04\x0a\x42\x02\x05\x21\x81\x82\x90\x40\x41\x48\xa0\x20\x24\
\x50\x10\x12\x28\x08\x09\x14\x84\x04\x0a\x42\x02\x05\x21\x81\x82\
\x90\x40\x41\x48\xa0\x20\x24\x50\x10\x12\x38\x25\x9a\x65\xa7\xb2\
\x86\x93\x2e\x7d\x2d\x41\xa1\x50\x85\x4d\xfd\x04\x56\x77\xce\x61\
\x75\xe7\x1c\xf2\xdf\xfe\xc0\xf6\xd6\x2f\xd8\xdd\xfc\x01\x0a\xe3\
\xeb\xb2\xbf\xf5\x53\x91\x35\x9c\xb4\x62\xba\xad\x1c\x73\x1f\x55\
\xc6\x61\xd2\x93\x63\xdc\x9e\x6b\x99\x59\xc3\x49\xe7\x18\xb7\x51\
\xc8\xf2\x5e\xf3\x29\x5f\x69\x43\xa1\xda\x83\xe2\xd9\x1d\x58\xf5\
\x01\x94\x9d\x21\x94\x9d\x21\x58\xf5\x01\x18\xa7\x37\xa0\xd7\xfa\
\x90\xaf\xb4\xc7\x65\x76\x35\xe3\xe2\x8b\xec\xfb\xcc\x14\x95\x39\
\x16\x46\xdc\xf2\x5e\xf3\x49\xaf\xf5\xa1\xec\x0c\xe1\xe7\xc5\xe3\
\xc4\x47\xaf\xf5\xc7\x44\x3a\x96\xec\x7b\x45\x8e\x66\xd9\xa9\x1c\
\xe3\xc7\x28\xae\x50\xed\x05\x16\xf7\x9e\x48\xc5\x74\x5b\xb2\xef\
\x18\x19\x23\x79\x36\xca\x33\x4e\x6f\x84\xc4\xf9\x8f\x55\x1f\xc0\
\xca\x7e\xd3\x4b\x69\xd9\x77\x8d\x04\x94\xb7\x76\x70\x09\x25\xfb\
\x3e\x34\x79\x78\xca\xce\x10\xd6\x0e\x2e\x93\x19\x89\x8a\xd9\xd0\
\x31\xf2\xac\xfa\x20\x74\x79\x7e\x89\x18\x89\x39\xc6\x8f\x65\xdf\
\x3b\x14\xb2\x86\x93\x8e\x22\x6d\xdf\x4b\x67\xac\x89\x1a\xe3\x8b\
\xb2\xef\x2f\x8c\x62\xba\x2d\x95\x71\x28\x54\x7b\x91\xcb\xc3\x53\
\xa8\xf6\x92\x51\x0f\x15\xd3\xd5\x30\xfa\x44\xbb\x6d\xd0\x54\x5e\
\xfd\xfe\x17\xeb\xa1\x26\xdb\xc3\xd4\x60\xe3\x98\x65\xf4\xe1\xd9\
\x3e\xb9\xf6\x36\x16\xd9\x1e\xa6\xe2\x79\x6c\x79\x1e\x96\xa3\xe8\
\xba\x93\x44\xa1\x57\x0b\xe3\xb8\xa9\x60\xfa\xe6\x2b\xed\x99\xcb\
\xc3\xb3\x71\xd8\x79\x4e\xe3\x38\xee\xcc\xb8\x71\xc8\x48\xdf\x44\
\xa4\x31\xd6\xbf\x59\x8c\x2e\x6f\x9d\xe2\xd9\x5d\x7c\x07\x6b\x95\
\xf1\x5b\x95\x71\x88\x72\x70\x0e\x50\x07\x1f\x64\xfb\x08\x0c\x0a\
\x94\xd1\x40\x92\x22\xf0\x41\x65\x5c\xf8\xb5\x45\xf4\x60\x27\x96\
\xed\x23\x30\x2a\xe3\x5d\xd9\x29\x5c\xb2\xef\x51\xe0\xad\x6c\x1f\
\x81\xc1\x15\xae\x78\x76\x27\x4d\xa0\x6f\x2f\x8e\xdf\x4a\xa7\xee\
\xba\x47\x2a\xe3\xa0\xd7\xfa\xd2\x04\xfa\xc6\x98\xf8\xbd\xcc\x28\
\x8c\xaf\xab\x8c\xc3\xc6\x61\x47\x9a\x40\x6f\x90\x36\x1b\xf1\xfb\
\xc7\x1b\xcd\xb2\x53\x58\xc0\x65\x34\x12\xff\xdb\x60\x2c\x57\xb9\
\x85\x85\x97\x61\x7a\xfb\xe4\x5a\x5a\xfa\xc6\xb2\xfe\x21\xb8\x0f\
\xaf\xec\x37\x67\x1e\x7d\xde\x73\x56\x1c\xf7\x60\x3f\x38\xce\xc8\
\x78\x50\x8d\xe5\x0a\x37\x8e\xc6\xf8\x22\xd6\xc2\x59\xcc\x84\x56\
\x7d\x00\xcb\x7b\xcd\x27\x95\x71\xc8\x1a\x4e\x5a\xf6\xfd\x43\x01\
\x5f\x66\x56\xf6\x9b\x91\xae\x76\xfe\xd4\x55\x77\xdd\x23\xd9\xf7\
\x0e\x15\x1c\xac\xd7\x0e\x2e\x23\xe9\xca\x65\x67\xe8\x8d\x2d\x2a\
\xe3\x5d\xcd\xb2\x53\xb2\xef\x1c\x2a\xa3\xb1\xa6\x8b\x91\x18\x66\
\x3a\x5b\xf5\xc1\x4b\xe4\x31\x7e\x9b\x38\x79\x88\x66\xd9\x29\x8c\
\x44\x95\x71\xd8\xfa\xdd\x15\x8e\xba\x42\xb5\xe7\xd5\x3c\xc5\x74\
\x5b\xb1\x9d\xf9\x82\x80\x6b\x1e\x46\x63\xd0\x39\xb1\xec\x0c\xc1\
\x38\xbd\xf1\x47\x1d\xa8\xbb\xee\x51\x62\x23\xef\x35\x46\xdd\xb9\
\xeb\xff\x85\xd5\xc6\x61\x07\xf4\x5a\x1f\x8a\x67\x77\x5e\xb3\x29\
\x3b\x43\x28\xd9\xf7\xde\x4f\xdc\xf2\x95\xb6\x17\x71\x18\x75\x89\
\xe9\xb6\xd3\xa0\x30\xbe\xee\x4f\xeb\xc9\x7e\x5c\xe9\x3e\x2a\xa6\
\xdb\x52\xcc\x86\x3e\x57\x51\xf7\x11\x8a\xd9\xd0\x73\x8c\x1f\x8f\
\x84\xde\xe2\x6b\xf2\xe8\xef\x6e\x8e\x71\x5b\x31\x1b\xfa\x5c\xd4\
\x39\x82\x20\x08\x82\x20\x3e\xe0\x1f\xc4\xaa\x6b\xf3\x2e\xda\x49\
\xd3\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x06\x06\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x64\x00\x00\x00\x64\x08\x06\x00\x00\x00\x70\xe2\x95\x54\
\x00\x00\x05\xcd\x49\x44\x41\x54\x78\x9c\xed\x9d\x41\x6b\xe3\x46\
\x14\x80\xfd\x13\xfa\x13\xfa\x13\xf6\x27\xec\x71\xe3\xc3\x06\x8a\
\xa1\x84\xd2\x4d\x28\x39\xac\x4d\x6b\xc4\x1a\x65\xd5\x06\x53\xe3\
\x40\x54\x9a\x08\xea\xb4\xac\xba\x60\x57\xd8\x6c\xb0\x29\xd8\x3d\
\xa8\xa1\x49\xa1\xca\xc5\x97\xf5\xa5\x86\x52\xf7\x62\xd8\xe2\x8b\
\x4f\x22\xdb\xe6\xe4\xd3\xeb\x49\xaa\x3c\x7a\x23\xc9\xb2\x92\x1d\
\xc9\x6f\xe0\xbb\x04\x65\x2c\xcf\xa7\x99\x79\x6f\x66\x84\x73\x39\
\x2a\x54\xa8\x50\xa1\x42\x85\xca\x66\x14\xa3\x94\xdf\x6b\x97\xf2\
\xbf\xb7\x4b\x79\x20\x22\xf3\xc6\x28\x6e\x49\x77\x21\xe3\x81\x00\
\x5f\x2e\xb5\x18\x4f\x1f\x3d\x4c\x56\x48\x71\x4b\x5a\xf5\x26\xc6\
\x7a\x65\xe3\x71\x85\x94\xf2\xb5\x64\x85\x94\xf2\xb5\x55\x85\xc0\
\xb0\xb9\xf1\x90\x10\xc1\x20\x21\x82\x41\x42\x04\x83\x84\x08\x06\
\x09\x11\x0c\x12\x22\x18\x24\x44\x30\x9c\xb6\xf8\xf2\x68\x0f\x9e\
\x34\x0f\x7c\xec\x36\x0f\x7e\xda\x33\xa4\xf7\x48\x88\x20\x42\x9e\
\x34\x0f\xe0\xe3\xa6\x7c\x4d\x42\xee\x59\x88\xfe\xdd\x33\x38\xb5\
\x5e\x2e\x21\xf7\xbf\x72\xa5\x90\x90\x7b\x16\xd2\x3f\x3f\x82\x5f\
\xff\xfe\x6d\x89\x53\xeb\x25\x09\x21\x21\x1b\x0e\x09\x11\x0c\x12\
\x22\x18\x24\x44\x30\x48\x88\x60\x90\x10\xc1\x20\x21\x82\x41\x42\
\x04\x83\x84\x08\x06\x09\x11\x0c\x12\x22\x18\x24\x44\x30\x36\x5e\
\xc8\xc2\xd2\xc1\x36\x35\xb0\x4d\x0d\x66\xbd\x3a\x4c\x3b\x55\x98\
\x0f\x54\xb0\x4d\x0d\x6e\xaf\xce\x48\xc8\x7d\x08\x98\xf5\xea\x30\
\x3c\x2d\x42\x57\xda\x8e\x74\x5f\xd6\xf1\x3e\x8c\xf5\x0a\xcc\x07\
\x2a\x09\x49\x8a\xf9\x40\x05\xeb\x78\x3f\xb2\x04\x1e\xe6\xe1\x0e\
\x4c\x3b\x55\x12\x12\x17\xdb\xd4\x60\xd4\x28\xaf\x2d\x02\x13\x33\
\xeb\xd5\x49\xc8\x2a\x4c\x5a\x0a\x57\x44\x57\xda\x86\xcb\xda\x2e\
\x8c\xf5\x0a\xcc\x7a\xf5\xa5\x79\x63\x3e\x50\x61\xd6\xab\xc3\xa4\
\xa5\x84\xf6\xaa\x51\xa3\x4c\x42\xa2\xe0\x3d\x49\xee\xa5\x2f\x17\
\x60\xd2\x52\x60\x61\xe9\x2b\xd5\x37\x1f\xa8\x30\x3c\x2d\x72\x7b\
\xcb\xaa\xf5\x85\x09\x51\x8f\x14\xf8\xac\xd9\x5d\xe2\x93\x6f\x0d\
\xf8\xe8\x45\x3d\x7d\x42\xb0\x86\xeb\x4a\xdb\xb1\x44\xb0\xd8\xa6\
\x06\xe6\xe1\x0e\x2a\x25\x49\x21\xcf\x9f\xd5\xe0\xf1\x17\x17\x28\
\x1f\x6a\x67\x7f\xa5\x46\x08\xd6\x33\xfa\x72\x21\xf1\x08\x69\xd4\
\x28\xfb\x3e\xe7\xb2\xb6\x7b\x2f\x42\x1e\x1f\x5e\xfc\x93\x0a\x21\
\xd3\x4e\x15\x7d\x72\xef\x2a\x9f\xc0\x7a\xe2\xf0\xb4\x98\x88\x90\
\x17\xdf\xb4\xe0\xe4\x97\xb7\x4b\x7c\xfa\xfd\x1f\xae\x14\xe1\x85\
\xd8\xa6\xe6\x9b\x7c\xfb\x72\x21\xd2\x10\xb5\xb0\x74\xb8\xbd\x3a\
\x03\xdb\xd4\x56\xfe\x5c\xeb\x78\xdf\xf7\x3d\xe2\xd4\x93\x39\x21\
\x6c\xc3\x74\xa5\xed\xc0\xb0\x74\x61\xe9\x30\x1f\xa8\x60\x1e\xee\
\xf8\x44\x5a\xc7\xfb\x6e\xaf\x9a\xb4\x14\x30\x0f\x77\x5c\xd8\x1c\
\x64\x61\xe9\xd0\x97\x0b\x89\x0d\x5d\x99\x10\x32\x1f\xa8\xbe\xba\
\xc6\x7a\x25\xb0\x37\x61\x4f\x36\x2b\xd4\x49\x26\xbd\x7f\x9f\xb4\
\x14\x5f\x7d\xd8\x50\x19\xb7\x97\x64\x42\xc8\x65\x6d\x37\xf2\x50\
\x75\x7b\x75\xe6\x7b\xa2\x79\xf4\xe5\x82\xaf\x6e\x5e\x96\xce\xd6\
\x19\x37\x3f\x49\xbd\x10\xdb\xd4\x22\xf7\x8e\x85\xa5\x73\x43\xd6\
\x51\xa3\xec\x2e\x2e\xf2\xf2\x8d\x76\x29\xcf\x1d\x06\xd9\xa8\xab\
\x2b\x6d\x6f\xa6\x90\x49\x4b\xf1\x35\x04\xaf\x77\xb0\xd7\x3a\xe3\
\x3d\x76\xfd\xac\x57\x47\xef\x91\x17\xb1\x61\xd7\xc7\x89\xee\x52\
\x2f\x84\x1d\xe3\xad\xe3\x7d\x6e\xef\x60\x87\x95\xb0\x0c\x1b\x1b\
\xda\x78\x8d\xbc\xb0\x74\xdf\xb5\x71\x72\x9f\x54\x0b\x59\x58\xba\
\x2f\x42\xe2\x0d\x57\x58\xef\x08\x9b\x78\xb1\xe4\x2f\xe8\x7a\xf6\
\x5e\xe2\xac\x0a\xa7\x5a\x08\x36\x7f\xf0\x9e\x60\xf6\x69\xe7\xf5\
\x24\x2f\x6c\xd6\xdf\x97\x0b\x81\xd7\xb3\xf3\x13\x16\x91\x65\x5a\
\x08\x16\xee\x62\x42\xb0\xe1\x24\xca\xd2\x39\xdb\xab\xc2\xd6\xab\
\x58\x21\x41\xa1\x77\x26\x85\x60\xf1\xff\xba\x3d\x29\x48\x48\x58\
\xc2\xc7\xf6\xc2\x38\xfb\x25\xa9\x16\xc2\x36\x18\x6f\x48\xc1\x22\
\xa0\x28\xf5\xb3\xe1\x6f\xd8\x3a\x15\xfb\x19\x1b\x37\xa9\x63\x21\
\xef\x3a\xe2\x58\xd8\x08\x2e\x28\xd9\xbb\xbd\x3a\x8b\xd5\x0b\x33\
\x25\x24\xea\x93\xcf\x46\x4b\x51\xf7\x2e\xd8\x21\x28\x68\x4e\x60\
\xef\x25\xaa\xf4\xcc\x0b\xc1\xf2\x0a\x36\x5a\x8a\x92\x45\x63\x75\
\x07\x45\x4d\xec\xf0\x16\x77\x81\x31\xd5\x42\xb0\xc9\x1a\xcb\x2d\
\xa2\x8a\xf3\x82\x2d\x3e\x06\x2d\xc9\xb0\x39\x48\xdc\x0d\xb1\x54\
\x0b\xc1\xc2\x59\xec\x29\xc6\xc4\x05\x25\x6d\xd8\xde\x4a\xbb\xc4\
\xcf\x5d\xd8\x39\x2a\x4a\x8e\x93\x49\x21\x30\x8c\x96\xf0\x61\xcb\
\x26\xbc\x35\x2f\xef\x6a\x30\xb6\xc7\xc2\x5e\x3f\xed\x54\x13\xeb\
\x1d\x99\x10\x12\x75\x95\x15\xcb\x59\xfa\x72\xc1\xcd\x15\x6c\x53\
\x83\x49\x4b\x71\x65\x38\xfb\xf0\xbc\x5e\x62\x9b\x1a\x8c\xf5\x0a\
\xba\xb9\x15\x57\x46\x26\x84\xac\x32\xf9\x46\xdd\x07\x71\xea\xc0\
\x86\xc4\x20\x92\x38\x0e\xe4\x0a\x91\x0e\xe0\x44\x6e\xfb\x50\x9e\
\x77\xe1\x83\xcf\x7f\x7e\x2b\xac\x10\xac\xa1\x79\xbd\xc4\x36\xb5\
\x48\x52\xbc\xf9\x06\xd6\xb3\x30\x78\xcb\xf8\x49\x0b\x39\x91\xdb\
\x50\x3f\x78\xf5\x46\x68\x21\x58\xa3\xf1\xb2\xea\x85\xa5\x73\xb7\
\x6f\x9d\x43\x74\xec\xff\x60\x2b\xc5\x5e\x11\x71\x16\x11\xc3\x84\
\xfc\xa8\x7e\x0d\xe6\xf9\xeb\x25\x0c\xed\xc2\x95\x22\xb4\x10\xde\
\x70\x14\xb6\x96\x34\xeb\xd5\x61\xac\x57\x60\xd2\x52\xc0\x36\xb5\
\xd0\x27\xdc\xd9\x51\x9c\xb4\x14\x98\x76\xaa\x6b\x9d\x2e\x09\x13\
\x72\x6d\xfc\x00\xd3\x3f\xff\x5d\xc2\x3c\x7f\x9d\x1e\x21\xd8\x04\
\xdc\x2e\xad\x77\x24\xe7\x5d\x90\x19\x21\x30\xe4\x8f\xf7\x49\x0e\
\x29\x24\x64\x45\x78\x07\x14\x92\x3e\xa5\x4e\x42\x56\x00\xdb\x7a\
\xf5\x8a\x89\x1b\x0d\xcd\x07\x2a\x8c\xf5\x4a\x62\x07\xab\x37\x46\
\x08\x0c\x83\x23\x23\x27\x3a\x1a\x35\xca\x30\x1f\xa8\x30\x1f\xa8\
\xee\x52\x39\xfb\xce\xe1\xa4\xa5\xc0\xa8\x51\xf6\xed\x06\x92\x90\
\x18\x44\xcd\x3b\xe2\x40\x42\xd6\x60\xd6\xab\x27\x2a\x86\x86\xac\
\x04\xc5\x8c\xf5\x8a\xef\x78\x68\x18\xde\xd7\xdf\xee\x3a\x8c\xde\
\x28\x21\x2c\xce\xbb\x84\xd3\x4e\xd5\xfd\x41\x15\x27\xe9\xf3\xbe\
\x73\x98\xd4\xeb\x6a\x24\x24\x85\x90\x10\xc1\x20\x21\x82\x41\x42\
\x04\x83\x84\x08\x06\x09\x11\x0c\x12\x22\x18\x24\x44\x30\x48\x88\
\x60\x90\x10\xc1\x70\xda\x22\xf5\x7b\xea\x59\xc1\x69\x8b\xa0\x53\
\x27\x27\x72\x87\x7e\xf2\x48\x10\x21\x37\x27\x72\xe7\x5a\x55\x8c\
\xf7\x49\xc8\x3d\x0b\xa1\x9f\xcd\x13\x04\x12\x22\x18\x24\x44\x30\
\x48\x88\x60\x90\x10\xc1\x20\x21\x82\x21\x94\x10\x67\x5f\x7b\x93\
\xb9\x4b\x21\x0f\x56\x15\x42\xfc\x8f\xf1\xf4\xd1\xc3\x44\x85\xe4\
\x72\xb9\x9c\x51\xdc\x92\xda\xa5\xfc\xcd\xbb\xfe\x72\x29\xe3\xc6\
\x28\x6e\x49\x89\xcb\xa0\x42\x85\x0a\x15\x2a\x54\xa8\xa4\xa7\xfc\
\x07\x52\x47\xa7\x6a\x37\x61\x50\x29\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
\x00\x00\x04\xce\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x60\x00\x00\x00\x60\x08\x06\x00\x00\x00\xe2\x98\x77\x38\
\x00\x00\x04\x95\x49\x44\x41\x54\x78\x9c\xed\x5d\x31\x6e\xe3\x30\
\x10\xbc\x27\xf8\x09\x79\x82\x8a\x90\x4a\x29\x88\x4e\x9d\x3c\xc0\
\x85\x2b\xd7\x6e\xae\x3d\xe4\x07\x79\x42\x9e\x90\xe6\x8a\xeb\x94\
\x22\x6d\xe0\xb4\x07\x1c\xe0\x27\x10\x90\x56\x4e\x71\x39\xf3\x0a\
\xd1\xb9\x24\xe7\xc4\x96\xb5\xcb\xa5\xac\x1d\x60\x4a\x03\xd2\x8c\
\x48\x2e\xb9\xcb\xf5\x97\x2f\x02\x81\x40\x20\x10\x08\x04\x02\x81\
\x40\x20\x10\x08\x22\x81\xcd\x92\x11\x9c\xab\xa4\xcc\xd5\xb4\xca\
\xf5\x15\x18\x55\x80\xd1\x0b\xcf\x25\x18\x6d\x3d\x97\x0d\x55\x51\
\x1b\x7d\x5b\xe5\xfa\xaa\xcc\xd5\xb4\x1a\x9f\x66\xdc\xef\xd0\x3b\
\xd8\x2c\x19\x55\xe3\xd3\x0c\x4c\x7a\xe3\xc5\x75\xdd\x99\xde\x94\
\xb9\x9a\x72\xbf\x5b\xb4\xb0\x59\x32\x2a\x73\x35\xf5\x5f\x38\x92\
\xe8\xdb\x59\x1b\x7d\x0b\x79\x3a\xb7\x59\x32\xe2\x7e\x6f\x76\xd8\
\x2c\x19\xc1\x58\x5d\x53\x8b\xfe\x21\xc7\xea\xda\x66\x67\x27\xdc\
\x3a\x04\x07\xbb\xf0\x5b\x8c\xe0\xd6\x24\x08\x6c\x96\x8c\x20\x4f\
\xe7\x7e\xd1\xe4\x17\xfe\x3d\xf3\x74\xce\xad\x11\x19\x9a\x85\x55\
\x2f\xd8\x45\xde\xcd\x65\x69\xf4\x25\xb7\x5e\xa8\xf0\x5f\x7d\x1c\
\xd3\xcd\x90\x46\x83\xcd\x92\x91\x0f\x27\xf9\x05\x3d\x80\xb5\xd1\
\xb7\xdc\x1a\x1e\x0c\x9b\x25\xa3\xda\xe8\x5b\x6e\x11\x11\xb8\xe8\
\x5d\xc8\x0a\xe7\x2a\x81\x58\x17\xda\xc3\x68\xe1\x5c\x25\xdc\xba\
\xee\x05\x2f\x7e\xbf\xe6\xfb\x7d\x19\xbb\x09\xcd\x9c\x7f\x54\x5f\
\xfe\x7f\x23\x21\xda\x8d\xdb\x11\xcd\xf9\xbb\xb8\xe0\xd6\x7a\x2b\
\xfc\xce\x96\x5b\x9c\x40\x54\x05\xb7\xde\x6f\xe0\xe3\xfc\x08\x84\
\x09\xc8\x58\xf6\x09\xa5\xd1\x97\x70\xac\x8b\xee\x0e\xb2\xef\x98\
\xfd\xa2\xdb\x87\xe3\x05\x2a\x5a\x56\x03\x06\x39\xf5\xbc\x63\x95\
\xeb\x2b\x16\xf1\xfd\xd7\x3f\xc8\xa9\xe7\x1d\x79\x42\xd3\x26\x47\
\xcb\xfe\xf2\x91\x30\xbd\x09\x2a\xbe\xcd\xce\x4e\x80\xe0\xeb\xaf\
\x2f\x8c\xfb\xfd\xe3\xbb\x5b\xcd\x26\xe8\x22\xad\x66\x13\xf7\x7c\
\x7f\x47\x66\x42\xd0\x51\x40\x11\xf3\xd7\x17\xc6\x3d\xdf\xdf\x39\
\xe7\x9c\x5b\x57\x25\xaa\x09\xab\xd9\xc4\xad\xab\xd2\x39\xe7\xdc\
\xf3\xe3\x43\xbf\x47\x01\xc5\xdc\xff\x5a\xfc\x0d\xb0\x4c\x78\x2d\
\xfe\x06\x54\x26\x04\x19\x05\xd8\x91\xcf\x36\xf1\xb1\x4c\xd8\x26\
\x3e\xa9\x09\x21\x36\x67\x98\xe7\x3d\x9f\x89\xdf\xd5\x84\xcf\xc4\
\xa7\x32\x81\x3c\x81\x83\x3d\xfd\xec\x12\xff\x50\x13\xf6\x11\xff\
\xc5\x04\xe4\x85\x99\x34\x79\xe3\x8f\x1d\xd0\x1e\x76\x35\x9b\xec\
\x25\x52\x1b\x13\xda\x88\xbf\xae\x4a\xf7\xf4\xed\x2b\xaa\x01\xa4\
\x15\x78\x14\xc7\xcd\x98\x26\x70\x8b\x0f\x86\x78\x1a\x02\xa2\x64\
\x0b\x86\x09\x31\x88\xef\x49\x73\x3e\xe4\x53\x8d\x54\x0f\xdd\xc9\
\x84\x88\xc4\x77\x60\xb4\x23\xa9\xca\x6e\x8a\x67\xe9\x1e\xfa\x50\
\x13\x62\x13\x1f\x8c\xa6\x09\x47\x43\x9d\x7c\xb6\x32\xa1\xae\xdd\
\xba\x86\xb8\xc4\x37\x9a\xa6\xd6\x34\x64\xbe\xb7\x8d\x09\xd1\x89\
\x6f\x88\x16\x62\x5f\xbb\x1f\xec\x25\xb0\x4c\x08\x2d\xbe\xe7\x92\
\xc0\x80\xf0\xe5\x26\x5d\x4d\x60\x12\xdf\x01\x45\x24\xc4\x61\x40\
\x17\x13\x18\xc5\x27\x33\x80\x2d\xfb\xd5\xd6\x84\x75\x0d\x9c\xe2\
\x3b\x30\xda\x1d\x9d\x01\xfb\x46\x3b\xce\x39\xe7\x9e\x56\x24\x49\
\x9d\x41\x1a\xd0\x26\xce\x7f\x33\x0a\x90\x93\x3a\x2d\x49\x32\x05\
\x05\x2f\x3f\x39\x54\xfc\x08\x4c\x20\x89\x82\x82\x1a\xd0\x55\x7c\
\x66\x13\xf0\xeb\x47\x43\x6f\xc4\x30\xc4\xe7\x33\x81\xa0\x76\x34\
\x54\xf1\x6d\xab\xb3\x9d\x16\x0b\x73\x50\x13\x28\x8e\x22\xb0\x93\
\x31\x9d\xc5\xf7\x71\x3e\x45\x52\xa7\x2b\x49\x92\x32\xfe\x9a\x69\
\x54\xe2\xbf\xfe\x6d\x4c\x26\x90\x15\xed\x02\x51\x28\x8a\x71\xa4\
\x1c\x91\x09\x96\x2c\x2f\x4c\x71\xe5\x14\xf3\x3c\x3f\x06\x13\x48\
\x53\x92\x14\x49\x79\xec\x64\x0a\xb7\x09\xa4\x77\x06\x7c\x59\x0a\
\xda\xc3\xb6\x29\x4b\x69\x73\xb6\xd3\xc6\x04\xec\xda\x20\xf2\xea\
\x38\xec\xfd\xc0\xf3\xe3\x03\xaa\xf8\x6d\x4c\xf8\xf3\xeb\xa7\xab\
\x2f\x0c\xa2\x01\x01\xee\x8e\x51\xe4\x86\x3f\x32\xa1\xeb\x91\xf2\
\x67\x26\xe0\x8b\x1f\xe8\xb2\x06\xf6\x34\xf4\x91\x09\x58\xe7\xf9\
\xdb\x4c\xa0\x10\x1f\x4c\xc0\x12\x75\xaa\x06\x1c\x1b\x13\xb0\x93\
\x29\xaf\x4d\xa0\x12\x3f\x68\x63\x0f\xaa\x51\x00\xa6\x59\x98\x29\
\x92\x29\xab\xd9\x84\x4c\x7c\xe0\xe8\x25\xd1\xe7\x36\x34\xe8\xe4\
\x68\x79\x26\x97\xf4\x5e\xc8\xd7\x3f\x42\xae\xa9\x32\x5e\x53\x7d\
\x31\xe1\xb8\xbb\xa3\xec\xf1\xf5\x33\x37\x73\xa2\x3e\x25\x8d\x99\
\xd1\x74\xe3\x1d\xe4\x54\x14\x5b\xaf\xd1\x81\xf4\x0a\xf2\x4c\x6f\
\xd8\xa7\x9e\x6d\x80\x61\x34\xee\x88\xb7\x89\xdf\x00\x42\xd3\xf8\
\x9b\xf7\x51\xdf\xa4\xe1\x14\x9f\xbd\x3f\xd0\xbe\x38\xc2\xce\x89\
\xfd\x11\x7f\x83\x23\x6a\xe6\xb4\x88\xb6\x53\xe2\x3e\xe8\x77\x74\
\xa4\x8a\x68\x17\xdc\x36\xe8\x65\x6f\xa1\xb1\xba\x3e\x0a\xf1\x37\
\xf0\x3b\xe6\x25\xbb\xb0\xbb\x69\xa3\xd9\xe1\x52\x20\xe2\x5d\xb3\
\x1d\xd4\x7f\xcb\x44\x94\x4f\xb0\xcd\xce\xb6\xc7\x0b\xed\xa1\x68\
\xc2\x55\x4e\x23\x54\x11\xfd\xc6\x2a\x04\xfe\xfd\xb7\x0c\xf9\x55\
\x58\x0b\x46\x15\x55\xae\xaf\x44\xf8\x4f\x50\xe6\x6a\x8a\x18\xbe\
\x5a\x30\xaa\x18\xd4\xfc\x8e\x89\xd2\xe8\x4b\xc8\xd3\x79\x73\x3f\
\x41\x15\xf0\xf2\xb7\x85\xef\x45\xd6\x16\x8c\x5e\x80\x51\x05\x8c\
\xd5\x35\xe4\xe9\xbc\x1a\x9f\x66\x22\xba\x40\x20\x10\x08\x04\x02\
\x81\x40\x20\x10\x08\x04\x31\xe1\x2f\x57\xc0\xdf\xe9\x9c\x5b\x31\
\x18\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\x43\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x60\x00\x00\x00\x60\x08\x06\x00\x00\x00\xe2\x98\x77\x38\
\x00\x00\x01\x0a\x49\x44\x41\x54\x78\x9c\xed\xd1\xb1\x09\xc0\x30\
\x10\x04\x41\xf7\xdf\xb4\x5d\x81\x83\x85\xb3\x9c\xcc\xc0\x67\x82\
\x15\xdc\x75\x01\x00\x00\x00\x00\x00\x00\x00\xc7\xdd\x2f\x57\xdf\
\xd7\xfb\xeb\x3f\x5f\x77\x33\x03\x6c\xbb\x99\x01\xb6\xdd\xcc\x00\
\xdb\x6e\x66\x80\x6d\x37\x33\xc0\xb6\x9b\x19\x60\xdb\xcd\x0c\xb0\
\xed\x66\x06\xd8\x76\x33\x03\x6c\xbb\x99\x01\xb6\xdd\xcc\x00\xdb\
\x6e\x66\x80\x6d\x37\x33\xc0\xb6\x9b\x19\x60\xdb\xcd\x0c\xb0\xed\
\x66\x06\xd8\x76\x33\x03\x6c\xbb\x99\x01\xb6\xdd\xcc\x00\xdb\x6e\
\x66\x80\x6d\x37\x33\xc0\xb6\x9b\x19\x60\xdb\xcd\x0c\xb0\xed\x66\
\x06\xd8\x76\x33\x03\x6c\xbb\x99\x01\xb6\xdd\xcc\x00\xdb\x6e\x66\
\x80\x6d\x37\x33\xc0\xb6\x9b\x19\x60\xdb\xcd\x0c\xb0\xed\x66\x06\
\xd8\x76\x33\x03\x6c\xbb\x99\x01\xb6\xdd\xcc\x00\xdb\x6e\x66\x80\
\x6d\x37\x33\xc0\xb6\x9b\x19\x60\xdb\xcd\x0c\xb0\xed\x66\x06\xd8\
\x76\x33\x03\x6c\xbb\x99\x01\xb6\xdd\xcc\x00\xdb\x6e\x66\x80\x6d\
\x37\x33\xc0\xb6\x9b\x19\x60\xdb\xcd\x0c\xb0\xed\x66\x06\xd8\x76\
\x33\x03\x6c\xbb\x99\x01\xb6\xdd\xcc\x00\xdb\x6e\x66\x80\x6d\x37\
\x33\xc0\xb6\x0b\x00\x00\x00\x00\x00\x00\x00\x9c\xf4\x00\xb0\xbf\
\x59\x42\x87\x0c\x8e\x61\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\
\x60\x82\
\x00\x00\x09\xa2\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x80\x00\x00\x00\x80\x08\x06\x00\x00\x00\xc3\x3e\x61\xcb\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\
\x09\x70\x48\x59\x73\x00\x00\x0e\xc3\x00\x00\x0e\xc3\x01\xc7\x6f\
\xa8\x64\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\
\x72\x65\x00\x70\x61\x69\x6e\x74\x2e\x6e\x65\x74\x20\x34\x2e\x30\
\x2e\x31\x32\x43\x04\x6b\xec\x00\x00\x09\x12\x49\x44\x41\x54\x78\
\x5e\xed\x9d\x2f\xd0\x7e\x43\x14\xc7\x7f\x41\x10\x04\x41\x10\x04\
\x41\x10\x04\x41\x10\x04\x41\x10\x04\x41\xf8\x05\x41\x10\x04\x41\
\x10\x04\xc1\x8c\x20\x08\x82\x20\x08\xcc\x08\x02\x33\x82\x20\x08\
\x82\x20\x08\xc6\x08\x82\x20\x08\x82\x60\x86\x60\x86\x31\x9c\xcf\
\x8c\x3b\xef\x9d\xc7\x79\xde\xf7\xdc\xfd\x9e\xbd\xbb\x77\x9f\xfd\
\xce\x7c\xc2\xfb\xe7\xde\xdd\xbd\xf7\xdc\xbd\xbb\x67\xcf\x9e\x7b\
\x6b\x6a\x6a\x6a\x6a\x6a\x6a\x6a\x6a\x6a\x6a\x6a\x6a\x6a\x6a\x6a\
\x6a\x6a\x6a\x6a\x6a\xaa\x43\xdd\x65\x3c\x6a\x3c\x62\xdc\xc9\x2f\
\xa6\x2e\x43\x2f\x1a\xdf\x19\xff\xac\xf8\xcb\xf8\xc6\x78\xc6\x98\
\x1a\x54\xf7\x1a\x9f\x19\xeb\x1b\xef\xf1\x91\x71\x8f\x31\x35\x90\
\xee\x30\xbe\x32\xbc\x1b\xee\xf1\xa9\x31\x35\x90\x5e\x36\xbc\x1b\
\x7d\x1d\xbc\x2a\xa6\x06\x10\x4f\xff\x1f\x86\x77\x93\xaf\xe3\x17\
\x63\x6a\x00\x3d\x6c\x78\x37\x38\xc2\x83\xc6\x50\xa2\x41\x4f\x18\
\xcf\x19\x2f\x19\xaf\xad\xe0\x67\x7e\xcf\xdf\x47\x6a\xf8\xb3\x86\
\x77\x73\x23\x70\xec\x61\xc5\x48\xf6\xb6\xf1\x8e\xc1\x14\x67\x6b\
\x37\xc8\xff\x73\x1c\xc7\x73\x9e\xa3\x8e\x8c\x31\x6e\xaf\x7d\x11\
\x38\xf6\x50\xe2\x26\x31\x78\xf9\xd2\x60\x6e\xeb\x35\xaa\x14\xce\
\xc7\x79\x39\xff\x91\x8c\xe1\x22\x0c\xe0\x31\x83\xf9\x6b\xf6\x4d\
\x3f\x07\xe5\x50\x1e\xe5\xf6\xae\xa1\x0d\x80\xf7\xf5\x17\x86\x57\
\xf9\xbd\xa0\x7c\xea\xd1\xab\x86\x34\x80\x07\x8c\xcf\x0d\xaf\xd2\
\xad\xa0\x3e\xd4\xab\x37\x0d\x65\x00\xcc\x69\xdf\x30\xf6\xea\xea\
\xb7\x42\xbd\xa8\x1f\xf5\xec\x45\xc3\x18\x00\xf3\xd9\xd3\x45\x8c\
\x5e\xa1\x9e\xd4\xb7\x07\x0d\x61\x00\xcf\x1b\x25\xde\xac\x96\x50\
\x5f\xea\xdd\x5a\x87\x36\x00\xba\xd2\x77\x0d\xaf\x72\x47\xe1\x3d\
\xa3\xe5\x2b\xe1\xb0\x06\x40\xc0\x42\x6f\x03\xbd\x52\x68\x07\xed\
\x69\xa1\x43\x1a\xc0\xdd\xc6\x96\xe5\xcb\x23\xf0\xb5\xd1\xc2\x81\
\x74\x38\x03\xe0\x49\x19\xed\xe6\x2f\x60\x04\x7b\xf7\x04\x87\x32\
\x00\x2e\x0e\xee\x56\xaf\x32\xa3\x40\xfb\xf6\x34\x82\xc3\x18\x00\
\x03\xa5\x0f\x0d\xaf\x22\xa3\xf1\x89\xb1\xd7\xc0\xf0\x30\x06\xf0\
\xba\xe1\x55\x22\x9b\x9f\x0c\x62\xe3\x98\x5d\xd0\xc0\x05\x7e\xe6\
\xf7\xfc\xdd\x3b\x2e\x1b\xda\xbb\x87\x4a\xa2\x81\x16\xde\x32\x76\
\xd1\xd3\x86\x57\x81\x0c\xf0\xce\x11\xe3\xc6\xda\xf6\x7d\x46\x44\
\xfc\x1f\xff\xcf\x71\x35\xbd\x8e\xb4\xbb\xb6\x88\x71\xf0\xca\x8e\
\xc0\x14\xb6\xba\xb8\xd8\xbf\x1a\x5e\x05\x14\x38\x27\x4f\x19\xd1\
\xb0\x8a\x38\x9e\xf3\xd4\xa8\xe3\xef\xc6\xfd\x46\x4d\x75\x6f\x00\
\xd9\xab\x79\x8b\x3f\x9e\xa9\x64\xa6\x38\x5f\x8d\x75\x08\x06\x85\
\x35\xc7\x03\x5d\x1b\x00\xc1\x15\x5e\xc1\xa5\xec\xe1\x83\xaf\xb1\
\x26\xc1\x7b\xba\x96\xba\x35\x00\xba\x56\xba\x40\xaf\xe0\x12\x08\
\xdd\xda\x6b\x64\x4d\x39\x94\xe7\xd5\xa3\x04\xae\x83\xfa\xaa\x3a\
\xa7\x6e\x0d\xe0\x03\xc3\x2b\x74\x2b\x74\xc9\xad\x62\xd8\x29\x37\
\xeb\x95\xc0\xd4\xb0\x86\xba\x34\x00\x36\x24\x7a\x05\x6e\x85\x8b\
\xdf\x7a\x3f\x1b\xe5\x67\x19\x01\x9b\x35\xb3\xd5\xa5\x01\x64\x0d\
\xfc\x08\xe5\xee\x41\xd4\xc3\xab\xdf\x56\x88\x42\xce\x56\x77\x06\
\xf0\xb8\xe1\x15\xb6\x95\x57\x8d\x9e\xf4\x8a\xe1\xd5\x73\x2b\x5c\
\x9f\x4c\x75\x67\x00\x19\x4f\x3f\x2e\xe3\x1e\x95\xe1\xca\xe6\xfa\
\x64\xaa\x2b\x03\x50\xb6\x29\x2d\xfc\x68\xb4\x5a\x5b\xbf\x49\xd4\
\x8b\xfa\x79\xf5\xde\x42\xe6\x54\xb6\x2b\x03\xc8\x88\xee\xc9\xee\
\x22\xb3\xc5\x5e\x01\xaf\xde\x5b\xe0\x3a\x65\xa9\x1b\x03\x20\x0d\
\x89\x1a\xd7\xc7\xd4\xf1\x08\x52\x0d\x9d\xeb\x94\xe5\xc9\xec\xc6\
\x00\x94\x4d\x8a\xc0\x45\x89\x2e\xe6\xb4\x56\x86\x93\xeb\x05\x23\
\x43\xdd\x18\x80\x1a\xdf\xb7\xd7\xf2\x69\x96\x94\x75\x78\xc8\x1a\
\x0c\xb2\x6b\xc9\x3b\x7f\x84\xb4\x01\x29\xdd\x99\xe2\x2c\xe1\x69\
\xca\x5e\xdc\xa9\x2d\xea\xab\xf4\x02\x5c\xaf\x8c\x18\x42\x65\xda\
\x9d\x66\x00\x6a\xf7\xbf\x5b\x60\x42\xb2\xa8\xb7\xd7\x9e\x28\x74\
\xdf\xaa\xba\x30\x00\xd5\xef\x7f\xd4\x84\x0d\xd4\xdb\x6b\x4f\x94\
\x0c\x7f\x47\x17\x06\xf0\xb3\xe1\x15\x10\x81\xf5\xf2\x23\x4b\x09\
\x72\xcd\xc8\xd3\xd3\xdc\x00\x88\x78\xf1\x4e\x1e\x25\x6b\x34\xdc\
\x4a\xd4\xdf\x6b\x57\x14\xb5\xf7\x6b\x6e\x00\xea\xfb\xff\x28\x53\
\xbf\x73\xa2\xfe\x5e\xbb\xa2\xa8\xe3\x80\xe6\x06\xa0\x0c\x84\x88\
\xbc\x19\x41\x6c\x08\xf1\xda\x17\x41\x1d\x00\x37\x37\x80\x48\x7a\
\xd2\x73\x1c\x75\xf4\x7f\xaa\x37\x0d\xaf\x7d\x11\xd4\x9b\xd0\xdc\
\x00\x94\x01\xe0\x28\xc9\x8b\x69\x87\xd7\xbe\x08\x5c\x3f\x45\x4d\
\x0d\x80\xd5\x31\xef\xc4\x51\x7a\x4c\xb9\x52\x22\x75\x20\xac\xac\
\x7e\x36\x35\x80\x87\x0c\xef\xc4\x11\xf0\xfd\x8f\x24\x65\x21\x8c\
\xeb\x58\xaa\xa6\x06\xa0\x14\x5e\x23\x3c\xaa\xa5\x68\x8f\xd7\xce\
\x08\x5c\xc7\x52\x35\x35\x00\x65\x0a\xc8\xe0\x71\x24\xb1\xcd\xcc\
\x6b\x67\x04\x65\x2a\xd8\xd4\x00\xba\x59\x8a\xec\x40\xb4\xc7\x6b\
\x67\x04\x25\xd7\x50\x53\x03\x50\x96\x44\x99\x3a\x8d\x24\x65\x2a\
\xc8\x75\x2c\x95\x62\x00\xb2\x1b\x5e\x31\x00\xa5\xd1\x3d\xaa\xd5\
\xb5\x50\x66\x20\xc4\x37\x4a\x9a\x06\x70\xa5\x69\x00\x1b\x99\x06\
\x70\xc5\x45\x1a\x00\x5b\xb1\x47\x12\xed\xf1\xda\x19\xe1\xb0\x06\
\xc0\xe8\xd5\x3b\x71\x84\x39\x0b\xb8\x42\x99\x05\x34\x35\x00\x65\
\x1a\x38\xda\xa7\xcb\x5a\xf9\x01\x9a\x1a\x80\x32\x05\x99\x9e\xc0\
\x2b\xb8\x8e\xa5\x6a\x6a\x00\xca\x5a\x00\x11\xb5\x23\x49\x89\x10\
\x56\xb6\x8a\x35\x35\x00\x75\x35\x90\xca\x8f\x20\xe5\x26\x80\x12\
\x12\xdf\xd4\x00\x90\x12\x0f\xb0\x47\x1a\xb5\x3d\xa4\xa4\xc1\x53\
\x03\x43\x9b\x1b\x80\xb2\x23\x68\x14\x77\xb0\x32\x05\x54\xfd\xf1\
\xcd\x0d\x40\x89\x09\x24\x69\x74\x2d\x91\xe8\xe9\x49\x83\x11\x36\
\xb0\x72\x59\x2b\x9b\xb7\x92\xfc\xfa\x6d\x43\x51\x73\x03\x50\xa6\
\x82\x90\x7d\x53\xd8\xa5\x4c\x4a\x97\x1f\x8c\xd3\xb2\xd8\x8e\x45\
\xb2\x26\x72\x18\x65\x89\xfa\x9f\x96\xb3\x05\xf5\xab\x23\xcd\x0d\
\x80\xb0\x2e\xef\xe4\x51\x32\x3f\xbb\xc2\x53\xcf\xf7\xfe\xbc\x72\
\xd6\x30\x62\x57\xa6\x5e\x6b\xa9\x0f\x80\xba\x2f\xa0\xb9\x01\x20\
\x06\x32\x5e\x01\x11\xe4\x35\xe9\xff\x14\xbd\xf9\x0b\xf4\x06\x4f\
\x19\xaa\x94\xa8\x68\xd2\xd3\xaa\xea\xc2\x00\xd4\xdc\x39\x34\x42\
\x55\xc9\x48\x9c\x1b\xa0\x6c\x4c\xe1\x58\x65\x57\x34\x06\xab\xaa\
\x0b\x03\x50\xbb\xc1\x8c\xd9\x00\xef\x76\xef\xdc\x37\xa1\xe4\xf2\
\x55\x53\xe0\x67\xbc\xfe\xa8\xbb\x77\xee\x08\x18\x6f\x8a\x18\x08\
\x29\x4f\x02\x4f\xa2\xe2\x0c\x51\xf3\x13\x94\xac\xc6\x31\xd8\x54\
\x5e\x7d\x90\x95\x3e\xd6\x3b\x77\x94\x34\xa9\x19\x42\x94\x84\xca\
\x4a\x37\x08\x18\xcf\xd6\xef\x03\x2b\x1f\x6a\x80\xcc\x5d\xd1\xde\
\xf9\xa3\xa4\x49\xdd\x25\xab\xf4\x02\xaa\x01\x00\x1e\xcd\xe8\x13\
\x49\x3d\xd5\xef\x0b\x64\xee\x8a\xf6\xce\x1f\x25\x4d\x5c\x14\x35\
\x4b\x18\x19\xba\x4b\xa4\xbe\x82\x16\x98\x91\x44\xc6\x03\x6a\x26\
\x71\xea\x9a\xe9\xff\xf0\xca\x88\x92\xaa\x8c\x3c\x81\xa5\x09\x95\
\xb3\xb2\x93\xdf\xe4\x99\xa3\x7e\xde\x71\x5b\xc8\x18\xfd\xaf\xe5\
\x95\x11\x25\x55\x2d\x33\x85\x66\x94\xbd\x70\x2e\x3d\x7d\x56\xa6\
\x50\x92\x4d\x66\xca\x2b\x23\x4a\xba\xe8\x46\xbd\x82\xb6\x50\x9a\
\x3b\xa7\x74\x2a\x78\x0a\x5d\xb4\xe7\x24\xca\xc8\x15\x5c\x23\x10\
\xc6\x2b\x27\x4a\xba\x94\x28\xa1\x35\x64\xe7\xde\x2a\xde\xab\x59\
\x9f\x85\xc3\x5d\xbc\x7e\x52\xb3\xb2\x85\x67\x78\x1f\x4f\xe5\x95\
\x13\xa5\x8a\x94\xf0\xa8\x05\x9e\xc2\x92\xfc\x01\x4a\xe2\xc4\x53\
\x16\x23\xc8\xfa\x68\x44\xad\x30\x38\xaf\xac\x28\x55\x94\x91\x50\
\x19\x4a\x8d\x40\x59\x9f\x3f\xe5\x4f\xe3\xef\x93\xdf\x95\xb2\xd5\
\xd7\x10\x95\x57\x56\x94\x6a\xca\x7c\x1f\xb3\x96\xbf\x45\x4c\xe5\
\x94\xf4\x6d\x35\xa8\xb9\x1b\xda\x2b\x2f\x4a\x35\xb1\x50\xa2\xfa\
\x05\xd6\xb0\x5e\xb0\xc5\x67\x4f\xf9\xaa\xab\x36\x0b\xae\x43\xcd\
\x6c\x28\x5e\x99\x51\xaa\x4a\x75\x97\x9e\x42\xe4\xcd\x96\x0b\x49\
\x97\x9b\xf1\xee\x56\x29\x19\xd0\x6e\x91\x57\x66\x94\xaa\xe2\x89\
\x55\x52\xa8\x79\xf0\x34\xb1\x0a\xc7\x62\x4c\x44\xca\xf6\xb5\x0c\
\x18\xf8\x6d\xe9\xb9\x4a\xe4\x95\x1b\xa5\xba\x78\x62\x95\x98\xf9\
\x73\xe0\xbb\xe7\xc9\x8a\xb8\x54\x95\x6d\x5b\x0a\xb4\x7b\x8f\x3c\
\xc8\x5e\xd9\x51\x76\x91\x1a\x2f\x70\x1d\xf4\x08\x38\x68\x18\x28\
\x9e\x5b\xcc\xe1\x09\x54\x57\x2b\x4b\xa0\xdd\x7b\xc8\x2b\x3b\xca\
\x6e\xc2\xc7\xee\x55\x20\x1b\x5c\xb5\xec\xd3\x63\x5d\x82\xee\x7f\
\xe1\x7d\xe3\x37\xc3\x3b\xa6\x06\x1f\x1b\x7b\xc9\x2b\x3f\xca\x6e\
\x6a\xf5\x14\xb6\x04\xaf\x24\x86\xcf\x60\xb4\xe6\x38\xc0\x2b\x3b\
\xca\xae\x62\x31\x25\x7b\x50\x78\x14\x98\x92\xf2\xaa\x22\x0e\x20\
\x7b\x5c\xe0\x95\x17\x65\x77\xf1\x9e\xfe\xde\xf0\x2a\x73\x49\x30\
\x88\xcd\x32\x08\xef\xfc\x51\x9a\x68\x1a\xc1\xff\xc1\x20\x18\xab\
\x94\x44\x45\x79\xe7\x8b\xd2\x4c\x4c\xdf\x7a\x73\xd7\xf6\x00\xb3\
\x9a\xad\x9f\xcb\xf7\xce\x13\xa5\xa9\x70\xe6\x64\x45\xf2\x8c\xc6\
\x96\x2f\xa7\x2b\x3b\xb4\x95\x7d\x11\x69\xc2\xa1\xd3\x83\xcb\xb6\
\x37\xa2\x2b\xa1\x4a\x94\x52\xc6\xa6\x9c\x14\xb1\x84\x9c\x15\xcc\
\x31\x0a\xd1\x6d\x73\x43\x18\x00\x62\x9a\xa8\x46\xdc\x8e\x46\xe4\
\x06\x0d\x63\x00\x8b\xd8\xbe\x7d\xa9\xfe\x82\x53\x22\xee\xe4\xe1\
\x0c\x60\x11\xef\xc0\x8c\xf0\x32\x85\x6f\x0d\x16\x93\xd4\x8d\x20\
\xa5\x44\xc6\x01\xc3\x1a\xc0\x22\x02\x29\x95\x6d\xd8\x25\x50\xde\
\x3a\x80\x93\xf9\x39\xf1\x0d\xca\xc5\x2e\x21\x92\xc7\x60\x78\x03\
\x58\xc4\x94\x85\x19\x43\xad\xd7\x03\xe7\xe5\xfc\xd7\x4d\x8d\xf0\
\xe9\xb3\x15\x3d\x23\xfc\xfd\x26\xf0\x09\x44\xb6\xab\x5d\x8c\x01\
\xac\xc5\x4d\xe2\xfd\x48\xf7\xcc\xf7\x07\xb7\x4e\x23\xf9\x7f\x8e\
\xe3\x78\xce\x53\x32\x1f\x26\xd6\x81\xe0\x94\x5a\xbd\x02\x75\x8b\
\x88\x76\x78\xc7\x47\x28\xdd\x8f\xd9\x9d\x78\x32\x49\x5c\xb9\x24\
\x86\xa2\xbb\x5e\x2f\x07\xf3\x33\xbf\xe7\xef\xfc\x5f\xf6\xea\x1c\
\xd3\x58\x56\xfe\xb2\xa6\xb2\x18\x68\xf4\x63\x52\xa5\x5b\xf2\x30\
\xdc\xa9\x0a\x62\x16\x43\xcf\x50\x3a\x80\xe5\xe6\x6f\x09\x87\x2f\
\xdd\x99\x4d\xe4\xf6\x54\x65\xd1\xc5\x32\x66\x20\x9d\x5e\xc4\x20\
\x58\x36\xde\x9a\xc0\x8a\x71\x42\xc9\x2c\xa5\x64\xcf\xc5\x94\xa8\
\xb5\x41\x30\xd3\x60\x30\x09\xec\x18\xe6\x86\x44\x83\x5d\x4f\x75\
\xdb\xf0\x6e\xf2\x39\xb2\x77\x28\x4f\x75\xa0\xe8\x58\x80\x9e\x28\
\x33\x3f\xc1\x54\x47\xa2\x17\x39\xb7\x21\x86\xb1\x05\xdb\xe6\x6a\
\x86\xa7\x4d\x75\x20\x9e\x6e\x0c\x81\x9b\x4d\x80\x2c\xb3\x13\xa2\
\xa6\x6b\xee\x4e\x9a\x9a\x9a\x9a\x9a\x9a\x9a\x9a\x9a\x9a\x9a\x9a\
\x9a\x9a\x9a\x9a\x9a\x9a\x9a\x3a\xb8\x6e\xdd\xfa\x17\x34\xb7\x02\
\x75\xaf\x4d\xc6\xa5\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\
\x82\
"
qt_resource_name = b"\
\x00\x05\
\x00\x4f\xa6\x53\
\x00\x49\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x06\
\x05\x03\x7d\xc3\
\x00\x49\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
\x00\x12\
\x08\x38\x21\xe7\
\x00\x43\
\x00\x72\x00\x65\x00\x61\x00\x74\x00\x65\x00\x20\x00\x4e\x00\x65\x00\x77\x00\x2d\x00\x31\x00\x30\x00\x30\x00\x2e\x00\x70\x00\x6e\
\x00\x67\
\x00\x09\
\x08\x9b\xe0\x47\
\x00\x54\
\x00\x72\x00\x61\x00\x73\x00\x68\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x06\xe1\x5e\x27\
\x00\x44\
\x00\x6f\x00\x77\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x12\
\x03\x23\x64\x47\
\x00\x4c\
\x00\x69\x00\x6e\x00\x65\x00\x20\x00\x43\x00\x68\x00\x61\x00\x72\x00\x74\x00\x2d\x00\x31\x00\x30\x00\x30\x00\x2e\x00\x70\x00\x6e\
\x00\x67\
\x00\x13\
\x03\xe0\x65\x27\
\x00\x43\
\x00\x6c\x00\x6f\x00\x73\x00\x65\x00\x20\x00\x57\x00\x69\x00\x6e\x00\x64\x00\x6f\x00\x77\x00\x2d\x00\x38\x00\x30\x00\x2e\x00\x70\
\x00\x6e\x00\x67\
\x00\x25\
\x04\x16\xb1\xa7\
\x00\x4e\
\x00\x6f\x00\x72\x00\x6d\x00\x61\x00\x6c\x00\x20\x00\x44\x00\x69\x00\x73\x00\x74\x00\x72\x00\x69\x00\x62\x00\x75\x00\x74\x00\x69\
\x00\x6f\x00\x6e\x00\x20\x00\x48\x00\x69\x00\x73\x00\x74\x00\x6f\x00\x67\x00\x72\x00\x61\x00\x6d\x00\x2d\x00\x31\x00\x30\x00\x30\
\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x13\
\x01\xd6\xcc\xa7\
\x00\x4f\
\x00\x70\x00\x65\x00\x6e\x00\x20\x00\x46\x00\x6f\x00\x6c\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x31\x00\x30\x00\x30\x00\x2e\x00\x70\
\x00\x6e\x00\x67\
\x00\x0f\
\x0f\xe5\xf9\x67\
\x00\x41\
\x00\x70\x00\x70\x00\x72\x00\x6f\x00\x76\x00\x61\x00\x6c\x00\x2d\x00\x39\x00\x36\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x12\
\x07\x58\x78\xe7\
\x00\x4d\
\x00\x69\x00\x63\x00\x72\x00\x6f\x00\x6d\x00\x65\x00\x74\x00\x65\x00\x72\x00\x2d\x00\x31\x00\x30\x00\x30\x00\x2e\x00\x70\x00\x6e\
\x00\x67\
\x00\x10\
\x0c\x59\x2f\xe7\
\x00\x44\
\x00\x6f\x00\x63\x00\x75\x00\x6d\x00\x65\x00\x6e\x00\x74\x00\x2d\x00\x31\x00\x30\x00\x30\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x08\xc8\x5c\x67\
\x00\x53\
\x00\x61\x00\x76\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x09\x79\x2b\x87\
\x00\x44\
\x00\x69\x00\x70\x00\x6c\x00\x6f\x00\x6d\x00\x61\x00\x20\x00\x31\x00\x2d\x00\x31\x00\x30\x00\x30\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x0d\
\x02\x1a\x32\x27\
\x00\x54\
\x00\x6f\x00\x20\x00\x44\x00\x6f\x00\x2d\x00\x31\x00\x30\x00\x30\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x12\
\x06\xb0\xee\x67\
\x00\x45\
\x00\x64\x00\x69\x00\x74\x00\x20\x00\x49\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x2d\x00\x31\x00\x30\x00\x30\x00\x2e\x00\x70\x00\x6e\
\x00\x67\
\x00\x08\
\x03\xc6\x5d\xa7\
\x00\x50\
\x00\x6c\x00\x75\x00\x73\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x06\
\x05\xc3\x57\x47\
\x00\x55\
\x00\x70\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x14\
\x07\xfc\xe1\xc7\
\x00\x41\
\x00\x64\x00\x64\x00\x72\x00\x65\x00\x73\x00\x73\x00\x20\x00\x42\x00\x6f\x00\x6f\x00\x6b\x00\x2d\x00\x31\x00\x30\x00\x30\x00\x2e\
\x00\x70\x00\x6e\x00\x67\
\x00\x0d\
\x03\xc0\x61\x87\
\x00\x43\
\x00\x61\x00\x6e\x00\x63\x00\x65\x00\x6c\x00\x2d\x00\x39\x00\x36\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0e\
\x01\xdd\xf0\x67\
\x00\x42\
\x00\x61\x00\x72\x00\x63\x00\x6f\x00\x64\x00\x65\x00\x2d\x00\x39\x00\x36\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0b\
\x05\x8d\xbf\x27\
\x00\x51\
\x00\x49\x00\x5f\x00\x4c\x00\x6f\x00\x67\x00\x6f\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x10\x00\x02\x00\x00\x00\x14\x00\x00\x00\x03\
\x00\x00\x01\x20\x00\x00\x00\x00\x00\x01\x00\x00\x22\x7a\
\x00\x00\x02\xbe\x00\x00\x00\x00\x00\x01\x00\x00\x66\x59\
\x00\x00\x01\xfe\x00\x00\x00\x00\x00\x01\x00\x00\x4a\xd5\
\x00\x00\x00\x7a\x00\x00\x00\x00\x00\x01\x00\x00\x0f\xf6\
\x00\x00\x02\x9e\x00\x00\x00\x00\x00\x01\x00\x00\x61\x87\
\x00\x00\x02\x48\x00\x00\x00\x00\x00\x01\x00\x00\x56\x7d\
\x00\x00\x00\xa4\x00\x00\x00\x00\x00\x01\x00\x00\x19\xed\
\x00\x00\x00\xd0\x00\x00\x00\x00\x00\x01\x00\x00\x1b\x9b\
\x00\x00\x02\xe0\x00\x00\x00\x00\x00\x01\x00\x00\x67\xa0\
\x00\x00\x02\x5e\x00\x00\x00\x00\x00\x01\x00\x00\x57\x6b\
\x00\x00\x02\x1e\x00\x00\x00\x00\x00\x01\x00\x00\x4e\x1b\
\x00\x00\x00\x64\x00\x00\x00\x00\x00\x01\x00\x00\x0b\xde\
\x00\x00\x01\x70\x00\x00\x00\x00\x00\x01\x00\x00\x2b\xc6\
\x00\x00\x02\x70\x00\x00\x00\x00\x00\x01\x00\x00\x5b\x7d\
\x00\x00\x00\x22\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x06\x96\
\x00\x00\x01\xc0\x00\x00\x00\x00\x00\x01\x00\x00\x39\x60\
\x00\x00\x01\xd6\x00\x00\x00\x00\x00\x01\x00\x00\x3e\xdf\
\x00\x00\x01\x9a\x00\x00\x00\x00\x00\x01\x00\x00\x37\x34\
\x00\x00\x01\x4c\x00\x00\x00\x00\x00\x01\x00\x00\x26\xb6\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
|
President3D/Quality-SPC
|
src/InResources_rc.py
|
Python
|
gpl-3.0
| 124,824 | 0.00004 |
#
# Copyright 2011 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import os
from multiprocessing.managers import BaseManager, RemoteError
import logging
import threading
from vdsm import constants, utils
_g_singletonSupervdsmInstance = None
_g_singletonSupervdsmInstance_lock = threading.Lock()
ADDRESS = os.path.join(constants.P_VDSM_RUN, "svdsm.sock")
class _SuperVdsmManager(BaseManager):
pass
class ProxyCaller(object):
def __init__(self, supervdsmProxy, funcName):
self._funcName = funcName
self._supervdsmProxy = supervdsmProxy
def __call__(self, *args, **kwargs):
callMethod = lambda: \
getattr(self._supervdsmProxy._svdsm, self._funcName)(*args,
**kwargs)
try:
return callMethod()
except RemoteError:
self._supervdsmProxy._connect()
raise RuntimeError(
"Broken communication with supervdsm. Failed call to %s"
% self._funcName)
class SuperVdsmProxy(object):
"""
A wrapper around all the supervdsm init stuff
"""
_log = logging.getLogger("SuperVdsmProxy")
def __init__(self):
self.proxyLock = threading.Lock()
self._manager = None
self._svdsm = None
self._connect()
def open(self, *args, **kwargs):
return self._manager.open(*args, **kwargs)
def _connect(self):
self._manager = _SuperVdsmManager(address=ADDRESS, authkey='')
self._manager.register('instance')
self._manager.register('open')
self._log.debug("Trying to connect to Super Vdsm")
try:
utils.retry(self._manager.connect, Exception, timeout=60, tries=3)
except Exception as ex:
msg = "Connect to supervdsm service failed: %s" % ex
utils.panic(msg)
self._svdsm = self._manager.instance()
def __getattr__(self, name):
return ProxyCaller(self, name)
def getProxy():
global _g_singletonSupervdsmInstance
if _g_singletonSupervdsmInstance is None:
with _g_singletonSupervdsmInstance_lock:
if _g_singletonSupervdsmInstance is None:
_g_singletonSupervdsmInstance = SuperVdsmProxy()
return _g_singletonSupervdsmInstance
|
edwardbadboy/vdsm-ubuntu
|
vdsm/supervdsm.py
|
Python
|
gpl-2.0
| 3,064 | 0.000326 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of kryptomime, a Python module for email kryptography.
# Copyright © 2013,2014 Thomas Tanner <tanner@gmx.net>
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the included LICENSE file for details.
#______________________________________________________________________________
from __future__ import absolute_import
from __future__ import print_function
import setuptools, sys
from setuptools.command.test import test as TestCommand
__author__ = "Thomas Tanner"
__contact__ = 'tanner@gmx.net'
__url__ = 'https://github.com/ttanner/kryptomime'
class PyTest(TestCommand):
user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = None
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest, sys
args = [self.pytest_args] if self.pytest_args else []
errno = pytest.main(['--cov-config','.coveragerc','--cov','kryptomime']+args)
sys.exit(errno)
subproc = ["subprocess32"] if sys.version_info[0] == 2 else []
setuptools.setup(
name = "kryptomime",
description="Python support for E-Mail kryptography",
long_description=open('README.rst').read(),
license="GPLv3+",
version='0.5.0',
author=__author__,
author_email=__contact__,
maintainer=__author__,
maintainer_email=__contact__,
url=__url__,
package_dir={'kryptomime': 'kryptomime'},
packages=['kryptomime'],
package_data={'': ['README.rst', 'COPYING.txt', 'requirements.txt']},
tests_require=['pytest-cov','pytest-pythonpath'],
cmdclass = {'test': PyTest},
install_requires=['gnupg>=1.3.2','six>=1.8']+subproc,
extras_require={'docs': ["Sphinx>=1.1", "repoze.sphinx"]},
platforms="Linux, BSD, OSX, Windows",
download_url="https://github.com/ttanner/kryptomime/archive/master.zip",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Security :: Cryptography",
"Topic :: Utilities",]
)
|
ttanner/kryptomime
|
setup.py
|
Python
|
lgpl-3.0
| 3,207 | 0.005926 |
# coding:utf-8
'''
8b d8 88
Y8, ,8P ,d 88
Y8, ,8P 88 88
"8aa8" ,adPPYba, 88 88 MM88MMM 88,dPPYba,
`88' a8" "8a 88 88 88 88P' "8a
88 8b d8 88 88 88 88 88
88 "8a, ,a8" "8a, ,a88 88, 88 88
88 `"YbbdP"' `"YbbdP'Y8 "Y888 88 88
88888888ba 88
88 "8b 88
88 ,8P 88
88aaaaaa8P' 8b,dPPYba, ,adPPYba, ,adPPYYba, ,adPPYb,88
88""""""8b, 88P' "Y8 a8P_____88 "" `Y8 a8" `Y88
88 `8b 88 8PP""""""" ,adPPPPP88 8b 88
88 a8P 88 "8b, ,aa 88, ,88 "8a, ,d88
88888888P" 88 `"Ybbd8"' `"8bbdP"Y8 `"8bbdP"Y8
'''
import os
import requests
import json
import sys
import re
import datetime
import time
from bs4 import BeautifulSoup
from tabulate import tabulate
reload(sys)
sys.setdefaultencoding('utf8')
class KillMail_Character(object):
def __init__(self, url):
# 检查网络连接,确定主体
self.URL_kill = url
global client
client = requests.session()
r = client.get(self.URL_kill)
if r.status_code == requests.codes.ok:
print '网络连接正常'
soup = BeautifulSoup(r.text, 'html.parser')
ans = soup.find('div', class_='content')
if ans.span:
print '找到相关', url.split('/')[-3], ans.span.text
self.URL_lost = self.URL_kill + '/losts/'
self.URL_atk = self.URL_kill + '/atk/'
else:
print '请检查相关人物或军团联盟URL'
sys.exit()
else:
print '网络连接错误'
r.raise_for_status()
def latest_km(self, time=1):
# 返回人物页面“击杀”、“损失”、“参与击杀”三个栏目第一页的KM基本信息
killmail = {'kill': [], 'lost': [], 'atk': []}
if self.URL_kill:
killmail['kill'] = self.get_km_basic_info(self.URL_kill)
print '成功获取最新的击杀KM', len(killmail['kill'])
killmail['lost'] = self.get_km_basic_info(self.URL_lost)
print '成功获取最新的损失KM', len(killmail['lost'])
killmail['atk'] = self.get_km_basic_info(self.URL_atk)
print '成功获取最新的参与击杀KM', len(killmail['atk'])
else:
print 'ERROR'
return killmail
def get_km_basic_info(self, url):
# 返回URL页面中所有KM的基本信息
r = client.get(url)
soup = BeautifulSoup(r.text, 'html.parser')
a = soup.find('table',id='kbtable').find('tbody').find_all('tr')
info = []
url_info = []
killmail = []
for km in a[:-1]:
for n in km.find_all('td'):
if n.find_all(href=re.compile("/pilot/0")) != []:
info.append('未知角色')
if n.find_all(style='color:grey') != []:
null_alli_tag = 1
else:
null_alli_tag = 0
if n.find_all(href=re.compile("/corp/None")) != []:
null_corp_tag = 1
else:
null_corp_tag = 0
for text in n.text.split('\n'):
if len(text) != 0:
if text == '无联盟' and null_alli_tag == 1:
info.append('无联盟(NULL)')
if null_corp_tag == 1:
info.append('无军团(NULL)')
elif text == '未知':
pass
else:
info.append(text)
url_info.append(km['onclick'].split('\'')[-2])
while info:
killmail.append({
'victim_ship': info.pop(0),
'victim_shiptype': info.pop(0),
'victim': info.pop(0),
'victim_alli': info.pop(0),
'victim_corp': info.pop(0),
'final_blow': info.pop(0),
'alliance': info.pop(0),
'corp': info.pop(0),
'location': info.pop(0),
'time': info.pop(0),
'url': url_info.pop(0)
})
return killmail
class KillMail_Details(object):
def __init__(self, url):
if url:
self.url = 'http://kb.ceve-market.org' + url
global client
global soup
client = requests.session()
r = client.get(self.url)
if r.status_code == requests.codes.ok:
print '网络连接正常'
soup = BeautifulSoup(r.text, 'html.parser')
self.data_set = soup.find_all('div', class_='customjumbotron')
else:
print '网络连接错误'
r.raise_for_status()
else:
print 'KM对应URL不存在或错误'
sys.exit()
def km_victim_detail(self):
# 返回受害者的信息
info = map(lambda x: x.strip(), filter(
lambda x: len(x.strip()) > 0, self.data_set[0].text.split('\n')))
info.pop(info.index('相关击坠'))
killmail_info = {
'victim_character': info.pop(0)[2:],
'corp': info.pop(0)[2:],
'alliance': info.pop(0)[2:],
'ship': info.pop(0)[2:],
'time': info.pop(0)[2:],
'location': info.pop(0)[2:],
'damage_taken': info.pop(0)[4:],
'value': info.pop(0)[4:]
}
return killmail_info
def km_lastblow_detail(self):
# 返回最后一击的个人信息
info = map(lambda x: x.strip(), filter(
lambda x: len(x.strip()) > 0, self.data_set[1].text.split('\n')))
killmail_info = {
'last_blow_character': info.pop(0),
'corp': info.pop(0),
'alliance': info.pop(0)
}
# 对应武器以及船只信息
info = map(lambda x: x.get('title'), filter(
lambda x: x.get('title'), self.data_set[1].find_all('img')))
killmail_info.update({'ship': info.pop(0)})
killmail_info.update({'weapon': info.pop(0)})
return killmail_info
def km_comp_detail(self):
# KM的伤害来源组成,按照联盟以及船只分类
info_set = self.data_set[2].find_all('td')
info = map(lambda x: x.strip(), filter(
lambda x: len(x.strip()) > 0, info_set[0].text.split('\n')))
killmail_info = {'com_alliance': info}
info = map(lambda x: x.strip(), filter(
lambda x: len(x.strip()) > 0, info_set[1].text.split('\n')))
killmail_info.update({'com_ship': info})
return killmail_info
def km_involve_detail(self):
# 伤害来源个人信息
info = []
killmail_info = []
info = map(lambda x: x.strip(), filter(
lambda x: len(x.strip()) > 0, self.data_set[3].text.split('\n')))
info.pop(info.index('最后一击!'))
while info:
killmail_info.append({
'character': info.pop(0),
'corp': info.pop(0),
'alliance': info.pop(0),
'damage': info.pop(0)
})
# 对应武器以及船只信息
info = map(lambda x: x.get('title'), filter(
lambda x: x.get('title'), self.data_set[3].find_all('img')))
for n in xrange(len(killmail_info)):
killmail_info[n].update({
'ship': info.pop(0),
'weapon': info.pop(0)
})
return killmail_info
def km_fit_detail(self):
# 高中低槽装配信息以及损失价值信息
data_set = soup.find(id='kl-detail-shipdetails')
info = []
killmail = {}
for text in data_set.text.split('\n'):
if len(text.strip()) != 0:
if "装配" in text.strip() or "无人机挂舱" in text.strip() \
or "货舱" in text.strip() or "其他" in text.strip() or "损失价值" in text.strip():
info.append([])
if text.strip() != "价值":
info[-1].append(text.strip())
for n in info[:-1]:
slot_name = n.pop(0)
killmail[slot_name] = []
while n:
killmail[slot_name].append({
'item': n.pop(0),
'num': n.pop(0),
'value': n.pop(0)
})
killmail['values'] = []
while info[-1]:
killmail['values'].append({
'type': info[-1].pop(0),
'value': info[-1].pop(0)
})
return killmail
def km_fit_info(self):
# 详细参数
data_set = soup.find('div', id='attr_Panel').find(
'div', class_='row col-md-12')
data_set = filter(
lambda x: len(x.strip()) != 0, data_set.text.split('\n'))
# 工程部门, 6行
# 工程部门, 电容, 回充速率, CPU, 能量栅格, 改装值
for thing in data_set[0:6]:
print thing
# 攻击部门, 4行
# 攻击部门, 炮DPS, 导弹DPS, 立体炸弹DPS
for thing in data_set[6:10]:
print thing
# 防御部门, 20行
# 防御部门, 全伤有效/极限防御/常规防御,有效HP[字样],最小有效/有效[混合],最大有效
# 5行护盾防御: HP, 电磁, 热能, 动能,爆炸
# 5行装甲防御: HP, 电磁, 热能, 动能,爆炸
# 5行结构防御: HP, 电磁, 热能, 动能,爆炸
for thing in data_set[10:30]:
print thing
# 维修部门, 9行
# 维修部门, 护盾修理2, 装甲修理2, 结构修理2, 护盾被动回充2
for thing in data_set[30:39]:
print thing
# 导航部门, 6行
# 导航部门, 最大速度, 敏捷度, 起跳时间, 跃迁速度, 免疫跃迁干扰强度
for thing in data_set[39:45]:
print thing
# 索敌部门, 5行
# 索敌部门, 最大锁定个数/雷达强度, 锁定距离, 锁定分辨率, 信号半径
for thing in data_set[45:50]:
print thing
class KillMail_Search():
# seems broken
def __init__(self):
self.url = 'http://kb.ceve-market.org/ajax_search/'
global client
global soup
client = requests.session()
r = client.get(self.url)
if r.status_code == requests.codes.ok:
print '网络连接正常'
soup = BeautifulSoup(r.text, 'html.parser')
self.data_set = soup.find_all('div', class_='customjumbotron')
else:
print '网络连接错误'
r.raise_for_status()
def search(self, type, name):
URL = self.url
client.get(URL)
csrftoken_cookies = client.cookies['csrftoken']
end_time = time.strftime('%Y-%m-%d %H:%M')
start_time = datetime.datetime.now() + datetime.timedelta(days=-1)
search_data = dict(
searchtype=type,
name=name,
type='lost',
shiptype='shiptype',
systemtype='sys',
ship='',
system='',
starttime='',
endtime='',
prev='',
next='',
csrfmiddlewaretoken=csrftoken_cookies,
)
r = client.post(URL, data=search_data, headers=dict(Referer=URL))
soup = BeautifulSoup(r.text, 'html.parser')
a = soup.find('table',id='kbtable').find('tbody').find_all('tr')
info = []
url_info = []
killmail = []
for km in a[:-1]:
for n in km.find_all('td'):
if n.find_all(href=re.compile("/pilot/0")) != []:
info.append('未知角色')
if n.find_all(style='color:grey') != []:
null_alli_tag = 1
else:
null_alli_tag = 0
if n.find_all(href=re.compile("/corp/None")) != []:
null_corp_tag = 1
else:
null_corp_tag = 0
for text in n.text.split('\n'):
if len(text) != 0:
if text == '无联盟' and null_alli_tag == 1:
info.append('无联盟(NULL)')
if null_corp_tag == 1:
info.append('无军团(NULL)')
elif text == '未知':
pass
else:
info.append(text)
url_info.append(km['onclick'].split('\'')[-2])
while info:
killmail.append({
'victim_ship': info.pop(0),
'victim_shiptype': info.pop(0),
'victim': info.pop(0),
'victim_alli': info.pop(0),
'victim_corp': info.pop(0),
'final_blow': info.pop(0),
'alliance': info.pop(0),
'corp': info.pop(0),
'location': info.pop(0),
'time': info.pop(0),
'url': url_info.pop(0)
})
return killmail
class KillMail_LeaderBoard():
def __init__(self):
self.url = 'http://kb.ceve-market.org/rank/'
global client
client = requests.session()
r = client.get(self.url)
if r.status_code == requests.codes.ok:
print '网络连接正常'
soup = BeautifulSoup(r.text, 'html.parser')
self.data_set = soup.find_all('div', class_='col-md-6')
else:
print '网络连接错误'
r.raise_for_status()
def top_win_character(self):
# 30日个人综合战利排名
info = filter(lambda x: x, self.data_set[0].text.split('\n'))[1:]
organized_info = []
while info:
organized_info.append({
'rank': info.pop(0),
'name': info.pop(0).decode('utf8'),
'value': info.pop(0)
})
return organized_info
def top_lose_character(self):
# 30日个人损失排名
info = filter(lambda x: x, self.data_set[1].text.split('\n'))[1:]
organized_info = []
while info:
organized_info.append({
'rank': info.pop(0),
'name': info.pop(0),
'value': info.pop(0)
})
return organized_info
def top_win_corp(self):
# 30日军团综合战利排名
info = filter(lambda x: x, self.data_set[2].text.split('\n'))[1:]
organized_info = []
while info:
organized_info.append({
'rank': info.pop(0),
'name': info.pop(0),
'value': info.pop(0)
})
return organized_info
def top_lose_corp(self):
# 30日军团损失排名
info = filter(lambda x: x, self.data_set[3].text.split('\n'))[1:]
organized_info = []
while info:
organized_info.append({
'rank': info.pop(0),
'name': info.pop(0),
'value': info.pop(0)
})
return organized_info
def top_win_alliance(self):
# 30日联盟综合战利排名
info = filter(lambda x: x, self.data_set[4].text.split('\n'))[1:]
organized_info = []
while info:
organized_info.append({
'rank': info.pop(0),
'name': info.pop(0),
'value': info.pop(0)
})
return organized_info
def top_lose_alliance(self):
# 30日联盟损失排名
info = filter(lambda x: x, self.data_set[5].text.split('\n'))[1:]
organized_info = []
while info:
organized_info.append({
'rank': info.pop(0),
'name': info.pop(0),
'value': info.pop(0)
})
return organized_info
def top_blackhand_corp(self):
info = filter(lambda x: x, self.data_set[6].text.split('\n'))[1:]
organized_info = []
while info:
organized_info.append({
'rank': info.pop(0),
'name': info.pop(0),
'value': info.pop(0),
'ratio': info.pop(0)
})
return organized_info
def top_blackhand_alliance(self):
info = filter(lambda x: x, self.data_set[7].text.split('\n'))[1:]
organized_info = []
while info:
organized_info.append({
'rank': info.pop(0),
'name': info.pop(0),
'value': info.pop(0),
'ratio': info.pop(0)
})
return organized_info
def losers_233(self):
r = client.get('http://kb.ceve-market.org/233/')
soup = BeautifulSoup(r.text, 'html.parser')
data_set = soup.find_all('tbody')
info = []
url_info = []
killmail = []
for n in data_set[0].find_all('td'):
if n.find_all(href=re.compile("/pilot/0")) != []:
info.append('未知角色')
if n.find_all(style='color:grey') != []:
null_alli_tag = 1
if n.find_all(href=re.compile("/corp/None")) != []:
null_corp_tag = 1
for text in n.text.split('\n'):
if len(text) != 0:
if text == '无联盟' and null_alli_tag == 1:
info.append('无联盟(NULL)')
if null_corp_tag == 1:
info.append('无军团(NULL)')
elif text == '未知':
pass
else:
info.append(text)
for n in data_set[0].find_all('tr'):
url_info.append(n['onclick'].split('\'')[-2])
while info:
killmail.append({
'victim_ship': info.pop(0),
'victim_shiptype': info.pop(0),
'victim': info.pop(0),
'victim_alli': info.pop(0),
'victim_corp': info.pop(0),
'final_blow': info.pop(0),
'alliance': info.pop(0),
'corp': info.pop(0),
'location': info.pop(0),
'time': info.pop(0),
'value': info.pop(0)
})
return killmail
class KillMail_FrontPage():
def __init__(self):
self.base_url = 'http://kb.ceve-market.org/'
self.url = self.base_url
global client
client = requests.session()
def get_km_basic_info(self, url):
# 返回URL页面中所有KM的基本信息
r = client.get(url)
soup = BeautifulSoup(r.text, 'html.parser')
a = soup.find('table',id='kbtable').find('tbody').find_all('tr')
info = []
url_info = []
killmail = []
for km in a[:-1]:
for n in km.find_all('td'):
if n.find_all(href=re.compile("/pilot/0")) != []:
info.append('未知角色')
if n.find_all(style='color:grey') != []:
null_alli_tag = 1
else:
null_alli_tag = 0
if n.find_all(href=re.compile("/corp/None")) != []:
null_corp_tag = 1
else:
null_corp_tag = 0
for text in n.text.split('\n'):
if len(text) != 0:
if text == '无联盟' and null_alli_tag == 1:
info.append('无联盟(NULL)')
if null_corp_tag == 1:
info.append('无军团(NULL)')
elif text == '未知':
pass
else:
info.append(text)
url_info.append(km['onclick'].split('\'')[-2])
while info:
killmail.append({
'victim_ship': info.pop(0),
'victim_shiptype': info.pop(0),
'victim': info.pop(0),
'victim_alli': info.pop(0),
'victim_corp': info.pop(0),
'final_blow': info.pop(0),
'alliance': info.pop(0),
'corp': info.pop(0),
'location': info.pop(0),
'time': info.pop(0),
'url': url_info.pop(0)
})
return killmail
def update_url(self, url):
client = requests.session()
r = client.get(url)
soup = BeautifulSoup(r.text, 'html.parser')
a = soup.find_all('a')
for n in a:
if '下一页' in n.text.strip():
url = self.base_url + n.get('href')
break
return url
class KillMail_spider(object):
def __init__(self, date):
self.spider = KillMail_FrontPage()
# 更改选定日期格式
self.date = date
self.start_date = datetime.datetime.strptime(date, "%Y-%m-%d")
self.end_date = datetime.datetime.combine(
self.start_date, datetime.time.max)
print self.start_date, self.end_date
with open(self.date + '.json', 'w') as f:
f.write(json.dumps([], ensure_ascii=False, indent=1))
# 检查是否存在前一天的url文件
self.last_date = (
self.start_date+datetime.timedelta(days=1)).strftime("%Y-%m-%d")
if os.path.isfile(self.last_date + '_url.json'):
print "find last day's url file"
print "loading its last url"
with open(self.last_date + '_url.json', 'r') as f:
url_data = json.load(f)
self.find_start_url(url_data[-1])
else:
print "cannot find url file"
print "start from first page"
self.find_start_url('http://kb.ceve-market.org/')
def find_start_url(self, url):
info = self.spider.get_km_basic_info(url)
# 判断是否找到该日期KM的起始页面
last_km_date = datetime.datetime.strptime(
info[-1]['time'].split(' ')[0], "%Y-%m-%d")
if last_km_date != self.start_date:
print 'cannot find any validate killmaills in this page, last km is', info[-1]['time']
next_url = self.spider.update_url(url)
self.find_start_url(next_url)
else:
print 'Find validate killmaills in this page'
print 'Now url has been set to', url
self.url = url
self.url_set = [self.url]
print '==============================='
print 'Now start to find url set'
self.find_end_url(self.url)
def find_end_url(self, url):
info = self.spider.get_km_basic_info(url)
self.fetch_km(url)
# 判断是否找到该日期KM的终结页面
last_km_date = datetime.datetime.strptime(
info[-1]['time'].split(' ')[0], "%Y-%m-%d")
if last_km_date == self.start_date:
print 'no other killmail in this page, last km is', info[-1]['time']
next_url = self.spider.update_url(url)
self.url_set.append(next_url)
self.find_end_url(next_url)
else:
print 'find validate killmaills in this page'
with open(self.date + '_url.json', 'w') as f:
f.write(json.dumps(self.url_set, ensure_ascii=False, indent=1))
print 'Store the urls in file'
def url_fetch(self):
# url文件是否存在?
file_name = self.date + '_url.json'
if os.path.isfile(file_name):
print "find url file"
with open(file_name, 'r') as f:
url_data = json.load(f)
print 'start fetching'
for n in url_data:
self.fetch_km(n)
print 'loading next page...'
else:
print "cannot find today's file, start processing"
def fetch_km(self, url):
file_name = self.date + '.json'
info = self.spider.get_km_basic_info(url)
info = self.check_km(info)
if os.path.isfile(file_name):
with open(file_name, 'r') as f:
old_data = json.load(f)
with open(self.date + '.json', 'w') as f:
f.write(
json.dumps(old_data+info, ensure_ascii=False, indent=1))
print 'now we have', len(old_data + info), 'killmails.'
else:
with open(self.date + '.json', 'w') as f:
f.write(json.dumps(info, ensure_ascii=False, indent=1))
def check_km(self, info):
# 判断页面第一与最后的KM时间是否相同, 如果不相同则进行队头与队尾操作
if datetime.datetime.strptime(info[0]['time'].split(' ')[0], "%Y-%m-%d") > datetime.datetime.strptime(info[-1]['time'].split(' ')[0], "%Y-%m-%d"):
print 'contain different km'
if datetime.datetime.strptime(info[-1]['time'].split(' ')[0], "%Y-%m-%d") != self.start_date:
print 'this is the end'
return self.km_end(info)
else:
print 'ths is the front'
return self.km_front(info)
else:
print 'this is the same day\'s page'
return info
def km_front(self, info):
# 判断队头
data = []
for n in info:
if datetime.datetime.strptime(n['time'].strip(), "%Y-%m-%d %H:%M:%S") < self.end_date:
data.append(n)
return data
def km_end(self, info):
# 判断队尾
data = []
for n in info:
if datetime.datetime.strptime(n['time'].strip(), "%Y-%m-%d %H:%M:%S") > self.start_date:
data.append(n)
return data
|
YouthBread/CEVE-KB
|
kb.py
|
Python
|
gpl-2.0
| 26,992 | 0.000664 |
#!/usr/bin/true
#
# download.py - part of autospec
# Copyright (C) 2018 Intel Corporation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
from io import BytesIO
import pycurl
from util import print_fatal
def do_curl(url, dest=None, post=None, is_fatal=False):
"""
Perform a curl operation for `url`.
If `post` is set, a POST is performed for `url` with fields taken from the
specified value. Otherwise a GET is performed for `url`. If `dest` is set,
the curl response (if successful) is written to the specified path and the
path is returned. Otherwise a successful response is returned as a BytesIO
object. If `is_fatal` is `True` (`False` is the default), a GET failure,
POST failure, or a failure to write to the path specified for `dest`
results in the program exiting with an error. Otherwise, `None` is returned
for any of those error conditions.
"""
c = pycurl.Curl()
c.setopt(c.URL, url)
if post:
c.setopt(c.POSTFIELDS, post)
c.setopt(c.FOLLOWLOCATION, True)
c.setopt(c.FAILONERROR, True)
c.setopt(c.CONNECTTIMEOUT, 10)
c.setopt(c.TIMEOUT, 600)
c.setopt(c.LOW_SPEED_LIMIT, 1)
c.setopt(c.LOW_SPEED_TIME, 10)
buf = BytesIO()
c.setopt(c.WRITEDATA, buf)
try:
c.perform()
except pycurl.error as e:
if is_fatal:
print_fatal("Unable to fetch {}: {}".format(url, e))
sys.exit(1)
return None
finally:
c.close()
# write to dest if specified
if dest:
try:
with open(dest, 'wb') as fp:
fp.write(buf.getvalue())
except IOError as e:
if os.path.exists(dest):
os.unlink(dest)
if is_fatal:
print_fatal("Unable to write to {}: {}".format(dest, e))
sys.exit(1)
return None
if dest:
return dest
else:
return buf
|
clearlinux/autospec
|
autospec/download.py
|
Python
|
gpl-3.0
| 2,543 | 0 |
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
from autopilot.matchers import Eventually
from testtools.matchers import Equals
from bmicalc import tests
class MainViewTestCase(tests.BaseTestCase):
"""Tests for the mainview"""
def setUp(self):
super(MainViewTestCase, self).setUp()
def test_click_button(self):
# Find and click the button
button = self.app.main_view.get_button()
self.app.pointing_device.click_object(button)
# Make an assertion about what should happen
label = self.app.main_view.get_label()
self.assertThat(label.text, Eventually(Equals('..world!')))
|
avi-software/bmicalc
|
app/tests/autopilot/weightcalc/tests/test_main.py
|
Python
|
gpl-3.0
| 671 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.