repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
baruch/libsigrokdecode
|
decoders/z80/tables.py
|
24
|
44963
|
##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2014 Daniel Elstner <daniel.kitta@gmail.com>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, see <http://www.gnu.org/licenses/>.
##
'''
Instruction tuple: (d, i, ro, wo, rep, format string)
The placeholders d and i are the number of bytes in the instruction
used for the displacement and the immediate operand, respectively. An
operand consisting of more than one byte is assembled in little endian
order.
The placeholders ro and wo are the number of bytes the instruction
is expected to read or write, respectively. These counts are used
for both memory and I/O access, but not for immediate operands.
A negative value indicates that the operand byte order is big endian
rather than the usual little endian.
The placeholder rep is a boolean used to mark repeating instructions.
The format string should refer to the {d} and {i} operands by name.
Displacements are interpreted as signed integers, whereas immediate
operands are always read as unsigned. The tables for instructions
operating on the IX/IY index registers additionally use {r} in the
format string as a placeholder for the register name.
Relative jump instructions may specify {j} instead of {d} to output
the displacement relative to the start of the instruction.
'''
# Instructions without a prefix
main_instructions = {
0x00: (0, 0, 0, 0, False, 'NOP'),
0x01: (0, 2, 0, 0, False, 'LD BC,{i:04H}h'),
0x02: (0, 0, 0, 1, False, 'LD (BC),A'),
0x03: (0, 0, 0, 0, False, 'INC BC'),
0x04: (0, 0, 0, 0, False, 'INC B'),
0x05: (0, 0, 0, 0, False, 'DEC B'),
0x06: (0, 1, 0, 0, False, 'LD B,{i:02H}h'),
0x07: (0, 0, 0, 0, False, 'RLCA'),
0x08: (0, 0, 0, 0, False, 'EX AF,AF\''),
0x09: (0, 0, 0, 0, False, 'ADD HL,BC'),
0x0A: (0, 0, 1, 0, False, 'LD A,(BC)'),
0x0B: (0, 0, 0, 0, False, 'DEC BC'),
0x0C: (0, 0, 0, 0, False, 'INC C'),
0x0D: (0, 0, 0, 0, False, 'DEC C'),
0x0E: (0, 1, 0, 0, False, 'LD C,{i:02H}h'),
0x0F: (0, 0, 0, 0, False, 'RRCA'),
0x10: (1, 0, 0, 0, False, 'DJNZ ${j:+d}'),
0x11: (0, 2, 0, 0, False, 'LD DE,{i:04H}h'),
0x12: (0, 0, 0, 1, False, 'LD (DE),A'),
0x13: (0, 0, 0, 0, False, 'INC DE'),
0x14: (0, 0, 0, 0, False, 'INC D'),
0x15: (0, 0, 0, 0, False, 'DEC D'),
0x16: (0, 1, 0, 0, False, 'LD D,{i:02H}h'),
0x17: (0, 0, 0, 0, False, 'RLA'),
0x18: (1, 0, 0, 0, False, 'JR ${j:+d}'),
0x19: (0, 0, 0, 0, False, 'ADD HL,DE'),
0x1A: (0, 0, 1, 0, False, 'LD A,(DE)'),
0x1B: (0, 0, 0, 0, False, 'DEC DE'),
0x1C: (0, 0, 0, 0, False, 'INC E'),
0x1D: (0, 0, 0, 0, False, 'DEC E'),
0x1E: (0, 1, 0, 0, False, 'LD E,{i:02H}h'),
0x1F: (0, 0, 0, 0, False, 'RRA'),
0x20: (1, 0, 0, 0, False, 'JR NZ,${j:+d}'),
0x21: (0, 2, 0, 0, False, 'LD HL,{i:04H}h'),
0x22: (0, 2, 0, 2, False, 'LD ({i:04H}h),HL'),
0x23: (0, 0, 0, 0, False, 'INC HL'),
0x24: (0, 0, 0, 0, False, 'INC H'),
0x25: (0, 0, 0, 0, False, 'DEC H'),
0x26: (0, 1, 0, 0, False, 'LD H,{i:02H}h'),
0x27: (0, 0, 0, 0, False, 'DAA'),
0x28: (1, 0, 0, 0, False, 'JR Z,${j:+d}'),
0x29: (0, 0, 0, 0, False, 'ADD HL,HL'),
0x2A: (0, 2, 2, 0, False, 'LD HL,({i:04H}h)'),
0x2B: (0, 0, 0, 0, False, 'DEC HL'),
0x2C: (0, 0, 0, 0, False, 'INC L'),
0x2D: (0, 0, 0, 0, False, 'DEC L'),
0x2E: (0, 1, 0, 0, False, 'LD L,{i:02H}h'),
0x2F: (0, 0, 0, 0, False, 'CPL'),
0x30: (1, 0, 0, 0, False, 'JR NC,${j:+d}'),
0x31: (0, 2, 0, 0, False, 'LD SP,{i:04H}h'),
0x32: (0, 2, 0, 1, False, 'LD ({i:04H}h),A'),
0x33: (0, 0, 0, 0, False, 'INC SP'),
0x34: (0, 0, 1, 1, False, 'INC (HL)'),
0x35: (0, 0, 1, 1, False, 'DEC (HL)'),
0x36: (0, 1, 0, 1, False, 'LD (HL),{i:02H}h'),
0x37: (0, 0, 0, 0, False, 'SCF'),
0x38: (1, 0, 0, 0, False, 'JR C,${j:+d}'),
0x39: (0, 0, 0, 0, False, 'ADD HL,SP'),
0x3A: (0, 2, 1, 0, False, 'LD A,({i:04H}h)'),
0x3B: (0, 0, 0, 0, False, 'DEC SP'),
0x3C: (0, 0, 0, 0, False, 'INC A'),
0x3D: (0, 0, 0, 0, False, 'DEC A'),
0x3E: (0, 1, 0, 0, False, 'LD A,{i:02H}h'),
0x3F: (0, 0, 0, 0, False, 'CCF'),
0x40: (0, 0, 0, 0, False, 'LD B,B'),
0x41: (0, 0, 0, 0, False, 'LD B,C'),
0x42: (0, 0, 0, 0, False, 'LD B,D'),
0x43: (0, 0, 0, 0, False, 'LD B,E'),
0x44: (0, 0, 0, 0, False, 'LD B,H'),
0x45: (0, 0, 0, 0, False, 'LD B,L'),
0x46: (0, 0, 1, 0, False, 'LD B,(HL)'),
0x47: (0, 0, 0, 0, False, 'LD B,A'),
0x48: (0, 0, 0, 0, False, 'LD C,B'),
0x49: (0, 0, 0, 0, False, 'LD C,C'),
0x4A: (0, 0, 0, 0, False, 'LD C,D'),
0x4B: (0, 0, 0, 0, False, 'LD C,E'),
0x4C: (0, 0, 0, 0, False, 'LD C,H'),
0x4D: (0, 0, 0, 0, False, 'LD C,L'),
0x4E: (0, 0, 1, 0, False, 'LD C,(HL)'),
0x4F: (0, 0, 0, 0, False, 'LD C,A'),
0x50: (0, 0, 0, 0, False, 'LD D,B'),
0x51: (0, 0, 0, 0, False, 'LD D,C'),
0x52: (0, 0, 0, 0, False, 'LD D,D'),
0x53: (0, 0, 0, 0, False, 'LD D,E'),
0x54: (0, 0, 0, 0, False, 'LD D,H'),
0x55: (0, 0, 0, 0, False, 'LD D,L'),
0x56: (0, 0, 1, 0, False, 'LD D,(HL)'),
0x57: (0, 0, 0, 0, False, 'LD D,A'),
0x58: (0, 0, 0, 0, False, 'LD E,B'),
0x59: (0, 0, 0, 0, False, 'LD E,C'),
0x5A: (0, 0, 0, 0, False, 'LD E,D'),
0x5B: (0, 0, 0, 0, False, 'LD E,E'),
0x5C: (0, 0, 0, 0, False, 'LD E,H'),
0x5D: (0, 0, 0, 0, False, 'LD E,L'),
0x5E: (0, 0, 1, 0, False, 'LD E,(HL)'),
0x5F: (0, 0, 0, 0, False, 'LD E,A'),
0x60: (0, 0, 0, 0, False, 'LD H,B'),
0x61: (0, 0, 0, 0, False, 'LD H,C'),
0x62: (0, 0, 0, 0, False, 'LD H,D'),
0x63: (0, 0, 0, 0, False, 'LD H,E'),
0x64: (0, 0, 0, 0, False, 'LD H,H'),
0x65: (0, 0, 0, 0, False, 'LD H,L'),
0x66: (0, 0, 1, 0, False, 'LD H,(HL)'),
0x67: (0, 0, 0, 0, False, 'LD H,A'),
0x68: (0, 0, 0, 0, False, 'LD L,B'),
0x69: (0, 0, 0, 0, False, 'LD L,C'),
0x6A: (0, 0, 0, 0, False, 'LD L,D'),
0x6B: (0, 0, 0, 0, False, 'LD L,E'),
0x6C: (0, 0, 0, 0, False, 'LD L,H'),
0x6D: (0, 0, 0, 0, False, 'LD L,L'),
0x6E: (0, 0, 1, 0, False, 'LD L,(HL)'),
0x6F: (0, 0, 0, 0, False, 'LD L,A'),
0x70: (0, 0, 0, 1, False, 'LD (HL),B'),
0x71: (0, 0, 0, 1, False, 'LD (HL),C'),
0x72: (0, 0, 0, 1, False, 'LD (HL),D'),
0x73: (0, 0, 0, 1, False, 'LD (HL),E'),
0x74: (0, 0, 0, 1, False, 'LD (HL),H'),
0x75: (0, 0, 0, 1, False, 'LD (HL),L'),
0x76: (0, 0, 0, 0, False, 'HALT'),
0x77: (0, 0, 0, 1, False, 'LD (HL),A'),
0x78: (0, 0, 0, 0, False, 'LD A,B'),
0x79: (0, 0, 0, 0, False, 'LD A,C'),
0x7A: (0, 0, 0, 0, False, 'LD A,D'),
0x7B: (0, 0, 0, 0, False, 'LD A,E'),
0x7C: (0, 0, 0, 0, False, 'LD A,H'),
0x7D: (0, 0, 0, 0, False, 'LD A,L'),
0x7E: (0, 0, 1, 0, False, 'LD A,(HL)'),
0x7F: (0, 0, 0, 0, False, 'LD A,A'),
0x80: (0, 0, 0, 0, False, 'ADD A,B'),
0x81: (0, 0, 0, 0, False, 'ADD A,C'),
0x82: (0, 0, 0, 0, False, 'ADD A,D'),
0x83: (0, 0, 0, 0, False, 'ADD A,E'),
0x84: (0, 0, 0, 0, False, 'ADD A,H'),
0x85: (0, 0, 0, 0, False, 'ADD A,L'),
0x86: (0, 0, 1, 0, False, 'ADD A,(HL)'),
0x87: (0, 0, 0, 0, False, 'ADD A,A'),
0x88: (0, 0, 0, 0, False, 'ADC A,B'),
0x89: (0, 0, 0, 0, False, 'ADC A,C'),
0x8A: (0, 0, 0, 0, False, 'ADC A,D'),
0x8B: (0, 0, 0, 0, False, 'ADC A,E'),
0x8C: (0, 0, 0, 0, False, 'ADC A,H'),
0x8D: (0, 0, 0, 0, False, 'ADC A,L'),
0x8E: (0, 0, 1, 0, False, 'ADC A,(HL)'),
0x8F: (0, 0, 0, 0, False, 'ADC A,A'),
0x90: (0, 0, 0, 0, False, 'SUB B'),
0x91: (0, 0, 0, 0, False, 'SUB C'),
0x92: (0, 0, 0, 0, False, 'SUB D'),
0x93: (0, 0, 0, 0, False, 'SUB E'),
0x94: (0, 0, 0, 0, False, 'SUB H'),
0x95: (0, 0, 0, 0, False, 'SUB L'),
0x96: (0, 0, 1, 0, False, 'SUB (HL)'),
0x97: (0, 0, 0, 0, False, 'SUB A'),
0x98: (0, 0, 0, 0, False, 'SBC A,B'),
0x99: (0, 0, 0, 0, False, 'SBC A,C'),
0x9A: (0, 0, 0, 0, False, 'SBC A,D'),
0x9B: (0, 0, 0, 0, False, 'SBC A,E'),
0x9C: (0, 0, 0, 0, False, 'SBC A,H'),
0x9D: (0, 0, 0, 0, False, 'SBC A,L'),
0x9E: (0, 0, 1, 0, False, 'SBC A,(HL)'),
0x9F: (0, 0, 0, 0, False, 'SBC A,A'),
0xA0: (0, 0, 0, 0, False, 'AND B'),
0xA1: (0, 0, 0, 0, False, 'AND C'),
0xA2: (0, 0, 0, 0, False, 'AND D'),
0xA3: (0, 0, 0, 0, False, 'AND E'),
0xA4: (0, 0, 0, 0, False, 'AND H'),
0xA5: (0, 0, 0, 0, False, 'AND L'),
0xA6: (0, 0, 1, 0, False, 'AND (HL)'),
0xA7: (0, 0, 0, 0, False, 'AND A'),
0xA8: (0, 0, 0, 0, False, 'XOR B'),
0xA9: (0, 0, 0, 0, False, 'XOR C'),
0xAA: (0, 0, 0, 0, False, 'XOR D'),
0xAB: (0, 0, 0, 0, False, 'XOR E'),
0xAC: (0, 0, 0, 0, False, 'XOR H'),
0xAD: (0, 0, 0, 0, False, 'XOR L'),
0xAE: (0, 0, 1, 0, False, 'XOR (HL)'),
0xAF: (0, 0, 0, 0, False, 'XOR A'),
0xB0: (0, 0, 0, 0, False, 'OR B'),
0xB1: (0, 0, 0, 0, False, 'OR C'),
0xB2: (0, 0, 0, 0, False, 'OR D'),
0xB3: (0, 0, 0, 0, False, 'OR E'),
0xB4: (0, 0, 0, 0, False, 'OR H'),
0xB5: (0, 0, 0, 0, False, 'OR L'),
0xB6: (0, 0, 1, 0, False, 'OR (HL)'),
0xB7: (0, 0, 0, 0, False, 'OR A'),
0xB8: (0, 0, 0, 0, False, 'CP B'),
0xB9: (0, 0, 0, 0, False, 'CP C'),
0xBA: (0, 0, 0, 0, False, 'CP D'),
0xBB: (0, 0, 0, 0, False, 'CP E'),
0xBC: (0, 0, 0, 0, False, 'CP H'),
0xBD: (0, 0, 0, 0, False, 'CP L'),
0xBE: (0, 0, 1, 0, False, 'CP (HL)'),
0xBF: (0, 0, 0, 0, False, 'CP A'),
0xC0: (0, 0, 2, 0, False, 'RET NZ'),
0xC1: (0, 0, 2, 0, False, 'POP BC'),
0xC2: (0, 2, 0, 0, False, 'JP NZ,{i:04H}h'),
0xC3: (0, 2, 0, 0, False, 'JP {i:04H}h'),
0xC4: (0, 2, 0,-2, False, 'CALL NZ,{i:04H}h'),
0xC5: (0, 0, 0,-2, False, 'PUSH BC'),
0xC6: (0, 1, 0, 0, False, 'ADD A,{i:02H}h'),
0xC7: (0, 0, 0,-2, False, 'RST 00h'),
0xC8: (0, 0, 2, 0, False, 'RET Z'),
0xC9: (0, 0, 2, 0, False, 'RET'),
0xCA: (0, 2, 0, 0, False, 'JP Z,{i:04H}h'),
0xCC: (0, 2, 0,-2, False, 'CALL Z,{i:04H}h'),
0xCD: (0, 2, 0,-2, False, 'CALL {i:04H}h'),
0xCE: (0, 1, 0, 0, False, 'ADC A,{i:02H}h'),
0xCF: (0, 0, 0,-2, False, 'RST 08h'),
0xD0: (0, 0, 2, 0, False, 'RET NC'),
0xD1: (0, 0, 2, 0, False, 'POP DE'),
0xD2: (0, 2, 0, 0, False, 'JP NC,{i:04H}h'),
0xD3: (0, 1, 0, 1, False, 'OUT ({i:02H}h),A'),
0xD4: (0, 2, 0,-2, False, 'CALL NC,{i:04H}h'),
0xD5: (0, 0, 0,-2, False, 'PUSH DE'),
0xD6: (0, 1, 0, 0, False, 'SUB {i:02H}h'),
0xD7: (0, 0, 0,-2, False, 'RST 10h'),
0xD8: (0, 0, 2, 0, False, 'RET C'),
0xD9: (0, 0, 0, 0, False, 'EXX'),
0xDA: (0, 2, 0, 0, False, 'JP C,{i:04H}h'),
0xDB: (0, 1, 1, 0, False, 'IN A,({i:02H}h)'),
0xDC: (0, 2, 0,-2, False, 'CALL C,{i:04H}h'),
0xDE: (0, 1, 0, 0, False, 'SBC A,{i:02H}h'),
0xDF: (0, 0, 0,-2, False, 'RST 18h'),
0xE0: (0, 0, 2, 0, False, 'RET PO'),
0xE1: (0, 0, 2, 0, False, 'POP HL'),
0xE2: (0, 2, 0, 0, False, 'JP PO,{i:04H}h'),
0xE3: (0, 0, 2, 2, False, 'EX (SP),HL'),
0xE4: (0, 2, 0,-2, False, 'CALL PO,{i:04H}h'),
0xE5: (0, 0, 0,-2, False, 'PUSH HL'),
0xE6: (0, 1, 0, 0, False, 'AND {i:02H}h'),
0xE7: (0, 0, 0,-2, False, 'RST 20h'),
0xE8: (0, 0, 2, 0, False, 'RET PE'),
0xE9: (0, 0, 0, 0, False, 'JP (HL)'),
0xEA: (0, 2, 0, 0, False, 'JP PE,{i:04H}h'),
0xEB: (0, 0, 0, 0, False, 'EX DE,HL'),
0xEC: (0, 2, 0,-2, False, 'CALL PE,{i:04H}h'),
0xEE: (0, 1, 0, 0, False, 'XOR {i:02H}h'),
0xEF: (0, 0, 0,-2, False, 'RST 28h'),
0xF0: (0, 0, 2, 0, False, 'RET P'),
0xF1: (0, 0, 2, 0, False, 'POP AF'),
0xF2: (0, 2, 0, 0, False, 'JP P,{i:04H}h'),
0xF3: (0, 0, 0, 0, False, 'DI'),
0xF4: (0, 2, 0,-2, False, 'CALL P,{i:04H}h'),
0xF5: (0, 0, 0,-2, False, 'PUSH AF'),
0xF6: (0, 1, 0, 0, False, 'OR {i:02H}h'),
0xF7: (0, 0, 0,-2, False, 'RST 30h'),
0xF8: (0, 0, 2, 0, False, 'RET M'),
0xF9: (0, 0, 0, 0, False, 'LD SP,HL'),
0xFA: (0, 2, 0, 0, False, 'JP M,{i:04H}h'),
0xFB: (0, 0, 0, 0, False, 'EI'),
0xFC: (0, 2, 0,-2, False, 'CALL M,{i:04H}h'),
0xFE: (0, 1, 0, 0, False, 'CP {i:02H}h'),
0xFF: (0, 0, 0,-2, False, 'RST 38h')
}
# Instructions with ED prefix
extended_instructions = {
0x40: (0, 0, 1, 0, False, 'IN B,(C)'),
0x41: (0, 0, 0, 1, False, 'OUT (C),B'),
0x42: (0, 0, 0, 0, False, 'SBC HL,BC'),
0x43: (0, 2, 0, 2, False, 'LD ({i:04H}h),BC'),
0x44: (0, 0, 0, 0, False, 'NEG'),
0x45: (0, 0, 2, 0, False, 'RETN'),
0x46: (0, 0, 0, 0, False, 'IM 0'),
0x47: (0, 0, 0, 0, False, 'LD I,A'),
0x48: (0, 0, 1, 0, False, 'IN C,(C)'),
0x49: (0, 0, 0, 1, False, 'OUT (C),C'),
0x4A: (0, 0, 0, 0, False, 'ADC HL,BC'),
0x4B: (0, 2, 2, 0, False, 'LD BC,({i:04H}h)'),
0x4C: (0, 0, 0, 0, False, 'NEG'),
0x4D: (0, 0, 2, 0, False, 'RETI'),
0x4E: (0, 0, 0, 0, False, 'IM 0/1'),
0x4F: (0, 0, 0, 0, False, 'LD R,A'),
0x50: (0, 0, 1, 0, False, 'IN D,(C)'),
0x51: (0, 0, 0, 1, False, 'OUT (C),D'),
0x52: (0, 0, 0, 0, False, 'SBC HL,DE'),
0x53: (0, 2, 0, 2, False, 'LD ({i:04H}h),DE'),
0x54: (0, 0, 0, 0, False, 'NEG'),
0x55: (0, 0, 2, 0, False, 'RETN'),
0x56: (0, 0, 0, 0, False, 'IM 1'),
0x57: (0, 0, 0, 0, False, 'LD A,I'),
0x58: (0, 0, 1, 0, False, 'IN E,(C)'),
0x59: (0, 0, 0, 1, False, 'OUT (C),E'),
0x5A: (0, 0, 0, 0, False, 'ADC HL,DE'),
0x5B: (0, 2, 2, 0, False, 'LD DE,({i:04H}h)'),
0x5C: (0, 0, 0, 0, False, 'NEG'),
0x5D: (0, 0, 2, 0, False, 'RETN'),
0x5E: (0, 0, 0, 0, False, 'IM 2'),
0x5F: (0, 0, 0, 0, False, 'LD A,R'),
0x60: (0, 0, 1, 0, False, 'IN H,(C)'),
0x61: (0, 0, 0, 1, False, 'OUT (C),H'),
0x62: (0, 0, 0, 0, False, 'SBC HL,HL'),
0x63: (0, 2, 0, 2, False, 'LD ({i:04H}h),HL'),
0x64: (0, 0, 0, 0, False, 'NEG'),
0x65: (0, 0, 2, 0, False, 'RETN'),
0x66: (0, 0, 0, 0, False, 'IM 0'),
0x67: (0, 0, 1, 1, False, 'RRD'),
0x68: (0, 0, 1, 0, False, 'IN L,(C)'),
0x69: (0, 0, 0, 1, False, 'OUT (C),L'),
0x6A: (0, 0, 0, 0, False, 'ADC HL,HL'),
0x6B: (0, 2, 2, 0, False, 'LD HL,({i:04H}h)'),
0x6C: (0, 0, 0, 0, False, 'NEG'),
0x6D: (0, 0, 2, 0, False, 'RETN'),
0x6E: (0, 0, 0, 0, False, 'IM 0/1'),
0x6F: (0, 0, 1, 1, False, 'RLD'),
0x70: (0, 0, 1, 0, False, 'IN (C)'),
0x71: (0, 0, 0, 1, False, 'OUT (C),0'),
0x72: (0, 0, 0, 0, False, 'SBC HL,SP'),
0x73: (0, 2, 0, 2, False, 'LD ({i:04H}h),SP'),
0x74: (0, 0, 0, 0, False, 'NEG'),
0x75: (0, 0, 2, 0, False, 'RETN'),
0x76: (0, 0, 0, 0, False, 'IM 1'),
0x78: (0, 0, 1, 0, False, 'IN A,(C)'),
0x79: (0, 0, 0, 1, False, 'OUT (C),A'),
0x7A: (0, 0, 0, 0, False, 'ADC HL,SP'),
0x7B: (0, 2, 2, 0, False, 'LD SP,({i:04H}h)'),
0x7C: (0, 0, 0, 0, False, 'NEG'),
0x7D: (0, 0, 2, 0, False, 'RETN'),
0x7E: (0, 0, 0, 0, False, 'IM 2'),
0xA0: (0, 0, 1, 1, False, 'LDI'),
0xA1: (0, 0, 1, 0, False, 'CPI'),
0xA2: (0, 0, 1, 1, False, 'INI'),
0xA3: (0, 0, 1, 1, False, 'OUTI'),
0xA8: (0, 0, 1, 1, False, 'LDD'),
0xA9: (0, 0, 1, 0, False, 'CPD'),
0xAA: (0, 0, 1, 1, False, 'IND'),
0xAB: (0, 0, 1, 1, False, 'OUTD'),
0xB0: (0, 0, 1, 1, True, 'LDIR'),
0xB1: (0, 0, 1, 0, True, 'CPIR'),
0xB2: (0, 0, 1, 1, True, 'INIR'),
0xB3: (0, 0, 1, 1, True, 'OTIR'),
0xB8: (0, 0, 1, 1, True, 'LDDR'),
0xB9: (0, 0, 1, 0, True, 'CPDR'),
0xBA: (0, 0, 1, 1, True, 'INDR'),
0xBB: (0, 0, 1, 1, True, 'OTDR')
}
# Instructions with CB prefix
bit_instructions = {
0x00: (0, 0, 0, 0, False, 'RLC B'),
0x01: (0, 0, 0, 0, False, 'RLC C'),
0x02: (0, 0, 0, 0, False, 'RLC D'),
0x03: (0, 0, 0, 0, False, 'RLC E'),
0x04: (0, 0, 0, 0, False, 'RLC H'),
0x05: (0, 0, 0, 0, False, 'RLC L'),
0x06: (0, 0, 1, 1, False, 'RLC (HL)'),
0x07: (0, 0, 0, 0, False, 'RLC A'),
0x08: (0, 0, 0, 0, False, 'RRC B'),
0x09: (0, 0, 0, 0, False, 'RRC C'),
0x0A: (0, 0, 0, 0, False, 'RRC D'),
0x0B: (0, 0, 0, 0, False, 'RRC E'),
0x0C: (0, 0, 0, 0, False, 'RRC H'),
0x0D: (0, 0, 0, 0, False, 'RRC L'),
0x0E: (0, 0, 1, 1, False, 'RRC (HL)'),
0x0F: (0, 0, 0, 0, False, 'RRC A'),
0x10: (0, 0, 0, 0, False, 'RL B'),
0x11: (0, 0, 0, 0, False, 'RL C'),
0x12: (0, 0, 0, 0, False, 'RL D'),
0x13: (0, 0, 0, 0, False, 'RL E'),
0x14: (0, 0, 0, 0, False, 'RL H'),
0x15: (0, 0, 0, 0, False, 'RL L'),
0x16: (0, 0, 1, 1, False, 'RL (HL)'),
0x17: (0, 0, 0, 0, False, 'RL A'),
0x18: (0, 0, 0, 0, False, 'RR B'),
0x19: (0, 0, 0, 0, False, 'RR C'),
0x1A: (0, 0, 0, 0, False, 'RR D'),
0x1B: (0, 0, 0, 0, False, 'RR E'),
0x1C: (0, 0, 0, 0, False, 'RR H'),
0x1D: (0, 0, 0, 0, False, 'RR L'),
0x1E: (0, 0, 1, 1, False, 'RR (HL)'),
0x1F: (0, 0, 0, 0, False, 'RR A'),
0x20: (0, 0, 0, 0, False, 'SLA B'),
0x21: (0, 0, 0, 0, False, 'SLA C'),
0x22: (0, 0, 0, 0, False, 'SLA D'),
0x23: (0, 0, 0, 0, False, 'SLA E'),
0x24: (0, 0, 0, 0, False, 'SLA H'),
0x25: (0, 0, 0, 0, False, 'SLA L'),
0x26: (0, 0, 1, 1, False, 'SLA (HL)'),
0x27: (0, 0, 0, 0, False, 'SLA A'),
0x28: (0, 0, 0, 0, False, 'SRA B'),
0x29: (0, 0, 0, 0, False, 'SRA C'),
0x2A: (0, 0, 0, 0, False, 'SRA D'),
0x2B: (0, 0, 0, 0, False, 'SRA E'),
0x2C: (0, 0, 0, 0, False, 'SRA H'),
0x2D: (0, 0, 0, 0, False, 'SRA L'),
0x2E: (0, 0, 1, 1, False, 'SRA (HL)'),
0x2F: (0, 0, 0, 0, False, 'SRA A'),
0x30: (0, 0, 0, 0, False, 'SLL B'),
0x31: (0, 0, 0, 0, False, 'SLL C'),
0x32: (0, 0, 0, 0, False, 'SLL D'),
0x33: (0, 0, 0, 0, False, 'SLL E'),
0x34: (0, 0, 0, 0, False, 'SLL H'),
0x35: (0, 0, 0, 0, False, 'SLL L'),
0x36: (0, 0, 1, 1, False, 'SLL (HL)'),
0x37: (0, 0, 0, 0, False, 'SLL A'),
0x38: (0, 0, 0, 0, False, 'SRL B'),
0x39: (0, 0, 0, 0, False, 'SRL C'),
0x3A: (0, 0, 0, 0, False, 'SRL D'),
0x3B: (0, 0, 0, 0, False, 'SRL E'),
0x3C: (0, 0, 0, 0, False, 'SRL H'),
0x3D: (0, 0, 0, 0, False, 'SRL L'),
0x3E: (0, 0, 1, 1, False, 'SRL (HL)'),
0x3F: (0, 0, 0, 0, False, 'SRL A'),
0x40: (0, 0, 0, 0, False, 'BIT 0,B'),
0x41: (0, 0, 0, 0, False, 'BIT 0,C'),
0x42: (0, 0, 0, 0, False, 'BIT 0,D'),
0x43: (0, 0, 0, 0, False, 'BIT 0,E'),
0x44: (0, 0, 0, 0, False, 'BIT 0,H'),
0x45: (0, 0, 0, 0, False, 'BIT 0,L'),
0x46: (0, 0, 1, 0, False, 'BIT 0,(HL)'),
0x47: (0, 0, 0, 0, False, 'BIT 0,A'),
0x48: (0, 0, 0, 0, False, 'BIT 1,B'),
0x49: (0, 0, 0, 0, False, 'BIT 1,C'),
0x4A: (0, 0, 0, 0, False, 'BIT 1,D'),
0x4B: (0, 0, 0, 0, False, 'BIT 1,E'),
0x4C: (0, 0, 0, 0, False, 'BIT 1,H'),
0x4D: (0, 0, 0, 0, False, 'BIT 1,L'),
0x4E: (0, 0, 1, 0, False, 'BIT 1,(HL)'),
0x4F: (0, 0, 0, 0, False, 'BIT 1,A'),
0x50: (0, 0, 0, 0, False, 'BIT 2,B'),
0x51: (0, 0, 0, 0, False, 'BIT 2,C'),
0x52: (0, 0, 0, 0, False, 'BIT 2,D'),
0x53: (0, 0, 0, 0, False, 'BIT 2,E'),
0x54: (0, 0, 0, 0, False, 'BIT 2,H'),
0x55: (0, 0, 0, 0, False, 'BIT 2,L'),
0x56: (0, 0, 1, 0, False, 'BIT 2,(HL)'),
0x57: (0, 0, 0, 0, False, 'BIT 2,A'),
0x58: (0, 0, 0, 0, False, 'BIT 3,B'),
0x59: (0, 0, 0, 0, False, 'BIT 3,C'),
0x5A: (0, 0, 0, 0, False, 'BIT 3,D'),
0x5B: (0, 0, 0, 0, False, 'BIT 3,E'),
0x5C: (0, 0, 0, 0, False, 'BIT 3,H'),
0x5D: (0, 0, 0, 0, False, 'BIT 3,L'),
0x5E: (0, 0, 1, 0, False, 'BIT 3,(HL)'),
0x5F: (0, 0, 0, 0, False, 'BIT 3,A'),
0x60: (0, 0, 0, 0, False, 'BIT 4,B'),
0x61: (0, 0, 0, 0, False, 'BIT 4,C'),
0x62: (0, 0, 0, 0, False, 'BIT 4,D'),
0x63: (0, 0, 0, 0, False, 'BIT 4,E'),
0x64: (0, 0, 0, 0, False, 'BIT 4,H'),
0x65: (0, 0, 0, 0, False, 'BIT 4,L'),
0x66: (0, 0, 1, 0, False, 'BIT 4,(HL)'),
0x67: (0, 0, 0, 0, False, 'BIT 4,A'),
0x68: (0, 0, 0, 0, False, 'BIT 5,B'),
0x69: (0, 0, 0, 0, False, 'BIT 5,C'),
0x6A: (0, 0, 0, 0, False, 'BIT 5,D'),
0x6B: (0, 0, 0, 0, False, 'BIT 5,E'),
0x6C: (0, 0, 0, 0, False, 'BIT 5,H'),
0x6D: (0, 0, 0, 0, False, 'BIT 5,L'),
0x6E: (0, 0, 1, 0, False, 'BIT 5,(HL)'),
0x6F: (0, 0, 0, 0, False, 'BIT 5,A'),
0x70: (0, 0, 0, 0, False, 'BIT 6,B'),
0x71: (0, 0, 0, 0, False, 'BIT 6,C'),
0x72: (0, 0, 0, 0, False, 'BIT 6,D'),
0x73: (0, 0, 0, 0, False, 'BIT 6,E'),
0x74: (0, 0, 0, 0, False, 'BIT 6,H'),
0x75: (0, 0, 0, 0, False, 'BIT 6,L'),
0x76: (0, 0, 1, 0, False, 'BIT 6,(HL)'),
0x77: (0, 0, 0, 0, False, 'BIT 6,A'),
0x78: (0, 0, 0, 0, False, 'BIT 7,B'),
0x79: (0, 0, 0, 0, False, 'BIT 7,C'),
0x7A: (0, 0, 0, 0, False, 'BIT 7,D'),
0x7B: (0, 0, 0, 0, False, 'BIT 7,E'),
0x7C: (0, 0, 0, 0, False, 'BIT 7,H'),
0x7D: (0, 0, 0, 0, False, 'BIT 7,L'),
0x7E: (0, 0, 1, 0, False, 'BIT 7,(HL)'),
0x7F: (0, 0, 0, 0, False, 'BIT 7,A'),
0x80: (0, 0, 0, 0, False, 'RES 0,B'),
0x81: (0, 0, 0, 0, False, 'RES 0,C'),
0x82: (0, 0, 0, 0, False, 'RES 0,D'),
0x83: (0, 0, 0, 0, False, 'RES 0,E'),
0x84: (0, 0, 0, 0, False, 'RES 0,H'),
0x85: (0, 0, 0, 0, False, 'RES 0,L'),
0x86: (0, 0, 1, 1, False, 'RES 0,(HL)'),
0x87: (0, 0, 0, 0, False, 'RES 0,A'),
0x88: (0, 0, 0, 0, False, 'RES 1,B'),
0x89: (0, 0, 0, 0, False, 'RES 1,C'),
0x8A: (0, 0, 0, 0, False, 'RES 1,D'),
0x8B: (0, 0, 0, 0, False, 'RES 1,E'),
0x8C: (0, 0, 0, 0, False, 'RES 1,H'),
0x8D: (0, 0, 0, 0, False, 'RES 1,L'),
0x8E: (0, 0, 1, 1, False, 'RES 1,(HL)'),
0x8F: (0, 0, 0, 0, False, 'RES 1,A'),
0x90: (0, 0, 0, 0, False, 'RES 2,B'),
0x91: (0, 0, 0, 0, False, 'RES 2,C'),
0x92: (0, 0, 0, 0, False, 'RES 2,D'),
0x93: (0, 0, 0, 0, False, 'RES 2,E'),
0x94: (0, 0, 0, 0, False, 'RES 2,H'),
0x95: (0, 0, 0, 0, False, 'RES 2,L'),
0x96: (0, 0, 1, 1, False, 'RES 2,(HL)'),
0x97: (0, 0, 0, 0, False, 'RES 2,A'),
0x98: (0, 0, 0, 0, False, 'RES 3,B'),
0x99: (0, 0, 0, 0, False, 'RES 3,C'),
0x9A: (0, 0, 0, 0, False, 'RES 3,D'),
0x9B: (0, 0, 0, 0, False, 'RES 3,E'),
0x9C: (0, 0, 0, 0, False, 'RES 3,H'),
0x9D: (0, 0, 0, 0, False, 'RES 3,L'),
0x9E: (0, 0, 1, 1, False, 'RES 3,(HL)'),
0x9F: (0, 0, 0, 0, False, 'RES 3,A'),
0xA0: (0, 0, 0, 0, False, 'RES 4,B'),
0xA1: (0, 0, 0, 0, False, 'RES 4,C'),
0xA2: (0, 0, 0, 0, False, 'RES 4,D'),
0xA3: (0, 0, 0, 0, False, 'RES 4,E'),
0xA4: (0, 0, 0, 0, False, 'RES 4,H'),
0xA5: (0, 0, 0, 0, False, 'RES 4,L'),
0xA6: (0, 0, 1, 1, False, 'RES 4,(HL)'),
0xA7: (0, 0, 0, 0, False, 'RES 4,A'),
0xA8: (0, 0, 0, 0, False, 'RES 5,B'),
0xA9: (0, 0, 0, 0, False, 'RES 5,C'),
0xAA: (0, 0, 0, 0, False, 'RES 5,D'),
0xAB: (0, 0, 0, 0, False, 'RES 5,E'),
0xAC: (0, 0, 0, 0, False, 'RES 5,H'),
0xAD: (0, 0, 0, 0, False, 'RES 5,L'),
0xAE: (0, 0, 1, 1, False, 'RES 5,(HL)'),
0xAF: (0, 0, 0, 0, False, 'RES 5,A'),
0xB0: (0, 0, 0, 0, False, 'RES 6,B'),
0xB1: (0, 0, 0, 0, False, 'RES 6,C'),
0xB2: (0, 0, 0, 0, False, 'RES 6,D'),
0xB3: (0, 0, 0, 0, False, 'RES 6,E'),
0xB4: (0, 0, 0, 0, False, 'RES 6,H'),
0xB5: (0, 0, 0, 0, False, 'RES 6,L'),
0xB6: (0, 0, 1, 1, False, 'RES 6,(HL)'),
0xB7: (0, 0, 0, 0, False, 'RES 6,A'),
0xB8: (0, 0, 0, 0, False, 'RES 7,B'),
0xB9: (0, 0, 0, 0, False, 'RES 7,C'),
0xBA: (0, 0, 0, 0, False, 'RES 7,D'),
0xBB: (0, 0, 0, 0, False, 'RES 7,E'),
0xBC: (0, 0, 0, 0, False, 'RES 7,H'),
0xBD: (0, 0, 0, 0, False, 'RES 7,L'),
0xBE: (0, 0, 1, 1, False, 'RES 7,(HL)'),
0xBF: (0, 0, 0, 0, False, 'RES 7,A'),
0xC0: (0, 0, 0, 0, False, 'SET 0,B'),
0xC1: (0, 0, 0, 0, False, 'SET 0,C'),
0xC2: (0, 0, 0, 0, False, 'SET 0,D'),
0xC3: (0, 0, 0, 0, False, 'SET 0,E'),
0xC4: (0, 0, 0, 0, False, 'SET 0,H'),
0xC5: (0, 0, 0, 0, False, 'SET 0,L'),
0xC6: (0, 0, 1, 1, False, 'SET 0,(HL)'),
0xC7: (0, 0, 0, 0, False, 'SET 0,A'),
0xC8: (0, 0, 0, 0, False, 'SET 1,B'),
0xC9: (0, 0, 0, 0, False, 'SET 1,C'),
0xCA: (0, 0, 0, 0, False, 'SET 1,D'),
0xCB: (0, 0, 0, 0, False, 'SET 1,E'),
0xCC: (0, 0, 0, 0, False, 'SET 1,H'),
0xCD: (0, 0, 0, 0, False, 'SET 1,L'),
0xCE: (0, 0, 1, 1, False, 'SET 1,(HL)'),
0xCF: (0, 0, 0, 0, False, 'SET 1,A'),
0xD0: (0, 0, 0, 0, False, 'SET 2,B'),
0xD1: (0, 0, 0, 0, False, 'SET 2,C'),
0xD2: (0, 0, 0, 0, False, 'SET 2,D'),
0xD3: (0, 0, 0, 0, False, 'SET 2,E'),
0xD4: (0, 0, 0, 0, False, 'SET 2,H'),
0xD5: (0, 0, 0, 0, False, 'SET 2,L'),
0xD6: (0, 0, 1, 1, False, 'SET 2,(HL)'),
0xD7: (0, 0, 0, 0, False, 'SET 2,A'),
0xD8: (0, 0, 0, 0, False, 'SET 3,B'),
0xD9: (0, 0, 0, 0, False, 'SET 3,C'),
0xDA: (0, 0, 0, 0, False, 'SET 3,D'),
0xDB: (0, 0, 0, 0, False, 'SET 3,E'),
0xDC: (0, 0, 0, 0, False, 'SET 3,H'),
0xDD: (0, 0, 0, 0, False, 'SET 3,L'),
0xDE: (0, 0, 1, 1, False, 'SET 3,(HL)'),
0xDF: (0, 0, 0, 0, False, 'SET 3,A'),
0xE0: (0, 0, 0, 0, False, 'SET 4,B'),
0xE1: (0, 0, 0, 0, False, 'SET 4,C'),
0xE2: (0, 0, 0, 0, False, 'SET 4,D'),
0xE3: (0, 0, 0, 0, False, 'SET 4,E'),
0xE4: (0, 0, 0, 0, False, 'SET 4,H'),
0xE5: (0, 0, 0, 0, False, 'SET 4,L'),
0xE6: (0, 0, 1, 1, False, 'SET 4,(HL)'),
0xE7: (0, 0, 0, 0, False, 'SET 4,A'),
0xE8: (0, 0, 0, 0, False, 'SET 5,B'),
0xE9: (0, 0, 0, 0, False, 'SET 5,C'),
0xEA: (0, 0, 0, 0, False, 'SET 5,D'),
0xEB: (0, 0, 0, 0, False, 'SET 5,E'),
0xEC: (0, 0, 0, 0, False, 'SET 5,H'),
0xED: (0, 0, 0, 0, False, 'SET 5,L'),
0xEE: (0, 0, 1, 1, False, 'SET 5,(HL)'),
0xEF: (0, 0, 0, 0, False, 'SET 5,A'),
0xF0: (0, 0, 0, 0, False, 'SET 6,B'),
0xF1: (0, 0, 0, 0, False, 'SET 6,C'),
0xF2: (0, 0, 0, 0, False, 'SET 6,D'),
0xF3: (0, 0, 0, 0, False, 'SET 6,E'),
0xF4: (0, 0, 0, 0, False, 'SET 6,H'),
0xF5: (0, 0, 0, 0, False, 'SET 6,L'),
0xF6: (0, 0, 1, 1, False, 'SET 6,(HL)'),
0xF7: (0, 0, 0, 0, False, 'SET 6,A'),
0xF8: (0, 0, 0, 0, False, 'SET 7,B'),
0xF9: (0, 0, 0, 0, False, 'SET 7,C'),
0xFA: (0, 0, 0, 0, False, 'SET 7,D'),
0xFB: (0, 0, 0, 0, False, 'SET 7,E'),
0xFC: (0, 0, 0, 0, False, 'SET 7,H'),
0xFD: (0, 0, 0, 0, False, 'SET 7,L'),
0xFE: (0, 0, 1, 1, False, 'SET 7,(HL)'),
0xFF: (0, 0, 0, 0, False, 'SET 7,A')
}
# Instructions with DD or FD prefix
index_instructions = {
0x09: (0, 0, 0, 0, False, 'ADD {r},BC'),
0x19: (0, 0, 0, 0, False, 'ADD {r},DE'),
0x21: (0, 2, 0, 0, False, 'LD {r},{i:04H}h'),
0x22: (0, 2, 0, 2, False, 'LD ({i:04H}h),{r}'),
0x23: (0, 0, 0, 0, False, 'INC {r}'),
0x24: (0, 0, 0, 0, False, 'INC {r}h'),
0x25: (0, 0, 0, 0, False, 'DEC {r}h'),
0x26: (0, 1, 0, 0, False, 'LD {r}h,{i:02H}h'),
0x29: (0, 0, 0, 0, False, 'ADD {r},{r}'),
0x2A: (0, 2, 2, 0, False, 'LD {r},({i:04H}h)'),
0x2B: (0, 0, 0, 0, False, 'DEC {r}'),
0x2C: (0, 0, 0, 0, False, 'INC {r}l'),
0x2D: (0, 0, 0, 0, False, 'DEC {r}l'),
0x2E: (0, 1, 0, 0, False, 'LD {r}l,{i:02H}h'),
0x34: (1, 0, 1, 1, False, 'INC ({r}{d:+d})'),
0x35: (1, 0, 1, 1, False, 'DEC ({r}{d:+d})'),
0x36: (1, 1, 0, 1, False, 'LD ({r}{d:+d}),{i:02H}h'),
0x39: (0, 0, 0, 0, False, 'ADD {r},SP'),
0x44: (0, 0, 0, 0, False, 'LD B,{r}h'),
0x45: (0, 0, 0, 0, False, 'LD B,{r}l'),
0x46: (1, 0, 1, 0, False, 'LD B,({r}{d:+d})'),
0x4C: (0, 0, 0, 0, False, 'LD C,{r}h'),
0x4D: (0, 0, 0, 0, False, 'LD C,{r}l'),
0x4E: (1, 0, 1, 0, False, 'LD C,({r}{d:+d})'),
0x54: (0, 0, 0, 0, False, 'LD D,{r}h'),
0x55: (0, 0, 0, 0, False, 'LD D,{r}l'),
0x56: (1, 0, 1, 0, False, 'LD D,({r}{d:+d})'),
0x5C: (0, 0, 0, 0, False, 'LD E,{r}h'),
0x5D: (0, 0, 0, 0, False, 'LD E,{r}l'),
0x5E: (1, 0, 1, 0, False, 'LD E,({r}{d:+d})'),
0x60: (0, 0, 0, 0, False, 'LD {r}h,B'),
0x61: (0, 0, 0, 0, False, 'LD {r}h,C'),
0x62: (0, 0, 0, 0, False, 'LD {r}h,D'),
0x63: (0, 0, 0, 0, False, 'LD {r}h,E'),
0x64: (0, 0, 0, 0, False, 'LD {r}h,{r}h'),
0x65: (0, 0, 0, 0, False, 'LD {r}h,{r}l'),
0x66: (1, 0, 1, 0, False, 'LD H,({r}{d:+d})'),
0x67: (0, 0, 0, 0, False, 'LD {r}h,A'),
0x68: (0, 0, 0, 0, False, 'LD {r}l,B'),
0x69: (0, 0, 0, 0, False, 'LD {r}l,C'),
0x6A: (0, 0, 0, 0, False, 'LD {r}l,D'),
0x6B: (0, 0, 0, 0, False, 'LD {r}l,E'),
0x6C: (0, 0, 0, 0, False, 'LD {r}l,{r}h'),
0x6D: (0, 0, 0, 0, False, 'LD {r}l,{r}l'),
0x6E: (1, 0, 1, 0, False, 'LD L,({r}{d:+d})'),
0x6F: (0, 0, 0, 0, False, 'LD {r}l,A'),
0x70: (1, 0, 0, 1, False, 'LD ({r}{d:+d}),B'),
0x71: (1, 0, 0, 1, False, 'LD ({r}{d:+d}),C'),
0x72: (1, 0, 0, 1, False, 'LD ({r}{d:+d}),D'),
0x73: (1, 0, 0, 1, False, 'LD ({r}{d:+d}),E'),
0x74: (1, 0, 0, 1, False, 'LD ({r}{d:+d}),H'),
0x75: (1, 0, 0, 1, False, 'LD ({r}{d:+d}),L'),
0x77: (1, 0, 0, 1, False, 'LD ({r}{d:+d}),A'),
0x7C: (0, 0, 0, 0, False, 'LD A,{r}h'),
0x7D: (0, 0, 0, 0, False, 'LD A,{r}l'),
0x7E: (1, 0, 1, 0, False, 'LD A,({r}{d:+d})'),
0x84: (0, 0, 0, 0, False, 'ADD A,{r}h'),
0x85: (0, 0, 0, 0, False, 'ADD A,{r}l'),
0x86: (1, 0, 1, 0, False, 'ADD A,({r}{d:+d})'),
0x8C: (0, 0, 0, 0, False, 'ADC A,{r}h'),
0x8D: (0, 0, 0, 0, False, 'ADC A,{r}l'),
0x8E: (1, 0, 1, 0, False, 'ADC A,({r}{d:+d})'),
0x94: (0, 0, 0, 0, False, 'SUB {r}h'),
0x95: (0, 0, 0, 0, False, 'SUB {r}l'),
0x96: (1, 0, 1, 0, False, 'SUB ({r}{d:+d})'),
0x9C: (0, 0, 0, 0, False, 'SBC A,{r}h'),
0x9D: (0, 0, 0, 0, False, 'SBC A,{r}l'),
0x9E: (1, 0, 1, 0, False, 'SBC A,({r}{d:+d})'),
0xA4: (0, 0, 0, 0, False, 'AND {r}h'),
0xA5: (0, 0, 0, 0, False, 'AND {r}l'),
0xA6: (1, 0, 1, 0, False, 'AND ({r}{d:+d})'),
0xAC: (0, 0, 0, 0, False, 'XOR {r}h'),
0xAD: (0, 0, 0, 0, False, 'XOR {r}l'),
0xAE: (1, 0, 1, 0, False, 'XOR ({r}{d:+d})'),
0xB4: (0, 0, 0, 0, False, 'OR {r}h'),
0xB5: (0, 0, 0, 0, False, 'OR {r}l'),
0xB6: (1, 0, 1, 0, False, 'OR ({r}{d:+d})'),
0xBC: (0, 0, 0, 0, False, 'CP {r}h'),
0xBD: (0, 0, 0, 0, False, 'CP {r}l'),
0xBE: (1, 0, 1, 0, False, 'CP ({r}{d:+d})'),
0xE1: (0, 0, 2, 0, False, 'POP {r}'),
0xE3: (0, 0, 2, 2, False, 'EX (SP),{r}'),
0xE5: (0, 0, 0,-2, False, 'PUSH {r}'),
0xE9: (0, 0, 0, 0, False, 'JP ({r})'),
0xF9: (0, 0, 0, 0, False, 'LD SP,{r}')
}
# Instructions with DD CB or FD CB prefix.
# For these instructions, the displacement precedes the opcode byte.
# This is handled as a special case in the code, and thus the entries
# in this table specify 0 for the displacement length.
index_bit_instructions = {
0x00: (0, 0, 1, 1, False, 'RLC ({r}{d:+d}),B'),
0x01: (0, 0, 1, 1, False, 'RLC ({r}{d:+d}),C'),
0x02: (0, 0, 1, 1, False, 'RLC ({r}{d:+d}),D'),
0x03: (0, 0, 1, 1, False, 'RLC ({r}{d:+d}),E'),
0x04: (0, 0, 1, 1, False, 'RLC ({r}{d:+d}),H'),
0x05: (0, 0, 1, 1, False, 'RLC ({r}{d:+d}),L'),
0x06: (0, 0, 1, 1, False, 'RLC ({r}{d:+d})'),
0x07: (0, 0, 1, 1, False, 'RLC ({r}{d:+d}),A'),
0x08: (0, 0, 1, 1, False, 'RRC ({r}{d:+d}),B'),
0x09: (0, 0, 1, 1, False, 'RRC ({r}{d:+d}),C'),
0x0A: (0, 0, 1, 1, False, 'RRC ({r}{d:+d}),D'),
0x0B: (0, 0, 1, 1, False, 'RRC ({r}{d:+d}),E'),
0x0C: (0, 0, 1, 1, False, 'RRC ({r}{d:+d}),H'),
0x0D: (0, 0, 1, 1, False, 'RRC ({r}{d:+d}),L'),
0x0E: (0, 0, 1, 1, False, 'RRC ({r}{d:+d})'),
0x0F: (0, 0, 1, 1, False, 'RRC ({r}{d:+d}),A'),
0x10: (0, 0, 1, 1, False, 'RL ({r}{d:+d}),B'),
0x11: (0, 0, 1, 1, False, 'RL ({r}{d:+d}),C'),
0x12: (0, 0, 1, 1, False, 'RL ({r}{d:+d}),D'),
0x13: (0, 0, 1, 1, False, 'RL ({r}{d:+d}),E'),
0x14: (0, 0, 1, 1, False, 'RL ({r}{d:+d}),H'),
0x15: (0, 0, 1, 1, False, 'RL ({r}{d:+d}),L'),
0x16: (0, 0, 1, 1, False, 'RL ({r}{d:+d})'),
0x17: (0, 0, 1, 1, False, 'RL ({r}{d:+d}),A'),
0x18: (0, 0, 1, 1, False, 'RR ({r}{d:+d}),B'),
0x19: (0, 0, 1, 1, False, 'RR ({r}{d:+d}),C'),
0x1A: (0, 0, 1, 1, False, 'RR ({r}{d:+d}),D'),
0x1B: (0, 0, 1, 1, False, 'RR ({r}{d:+d}),E'),
0x1C: (0, 0, 1, 1, False, 'RR ({r}{d:+d}),H'),
0x1D: (0, 0, 1, 1, False, 'RR ({r}{d:+d}),L'),
0x1E: (0, 0, 1, 1, False, 'RR ({r}{d:+d})'),
0x1F: (0, 0, 1, 1, False, 'RR ({r}{d:+d}),A'),
0x20: (0, 0, 1, 1, False, 'SLA ({r}{d:+d}),B'),
0x21: (0, 0, 1, 1, False, 'SLA ({r}{d:+d}),C'),
0x22: (0, 0, 1, 1, False, 'SLA ({r}{d:+d}),D'),
0x23: (0, 0, 1, 1, False, 'SLA ({r}{d:+d}),E'),
0x24: (0, 0, 1, 1, False, 'SLA ({r}{d:+d}),H'),
0x25: (0, 0, 1, 1, False, 'SLA ({r}{d:+d}),L'),
0x26: (0, 0, 1, 1, False, 'SLA ({r}{d:+d})'),
0x27: (0, 0, 1, 1, False, 'SLA ({r}{d:+d}),A'),
0x28: (0, 0, 1, 1, False, 'SRA ({r}{d:+d}),B'),
0x29: (0, 0, 1, 1, False, 'SRA ({r}{d:+d}),C'),
0x2A: (0, 0, 1, 1, False, 'SRA ({r}{d:+d}),D'),
0x2B: (0, 0, 1, 1, False, 'SRA ({r}{d:+d}),E'),
0x2C: (0, 0, 1, 1, False, 'SRA ({r}{d:+d}),H'),
0x2D: (0, 0, 1, 1, False, 'SRA ({r}{d:+d}),L'),
0x2E: (0, 0, 1, 1, False, 'SRA ({r}{d:+d})'),
0x2F: (0, 0, 1, 1, False, 'SRA ({r}{d:+d}),A'),
0x30: (0, 0, 1, 1, False, 'SLL ({r}{d:+d}),B'),
0x31: (0, 0, 1, 1, False, 'SLL ({r}{d:+d}),C'),
0x32: (0, 0, 1, 1, False, 'SLL ({r}{d:+d}),D'),
0x33: (0, 0, 1, 1, False, 'SLL ({r}{d:+d}),E'),
0x34: (0, 0, 1, 1, False, 'SLL ({r}{d:+d}),H'),
0x35: (0, 0, 1, 1, False, 'SLL ({r}{d:+d}),L'),
0x36: (0, 0, 1, 1, False, 'SLL ({r}{d:+d})'),
0x37: (0, 0, 1, 1, False, 'SLL ({r}{d:+d}),A'),
0x38: (0, 0, 1, 1, False, 'SRL ({r}{d:+d}),B'),
0x39: (0, 0, 1, 1, False, 'SRL ({r}{d:+d}),C'),
0x3A: (0, 0, 1, 1, False, 'SRL ({r}{d:+d}),D'),
0x3B: (0, 0, 1, 1, False, 'SRL ({r}{d:+d}),E'),
0x3C: (0, 0, 1, 1, False, 'SRL ({r}{d:+d}),H'),
0x3D: (0, 0, 1, 1, False, 'SRL ({r}{d:+d}),L'),
0x3E: (0, 0, 1, 1, False, 'SRL ({r}{d:+d})'),
0x3F: (0, 0, 1, 1, False, 'SRL ({r}{d:+d}),A'),
0x40: (0, 0, 1, 0, False, 'BIT 0,({r}{d:+d})'),
0x41: (0, 0, 1, 0, False, 'BIT 0,({r}{d:+d})'),
0x42: (0, 0, 1, 0, False, 'BIT 0,({r}{d:+d})'),
0x43: (0, 0, 1, 0, False, 'BIT 0,({r}{d:+d})'),
0x44: (0, 0, 1, 0, False, 'BIT 0,({r}{d:+d})'),
0x45: (0, 0, 1, 0, False, 'BIT 0,({r}{d:+d})'),
0x46: (0, 0, 1, 0, False, 'BIT 0,({r}{d:+d})'),
0x47: (0, 0, 1, 0, False, 'BIT 0,({r}{d:+d})'),
0x48: (0, 0, 1, 0, False, 'BIT 1,({r}{d:+d})'),
0x49: (0, 0, 1, 0, False, 'BIT 1,({r}{d:+d})'),
0x4A: (0, 0, 1, 0, False, 'BIT 1,({r}{d:+d})'),
0x4B: (0, 0, 1, 0, False, 'BIT 1,({r}{d:+d})'),
0x4C: (0, 0, 1, 0, False, 'BIT 1,({r}{d:+d})'),
0x4D: (0, 0, 1, 0, False, 'BIT 1,({r}{d:+d})'),
0x4E: (0, 0, 1, 0, False, 'BIT 1,({r}{d:+d})'),
0x4F: (0, 0, 1, 0, False, 'BIT 1,({r}{d:+d})'),
0x50: (0, 0, 1, 0, False, 'BIT 2,({r}{d:+d})'),
0x51: (0, 0, 1, 0, False, 'BIT 2,({r}{d:+d})'),
0x52: (0, 0, 1, 0, False, 'BIT 2,({r}{d:+d})'),
0x53: (0, 0, 1, 0, False, 'BIT 2,({r}{d:+d})'),
0x54: (0, 0, 1, 0, False, 'BIT 2,({r}{d:+d})'),
0x55: (0, 0, 1, 0, False, 'BIT 2,({r}{d:+d})'),
0x56: (0, 0, 1, 0, False, 'BIT 2,({r}{d:+d})'),
0x57: (0, 0, 1, 0, False, 'BIT 2,({r}{d:+d})'),
0x58: (0, 0, 1, 0, False, 'BIT 3,({r}{d:+d})'),
0x59: (0, 0, 1, 0, False, 'BIT 3,({r}{d:+d})'),
0x5A: (0, 0, 1, 0, False, 'BIT 3,({r}{d:+d})'),
0x5B: (0, 0, 1, 0, False, 'BIT 3,({r}{d:+d})'),
0x5C: (0, 0, 1, 0, False, 'BIT 3,({r}{d:+d})'),
0x5D: (0, 0, 1, 0, False, 'BIT 3,({r}{d:+d})'),
0x5E: (0, 0, 1, 0, False, 'BIT 3,({r}{d:+d})'),
0x5F: (0, 0, 1, 0, False, 'BIT 3,({r}{d:+d})'),
0x60: (0, 0, 1, 0, False, 'BIT 4,({r}{d:+d})'),
0x61: (0, 0, 1, 0, False, 'BIT 4,({r}{d:+d})'),
0x62: (0, 0, 1, 0, False, 'BIT 4,({r}{d:+d})'),
0x63: (0, 0, 1, 0, False, 'BIT 4,({r}{d:+d})'),
0x64: (0, 0, 1, 0, False, 'BIT 4,({r}{d:+d})'),
0x65: (0, 0, 1, 0, False, 'BIT 4,({r}{d:+d})'),
0x66: (0, 0, 1, 0, False, 'BIT 4,({r}{d:+d})'),
0x67: (0, 0, 1, 0, False, 'BIT 4,({r}{d:+d})'),
0x68: (0, 0, 1, 0, False, 'BIT 5,({r}{d:+d})'),
0x69: (0, 0, 1, 0, False, 'BIT 5,({r}{d:+d})'),
0x6A: (0, 0, 1, 0, False, 'BIT 5,({r}{d:+d})'),
0x6B: (0, 0, 1, 0, False, 'BIT 5,({r}{d:+d})'),
0x6C: (0, 0, 1, 0, False, 'BIT 5,({r}{d:+d})'),
0x6D: (0, 0, 1, 0, False, 'BIT 5,({r}{d:+d})'),
0x6E: (0, 0, 1, 0, False, 'BIT 5,({r}{d:+d})'),
0x6F: (0, 0, 1, 0, False, 'BIT 5,({r}{d:+d})'),
0x70: (0, 0, 1, 0, False, 'BIT 6,({r}{d:+d})'),
0x71: (0, 0, 1, 0, False, 'BIT 6,({r}{d:+d})'),
0x72: (0, 0, 1, 0, False, 'BIT 6,({r}{d:+d})'),
0x73: (0, 0, 1, 0, False, 'BIT 6,({r}{d:+d})'),
0x74: (0, 0, 1, 0, False, 'BIT 6,({r}{d:+d})'),
0x75: (0, 0, 1, 0, False, 'BIT 6,({r}{d:+d})'),
0x76: (0, 0, 1, 0, False, 'BIT 6,({r}{d:+d})'),
0x77: (0, 0, 1, 0, False, 'BIT 6,({r}{d:+d})'),
0x78: (0, 0, 1, 0, False, 'BIT 7,({r}{d:+d})'),
0x79: (0, 0, 1, 0, False, 'BIT 7,({r}{d:+d})'),
0x7A: (0, 0, 1, 0, False, 'BIT 7,({r}{d:+d})'),
0x7B: (0, 0, 1, 0, False, 'BIT 7,({r}{d:+d})'),
0x7C: (0, 0, 1, 0, False, 'BIT 7,({r}{d:+d})'),
0x7D: (0, 0, 1, 0, False, 'BIT 7,({r}{d:+d})'),
0x7E: (0, 0, 1, 0, False, 'BIT 7,({r}{d:+d})'),
0x7F: (0, 0, 1, 0, False, 'BIT 7,({r}{d:+d})'),
0x80: (0, 0, 1, 1, False, 'RES 0,({r}{d:+d}),B'),
0x81: (0, 0, 1, 1, False, 'RES 0,({r}{d:+d}),C'),
0x82: (0, 0, 1, 1, False, 'RES 0,({r}{d:+d}),D'),
0x83: (0, 0, 1, 1, False, 'RES 0,({r}{d:+d}),E'),
0x84: (0, 0, 1, 1, False, 'RES 0,({r}{d:+d}),H'),
0x85: (0, 0, 1, 1, False, 'RES 0,({r}{d:+d}),L'),
0x86: (0, 0, 1, 1, False, 'RES 0,({r}{d:+d})'),
0x87: (0, 0, 1, 1, False, 'RES 0,({r}{d:+d}),A'),
0x88: (0, 0, 1, 1, False, 'RES 1,({r}{d:+d}),B'),
0x89: (0, 0, 1, 1, False, 'RES 1,({r}{d:+d}),C'),
0x8A: (0, 0, 1, 1, False, 'RES 1,({r}{d:+d}),D'),
0x8B: (0, 0, 1, 1, False, 'RES 1,({r}{d:+d}),E'),
0x8C: (0, 0, 1, 1, False, 'RES 1,({r}{d:+d}),H'),
0x8D: (0, 0, 1, 1, False, 'RES 1,({r}{d:+d}),L'),
0x8E: (0, 0, 1, 1, False, 'RES 1,({r}{d:+d})'),
0x8F: (0, 0, 1, 1, False, 'RES 1,({r}{d:+d}),A'),
0x90: (0, 0, 1, 1, False, 'RES 2,({r}{d:+d}),B'),
0x91: (0, 0, 1, 1, False, 'RES 2,({r}{d:+d}),C'),
0x92: (0, 0, 1, 1, False, 'RES 2,({r}{d:+d}),D'),
0x93: (0, 0, 1, 1, False, 'RES 2,({r}{d:+d}),E'),
0x94: (0, 0, 1, 1, False, 'RES 2,({r}{d:+d}),H'),
0x95: (0, 0, 1, 1, False, 'RES 2,({r}{d:+d}),L'),
0x96: (0, 0, 1, 1, False, 'RES 2,({r}{d:+d})'),
0x97: (0, 0, 1, 1, False, 'RES 2,({r}{d:+d}),A'),
0x98: (0, 0, 1, 1, False, 'RES 3,({r}{d:+d}),B'),
0x99: (0, 0, 1, 1, False, 'RES 3,({r}{d:+d}),C'),
0x9A: (0, 0, 1, 1, False, 'RES 3,({r}{d:+d}),D'),
0x9B: (0, 0, 1, 1, False, 'RES 3,({r}{d:+d}),E'),
0x9C: (0, 0, 1, 1, False, 'RES 3,({r}{d:+d}),H'),
0x9D: (0, 0, 1, 1, False, 'RES 3,({r}{d:+d}),L'),
0x9E: (0, 0, 1, 1, False, 'RES 3,({r}{d:+d})'),
0x9F: (0, 0, 1, 1, False, 'RES 3,({r}{d:+d}),A'),
0xA0: (0, 0, 1, 1, False, 'RES 4,({r}{d:+d}),B'),
0xA1: (0, 0, 1, 1, False, 'RES 4,({r}{d:+d}),C'),
0xA2: (0, 0, 1, 1, False, 'RES 4,({r}{d:+d}),D'),
0xA3: (0, 0, 1, 1, False, 'RES 4,({r}{d:+d}),E'),
0xA4: (0, 0, 1, 1, False, 'RES 4,({r}{d:+d}),H'),
0xA5: (0, 0, 1, 1, False, 'RES 4,({r}{d:+d}),L'),
0xA6: (0, 0, 1, 1, False, 'RES 4,({r}{d:+d})'),
0xA7: (0, 0, 1, 1, False, 'RES 4,({r}{d:+d}),A'),
0xA8: (0, 0, 1, 1, False, 'RES 5,({r}{d:+d}),B'),
0xA9: (0, 0, 1, 1, False, 'RES 5,({r}{d:+d}),C'),
0xAA: (0, 0, 1, 1, False, 'RES 5,({r}{d:+d}),D'),
0xAB: (0, 0, 1, 1, False, 'RES 5,({r}{d:+d}),E'),
0xAC: (0, 0, 1, 1, False, 'RES 5,({r}{d:+d}),H'),
0xAD: (0, 0, 1, 1, False, 'RES 5,({r}{d:+d}),L'),
0xAE: (0, 0, 1, 1, False, 'RES 5,({r}{d:+d})'),
0xAF: (0, 0, 1, 1, False, 'RES 5,({r}{d:+d}),A'),
0xB0: (0, 0, 1, 1, False, 'RES 6,({r}{d:+d}),B'),
0xB1: (0, 0, 1, 1, False, 'RES 6,({r}{d:+d}),C'),
0xB2: (0, 0, 1, 1, False, 'RES 6,({r}{d:+d}),D'),
0xB3: (0, 0, 1, 1, False, 'RES 6,({r}{d:+d}),E'),
0xB4: (0, 0, 1, 1, False, 'RES 6,({r}{d:+d}),H'),
0xB5: (0, 0, 1, 1, False, 'RES 6,({r}{d:+d}),L'),
0xB6: (0, 0, 1, 1, False, 'RES 6,({r}{d:+d})'),
0xB7: (0, 0, 1, 1, False, 'RES 6,({r}{d:+d}),A'),
0xB8: (0, 0, 1, 1, False, 'RES 7,({r}{d:+d}),B'),
0xB9: (0, 0, 1, 1, False, 'RES 7,({r}{d:+d}),C'),
0xBA: (0, 0, 1, 1, False, 'RES 7,({r}{d:+d}),D'),
0xBB: (0, 0, 1, 1, False, 'RES 7,({r}{d:+d}),E'),
0xBC: (0, 0, 1, 1, False, 'RES 7,({r}{d:+d}),H'),
0xBD: (0, 0, 1, 1, False, 'RES 7,({r}{d:+d}),L'),
0xBE: (0, 0, 1, 1, False, 'RES 7,({r}{d:+d})'),
0xBF: (0, 0, 1, 1, False, 'RES 7,({r}{d:+d}),A'),
0xC0: (0, 0, 1, 1, False, 'SET 0,({r}{d:+d}),B'),
0xC1: (0, 0, 1, 1, False, 'SET 0,({r}{d:+d}),C'),
0xC2: (0, 0, 1, 1, False, 'SET 0,({r}{d:+d}),D'),
0xC3: (0, 0, 1, 1, False, 'SET 0,({r}{d:+d}),E'),
0xC4: (0, 0, 1, 1, False, 'SET 0,({r}{d:+d}),H'),
0xC5: (0, 0, 1, 1, False, 'SET 0,({r}{d:+d}),L'),
0xC6: (0, 0, 1, 1, False, 'SET 0,({r}{d:+d})'),
0xC7: (0, 0, 1, 1, False, 'SET 0,({r}{d:+d}),A'),
0xC8: (0, 0, 1, 1, False, 'SET 1,({r}{d:+d}),B'),
0xC9: (0, 0, 1, 1, False, 'SET 1,({r}{d:+d}),C'),
0xCA: (0, 0, 1, 1, False, 'SET 1,({r}{d:+d}),D'),
0xCB: (0, 0, 1, 1, False, 'SET 1,({r}{d:+d}),E'),
0xCC: (0, 0, 1, 1, False, 'SET 1,({r}{d:+d}),H'),
0xCD: (0, 0, 1, 1, False, 'SET 1,({r}{d:+d}),L'),
0xCE: (0, 0, 1, 1, False, 'SET 1,({r}{d:+d})'),
0xCF: (0, 0, 1, 1, False, 'SET 1,({r}{d:+d}),A'),
0xD0: (0, 0, 1, 1, False, 'SET 2,({r}{d:+d}),B'),
0xD1: (0, 0, 1, 1, False, 'SET 2,({r}{d:+d}),C'),
0xD2: (0, 0, 1, 1, False, 'SET 2,({r}{d:+d}),D'),
0xD3: (0, 0, 1, 1, False, 'SET 2,({r}{d:+d}),E'),
0xD4: (0, 0, 1, 1, False, 'SET 2,({r}{d:+d}),H'),
0xD5: (0, 0, 1, 1, False, 'SET 2,({r}{d:+d}),L'),
0xD6: (0, 0, 1, 1, False, 'SET 2,({r}{d:+d})'),
0xD7: (0, 0, 1, 1, False, 'SET 2,({r}{d:+d}),A'),
0xD8: (0, 0, 1, 1, False, 'SET 3,({r}{d:+d}),B'),
0xD9: (0, 0, 1, 1, False, 'SET 3,({r}{d:+d}),C'),
0xDA: (0, 0, 1, 1, False, 'SET 3,({r}{d:+d}),D'),
0xDB: (0, 0, 1, 1, False, 'SET 3,({r}{d:+d}),E'),
0xDC: (0, 0, 1, 1, False, 'SET 3,({r}{d:+d}),H'),
0xDD: (0, 0, 1, 1, False, 'SET 3,({r}{d:+d}),L'),
0xDE: (0, 0, 1, 1, False, 'SET 3,({r}{d:+d})'),
0xDF: (0, 0, 1, 1, False, 'SET 3,({r}{d:+d}),A'),
0xE0: (0, 0, 1, 1, False, 'SET 4,({r}{d:+d}),B'),
0xE1: (0, 0, 1, 1, False, 'SET 4,({r}{d:+d}),C'),
0xE2: (0, 0, 1, 1, False, 'SET 4,({r}{d:+d}),D'),
0xE3: (0, 0, 1, 1, False, 'SET 4,({r}{d:+d}),E'),
0xE4: (0, 0, 1, 1, False, 'SET 4,({r}{d:+d}),H'),
0xE5: (0, 0, 1, 1, False, 'SET 4,({r}{d:+d}),L'),
0xE6: (0, 0, 1, 1, False, 'SET 4,({r}{d:+d})'),
0xE7: (0, 0, 1, 1, False, 'SET 4,({r}{d:+d}),A'),
0xE8: (0, 0, 1, 1, False, 'SET 5,({r}{d:+d}),B'),
0xE9: (0, 0, 1, 1, False, 'SET 5,({r}{d:+d}),C'),
0xEA: (0, 0, 1, 1, False, 'SET 5,({r}{d:+d}),D'),
0xEB: (0, 0, 1, 1, False, 'SET 5,({r}{d:+d}),E'),
0xEC: (0, 0, 1, 1, False, 'SET 5,({r}{d:+d}),H'),
0xED: (0, 0, 1, 1, False, 'SET 5,({r}{d:+d}),L'),
0xEE: (0, 0, 1, 1, False, 'SET 5,({r}{d:+d})'),
0xEF: (0, 0, 1, 1, False, 'SET 5,({r}{d:+d}),A'),
0xF0: (0, 0, 1, 1, False, 'SET 6,({r}{d:+d}),B'),
0xF1: (0, 0, 1, 1, False, 'SET 6,({r}{d:+d}),C'),
0xF2: (0, 0, 1, 1, False, 'SET 6,({r}{d:+d}),D'),
0xF3: (0, 0, 1, 1, False, 'SET 6,({r}{d:+d}),E'),
0xF4: (0, 0, 1, 1, False, 'SET 6,({r}{d:+d}),H'),
0xF5: (0, 0, 1, 1, False, 'SET 6,({r}{d:+d}),L'),
0xF6: (0, 0, 1, 1, False, 'SET 6,({r}{d:+d})'),
0xF7: (0, 0, 1, 1, False, 'SET 6,({r}{d:+d}),A'),
0xF8: (0, 0, 1, 1, False, 'SET 7,({r}{d:+d}),B'),
0xF9: (0, 0, 1, 1, False, 'SET 7,({r}{d:+d}),C'),
0xFA: (0, 0, 1, 1, False, 'SET 7,({r}{d:+d}),D'),
0xFB: (0, 0, 1, 1, False, 'SET 7,({r}{d:+d}),E'),
0xFC: (0, 0, 1, 1, False, 'SET 7,({r}{d:+d}),H'),
0xFD: (0, 0, 1, 1, False, 'SET 7,({r}{d:+d}),L'),
0xFE: (0, 0, 1, 1, False, 'SET 7,({r}{d:+d})'),
0xFF: (0, 0, 1, 1, False, 'SET 7,({r}{d:+d}),A')
}
instr_table_by_prefix = {
0: (main_instructions, ''),
0xED: (extended_instructions, ''),
0xCB: (bit_instructions, ''),
0xDD: (index_instructions, 'IX'),
0xFD: (index_instructions, 'IY'),
0xDDCB: (index_bit_instructions, 'IX'),
0xFDCB: (index_bit_instructions, 'IY')
}
|
gpl-3.0
|
weidnem/IntroPython2016
|
students/pvosper/session04/file_lab.py
|
3
|
3886
|
#!/usr/bin/env python3
# === File Lab =====
'''
write a program which prints the full path to all files in the current
directory, one per line
'''
import os
print('\nFull path to current directory:')
print(os.getcwd())
# create list of all files in current directory
directory_list = os.listdir()
print('\nFiles in current directory:')
for entry in directory_list:
print("\t", entry)
print('\n'*2)
print('-'*80)
print('\n'*2)
'''
Write a program which copies a file from a source, to a destination (without
using shutil, or the OS copy command)
'''
# Function to create 80 character line of gibberish
def line_of_gibberish():
import random
line = ""
for i in range(80):
line = line + chr(random.randint(41,79)) #$Todo: hex
line = line + "\n"
return line
# Create file to use as source
file_object = open('source_file.txt', 'w')
for i in range(100):
file_object.write(line_of_gibberish())
file_object.close()
# Copy source to target line by line
target_file_object = open('target_file.txt','w')
source_file_object = open('source_file.txt','r')
while True:
line = source_file_object.readline()
if not line:
break
else:
target_file_object.write(line)
target_file_object.close()
source_file_object.close()
'''
Write a little script that reads that file (students.txt), and generates a
list of all the languages that have been used.
Extra credit: keep track of how many students specified each language.
'''
# Create a dictionary from students.txt file
# key = name, value = languages
students_dictionary = {}
for line in open('../../../Examples/Session01/students.txt'):
student_info = []
student_info = line.split(':')
if len(student_info) == 2:
students_dictionary[student_info[0]] = student_info[1].replace('\n', ' ')
else:
students_dictionary[student_info[0]] = '<n/a>'
# Get rid of 'name: languages' header line
students_dictionary.pop('name')
# Create a dictionary of languages
# key = language, value = count
# $todo value = list of students who know that language
language_dictionary = {}
language_list = students_dictionary.values()
for entry in language_list:
entry = entry.lower().replace(',', '').replace('pyton', 'python').replace('macscript', 'maxscript')
entry_list = entry.split()
for language in entry_list:
if language in language_dictionary:
language_dictionary[language] = language_dictionary[language] + 1
else:
language_dictionary[language] = 1
# Print Languages & number of users
for languages in language_dictionary:
print("{}\t{:10.0f}".format(languages.ljust(12), language_dictionary[languages]))
if __name__ == '__main__':
print('\n=== MAIN ===\n')
'''
=== SAMPLE ===
In [20]: run file_lab.py
Full path to current directory:
/Users/paulvosper/UWPCE_IntroPython2016/IntroPython2016/students/pvosper/session04
Files in current directory:
.DS_Store
dict_lab.py
file_lab.py
kata_14_trigrams.py
lorem_ipsum_generator.py
myfile
sherlock.txt
sherlock_partial.txt
source_file.txt
target_file.txt
temp_file.txt
--------------------------------------------------------------------------------
swift 1
asp 1
r 4
bash 6
cobol 1
ruby 2
sas 1
python 19
perl 3
c 2
maxscript 1
german 1
qbasic 1
actionscript 1
php 4
xml 1
basic 2
sql 12
htl 1
html 7
javascript 1
c# 2
c++ 5
scala 1
java 3
shell 2
=== MAIN ===
'''
|
unlicense
|
mrquim/mrquimrepo
|
script.xbmcbackup/scheduler.py
|
4
|
6840
|
import xbmc
import xbmcvfs
import xbmcgui
import datetime
import time
import os
import resources.lib.utils as utils
from resources.lib.croniter import croniter
from resources.lib.backup import XbmcBackup
class BackupScheduler:
monitor = None
enabled = "false"
next_run = 0
next_run_path = None
restore_point = None
def __init__(self):
self.monitor = UpdateMonitor(update_method = self.settingsChanged)
self.enabled = utils.getSetting("enable_scheduler")
self.next_run_path = xbmc.translatePath(utils.data_dir()) + 'next_run.txt'
if(self.enabled == "true"):
nr = 0
if(xbmcvfs.exists(self.next_run_path)):
fh = xbmcvfs.File(self.next_run_path)
try:
#check if we saved a run time from the last run
nr = float(fh.read())
except ValueError:
nr = 0
fh.close()
#if we missed and the user wants to play catch-up
if(0 < nr <= time.time() and utils.getSetting('schedule_miss') == 'true'):
utils.log("scheduled backup was missed, doing it now...")
progress_mode = int(utils.getSetting('progress_mode'))
if(progress_mode == 0):
progress_mode = 1 # Kodi just started, don't block it with a foreground progress bar
self.doScheduledBackup(progress_mode)
self.setup()
def setup(self):
#scheduler was turned on, find next run time
utils.log("scheduler enabled, finding next run time")
self.findNextRun(time.time())
def start(self):
#check if a backup should be resumed
resumeRestore = self._resumeCheck()
if(resumeRestore):
restore = XbmcBackup()
restore.selectRestore(self.restore_point)
#skip the advanced settings check
restore.skipAdvanced()
restore.run(XbmcBackup.Restore)
while(not xbmc.abortRequested):
if(self.enabled == "true"):
#scheduler is still on
now = time.time()
if(self.next_run <= now):
progress_mode = int(utils.getSetting('progress_mode'))
self.doScheduledBackup(progress_mode)
#check if we should shut the computer down
if(utils.getSetting("cron_shutdown") == 'true'):
#wait 10 seconds to make sure all backup processes and files are completed
time.sleep(10)
xbmc.executebuiltin('ShutDown()')
else:
#find the next run time like normal
self.findNextRun(now)
xbmc.sleep(500)
#delete monitor to free up memory
del self.monitor
def doScheduledBackup(self,progress_mode):
if(progress_mode != 2):
utils.showNotification(utils.getString(30053))
backup = XbmcBackup()
if(backup.remoteConfigured()):
if(int(utils.getSetting('progress_mode')) in [0,1]):
backup.run(XbmcBackup.Backup,True)
else:
backup.run(XbmcBackup.Backup,False)
#check if this is a "one-off"
if(int(utils.getSetting("schedule_interval")) == 0):
#disable the scheduler after this run
self.enabled = "false"
utils.setSetting('enable_scheduler','false')
else:
utils.showNotification(utils.getString(30045))
def findNextRun(self,now):
progress_mode = int(utils.getSetting('progress_mode'))
#find the cron expression and get the next run time
cron_exp = self.parseSchedule()
cron_ob = croniter(cron_exp,datetime.datetime.fromtimestamp(now))
new_run_time = cron_ob.get_next(float)
if(new_run_time != self.next_run):
self.next_run = new_run_time
utils.log("scheduler will run again on " + datetime.datetime.fromtimestamp(self.next_run).strftime('%m-%d-%Y %H:%M'))
#write the next time to a file
fh = xbmcvfs.File(self.next_run_path, 'w')
fh.write(str(self.next_run))
fh.close()
#only show when not in silent mode
if(progress_mode != 2):
utils.showNotification(utils.getString(30081) + " " + datetime.datetime.fromtimestamp(self.next_run).strftime('%m-%d-%Y %H:%M'))
def settingsChanged(self):
current_enabled = utils.getSetting("enable_scheduler")
if(current_enabled == "true" and self.enabled == "false"):
#scheduler was just turned on
self.enabled = current_enabled
self.setup()
elif (current_enabled == "false" and self.enabled == "true"):
#schedule was turn off
self.enabled = current_enabled
if(self.enabled == "true"):
#always recheck the next run time after an update
self.findNextRun(time.time())
def parseSchedule(self):
schedule_type = int(utils.getSetting("schedule_interval"))
cron_exp = utils.getSetting("cron_schedule")
hour_of_day = utils.getSetting("schedule_time")
hour_of_day = int(hour_of_day[0:2])
if(schedule_type == 0 or schedule_type == 1):
#every day
cron_exp = "0 " + str(hour_of_day) + " * * *"
elif(schedule_type == 2):
#once a week
day_of_week = utils.getSetting("day_of_week")
cron_exp = "0 " + str(hour_of_day) + " * * " + day_of_week
elif(schedule_type == 3):
#first day of month
cron_exp = "0 " + str(hour_of_day) + " 1 * *"
return cron_exp
def _resumeCheck(self):
shouldContinue = False
if(xbmcvfs.exists(xbmc.translatePath(utils.data_dir() + "resume.txt"))):
rFile = xbmcvfs.File(xbmc.translatePath(utils.data_dir() + "resume.txt"),'r')
self.restore_point = rFile.read()
rFile.close()
xbmcvfs.delete(xbmc.translatePath(utils.data_dir() + "resume.txt"))
shouldContinue = xbmcgui.Dialog().yesno(utils.getString(30042),utils.getString(30043),utils.getString(30044))
return shouldContinue
class UpdateMonitor(xbmc.Monitor):
update_method = None
def __init__(self,*args, **kwargs):
xbmc.Monitor.__init__(self)
self.update_method = kwargs['update_method']
def onSettingsChanged(self):
self.update_method()
BackupScheduler().start()
|
gpl-2.0
|
tejasnikumbh/Algorithms
|
ArraysAndSorting/MarkAndToys.py
|
1
|
1514
|
'''
In place quickSort The quickSort Method
Time Complexity : Best,Avg - O(NlogN) , Worst - O(N^2)
Space Complexity : O(N)
Auxilary Space : O(logN) for the stack frames
'''
def quickSort(a,start,end):
if(start >= end): return a
else:
pivot = a[end]
swapIndex = start
for i in range(start,end + 1):
if(a[i] < pivot):
#swap(a,i,swapIndex)
temp = a[i]
a[i] = a[swapIndex]
a[swapIndex] = temp
swapIndex += 1
#swap(a,end,swapIndex)
temp = a[end]
a[end] = a[swapIndex]
a[swapIndex] = temp
quickSort(a,start,swapIndex - 1)
quickSort(a,swapIndex + 1,end)
return a
'''
Function that returns maximum toys that can be bought. Simple strategy is to
sort the prices array and add as many toys as possible by incrementally adding
up prices from the least to the most until budget is exhausted.
'''
def max_toys(prices, rupees):
#Compute and return final answer over here
answer = 0
prices = quickSort(prices,0,len(prices)-1)
totalBudget = rupees
for price in prices:
if((totalBudget - price) >= 0):
totalBudget -= price
answer += 1
else: break
return answer
'''
Main function for the program
'''
if __name__ == '__main__':
n, k = map(int, raw_input().split())
prices = map(int, raw_input().split())
print max_toys(prices, k)
|
bsd-2-clause
|
cwyark/v2ex
|
mapreduce/input_readers.py
|
20
|
31679
|
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines input readers for MapReduce."""
# pylint: disable-msg=C6409
import logging
import math
import StringIO
import zipfile
from google.appengine.api import datastore
from mapreduce.lib import blobstore
from google.appengine.ext import db
from mapreduce.lib import key_range
from mapreduce import util
from mapreduce.model import JsonMixin
class Error(Exception):
"""Base-class for exceptions in this module."""
class BadReaderParamsError(Error):
"""The input parameters to a reader were invalid."""
class InputReader(JsonMixin):
"""Abstract base class for input readers.
InputReaders have the following properties:
* They are created by using the split_input method to generate a set of
InputReaders from a MapperSpec.
* They generate inputs to the mapper via the iterator interface.
* After creation, they can be serialized and resumed using the JsonMixin
interface.
* They are cast to string for a user-readable description; it may be
valuable to implement __str__.
"""
# Mapreduce parameters.
_APP_PARAM = "_app"
MAPPER_PARAMS = "mapper_params"
def __iter__(self):
return self
def next(self):
"""Returns the next input from this input reader as a key, value pair.
Returns:
The next input from this input reader.
"""
raise NotImplementedError
@classmethod
def from_json(cls, input_shard_state):
"""Creates an instance of the InputReader for the given input shard state.
Args:
input_shard_state: The InputReader state as a dict-like object.
Returns:
An instance of the InputReader configured using the values of json.
"""
raise NotImplementedError
def to_json(self):
"""Returns an input shard state for the remaining inputs.
Returns:
A json-izable version of the remaining InputReader.
"""
raise NotImplementedError
@classmethod
def split_input(cls, mapper_spec):
"""Returns a list of input readers for the input spec.
Args:
mapper_spec: The MapperSpec for this InputReader.
Returns:
A list of InputReaders.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
raise NotImplementedError
# TODO(user): Use cursor API as soon as we have it available.
class DatastoreInputReader(InputReader):
"""Represents a range in query results.
DatastoreInputReader yields model instances from the entities in a given key
range. Iterating over DatastoreInputReader changes its range past consumed
entries.
The class shouldn't be instantiated directly. Use the split_input class method
instead.
"""
# Number of entities to fetch at once while doing scanning.
_BATCH_SIZE = 50
# Maximum number of shards we'll create.
_MAX_SHARD_COUNT = 256
# Mapreduce parameters.
ENTITY_KIND_PARAM = "entity_kind"
KEYS_ONLY_PARAM = "keys_only"
BATCH_SIZE_PARAM = "batch_size"
KEY_RANGE_PARAM = "key_range"
# TODO(user): Add support for arbitrary queries. It's not possible to
# support them without cursors since right now you can't even serialize query
# definition.
def __init__(self, entity_kind, key_range_param, mapper_params):
"""Create new DatastoreInputReader object.
This is internal constructor. Use split_query instead.
Args:
entity_kind: entity kind as string.
key_range_param: key range to process as key_range.KeyRange.
mapper_params: mapper parameters as defined by user.
"""
self._entity_kind = entity_kind
self._key_range = key_range_param
self._mapper_params = mapper_params
self._batch_size = int(self._mapper_params.get(
self.BATCH_SIZE_PARAM, self._BATCH_SIZE))
def __iter__(self):
"""Create a generator for model instances for entities.
Iterating through entities moves query range past the consumed entities.
Yields:
next model instance.
"""
while True:
query = self._key_range.make_ascending_query(
util.for_name(self._entity_kind))
results = query.fetch(limit=self._batch_size)
if not results:
break
for model_instance in results:
key = model_instance.key()
self._key_range.advance(key)
yield model_instance
# TODO(user): use query splitting functionality when it becomes available
# instead.
@classmethod
def _split_input_from_params(cls, app, entity_kind_name,
params, shard_count):
"""Return input reader objects. Helper for split_input."""
raw_entity_kind = util.get_short_name(entity_kind_name)
# we use datastore.Query instead of ext.db.Query here, because we can't
# erase ordering on db.Query once we set it.
ds_query = datastore.Query(kind=raw_entity_kind, _app=app, keys_only=True)
ds_query.Order("__key__")
first_entity_key_list = ds_query.Get(1)
if not first_entity_key_list:
return []
first_entity_key = first_entity_key_list[0]
ds_query.Order(("__key__", datastore.Query.DESCENDING))
try:
last_entity_key, = ds_query.Get(1)
except db.NeedIndexError, e:
# TODO(user): Show this error in the worker log, not the app logs.
logging.warning("Cannot create accurate approximation of keyspace, "
"guessing instead. Please address this problem: %s", e)
# TODO(user): Use a key-end hint from the user input parameters
# in this case, in the event the user has a good way of figuring out
# the range of the keyspace.
last_entity_key = key_range.KeyRange.guess_end_key(raw_entity_kind,
first_entity_key)
full_keyrange = key_range.KeyRange(
first_entity_key, last_entity_key, None, True, True, _app=app)
key_ranges = [full_keyrange]
number_of_half_splits = int(math.floor(math.log(shard_count, 2)))
for _ in range(0, number_of_half_splits):
new_ranges = []
for r in key_ranges:
new_ranges += r.split_range(1)
key_ranges = new_ranges
return [cls(entity_kind_name, r, params) for r in key_ranges]
@classmethod
def split_input(cls, mapper_spec):
"""Splits query into shards without fetching query results.
Tries as best as it can to split the whole query result set into equal
shards. Due to difficulty of making the perfect split, resulting shards'
sizes might differ significantly from each other. The actual number of
shards might also be less then requested (even 1), though it is never
greater.
Current implementation does key-lexicographic order splitting. It requires
query not to specify any __key__-based ordering. If an index for
query.order('-__key__') query is not present, an inaccurate guess at
sharding will be made by splitting the full key range.
Args:
mapper_spec: MapperSpec with params containing 'entity_kind'.
May also have 'batch_size' in the params to specify the number
of entities to process in each batch.
Returns:
A list of InputReader objects of length <= number_of_shards. These
may be DatastoreInputReader or DatastoreKeyInputReader objects.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Input reader class mismatch")
params = mapper_spec.params
if cls.ENTITY_KIND_PARAM not in params:
raise BadReaderParamsError("Missing mapper parameter 'entity_kind'")
entity_kind_name = params[cls.ENTITY_KIND_PARAM]
shard_count = mapper_spec.shard_count
app = params.get(cls._APP_PARAM)
# keys_only remains for backwards compatability. It may go away.
keys_only = util.parse_bool(params.get(cls.KEYS_ONLY_PARAM, False))
if keys_only:
raise BadReaderParamsError("The keys_only parameter is obsolete. "
"Use DatastoreKeyInputReader instead.")
# Fail fast if Model cannot be located.
util.for_name(entity_kind_name)
return cls._split_input_from_params(
app, entity_kind_name, params, shard_count)
def to_json(self):
"""Serializes all the data in this query range into json form.
Returns:
all the data in json-compatible map.
"""
json_dict = {self.KEY_RANGE_PARAM: self._key_range.to_json(),
self.ENTITY_KIND_PARAM: self._entity_kind,
self.MAPPER_PARAMS: self._mapper_params}
return json_dict
def __str__(self):
"""Returns the string representation of this DatastoreInputReader."""
return repr(self._key_range)
@classmethod
def from_json(cls, json):
"""Create new DatastoreInputReader from the json, encoded by to_json.
Args:
json: json map representation of DatastoreInputReader.
Returns:
an instance of DatastoreInputReader with all data deserialized from json.
"""
query_range = cls(json[cls.ENTITY_KIND_PARAM],
key_range.KeyRange.from_json(json[cls.KEY_RANGE_PARAM]),
json[cls.MAPPER_PARAMS])
return query_range
class DatastoreKeyInputReader(DatastoreInputReader):
"""An input reader which takes a Kind and yields Keys for that kind."""
def __iter__(self):
"""Create a generator for keys in the range.
Iterating through entries moves query range past the consumed entries.
Yields:
next entry.
"""
while True:
raw_entity_kind = util.get_short_name(self._entity_kind)
query = self._key_range.make_ascending_datastore_query(
raw_entity_kind, keys_only=True)
results = query.Get(limit=self._batch_size)
if not results:
break
for key in results:
self._key_range.advance(key)
yield key
@classmethod
def split_input(cls, mapper_spec):
"""Splits query into shards without fetching query results.
Tries as best as it can to split the whole query result set into equal
shards. Due to difficulty of making the perfect split, resulting shards'
sizes might differ significantly from each other. The actual number of
shards might also be less then requested (even 1), though it is never
greater.
Current implementation does key-lexicographic order splitting. It requires
query not to specify any __key__-based ordering. If an index for
query.order('-__key__') query is not present, an inaccurate guess at
sharding will be made by splitting the full key range.
Args:
mapper_spec: MapperSpec with params containing 'entity_kind'.
May also have 'batch_size' in the params to specify the number
of entities to process in each batch.
Returns:
A list of DatastoreKeyInputReader objects of length <= number_of_shards.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Input reader class mismatch")
params = mapper_spec.params
if cls.ENTITY_KIND_PARAM not in params:
raise BadReaderParamsError("Missing mapper parameter 'entity_kind'")
entity_kind_name = params[cls.ENTITY_KIND_PARAM]
shard_count = mapper_spec.shard_count
app = params.get(cls._APP_PARAM)
return cls._split_input_from_params(
app, entity_kind_name, params, shard_count)
class DatastoreEntityInputReader(DatastoreInputReader):
"""An input reader which yields low level datastore entities for a kind."""
def __iter__(self):
"""Create a generator for low level entities in the range.
Iterating through entries moves query range past the consumed entries.
Yields:
next entry.
"""
while True:
raw_entity_kind = util.get_short_name(self._entity_kind)
query = self._key_range.make_ascending_datastore_query(raw_entity_kind)
results = query.Get(limit=self._batch_size)
if not results:
break
for entity in results:
self._key_range.advance(entity.key())
yield entity
@classmethod
def split_input(cls, mapper_spec):
"""Splits query into shards without fetching query results.
Tries as best as it can to split the whole query result set into equal
shards. Due to difficulty of making the perfect split, resulting shards'
sizes might differ significantly from each other. The actual number of
shards might also be less then requested (even 1), though it is never
greater.
Current implementation does key-lexicographic order splitting. It requires
query not to specify any __key__-based ordering. If an index for
query.order('-__key__') query is not present, an inaccurate guess at
sharding will be made by splitting the full key range.
Args:
mapper_spec: MapperSpec with params containing 'entity_kind'.
May also have 'batch_size' in the params to specify the number
of entities to process in each batch.
Returns:
List of DatastoreEntityInputReader objects of length <= number_of_shards.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Input reader class mismatch")
params = mapper_spec.params
if cls.ENTITY_KIND_PARAM not in params:
raise BadReaderParamsError("Missing mapper parameter 'entity_kind'")
entity_kind_name = params[cls.ENTITY_KIND_PARAM]
shard_count = mapper_spec.shard_count
app = params.get(cls._APP_PARAM)
return cls._split_input_from_params(
app, entity_kind_name, params, shard_count)
class BlobstoreLineInputReader(InputReader):
"""Input reader for a newline delimited blob in Blobstore."""
# TODO(user): Should we set this based on MAX_BLOB_FETCH_SIZE?
_BLOB_BUFFER_SIZE = 64000
# Maximum number of shards to allow.
_MAX_SHARD_COUNT = 256
# Maximum number of blobs to allow.
_MAX_BLOB_KEYS_COUNT = 246
# Mapreduce parameters.
BLOB_KEYS_PARAM = "blob_keys"
# Serialization parmaeters.
INITIAL_POSITION_PARAM = "initial_position"
END_POSITION_PARAM = "end_position"
BLOB_KEY_PARAM = "blob_key"
def __init__(self, blob_key, start_position, end_position):
"""Initializes this instance with the given blob key and character range.
This BlobstoreInputReader will read from the first record starting after
strictly after start_position until the first record ending at or after
end_position (exclusive). As an exception, if start_position is 0, then
this InputReader starts reading at the first record.
Args:
blob_key: the BlobKey that this input reader is processing.
start_position: the position to start reading at.
end_position: a position in the last record to read.
"""
self._blob_key = blob_key
self._blob_reader = blobstore.BlobReader(blob_key,
self._BLOB_BUFFER_SIZE,
start_position)
self._end_position = end_position
self._has_iterated = False
self._read_before_start = bool(start_position)
def next(self):
"""Returns the next input from as an (offset, line) tuple."""
self._has_iterated = True
if self._read_before_start:
self._blob_reader.readline()
self._read_before_start = False
start_position = self._blob_reader.tell()
if start_position >= self._end_position:
raise StopIteration()
line = self._blob_reader.readline()
if not line:
raise StopIteration()
return start_position, line.rstrip("\n")
def to_json(self):
"""Returns an json-compatible input shard spec for remaining inputs."""
new_pos = self._blob_reader.tell()
if self._has_iterated:
new_pos -= 1
return {self.BLOB_KEY_PARAM: self._blob_key,
self.INITIAL_POSITION_PARAM: new_pos,
self.END_POSITION_PARAM: self._end_position}
def __str__(self):
"""Returns the string representation of this BlobstoreLineInputReader."""
return "blobstore.BlobKey(%r):[%d, %d]" % (
self._blob_key, self._blob_reader.tell(), self._end_position)
@classmethod
def from_json(cls, json):
"""Instantiates an instance of this InputReader for the given shard spec."""
return cls(json[cls.BLOB_KEY_PARAM],
json[cls.INITIAL_POSITION_PARAM],
json[cls.END_POSITION_PARAM])
@classmethod
def split_input(cls, mapper_spec):
"""Returns a list of shard_count input_spec_shards for input_spec.
Args:
mapper_spec: The mapper specification to split from. Must contain
'blob_keys' parameter with one or more blob keys.
Returns:
A list of BlobstoreInputReaders corresponding to the specified shards.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Mapper input reader class mismatch")
params = mapper_spec.params
if cls.BLOB_KEYS_PARAM not in params:
raise BadReaderParamsError("Must specify 'blob_keys' for mapper input")
blob_keys = params[cls.BLOB_KEYS_PARAM]
if isinstance(blob_keys, basestring):
# This is a mechanism to allow multiple blob keys (which do not contain
# commas) in a single string. It may go away.
blob_keys = blob_keys.split(",")
if len(blob_keys) > cls._MAX_BLOB_KEYS_COUNT:
raise BadReaderParamsError("Too many 'blob_keys' for mapper input")
if not blob_keys:
raise BadReaderParamsError("No 'blob_keys' specified for mapper input")
blob_sizes = {}
for blob_key in blob_keys:
blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key))
blob_sizes[blob_key] = blob_info.size
shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)
shards_per_blob = shard_count // len(blob_keys)
if shards_per_blob == 0:
shards_per_blob = 1
chunks = []
for blob_key, blob_size in blob_sizes.items():
blob_chunk_size = blob_size // shards_per_blob
for i in xrange(shards_per_blob - 1):
chunks.append(BlobstoreLineInputReader.from_json(
{cls.BLOB_KEY_PARAM: blob_key,
cls.INITIAL_POSITION_PARAM: blob_chunk_size * i,
cls.END_POSITION_PARAM: blob_chunk_size * (i + 1)}))
chunks.append(BlobstoreLineInputReader.from_json(
{cls.BLOB_KEY_PARAM: blob_key,
cls.INITIAL_POSITION_PARAM: blob_chunk_size * (shards_per_blob - 1),
cls.END_POSITION_PARAM: blob_size}))
return chunks
class BlobstoreZipInputReader(InputReader):
"""Input reader for files from a zip archive stored in the Blobstore.
Each instance of the reader will read the TOC, from the end of the zip file,
and then only the contained files which it is responsible for.
"""
# Maximum number of shards to allow.
_MAX_SHARD_COUNT = 256
# Mapreduce parameters.
BLOB_KEY_PARAM = "blob_key"
START_INDEX_PARAM = "start_index"
END_INDEX_PARAM = "end_index"
def __init__(self, blob_key, start_index, end_index,
_reader=blobstore.BlobReader):
"""Initializes this instance with the given blob key and file range.
This BlobstoreZipInputReader will read from the file with index start_index
up to but not including the file with index end_index.
Args:
blob_key: the BlobKey that this input reader is processing.
start_index: the index of the first file to read.
end_index: the index of the first file that will not be read.
_reader: a callable that returns a file-like object for reading blobs.
Used for dependency injection.
"""
self._blob_key = blob_key
self._start_index = start_index
self._end_index = end_index
self._reader = _reader
self._zip = None
self._entries = None
def next(self):
"""Returns the next input from this input reader as (ZipInfo, opener) tuple.
Returns:
The next input from this input reader, in the form of a 2-tuple.
The first element of the tuple is a zipfile.ZipInfo object.
The second element of the tuple is a zero-argument function that, when
called, returns the complete body of the file.
"""
if not self._zip:
self._zip = zipfile.ZipFile(self._reader(self._blob_key))
# Get a list of entries, reversed so we can pop entries off in order
self._entries = self._zip.infolist()[self._start_index:self._end_index]
self._entries.reverse()
if not self._entries:
raise StopIteration()
entry = self._entries.pop()
self._start_index += 1
return (entry, lambda: self._zip.read(entry.filename))
@classmethod
def from_json(cls, json):
"""Creates an instance of the InputReader for the given input shard state.
Args:
json: The InputReader state as a dict-like object.
Returns:
An instance of the InputReader configured using the values of json.
"""
return cls(json[cls.BLOB_KEY_PARAM],
json[cls.START_INDEX_PARAM],
json[cls.END_INDEX_PARAM])
def to_json(self):
"""Returns an input shard state for the remaining inputs.
Returns:
A json-izable version of the remaining InputReader.
"""
return {self.BLOB_KEY_PARAM: self._blob_key,
self.START_INDEX_PARAM: self._start_index,
self.END_INDEX_PARAM: self._end_index}
def __str__(self):
"""Returns the string representation of this BlobstoreZipInputReader."""
return "blobstore.BlobKey(%r):[%d, %d]" % (
self._blob_key, self._start_index, self._end_index)
@classmethod
def split_input(cls, mapper_spec, _reader=blobstore.BlobReader):
"""Returns a list of input shard states for the input spec.
Args:
mapper_spec: The MapperSpec for this InputReader. Must contain
'blob_key' parameter with one blob key.
_reader: a callable that returns a file-like object for reading blobs.
Used for dependency injection.
Returns:
A list of InputReaders spanning files within the zip.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Mapper input reader class mismatch")
params = mapper_spec.params
if cls.BLOB_KEY_PARAM not in params:
raise BadReaderParamsError("Must specify 'blob_key' for mapper input")
blob_key = params[cls.BLOB_KEY_PARAM]
zip_input = zipfile.ZipFile(_reader(blob_key))
files = zip_input.infolist()
total_size = sum(x.file_size for x in files)
num_shards = min(mapper_spec.shard_count, cls._MAX_SHARD_COUNT)
size_per_shard = total_size // num_shards
# Break the list of files into sublists, each of approximately
# size_per_shard bytes.
shard_start_indexes = [0]
current_shard_size = 0
for i, fileinfo in enumerate(files):
current_shard_size += fileinfo.file_size
if current_shard_size >= size_per_shard:
shard_start_indexes.append(i + 1)
current_shard_size = 0
if shard_start_indexes[-1] != len(files):
shard_start_indexes.append(len(files))
return [cls(blob_key, start_index, end_index, _reader)
for start_index, end_index
in zip(shard_start_indexes, shard_start_indexes[1:])]
class BlobstoreZipLineInputReader(InputReader):
"""Input reader for newline delimited files in zip archives from Blobstore.
This has the same external interface as the BlobstoreLineInputReader, in that
it takes a list of blobs as its input and yields lines to the reader.
However the blobs themselves are expected to be zip archives of line delimited
files instead of the files themselves.
This is useful as many line delimited files gain greatly from compression.
"""
# Maximum number of shards to allow.
_MAX_SHARD_COUNT = 256
# Maximum number of blobs to allow.
_MAX_BLOB_KEYS_COUNT = 246
# Mapreduce parameters.
BLOB_KEYS_PARAM = "blob_keys"
# Serialization parameters.
BLOB_KEY_PARAM = "blob_key"
START_FILE_INDEX_PARAM = "start_file_index"
END_FILE_INDEX_PARAM = "end_file_index"
OFFSET_PARAM = "offset"
def __init__(self, blob_key, start_file_index, end_file_index, offset,
_reader=blobstore.BlobReader):
"""Initializes this instance with the given blob key and file range.
This BlobstoreZipLineInputReader will read from the file with index
start_file_index up to but not including the file with index end_file_index.
It will return lines starting at offset within file[start_file_index]
Args:
blob_key: the BlobKey that this input reader is processing.
start_file_index: the index of the first file to read within the zip.
end_file_index: the index of the first file that will not be read.
offset: the byte offset within blob_key.zip[start_file_index] to start
reading. The reader will continue to the end of the file.
_reader: a callable that returns a file-like object for reading blobs.
Used for dependency injection.
"""
self._blob_key = blob_key
self._start_file_index = start_file_index
self._end_file_index = end_file_index
self._initial_offset = offset
self._reader = _reader
self._zip = None
self._entries = None
self._filestream = None
@classmethod
def split_input(cls, mapper_spec, _reader=blobstore.BlobReader):
"""Returns a list of input readers for the input spec.
Args:
mapper_spec: The MapperSpec for this InputReader. Must contain
'blob_keys' parameter with one or more blob keys.
_reader: a callable that returns a file-like object for reading blobs.
Used for dependency injection.
Returns:
A list of InputReaders spanning the subfiles within the blobs.
There will be at least one reader per blob, but it will otherwise
attempt to keep the expanded size even.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Mapper input reader class mismatch")
params = mapper_spec.params
if cls.BLOB_KEYS_PARAM not in params:
raise BadReaderParamsError("Must specify 'blob_key' for mapper input")
blob_keys = params[cls.BLOB_KEYS_PARAM]
if isinstance(blob_keys, basestring):
# This is a mechanism to allow multiple blob keys (which do not contain
# commas) in a single string. It may go away.
blob_keys = blob_keys.split(",")
if len(blob_keys) > cls._MAX_BLOB_KEYS_COUNT:
raise BadReaderParamsError("Too many 'blob_keys' for mapper input")
if not blob_keys:
raise BadReaderParamsError("No 'blob_keys' specified for mapper input")
blob_files = {}
total_size = 0
for blob_key in blob_keys:
zip_input = zipfile.ZipFile(_reader(blob_key))
blob_files[blob_key] = zip_input.infolist()
total_size += sum(x.file_size for x in blob_files[blob_key])
shard_count = min(cls._MAX_SHARD_COUNT, mapper_spec.shard_count)
# We can break on both blob key and file-within-zip boundaries.
# A shard will span at minimum a single blob key, but may only
# handle a few files within a blob.
size_per_shard = total_size // shard_count
readers = []
for blob_key in blob_keys:
files = blob_files[blob_key]
current_shard_size = 0
start_file_index = 0
next_file_index = 0
for fileinfo in files:
next_file_index += 1
current_shard_size += fileinfo.file_size
if current_shard_size >= size_per_shard:
readers.append(cls(blob_key, start_file_index, next_file_index, 0,
_reader))
current_shard_size = 0
start_file_index = next_file_index
if current_shard_size != 0:
readers.append(cls(blob_key, start_file_index, next_file_index, 0,
_reader))
return readers
def next(self):
"""Returns the next line from this input reader as (lineinfo, line) tuple.
Returns:
The next input from this input reader, in the form of a 2-tuple.
The first element of the tuple describes the source, it is itself
a tuple (blobkey, filenumber, byteoffset).
The second element of the tuple is the line found at that offset.
"""
if not self._filestream:
if not self._zip:
self._zip = zipfile.ZipFile(self._reader(self._blob_key))
# Get a list of entries, reversed so we can pop entries off in order
self._entries = self._zip.infolist()[self._start_file_index:
self._end_file_index]
self._entries.reverse()
if not self._entries:
raise StopIteration()
entry = self._entries.pop()
value = self._zip.read(entry.filename)
self._filestream = StringIO.StringIO(value)
if self._initial_offset:
self._filestream.seek(self._initial_offset)
self._filestream.readline()
start_position = self._filestream.tell()
line = self._filestream.readline()
if not line:
# Done with this file in the zip. Move on to the next file.
self._filestream.close()
self._filestream = None
self._start_file_index += 1
self._initial_offset = 0
return self.next()
return ((self._blob_key, self._start_file_index, start_position),
line.rstrip("\n"))
def _next_offset(self):
"""Return the offset of the next line to read."""
if self._filestream:
offset = self._filestream.tell()
if offset:
offset -= 1
else:
offset = self._initial_offset
return offset
def to_json(self):
"""Returns an input shard state for the remaining inputs.
Returns:
A json-izable version of the remaining InputReader.
"""
return {self.BLOB_KEY_PARAM: self._blob_key,
self.START_FILE_INDEX_PARAM: self._start_file_index,
self.END_FILE_INDEX_PARAM: self._end_file_index,
self.OFFSET_PARAM: self._next_offset()}
@classmethod
def from_json(cls, json, _reader=blobstore.BlobReader):
"""Creates an instance of the InputReader for the given input shard state.
Args:
json: The InputReader state as a dict-like object.
_reader: For dependency injection.
Returns:
An instance of the InputReader configured using the values of json.
"""
return cls(json[cls.BLOB_KEY_PARAM],
json[cls.START_FILE_INDEX_PARAM],
json[cls.END_FILE_INDEX_PARAM],
json[cls.OFFSET_PARAM],
_reader)
def __str__(self):
"""Returns the string representation of this reader.
Returns:
string blobkey:[start file num, end file num]:current offset.
"""
return "blobstore.BlobKey(%r):[%d, %d]:%d" % (
self._blob_key, self._start_file_index, self._end_file_index,
self._next_offset())
|
bsd-3-clause
|
psav/cfme_tests
|
cfme/utils/tests/test_ipappliance.py
|
8
|
1068
|
# -*- coding: utf-8 -*-
import pytest
from cfme.utils.appliance import IPAppliance
def test_ipappliance_from_hostname():
hostname = '1.2.3.4'
ip_a = IPAppliance(hostname=hostname)
assert ip_a.hostname == hostname
assert ip_a.url == 'https://{}/'.format(hostname)
def test_ipappliance_from_url():
address = '1.2.3.4'
url = 'http://{}/'.format(address)
ip_a = IPAppliance.from_url(url)
assert ip_a.url == url
assert ip_a.hostname == address
@pytest.mark.skipif(pytest.config.getoption('--dummy-appliance'),
reason="infra_provider cant support dummy instance")
def test_ipappliance_managed_providers(appliance, infra_provider):
assert infra_provider in appliance.managed_known_providers
def test_context_hack(monkeypatch):
ip_a = IPAppliance.from_url('http://127.0.0.2/')
def not_good(*k):
raise RuntimeError()
monkeypatch.setattr(ip_a, '_screenshot_capture_at_context_leave', not_good)
with pytest.raises(ValueError):
with ip_a:
raise ValueError("test")
|
gpl-2.0
|
markneville/nupic
|
tests/unit/nupic/research/temporal_memory_test.py
|
19
|
21695
|
#!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
TODO: Mock out all function calls.
TODO: Make default test TM instance simpler, with 4 cells per column.
"""
import tempfile
import unittest
from nupic.data.generators.pattern_machine import PatternMachine
from nupic.data.generators.sequence_machine import SequenceMachine
from nupic.research.temporal_memory import TemporalMemory
try:
import capnp
except ImportError:
capnp = None
if capnp:
from nupic.proto import TemporalMemoryProto_capnp
class TemporalMemoryTest(unittest.TestCase):
def setUp(self):
self.tm = TemporalMemory()
def testInitInvalidParams(self):
# Invalid columnDimensions
kwargs = {"columnDimensions": [], "cellsPerColumn": 32}
self.assertRaises(ValueError, TemporalMemory, **kwargs)
# Invalid cellsPerColumn
kwargs = {"columnDimensions": [2048], "cellsPerColumn": 0}
self.assertRaises(ValueError, TemporalMemory, **kwargs)
kwargs = {"columnDimensions": [2048], "cellsPerColumn": -10}
self.assertRaises(ValueError, TemporalMemory, **kwargs)
def testActivateCorrectlyPredictiveCells(self):
tm = self.tm
prevPredictiveCells = set([0, 237, 1026, 26337, 26339, 55536])
activeColumns = set([32, 47, 823])
prevMatchingCells = set()
(activeCells,
winnerCells,
predictedColumns,
predictedInactiveCells) = tm.activateCorrectlyPredictiveCells(prevPredictiveCells,
prevMatchingCells,
activeColumns)
self.assertEqual(activeCells, set([1026, 26337, 26339]))
self.assertEqual(winnerCells, set([1026, 26337, 26339]))
self.assertEqual(predictedColumns, set([32, 823]))
self.assertEqual(predictedInactiveCells, set())
def testActivateCorrectlyPredictiveCellsEmpty(self):
tm = self.tm
# No previous predictive cells, no active columns
prevPredictiveCells = set()
activeColumns = set()
prevMatchingCells = set()
(activeCells,
winnerCells,
predictedColumns,
predictedInactiveCells) = tm.activateCorrectlyPredictiveCells(prevPredictiveCells,
prevMatchingCells,
activeColumns)
self.assertEqual(activeCells, set())
self.assertEqual(winnerCells, set())
self.assertEqual(predictedColumns, set())
self.assertEqual(predictedInactiveCells, set())
# No previous predictive cells, with active columns
prevPredictiveCells = set()
activeColumns = set([32, 47, 823])
prevMatchingCells = set()
(activeCells,
winnerCells,
predictedColumns,
predictedInactiveCells) = tm.activateCorrectlyPredictiveCells(prevPredictiveCells,
prevMatchingCells,
activeColumns)
self.assertEqual(activeCells, set())
self.assertEqual(winnerCells, set())
self.assertEqual(predictedColumns, set())
self.assertEqual(predictedInactiveCells, set())
# No active columns, with previously predictive cells
prevPredictiveCells = set([0, 237, 1026, 26337, 26339, 55536])
activeColumns = set()
prevMatchingCells = set()
(activeCells,
winnerCells,
predictedColumns,
predictedInactiveCells) = tm.activateCorrectlyPredictiveCells(prevPredictiveCells,
prevMatchingCells,
activeColumns)
self.assertEqual(activeCells, set())
self.assertEqual(winnerCells, set())
self.assertEqual(predictedColumns, set())
self.assertEqual(predictedInactiveCells, set())
def testActivateCorrectlyPredictiveCellsOrphan(self):
tm = self.tm
tm.predictedSegmentDecrement = 0.001
prevPredictiveCells = set([])
activeColumns = set([32, 47, 823])
prevMatchingCells = set([32, 47])
(activeCells,
winnerCells,
predictedColumns,
predictedInactiveCells) = tm.activateCorrectlyPredictiveCells(prevPredictiveCells,
prevMatchingCells,
activeColumns)
self.assertEqual(activeCells, set([]))
self.assertEqual(winnerCells, set([]))
self.assertEqual(predictedColumns, set([]))
self.assertEqual(predictedInactiveCells, set([32,47]))
def testBurstColumns(self):
tm = TemporalMemory(
cellsPerColumn=4,
connectedPermanence=0.50,
minThreshold=1,
seed=42
)
connections = tm.connections
connections.createSegment(0)
connections.createSynapse(0, 23, 0.6)
connections.createSynapse(0, 37, 0.4)
connections.createSynapse(0, 477, 0.9)
connections.createSegment(0)
connections.createSynapse(1, 49, 0.9)
connections.createSynapse(1, 3, 0.8)
connections.createSegment(1)
connections.createSynapse(2, 733, 0.7)
connections.createSegment(108)
connections.createSynapse(3, 486, 0.9)
activeColumns = set([0, 1, 26])
predictedColumns = set([26])
prevActiveCells = set([23, 37, 49, 733])
prevWinnerCells = set([23, 37, 49, 733])
(activeCells,
winnerCells,
learningSegments) = tm.burstColumns(activeColumns,
predictedColumns,
prevActiveCells,
prevWinnerCells,
connections)
self.assertEqual(activeCells, set([0, 1, 2, 3, 4, 5, 6, 7]))
randomWinner = 4
self.assertEqual(winnerCells, set([0, randomWinner])) # 4 is randomly chosen cell
self.assertEqual(learningSegments, set([0, 4])) # 4 is new segment created
# Check that new segment was added to winner cell (6) in column 1
self.assertEqual(connections.segmentsForCell(randomWinner), set([4]))
def testBurstColumnsEmpty(self):
tm = self.tm
activeColumns = set()
predictedColumns = set()
prevActiveCells = set()
prevWinnerCells = set()
connections = tm.connections
(activeCells,
winnerCells,
learningSegments) = tm.burstColumns(activeColumns,
predictedColumns,
prevActiveCells,
prevWinnerCells,
connections)
self.assertEqual(activeCells, set())
self.assertEqual(winnerCells, set())
self.assertEqual(learningSegments, set())
def testLearnOnSegments(self):
tm = TemporalMemory(maxNewSynapseCount=2)
connections = tm.connections
connections.createSegment(0)
connections.createSynapse(0, 23, 0.6)
connections.createSynapse(0, 37, 0.4)
connections.createSynapse(0, 477, 0.9)
connections.createSegment(1)
connections.createSynapse(1, 733, 0.7)
connections.createSegment(8)
connections.createSynapse(2, 486, 0.9)
connections.createSegment(100)
prevActiveSegments = set([0, 2])
learningSegments = set([1, 3])
prevActiveCells = set([23, 37, 733])
winnerCells = set([0])
prevWinnerCells = set([10, 11, 12, 13, 14])
predictedInactiveCells = set()
prevMatchingSegments = set()
tm.learnOnSegments(prevActiveSegments,
learningSegments,
prevActiveCells,
winnerCells,
prevWinnerCells,
connections,
predictedInactiveCells,
prevMatchingSegments)
# Check segment 0
synapseData = connections.dataForSynapse(0)
self.assertAlmostEqual(synapseData.permanence, 0.7)
synapseData = connections.dataForSynapse(1)
self.assertAlmostEqual(synapseData.permanence, 0.5)
synapseData = connections.dataForSynapse(2)
self.assertAlmostEqual(synapseData.permanence, 0.8)
# Check segment 1
synapseData = connections.dataForSynapse(3)
self.assertAlmostEqual(synapseData.permanence, 0.8)
self.assertEqual(len(connections.synapsesForSegment(1)), 2)
# Check segment 2
synapseData = connections.dataForSynapse(4)
self.assertAlmostEqual(synapseData.permanence, 0.9)
self.assertEqual(len(connections.synapsesForSegment(2)), 1)
# Check segment 3
self.assertEqual(len(connections.synapsesForSegment(3)), 2)
def testComputePredictiveCells(self):
tm = TemporalMemory(activationThreshold=2, minThreshold=2, predictedSegmentDecrement=0.004)
connections = tm.connections
connections.createSegment(0)
connections.createSynapse(0, 23, 0.6)
connections.createSynapse(0, 37, 0.5)
connections.createSynapse(0, 477, 0.9)
connections.createSegment(1)
connections.createSynapse(1, 733, 0.7)
connections.createSynapse(1, 733, 0.4)
connections.createSegment(1)
connections.createSynapse(2, 974, 0.9)
connections.createSegment(8)
connections.createSynapse(3, 486, 0.9)
connections.createSegment(100)
activeCells = set([23, 37, 733, 974])
(activeSegments,
predictiveCells,
matchingSegments,
matchingCells) = tm.computePredictiveCells(activeCells, connections)
self.assertEqual(activeSegments, set([0]))
self.assertEqual(predictiveCells, set([0]))
self.assertEqual(matchingSegments, set([0,1]))
self.assertEqual(matchingCells, set([0,1]))
def testBestMatchingCell(self):
tm = TemporalMemory(
connectedPermanence=0.50,
minThreshold=1,
seed=42
)
connections = tm.connections
connections.createSegment(0)
connections.createSynapse(0, 23, 0.6)
connections.createSynapse(0, 37, 0.4)
connections.createSynapse(0, 477, 0.9)
connections.createSegment(0)
connections.createSynapse(1, 49, 0.9)
connections.createSynapse(1, 3, 0.8)
connections.createSegment(1)
connections.createSynapse(2, 733, 0.7)
connections.createSegment(108)
connections.createSynapse(3, 486, 0.9)
activeCells = set([23, 37, 49, 733])
self.assertEqual(tm.bestMatchingCell(tm.cellsForColumn(0),
activeCells,
connections),
(0, 0))
self.assertEqual(tm.bestMatchingCell(tm.cellsForColumn(3), # column containing cell 108
activeCells,
connections),
(103, None)) # Random cell from column
self.assertEqual(tm.bestMatchingCell(tm.cellsForColumn(999),
activeCells,
connections),
(31979, None)) # Random cell from column
def testBestMatchingCellFewestSegments(self):
tm = TemporalMemory(
columnDimensions=[2],
cellsPerColumn=2,
connectedPermanence=0.50,
minThreshold=1,
seed=42
)
connections = tm.connections
connections.createSegment(0)
connections.createSynapse(0, 3, 0.3)
activeSynapsesForSegment = set([])
for _ in range(100):
# Never pick cell 0, always pick cell 1
(cell, _) = tm.bestMatchingCell(tm.cellsForColumn(0),
activeSynapsesForSegment,
connections)
self.assertEqual(cell, 1)
def testBestMatchingSegment(self):
tm = TemporalMemory(
connectedPermanence=0.50,
minThreshold=1
)
connections = tm.connections
connections.createSegment(0)
connections.createSynapse(0, 23, 0.6)
connections.createSynapse(0, 37, 0.4)
connections.createSynapse(0, 477, 0.9)
connections.createSegment(0)
connections.createSynapse(1, 49, 0.9)
connections.createSynapse(1, 3, 0.8)
connections.createSegment(1)
connections.createSynapse(2, 733, 0.7)
connections.createSegment(8)
connections.createSynapse(3, 486, 0.9)
activeCells = set([23, 37, 49, 733])
self.assertEqual(tm.bestMatchingSegment(0,
activeCells,
connections),
(0, 2))
self.assertEqual(tm.bestMatchingSegment(1,
activeCells,
connections),
(2, 1))
self.assertEqual(tm.bestMatchingSegment(8,
activeCells,
connections),
(None, None))
self.assertEqual(tm.bestMatchingSegment(100,
activeCells,
connections),
(None, None))
def testLeastUsedCell(self):
tm = TemporalMemory(
columnDimensions=[2],
cellsPerColumn=2,
seed=42
)
connections = tm.connections
connections.createSegment(0)
connections.createSynapse(0, 3, 0.3)
for _ in range(100):
# Never pick cell 0, always pick cell 1
self.assertEqual(tm.leastUsedCell(tm.cellsForColumn(0),
connections),
1)
def testAdaptSegment(self):
tm = self.tm
connections = tm.connections
connections.createSegment(0)
connections.createSynapse(0, 23, 0.6)
connections.createSynapse(0, 37, 0.4)
connections.createSynapse(0, 477, 0.9)
tm.adaptSegment(0, set([0, 1]), connections,
tm.permanenceIncrement,
tm.permanenceDecrement)
synapseData = connections.dataForSynapse(0)
self.assertAlmostEqual(synapseData.permanence, 0.7)
synapseData = connections.dataForSynapse(1)
self.assertAlmostEqual(synapseData.permanence, 0.5)
synapseData = connections.dataForSynapse(2)
self.assertAlmostEqual(synapseData.permanence, 0.8)
def testAdaptSegmentToMax(self):
tm = self.tm
connections = tm.connections
connections.createSegment(0)
connections.createSynapse(0, 23, 0.9)
tm.adaptSegment(0, set([0]), connections,
tm.permanenceIncrement,
tm.permanenceDecrement)
synapseData = connections.dataForSynapse(0)
self.assertAlmostEqual(synapseData.permanence, 1.0)
# Now permanence should be at max
tm.adaptSegment(0, set([0]), connections,
tm.permanenceIncrement,
tm.permanenceDecrement)
synapseData = connections.dataForSynapse(0)
self.assertAlmostEqual(synapseData.permanence, 1.0)
def testAdaptSegmentToMin(self):
tm = self.tm
connections = tm.connections
connections.createSegment(0)
connections.createSynapse(0, 23, 0.1)
tm.adaptSegment(0, set(), connections,
tm.permanenceIncrement,
tm.permanenceDecrement)
synapses = connections.synapsesForSegment(0)
self.assertFalse(0 in synapses)
def testPickCellsToLearnOn(self):
tm = TemporalMemory(seed=42)
connections = tm.connections
connections.createSegment(0)
winnerCells = set([4, 47, 58, 93])
self.assertEqual(tm.pickCellsToLearnOn(2, 0, winnerCells, connections),
set([4, 93])) # randomly picked
self.assertEqual(tm.pickCellsToLearnOn(100, 0, winnerCells, connections),
set([4, 47, 58, 93]))
self.assertEqual(tm.pickCellsToLearnOn(0, 0, winnerCells, connections),
set())
def testPickCellsToLearnOnAvoidDuplicates(self):
tm = TemporalMemory(seed=42)
connections = tm.connections
connections.createSegment(0)
connections.createSynapse(0, 23, 0.6)
winnerCells = set([23])
# Ensure that no additional (duplicate) cells were picked
self.assertEqual(tm.pickCellsToLearnOn(2, 0, winnerCells, connections),
set())
def testColumnForCell1D(self):
tm = TemporalMemory(
columnDimensions=[2048],
cellsPerColumn=5
)
self.assertEqual(tm.columnForCell(0), 0)
self.assertEqual(tm.columnForCell(4), 0)
self.assertEqual(tm.columnForCell(5), 1)
self.assertEqual(tm.columnForCell(10239), 2047)
def testColumnForCell2D(self):
tm = TemporalMemory(
columnDimensions=[64, 64],
cellsPerColumn=4
)
self.assertEqual(tm.columnForCell(0), 0)
self.assertEqual(tm.columnForCell(3), 0)
self.assertEqual(tm.columnForCell(4), 1)
self.assertEqual(tm.columnForCell(16383), 4095)
def testColumnForCellInvalidCell(self):
tm = TemporalMemory(
columnDimensions=[64, 64],
cellsPerColumn=4
)
try:
tm.columnForCell(16383)
except IndexError:
self.fail("IndexError raised unexpectedly")
args = [16384]
self.assertRaises(IndexError, tm.columnForCell, *args)
args = [-1]
self.assertRaises(IndexError, tm.columnForCell, *args)
def testCellsForColumn1D(self):
tm = TemporalMemory(
columnDimensions=[2048],
cellsPerColumn=5
)
expectedCells = set([5, 6, 7, 8, 9])
self.assertEqual(tm.cellsForColumn(1), expectedCells)
def testCellsForColumn2D(self):
tm = TemporalMemory(
columnDimensions=[64, 64],
cellsPerColumn=4
)
expectedCells = set([256, 257, 258, 259])
self.assertEqual(tm.cellsForColumn(64), expectedCells)
def testCellsForColumnInvalidColumn(self):
tm = TemporalMemory(
columnDimensions=[64, 64],
cellsPerColumn=4
)
try:
tm.cellsForColumn(4095)
except IndexError:
self.fail("IndexError raised unexpectedly")
args = [4096]
self.assertRaises(IndexError, tm.cellsForColumn, *args)
args = [-1]
self.assertRaises(IndexError, tm.cellsForColumn, *args)
def testNumberOfColumns(self):
tm = TemporalMemory(
columnDimensions=[64, 64],
cellsPerColumn=32
)
self.assertEqual(tm.numberOfColumns(), 64 * 64)
def testNumberOfCells(self):
tm = TemporalMemory(
columnDimensions=[64, 64],
cellsPerColumn=32
)
self.assertEqual(tm.numberOfCells(), 64 * 64 * 32)
def testMapCellsToColumns(self):
tm = TemporalMemory(
columnDimensions=[100],
cellsPerColumn=4
)
columnsForCells = tm.mapCellsToColumns(set([0, 1, 2, 5, 399]))
self.assertEqual(columnsForCells[0], set([0, 1, 2]))
self.assertEqual(columnsForCells[1], set([5]))
self.assertEqual(columnsForCells[99], set([399]))
@unittest.skipUnless(
capnp, "pycapnp is not installed, skipping serialization test.")
def testWriteRead(self):
tm1 = TemporalMemory(
columnDimensions=[100],
cellsPerColumn=4,
activationThreshold=7,
initialPermanence=0.37,
connectedPermanence=0.58,
minThreshold=4,
maxNewSynapseCount=18,
permanenceIncrement=0.23,
permanenceDecrement=0.08,
seed=91
)
# Run some data through before serializing
self.patternMachine = PatternMachine(100, 4)
self.sequenceMachine = SequenceMachine(self.patternMachine)
sequence = self.sequenceMachine.generateFromNumbers(range(5))
for _ in range(3):
for pattern in sequence:
tm1.compute(pattern)
proto1 = TemporalMemoryProto_capnp.TemporalMemoryProto.new_message()
tm1.write(proto1)
# Write the proto to a temp file and read it back into a new proto
with tempfile.TemporaryFile() as f:
proto1.write(f)
f.seek(0)
proto2 = TemporalMemoryProto_capnp.TemporalMemoryProto.read(f)
# Load the deserialized proto
tm2 = TemporalMemory.read(proto2)
# Check that the two temporal memory objects have the same attributes
self.assertEqual(tm1, tm2)
# Run a couple records through after deserializing and check results match
tm1.compute(self.patternMachine.get(0))
tm2.compute(self.patternMachine.get(0))
self.assertEqual(tm1.activeCells, tm2.activeCells)
self.assertEqual(tm1.predictiveCells, tm2.predictiveCells)
self.assertEqual(tm1.winnerCells, tm2.winnerCells)
self.assertEqual(tm1.connections, tm2.connections)
tm1.compute(self.patternMachine.get(3))
tm2.compute(self.patternMachine.get(3))
self.assertEqual(tm1.activeCells, tm2.activeCells)
self.assertEqual(tm1.predictiveCells, tm2.predictiveCells)
self.assertEqual(tm1.winnerCells, tm2.winnerCells)
self.assertEqual(tm1.connections, tm2.connections)
if __name__ == '__main__':
unittest.main()
|
agpl-3.0
|
mancoast/CPythonPyc_test
|
cpython/241_pickletester.py
|
15
|
29752
|
import unittest
import pickle
import cPickle
import pickletools
import copy_reg
from test.test_support import TestFailed, have_unicode, TESTFN
# Tests that try a number of pickle protocols should have a
# for proto in protocols:
# kind of outer loop.
assert pickle.HIGHEST_PROTOCOL == cPickle.HIGHEST_PROTOCOL == 2
protocols = range(pickle.HIGHEST_PROTOCOL + 1)
# Return True if opcode code appears in the pickle, else False.
def opcode_in_pickle(code, pickle):
for op, dummy, dummy in pickletools.genops(pickle):
if op.code == code:
return True
return False
# Return the number of times opcode code appears in pickle.
def count_opcode(code, pickle):
n = 0
for op, dummy, dummy in pickletools.genops(pickle):
if op.code == code:
n += 1
return n
# We can't very well test the extension registry without putting known stuff
# in it, but we have to be careful to restore its original state. Code
# should do this:
#
# e = ExtensionSaver(extension_code)
# try:
# fiddle w/ the extension registry's stuff for extension_code
# finally:
# e.restore()
class ExtensionSaver:
# Remember current registration for code (if any), and remove it (if
# there is one).
def __init__(self, code):
self.code = code
if code in copy_reg._inverted_registry:
self.pair = copy_reg._inverted_registry[code]
copy_reg.remove_extension(self.pair[0], self.pair[1], code)
else:
self.pair = None
# Restore previous registration for code.
def restore(self):
code = self.code
curpair = copy_reg._inverted_registry.get(code)
if curpair is not None:
copy_reg.remove_extension(curpair[0], curpair[1], code)
pair = self.pair
if pair is not None:
copy_reg.add_extension(pair[0], pair[1], code)
class C:
def __cmp__(self, other):
return cmp(self.__dict__, other.__dict__)
import __main__
__main__.C = C
C.__module__ = "__main__"
class myint(int):
def __init__(self, x):
self.str = str(x)
class initarg(C):
def __init__(self, a, b):
self.a = a
self.b = b
def __getinitargs__(self):
return self.a, self.b
class metaclass(type):
pass
class use_metaclass(object):
__metaclass__ = metaclass
# DATA0 .. DATA2 are the pickles we expect under the various protocols, for
# the object returned by create_data().
# break into multiple strings to avoid confusing font-lock-mode
DATA0 = """(lp1
I0
aL1L
aF2
ac__builtin__
complex
p2
""" + \
"""(F3
F0
tRp3
aI1
aI-1
aI255
aI-255
aI-256
aI65535
aI-65535
aI-65536
aI2147483647
aI-2147483647
aI-2147483648
a""" + \
"""(S'abc'
p4
g4
""" + \
"""(i__main__
C
p5
""" + \
"""(dp6
S'foo'
p7
I1
sS'bar'
p8
I2
sbg5
tp9
ag9
aI5
a.
"""
# Disassembly of DATA0.
DATA0_DIS = """\
0: ( MARK
1: l LIST (MARK at 0)
2: p PUT 1
5: I INT 0
8: a APPEND
9: L LONG 1L
13: a APPEND
14: F FLOAT 2.0
17: a APPEND
18: c GLOBAL '__builtin__ complex'
39: p PUT 2
42: ( MARK
43: F FLOAT 3.0
46: F FLOAT 0.0
49: t TUPLE (MARK at 42)
50: R REDUCE
51: p PUT 3
54: a APPEND
55: I INT 1
58: a APPEND
59: I INT -1
63: a APPEND
64: I INT 255
69: a APPEND
70: I INT -255
76: a APPEND
77: I INT -256
83: a APPEND
84: I INT 65535
91: a APPEND
92: I INT -65535
100: a APPEND
101: I INT -65536
109: a APPEND
110: I INT 2147483647
122: a APPEND
123: I INT -2147483647
136: a APPEND
137: I INT -2147483648
150: a APPEND
151: ( MARK
152: S STRING 'abc'
159: p PUT 4
162: g GET 4
165: ( MARK
166: i INST '__main__ C' (MARK at 165)
178: p PUT 5
181: ( MARK
182: d DICT (MARK at 181)
183: p PUT 6
186: S STRING 'foo'
193: p PUT 7
196: I INT 1
199: s SETITEM
200: S STRING 'bar'
207: p PUT 8
210: I INT 2
213: s SETITEM
214: b BUILD
215: g GET 5
218: t TUPLE (MARK at 151)
219: p PUT 9
222: a APPEND
223: g GET 9
226: a APPEND
227: I INT 5
230: a APPEND
231: . STOP
highest protocol among opcodes = 0
"""
DATA1 = (']q\x01(K\x00L1L\nG@\x00\x00\x00\x00\x00\x00\x00'
'c__builtin__\ncomplex\nq\x02(G@\x08\x00\x00\x00\x00\x00'
'\x00G\x00\x00\x00\x00\x00\x00\x00\x00tRq\x03K\x01J\xff\xff'
'\xff\xffK\xffJ\x01\xff\xff\xffJ\x00\xff\xff\xffM\xff\xff'
'J\x01\x00\xff\xffJ\x00\x00\xff\xffJ\xff\xff\xff\x7fJ\x01\x00'
'\x00\x80J\x00\x00\x00\x80(U\x03abcq\x04h\x04(c__main__\n'
'C\nq\x05oq\x06}q\x07(U\x03fooq\x08K\x01U\x03barq\tK\x02ubh'
'\x06tq\nh\nK\x05e.'
)
# Disassembly of DATA1.
DATA1_DIS = """\
0: ] EMPTY_LIST
1: q BINPUT 1
3: ( MARK
4: K BININT1 0
6: L LONG 1L
10: G BINFLOAT 2.0
19: c GLOBAL '__builtin__ complex'
40: q BINPUT 2
42: ( MARK
43: G BINFLOAT 3.0
52: G BINFLOAT 0.0
61: t TUPLE (MARK at 42)
62: R REDUCE
63: q BINPUT 3
65: K BININT1 1
67: J BININT -1
72: K BININT1 255
74: J BININT -255
79: J BININT -256
84: M BININT2 65535
87: J BININT -65535
92: J BININT -65536
97: J BININT 2147483647
102: J BININT -2147483647
107: J BININT -2147483648
112: ( MARK
113: U SHORT_BINSTRING 'abc'
118: q BINPUT 4
120: h BINGET 4
122: ( MARK
123: c GLOBAL '__main__ C'
135: q BINPUT 5
137: o OBJ (MARK at 122)
138: q BINPUT 6
140: } EMPTY_DICT
141: q BINPUT 7
143: ( MARK
144: U SHORT_BINSTRING 'foo'
149: q BINPUT 8
151: K BININT1 1
153: U SHORT_BINSTRING 'bar'
158: q BINPUT 9
160: K BININT1 2
162: u SETITEMS (MARK at 143)
163: b BUILD
164: h BINGET 6
166: t TUPLE (MARK at 112)
167: q BINPUT 10
169: h BINGET 10
171: K BININT1 5
173: e APPENDS (MARK at 3)
174: . STOP
highest protocol among opcodes = 1
"""
DATA2 = ('\x80\x02]q\x01(K\x00\x8a\x01\x01G@\x00\x00\x00\x00\x00\x00\x00'
'c__builtin__\ncomplex\nq\x02G@\x08\x00\x00\x00\x00\x00\x00G\x00'
'\x00\x00\x00\x00\x00\x00\x00\x86Rq\x03K\x01J\xff\xff\xff\xffK'
'\xffJ\x01\xff\xff\xffJ\x00\xff\xff\xffM\xff\xffJ\x01\x00\xff\xff'
'J\x00\x00\xff\xffJ\xff\xff\xff\x7fJ\x01\x00\x00\x80J\x00\x00\x00'
'\x80(U\x03abcq\x04h\x04(c__main__\nC\nq\x05oq\x06}q\x07(U\x03foo'
'q\x08K\x01U\x03barq\tK\x02ubh\x06tq\nh\nK\x05e.')
# Disassembly of DATA2.
DATA2_DIS = """\
0: \x80 PROTO 2
2: ] EMPTY_LIST
3: q BINPUT 1
5: ( MARK
6: K BININT1 0
8: \x8a LONG1 1L
11: G BINFLOAT 2.0
20: c GLOBAL '__builtin__ complex'
41: q BINPUT 2
43: G BINFLOAT 3.0
52: G BINFLOAT 0.0
61: \x86 TUPLE2
62: R REDUCE
63: q BINPUT 3
65: K BININT1 1
67: J BININT -1
72: K BININT1 255
74: J BININT -255
79: J BININT -256
84: M BININT2 65535
87: J BININT -65535
92: J BININT -65536
97: J BININT 2147483647
102: J BININT -2147483647
107: J BININT -2147483648
112: ( MARK
113: U SHORT_BINSTRING 'abc'
118: q BINPUT 4
120: h BINGET 4
122: ( MARK
123: c GLOBAL '__main__ C'
135: q BINPUT 5
137: o OBJ (MARK at 122)
138: q BINPUT 6
140: } EMPTY_DICT
141: q BINPUT 7
143: ( MARK
144: U SHORT_BINSTRING 'foo'
149: q BINPUT 8
151: K BININT1 1
153: U SHORT_BINSTRING 'bar'
158: q BINPUT 9
160: K BININT1 2
162: u SETITEMS (MARK at 143)
163: b BUILD
164: h BINGET 6
166: t TUPLE (MARK at 112)
167: q BINPUT 10
169: h BINGET 10
171: K BININT1 5
173: e APPENDS (MARK at 5)
174: . STOP
highest protocol among opcodes = 2
"""
def create_data():
c = C()
c.foo = 1
c.bar = 2
x = [0, 1L, 2.0, 3.0+0j]
# Append some integer test cases at cPickle.c's internal size
# cutoffs.
uint1max = 0xff
uint2max = 0xffff
int4max = 0x7fffffff
x.extend([1, -1,
uint1max, -uint1max, -uint1max-1,
uint2max, -uint2max, -uint2max-1,
int4max, -int4max, -int4max-1])
y = ('abc', 'abc', c, c)
x.append(y)
x.append(y)
x.append(5)
return x
class AbstractPickleTests(unittest.TestCase):
# Subclass must define self.dumps, self.loads, self.error.
_testdata = create_data()
def setUp(self):
pass
def test_misc(self):
# test various datatypes not tested by testdata
for proto in protocols:
x = myint(4)
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
x = (1, ())
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
x = initarg(1, x)
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
# XXX test __reduce__ protocol?
def test_roundtrip_equality(self):
expected = self._testdata
for proto in protocols:
s = self.dumps(expected, proto)
got = self.loads(s)
self.assertEqual(expected, got)
def test_load_from_canned_string(self):
expected = self._testdata
for canned in DATA0, DATA1, DATA2:
got = self.loads(canned)
self.assertEqual(expected, got)
# There are gratuitous differences between pickles produced by
# pickle and cPickle, largely because cPickle starts PUT indices at
# 1 and pickle starts them at 0. See XXX comment in cPickle's put2() --
# there's a comment with an exclamation point there whose meaning
# is a mystery. cPickle also suppresses PUT for objects with a refcount
# of 1.
def dont_test_disassembly(self):
from cStringIO import StringIO
from pickletools import dis
for proto, expected in (0, DATA0_DIS), (1, DATA1_DIS):
s = self.dumps(self._testdata, proto)
filelike = StringIO()
dis(s, out=filelike)
got = filelike.getvalue()
self.assertEqual(expected, got)
def test_recursive_list(self):
l = []
l.append(l)
for proto in protocols:
s = self.dumps(l, proto)
x = self.loads(s)
self.assertEqual(len(x), 1)
self.assert_(x is x[0])
def test_recursive_dict(self):
d = {}
d[1] = d
for proto in protocols:
s = self.dumps(d, proto)
x = self.loads(s)
self.assertEqual(x.keys(), [1])
self.assert_(x[1] is x)
def test_recursive_inst(self):
i = C()
i.attr = i
for proto in protocols:
s = self.dumps(i, 2)
x = self.loads(s)
self.assertEqual(dir(x), dir(i))
self.assert_(x.attr is x)
def test_recursive_multi(self):
l = []
d = {1:l}
i = C()
i.attr = d
l.append(i)
for proto in protocols:
s = self.dumps(l, proto)
x = self.loads(s)
self.assertEqual(len(x), 1)
self.assertEqual(dir(x[0]), dir(i))
self.assertEqual(x[0].attr.keys(), [1])
self.assert_(x[0].attr[1] is x)
def test_garyp(self):
self.assertRaises(self.error, self.loads, 'garyp')
def test_insecure_strings(self):
insecure = ["abc", "2 + 2", # not quoted
#"'abc' + 'def'", # not a single quoted string
"'abc", # quote is not closed
"'abc\"", # open quote and close quote don't match
"'abc' ?", # junk after close quote
"'\\'", # trailing backslash
# some tests of the quoting rules
#"'abc\"\''",
#"'\\\\a\'\'\'\\\'\\\\\''",
]
for s in insecure:
buf = "S" + s + "\012p0\012."
self.assertRaises(ValueError, self.loads, buf)
if have_unicode:
def test_unicode(self):
endcases = [unicode(''), unicode('<\\u>'), unicode('<\\\u1234>'),
unicode('<\n>'), unicode('<\\>')]
for proto in protocols:
for u in endcases:
p = self.dumps(u, proto)
u2 = self.loads(p)
self.assertEqual(u2, u)
def test_ints(self):
import sys
for proto in protocols:
n = sys.maxint
while n:
for expected in (-n, n):
s = self.dumps(expected, proto)
n2 = self.loads(s)
self.assertEqual(expected, n2)
n = n >> 1
def test_maxint64(self):
maxint64 = (1L << 63) - 1
data = 'I' + str(maxint64) + '\n.'
got = self.loads(data)
self.assertEqual(got, maxint64)
# Try too with a bogus literal.
data = 'I' + str(maxint64) + 'JUNK\n.'
self.assertRaises(ValueError, self.loads, data)
def test_long(self):
for proto in protocols:
# 256 bytes is where LONG4 begins.
for nbits in 1, 8, 8*254, 8*255, 8*256, 8*257:
nbase = 1L << nbits
for npos in nbase-1, nbase, nbase+1:
for n in npos, -npos:
pickle = self.dumps(n, proto)
got = self.loads(pickle)
self.assertEqual(n, got)
# Try a monster. This is quadratic-time in protos 0 & 1, so don't
# bother with those.
nbase = long("deadbeeffeedface", 16)
nbase += nbase << 1000000
for n in nbase, -nbase:
p = self.dumps(n, 2)
got = self.loads(p)
self.assertEqual(n, got)
def test_reduce(self):
pass
def test_getinitargs(self):
pass
def test_metaclass(self):
a = use_metaclass()
for proto in protocols:
s = self.dumps(a, proto)
b = self.loads(s)
self.assertEqual(a.__class__, b.__class__)
def test_structseq(self):
import time
import os
t = time.localtime()
for proto in protocols:
s = self.dumps(t, proto)
u = self.loads(s)
self.assertEqual(t, u)
if hasattr(os, "stat"):
t = os.stat(os.curdir)
s = self.dumps(t, proto)
u = self.loads(s)
self.assertEqual(t, u)
if hasattr(os, "statvfs"):
t = os.statvfs(os.curdir)
s = self.dumps(t, proto)
u = self.loads(s)
self.assertEqual(t, u)
# Tests for protocol 2
def test_proto(self):
build_none = pickle.NONE + pickle.STOP
for proto in protocols:
expected = build_none
if proto >= 2:
expected = pickle.PROTO + chr(proto) + expected
p = self.dumps(None, proto)
self.assertEqual(p, expected)
oob = protocols[-1] + 1 # a future protocol
badpickle = pickle.PROTO + chr(oob) + build_none
try:
self.loads(badpickle)
except ValueError, detail:
self.failUnless(str(detail).startswith(
"unsupported pickle protocol"))
else:
self.fail("expected bad protocol number to raise ValueError")
def test_long1(self):
x = 12345678910111213141516178920L
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
self.assertEqual(opcode_in_pickle(pickle.LONG1, s), proto >= 2)
def test_long4(self):
x = 12345678910111213141516178920L << (256*8)
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
self.assertEqual(opcode_in_pickle(pickle.LONG4, s), proto >= 2)
def test_short_tuples(self):
# Map (proto, len(tuple)) to expected opcode.
expected_opcode = {(0, 0): pickle.TUPLE,
(0, 1): pickle.TUPLE,
(0, 2): pickle.TUPLE,
(0, 3): pickle.TUPLE,
(0, 4): pickle.TUPLE,
(1, 0): pickle.EMPTY_TUPLE,
(1, 1): pickle.TUPLE,
(1, 2): pickle.TUPLE,
(1, 3): pickle.TUPLE,
(1, 4): pickle.TUPLE,
(2, 0): pickle.EMPTY_TUPLE,
(2, 1): pickle.TUPLE1,
(2, 2): pickle.TUPLE2,
(2, 3): pickle.TUPLE3,
(2, 4): pickle.TUPLE,
}
a = ()
b = (1,)
c = (1, 2)
d = (1, 2, 3)
e = (1, 2, 3, 4)
for proto in protocols:
for x in a, b, c, d, e:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y, (proto, x, s, y))
expected = expected_opcode[proto, len(x)]
self.assertEqual(opcode_in_pickle(expected, s), True)
def test_singletons(self):
# Map (proto, singleton) to expected opcode.
expected_opcode = {(0, None): pickle.NONE,
(1, None): pickle.NONE,
(2, None): pickle.NONE,
(0, True): pickle.INT,
(1, True): pickle.INT,
(2, True): pickle.NEWTRUE,
(0, False): pickle.INT,
(1, False): pickle.INT,
(2, False): pickle.NEWFALSE,
}
for proto in protocols:
for x in None, False, True:
s = self.dumps(x, proto)
y = self.loads(s)
self.assert_(x is y, (proto, x, s, y))
expected = expected_opcode[proto, x]
self.assertEqual(opcode_in_pickle(expected, s), True)
def test_newobj_tuple(self):
x = MyTuple([1, 2, 3])
x.foo = 42
x.bar = "hello"
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(tuple(x), tuple(y))
self.assertEqual(x.__dict__, y.__dict__)
def test_newobj_list(self):
x = MyList([1, 2, 3])
x.foo = 42
x.bar = "hello"
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
def test_newobj_generic(self):
for proto in protocols:
for C in myclasses:
B = C.__base__
x = C(C.sample)
x.foo = 42
s = self.dumps(x, proto)
y = self.loads(s)
detail = (proto, C, B, x, y, type(y))
self.assertEqual(B(x), B(y), detail)
self.assertEqual(x.__dict__, y.__dict__, detail)
# Register a type with copy_reg, with extension code extcode. Pickle
# an object of that type. Check that the resulting pickle uses opcode
# (EXT[124]) under proto 2, and not in proto 1.
def produce_global_ext(self, extcode, opcode):
e = ExtensionSaver(extcode)
try:
copy_reg.add_extension(__name__, "MyList", extcode)
x = MyList([1, 2, 3])
x.foo = 42
x.bar = "hello"
# Dump using protocol 1 for comparison.
s1 = self.dumps(x, 1)
self.assert_(__name__ in s1)
self.assert_("MyList" in s1)
self.assertEqual(opcode_in_pickle(opcode, s1), False)
y = self.loads(s1)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
# Dump using protocol 2 for test.
s2 = self.dumps(x, 2)
self.assert_(__name__ not in s2)
self.assert_("MyList" not in s2)
self.assertEqual(opcode_in_pickle(opcode, s2), True)
y = self.loads(s2)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
finally:
e.restore()
def test_global_ext1(self):
self.produce_global_ext(0x00000001, pickle.EXT1) # smallest EXT1 code
self.produce_global_ext(0x000000ff, pickle.EXT1) # largest EXT1 code
def test_global_ext2(self):
self.produce_global_ext(0x00000100, pickle.EXT2) # smallest EXT2 code
self.produce_global_ext(0x0000ffff, pickle.EXT2) # largest EXT2 code
self.produce_global_ext(0x0000abcd, pickle.EXT2) # check endianness
def test_global_ext4(self):
self.produce_global_ext(0x00010000, pickle.EXT4) # smallest EXT4 code
self.produce_global_ext(0x7fffffff, pickle.EXT4) # largest EXT4 code
self.produce_global_ext(0x12abcdef, pickle.EXT4) # check endianness
def test_list_chunking(self):
n = 10 # too small to chunk
x = range(n)
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
num_appends = count_opcode(pickle.APPENDS, s)
self.assertEqual(num_appends, proto > 0)
n = 2500 # expect at least two chunks when proto > 0
x = range(n)
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
num_appends = count_opcode(pickle.APPENDS, s)
if proto == 0:
self.assertEqual(num_appends, 0)
else:
self.failUnless(num_appends >= 2)
def test_dict_chunking(self):
n = 10 # too small to chunk
x = dict.fromkeys(range(n))
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
num_setitems = count_opcode(pickle.SETITEMS, s)
self.assertEqual(num_setitems, proto > 0)
n = 2500 # expect at least two chunks when proto > 0
x = dict.fromkeys(range(n))
for proto in protocols:
s = self.dumps(x, proto)
y = self.loads(s)
self.assertEqual(x, y)
num_setitems = count_opcode(pickle.SETITEMS, s)
if proto == 0:
self.assertEqual(num_setitems, 0)
else:
self.failUnless(num_setitems >= 2)
def test_simple_newobj(self):
x = object.__new__(SimpleNewObj) # avoid __init__
x.abc = 666
for proto in protocols:
s = self.dumps(x, proto)
self.assertEqual(opcode_in_pickle(pickle.NEWOBJ, s), proto >= 2)
y = self.loads(s) # will raise TypeError if __init__ called
self.assertEqual(y.abc, 666)
self.assertEqual(x.__dict__, y.__dict__)
def test_newobj_list_slots(self):
x = SlotList([1, 2, 3])
x.foo = 42
x.bar = "hello"
s = self.dumps(x, 2)
y = self.loads(s)
self.assertEqual(list(x), list(y))
self.assertEqual(x.__dict__, y.__dict__)
self.assertEqual(x.foo, y.foo)
self.assertEqual(x.bar, y.bar)
def test_reduce_overrides_default_reduce_ex(self):
for proto in 0, 1, 2:
x = REX_one()
self.assertEqual(x._reduce_called, 0)
s = self.dumps(x, proto)
self.assertEqual(x._reduce_called, 1)
y = self.loads(s)
self.assertEqual(y._reduce_called, 0)
def test_reduce_ex_called(self):
for proto in 0, 1, 2:
x = REX_two()
self.assertEqual(x._proto, None)
s = self.dumps(x, proto)
self.assertEqual(x._proto, proto)
y = self.loads(s)
self.assertEqual(y._proto, None)
def test_reduce_ex_overrides_reduce(self):
for proto in 0, 1, 2:
x = REX_three()
self.assertEqual(x._proto, None)
s = self.dumps(x, proto)
self.assertEqual(x._proto, proto)
y = self.loads(s)
self.assertEqual(y._proto, None)
# Test classes for reduce_ex
class REX_one(object):
_reduce_called = 0
def __reduce__(self):
self._reduce_called = 1
return REX_one, ()
# No __reduce_ex__ here, but inheriting it from object
class REX_two(object):
_proto = None
def __reduce_ex__(self, proto):
self._proto = proto
return REX_two, ()
# No __reduce__ here, but inheriting it from object
class REX_three(object):
_proto = None
def __reduce_ex__(self, proto):
self._proto = proto
return REX_two, ()
def __reduce__(self):
raise TestFailed, "This __reduce__ shouldn't be called"
# Test classes for newobj
class MyInt(int):
sample = 1
class MyLong(long):
sample = 1L
class MyFloat(float):
sample = 1.0
class MyComplex(complex):
sample = 1.0 + 0.0j
class MyStr(str):
sample = "hello"
class MyUnicode(unicode):
sample = u"hello \u1234"
class MyTuple(tuple):
sample = (1, 2, 3)
class MyList(list):
sample = [1, 2, 3]
class MyDict(dict):
sample = {"a": 1, "b": 2}
myclasses = [MyInt, MyLong, MyFloat,
MyComplex,
MyStr, MyUnicode,
MyTuple, MyList, MyDict]
class SlotList(MyList):
__slots__ = ["foo"]
class SimpleNewObj(object):
def __init__(self, a, b, c):
# raise an error, to make sure this isn't called
raise TypeError("SimpleNewObj.__init__() didn't expect to get called")
class AbstractPickleModuleTests(unittest.TestCase):
def test_dump_closed_file(self):
import os
f = open(TESTFN, "w")
try:
f.close()
self.assertRaises(ValueError, self.module.dump, 123, f)
finally:
os.remove(TESTFN)
def test_load_closed_file(self):
import os
f = open(TESTFN, "w")
try:
f.close()
self.assertRaises(ValueError, self.module.dump, 123, f)
finally:
os.remove(TESTFN)
def test_highest_protocol(self):
# Of course this needs to be changed when HIGHEST_PROTOCOL changes.
self.assertEqual(self.module.HIGHEST_PROTOCOL, 2)
def test_callapi(self):
from cStringIO import StringIO
f = StringIO()
# With and without keyword arguments
self.module.dump(123, f, -1)
self.module.dump(123, file=f, protocol=-1)
self.module.dumps(123, -1)
self.module.dumps(123, protocol=-1)
self.module.Pickler(f, -1)
self.module.Pickler(f, protocol=-1)
class AbstractPersistentPicklerTests(unittest.TestCase):
# This class defines persistent_id() and persistent_load()
# functions that should be used by the pickler. All even integers
# are pickled using persistent ids.
def persistent_id(self, object):
if isinstance(object, int) and object % 2 == 0:
self.id_count += 1
return str(object)
else:
return None
def persistent_load(self, oid):
self.load_count += 1
object = int(oid)
assert object % 2 == 0
return object
def test_persistence(self):
self.id_count = 0
self.load_count = 0
L = range(10)
self.assertEqual(self.loads(self.dumps(L)), L)
self.assertEqual(self.id_count, 5)
self.assertEqual(self.load_count, 5)
def test_bin_persistence(self):
self.id_count = 0
self.load_count = 0
L = range(10)
self.assertEqual(self.loads(self.dumps(L, 1)), L)
self.assertEqual(self.id_count, 5)
self.assertEqual(self.load_count, 5)
|
gpl-3.0
|
openmips/stbgui
|
lib/python/Components/ServiceScan.py
|
1
|
9086
|
from enigma import eComponentScan, iDVBFrontend, eTimer
from Components.NimManager import nimmanager as nimmgr
from Tools.Transponder import getChannelNumber
class ServiceScan:
Idle = 1
Running = 2
Done = 3
Error = 4
DonePartially = 5
Errors = {
0: _("error starting scanning"),
1: _("error while scanning"),
2: _("no resource manager"),
3: _("no channel list")
}
def scanStatusChanged(self):
if self.state == self.Running:
self.progressbar.setValue(self.scan.getProgress())
self.lcd_summary and self.lcd_summary.updateProgress(self.scan.getProgress())
if self.scan.isDone():
errcode = self.scan.getError()
if errcode == 0:
self.state = self.DonePartially
self.servicelist.listAll()
else:
self.state = self.Error
self.errorcode = errcode
self.network.setText("")
self.transponder.setText("")
else:
result = self.foundServices + self.scan.getNumServices()
percentage = self.scan.getProgress()
if percentage > 99:
percentage = 99
#TRANSLATORS: The stb is performing a channel scan, progress percentage is printed in '%d' (and '%%' will show a single '%' symbol)
message = ngettext("Scanning - %d%% completed", "Scanning - %d%% completed", percentage) % percentage
message += ", "
#TRANSLATORS: Intermediate scanning result, '%d' channel(s) have been found so far
message += ngettext("%d channel found", "%d channels found", result) % result
self.text.setText(message)
transponder = self.scan.getCurrentTransponder()
network = ""
tp_text = ""
if transponder:
tp_type = transponder.getSystem()
if tp_type == iDVBFrontend.feSatellite:
network = _("Satellite")
tp = transponder.getDVBS()
orb_pos = tp.orbital_position
try:
sat_name = str(nimmgr.getSatDescription(orb_pos))
except KeyError:
sat_name = ""
if orb_pos > 1800: # west
orb_pos = 3600 - orb_pos
h = _("W")
else:
h = _("E")
if ("%d.%d" % (orb_pos/10, orb_pos%10)) in sat_name:
network = sat_name
else:
network = ("%s %d.%d %s") % (sat_name, orb_pos / 10, orb_pos % 10, h)
tp_text = { tp.System_DVB_S : "DVB-S", tp.System_DVB_S2 : "DVB-S2" }.get(tp.system, "")
if tp_text == "DVB-S2":
tp_text = ("%s %s") % ( tp_text,
{ tp.Modulation_Auto : "Auto", tp.Modulation_QPSK : "QPSK",
tp.Modulation_8PSK : "8PSK", tp.Modulation_QAM16 : "QAM16",
tp.Modulation_16APSK : "16APSK", tp.Modulation_32APSK : "32APSK" }.get(tp.modulation, ""))
tp_text = ("%s %d%c / %d / %s") % ( tp_text, tp.frequency/1000,
{ tp.Polarisation_Horizontal : 'H', tp.Polarisation_Vertical : 'V', tp.Polarisation_CircularLeft : 'L',
tp.Polarisation_CircularRight : 'R' }.get(tp.polarisation, ' '),
tp.symbol_rate/1000,
{ tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3",
tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8",
tp.FEC_8_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5",
tp.FEC_9_10 : "9/10", tp.FEC_None : "NONE" }.get(tp.fec, ""))
if tp.is_id > -1 and tp.system == tp.System_DVB_S2:
tp_text = ("%s IS %d") % (tp_text, tp.is_id)
elif tp_type == iDVBFrontend.feCable:
network = _("Cable")
tp = transponder.getDVBC()
tp_text = ("DVB-C/C2 %s %d MHz / SR:%d / FEC:%s") %( { tp.Modulation_Auto : "AUTO",
tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM32 : "QAM32",
tp.Modulation_QAM64 : "QAM64", tp.Modulation_QAM128 : "QAM128",
tp.Modulation_QAM256 : "QAM256" }.get(tp.modulation, ""),
tp.frequency/1000,
tp.symbol_rate/1000,
{ tp.FEC_Auto : "AUTO", tp.FEC_1_2 : "1/2", tp.FEC_2_3 : "2/3",
tp.FEC_3_4 : "3/4", tp.FEC_5_6 : "5/6", tp.FEC_7_8 : "7/8",
tp.FEC_8_9 : "8/9", tp.FEC_3_5 : "3/5", tp.FEC_4_5 : "4/5",
tp.FEC_9_10 : "9/10", tp.FEC_6_7 : "6/7", tp.FEC_None : "NONE" }.get(tp.fec_inner, ""))
elif tp_type == iDVBFrontend.feTerrestrial:
network = _("Terrestrial")
tp = transponder.getDVBT()
channel = getChannelNumber(tp.frequency, self.scanList[self.run]["feid"])
if channel:
channel = _("CH") + "%s " % channel
freqMHz = "%0.1f MHz" % (tp.frequency/1000000.)
tp_text = ("%s %s %s %s") %(
{
tp.System_DVB_T_T2 : "DVB-T/T2",
tp.System_DVB_T : "DVB-T",
tp.System_DVB_T2 : "DVB-T2"
}.get(tp.system, ""),
{
tp.Modulation_QPSK : "QPSK",
tp.Modulation_QAM16 : "QAM16", tp.Modulation_QAM64 : "QAM64",
tp.Modulation_Auto : "AUTO", tp.Modulation_QAM256 : "QAM256"
}.get(tp.modulation, ""),
"%s%s" % (channel, freqMHz.replace(".0","")),
{
tp.Bandwidth_8MHz : "Bw 8MHz", tp.Bandwidth_7MHz : "Bw 7MHz", tp.Bandwidth_6MHz : "Bw 6MHz",
tp.Bandwidth_Auto : "Bw Auto", tp.Bandwidth_5MHz : "Bw 5MHz",
tp.Bandwidth_1_712MHz : "Bw 1.712MHz", tp.Bandwidth_10MHz : "Bw 10MHz"
}.get(tp.bandwidth, ""))
elif tp_type == iDVBFrontend.feATSC:
network = _("ATSC")
tp = transponder.getATSC()
freqMHz = "%0.1f MHz" % (tp.frequency/1000000.)
tp_text = ("%s %s %s %s") % (
{
tp.System_ATSC : _("ATSC"),
tp.System_DVB_C_ANNEX_B : _("DVB-C ANNEX B")
}.get(tp.system, ""),
{
tp.Modulation_Auto : _("Auto"),
tp.Modulation_QAM16 : "QAM16",
tp.Modulation_QAM32 : "QAM32",
tp.Modulation_QAM64 : "QAM64",
tp.Modulation_QAM128 : "QAM128",
tp.Modulation_QAM256 : "QAM256",
tp.Modulation_VSB_8 : "8VSB",
tp.Modulation_VSB_16 : "16VSB"
}.get(tp.modulation, ""),
freqMHz.replace(".0",""),
{
tp.Inversion_Off : _("Off"),
tp.Inversion_On :_("On"),
tp.Inversion_Unknown : _("Auto")
}.get(tp.inversion, ""))
else:
print "unknown transponder type in scanStatusChanged"
self.network.setText(network)
self.transponder.setText(tp_text)
if self.state == self.DonePartially:
self.foundServices += self.scan.getNumServices()
self.text.setText(ngettext("Scanning completed, %d channel found", "Scanning completed, %d channels found", self.foundServices) % self.foundServices)
if self.state == self.Error:
self.text.setText(_("ERROR - failed to scan (%s)!") % (self.Errors[self.errorcode]) )
if self.state == self.DonePartially or self.state == self.Error:
self.delaytimer.start(100, True)
def __init__(self, progressbar, text, servicelist, passNumber, scanList, network, transponder, frontendInfo, lcd_summary):
self.foundServices = 0
self.progressbar = progressbar
self.text = text
self.servicelist = servicelist
self.passNumber = passNumber
self.scanList = scanList
self.frontendInfo = frontendInfo
self.transponder = transponder
self.network = network
self.run = 0
self.lcd_summary = lcd_summary
self.scan = None
self.delaytimer = eTimer()
self.delaytimer.callback.append(self.execEnd)
def doRun(self):
self.scan = eComponentScan()
self.frontendInfo.frontend_source = lambda : self.scan.getFrontend()
self.feid = self.scanList[self.run]["feid"]
self.flags = self.scanList[self.run]["flags"]
self.networkid = 0
if "networkid" in self.scanList[self.run]:
self.networkid = self.scanList[self.run]["networkid"]
self.state = self.Idle
self.scanStatusChanged()
for x in self.scanList[self.run]["transponders"]:
self.scan.addInitial(x)
def updatePass(self):
size = len(self.scanList)
if size > 1:
txt = "%s %s/%s (%s)" % (_("pass"), self.run + 1, size, nimmgr.getNim(self.scanList[self.run]["feid"]).slot_name)
self.passNumber.setText(txt)
def execBegin(self):
self.doRun()
self.updatePass()
self.scan.statusChanged.get().append(self.scanStatusChanged)
self.scan.newService.get().append(self.newService)
self.servicelist.clear()
self.state = self.Running
err = self.scan.start(self.feid, self.flags, self.networkid)
self.frontendInfo.updateFrontendData()
if err:
self.state = self.Error
self.errorcode = 0
self.scanStatusChanged()
def execEnd(self):
if self.scan is None:
if not self.isDone():
print "*** warning *** scan was not finished!"
return
self.scan.statusChanged.get().remove(self.scanStatusChanged)
self.scan.newService.get().remove(self.newService)
self.scan = None
if self.run != len(self.scanList) - 1:
self.run += 1
self.execBegin()
else:
self.state = self.Done
def isDone(self):
return self.state == self.Done or self.state == self.Error
def newService(self):
newServiceName = self.scan.getLastServiceName()
newServiceRef = self.scan.getLastServiceRef()
self.servicelist.addItem((newServiceName, newServiceRef))
self.lcd_summary and self.lcd_summary.updateService(newServiceName)
def destroy(self):
self.state = self.Idle
if self.scan is not None:
self.scan.statusChanged.get().remove(self.scanStatusChanged)
self.scan.newService.get().remove(self.newService)
self.scan = None
|
gpl-2.0
|
MobinRanjbar/hue
|
desktop/core/ext-py/Babel-0.9.6/babel/messages/tests/mofile.py
|
61
|
2585
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://babel.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://babel.edgewall.org/log/.
import doctest
import gettext
import unittest
from StringIO import StringIO
from babel.messages import mofile, Catalog
class WriteMoTestCase(unittest.TestCase):
def test_sorting(self):
# Ensure the header is sorted to the first entry so that its charset
# can be applied to all subsequent messages by GNUTranslations
# (ensuring all messages are safely converted to unicode)
catalog = Catalog(locale='en_US')
catalog.add(u'', '''\
"Content-Type: text/plain; charset=utf-8\n"
"Content-Transfer-Encoding: 8bit\n''')
catalog.add(u'foo', 'Voh')
catalog.add((u'There is', u'There are'), (u'Es gibt', u'Es gibt'))
catalog.add(u'Fizz', '')
catalog.add(('Fuzz', 'Fuzzes'), ('', ''))
buf = StringIO()
mofile.write_mo(buf, catalog)
buf.seek(0)
translations = gettext.GNUTranslations(fp=buf)
self.assertEqual(u'Voh', translations.ugettext('foo'))
assert isinstance(translations.ugettext('foo'), unicode)
self.assertEqual(u'Es gibt', translations.ungettext('There is', 'There are', 1))
assert isinstance(translations.ungettext('There is', 'There are', 1), unicode)
self.assertEqual(u'Fizz', translations.ugettext('Fizz'))
assert isinstance(translations.ugettext('Fizz'), unicode)
self.assertEqual(u'Fuzz', translations.ugettext('Fuzz'))
assert isinstance(translations.ugettext('Fuzz'), unicode)
self.assertEqual(u'Fuzzes', translations.ugettext('Fuzzes'))
assert isinstance(translations.ugettext('Fuzzes'), unicode)
def test_more_plural_forms(self):
catalog2 = Catalog(locale='ru_RU')
catalog2.add(('Fuzz', 'Fuzzes'), ('', '', ''))
buf = StringIO()
mofile.write_mo(buf, catalog2)
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite(mofile))
suite.addTest(unittest.makeSuite(WriteMoTestCase))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
apache-2.0
|
suzukaze/mycli
|
mycli/sqlcompleter.py
|
6
|
17248
|
from __future__ import print_function
from __future__ import unicode_literals
import logging
from prompt_toolkit.completion import Completer, Completion
from .packages.completion_engine import suggest_type
from .packages.parseutils import last_word
from .packages.special.favoritequeries import favoritequeries
from re import compile, escape
from .packages.tabulate import table_formats
try:
from collections import Counter
except ImportError:
# python 2.6
from .packages.counter import Counter
_logger = logging.getLogger(__name__)
class SQLCompleter(Completer):
keywords = ['ACCESS', 'ADD', 'ALL', 'ALTER TABLE', 'AND', 'ANY', 'AS',
'ASC', 'AUDIT', 'BEFORE', 'BETWEEN', 'BINARY', 'BY', 'CASE',
'CHANGE MASTER TO', 'CHAR', 'CHECK', 'CLUSTER', 'COLUMN',
'COMMENT', 'COMPRESS', 'CONNECT', 'COPY', 'CREATE', 'CURRENT',
'DATABASE', 'DATE', 'DECIMAL', 'DEFAULT', 'DELETE FROM',
'DELIMITER', 'DESC', 'DESCRIBE', 'DISTINCT', 'DROP', 'ELSE',
'ENCODING', 'ESCAPE', 'EXCLUSIVE', 'EXISTS', 'EXTENSION', 'FILE',
'FLOAT', 'FOR', 'FORMAT', 'FORCE_QUOTE', 'FORCE_NOT_NULL',
'FREEZE', 'FROM', 'FULL', 'FUNCTION', 'GRANT', 'GROUP BY',
'HAVING', 'HEADER', 'HOST', 'IDENTIFIED', 'IMMEDIATE', 'IN',
'INCREMENT', 'INDEX', 'INITIAL', 'INSERT INTO', 'INTEGER',
'INTERSECT', 'INTO', 'INTERVAL', 'IS', 'JOIN', 'LEFT', 'LEVEL',
'LIKE', 'LIMIT', 'LOCK', 'LOG', 'LOGS', 'LONG', 'MASTER', 'MINUS',
'MODE', 'MODIFY', 'NOAUDIT', 'NOCOMPRESS', 'NOT', 'NOWAIT', 'NULL',
'NUMBER', 'OIDS', 'OF', 'OFFLINE', 'ON', 'ONLINE', 'OPTION', 'OR',
'ORDER BY', 'OUTER', 'OWNER', 'PASSWORD', 'PCTFREE', 'PORT',
'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCESSLIST', 'PURGE', 'QUOTE',
'RAW', 'RENAME', 'REPAIR', 'RESOURCE', 'RESET', 'REVOKE', 'RIGHT',
'ROW', 'ROWID', 'ROWNUM', 'ROWS', 'SELECT', 'SESSION', 'SET',
'SHARE', 'SHOW', 'SIZE', 'SLAVE', 'SLAVES', 'SMALLINT', 'START',
'STOP', 'SUCCESSFUL', 'SYNONYM', 'SYSDATE', 'TABLE', 'TEMPLATE',
'THEN', 'TO', 'TRIGGER', 'TRUNCATE', 'UID', 'UNION', 'UNIQUE',
'UPDATE', 'USE', 'USER', 'USING', 'VALIDATE', 'VALUES', 'VARCHAR',
'VARCHAR2', 'VIEW', 'WHEN', 'WHENEVER', 'WHERE', 'WITH']
functions = ['AVG', 'COUNT', 'DISTINCT', 'FIRST', 'FORMAT', 'LAST',
'LCASE', 'LEN', 'MAX', 'MIN', 'MID', 'NOW', 'ROUND', 'SUM', 'TOP',
'UCASE']
show_items = []
change_items = ['MASTER_BIND', 'MASTER_HOST', 'MASTER_USER',
'MASTER_PASSWORD', 'MASTER_PORT', 'MASTER_CONNECT_RETRY',
'MASTER_HEARTBEAT_PERIOD', 'MASTER_LOG_FILE', 'MASTER_LOG_POS',
'RELAY_LOG_FILE', 'RELAY_LOG_POS', 'MASTER_SSL', 'MASTER_SSL_CA',
'MASTER_SSL_CAPATH', 'MASTER_SSL_CERT', 'MASTER_SSL_KEY',
'MASTER_SSL_CIPHER', 'MASTER_SSL_VERIFY_SERVER_CERT',
'IGNORE_SERVER_IDS']
users = []
def __init__(self, smart_completion=True):
super(self.__class__, self).__init__()
self.smart_completion = smart_completion
self.reserved_words = set()
for x in self.keywords:
self.reserved_words.update(x.split())
self.name_pattern = compile("^[_a-z][_a-z0-9\$]*$")
self.special_commands = []
self.table_formats = table_formats()
self.reset_completions()
def escape_name(self, name):
if name and ((not self.name_pattern.match(name))
or (name.upper() in self.reserved_words)
or (name.upper() in self.functions)):
name = '`%s`' % name
return name
def unescape_name(self, name):
""" Unquote a string."""
if name and name[0] == '"' and name[-1] == '"':
name = name[1:-1]
return name
def escaped_names(self, names):
return [self.escape_name(name) for name in names]
def extend_special_commands(self, special_commands):
# Special commands are not part of all_completions since they can only
# be at the beginning of a line.
self.special_commands.extend(special_commands)
def extend_database_names(self, databases):
self.databases.extend(databases)
def extend_keywords(self, additional_keywords):
self.keywords.extend(additional_keywords)
self.all_completions.update(additional_keywords)
def extend_show_items(self, show_items):
for show_item in show_items:
self.show_items.extend(show_item)
self.all_completions.update(show_item)
def extend_change_items(self, change_items):
for change_item in change_items:
self.change_items.extend(change_item)
self.all_completions.update(change_item)
def extend_users(self, users):
for user in users:
self.users.extend(user)
self.all_completions.update(user)
def extend_schemata(self, schema):
if schema is None:
return
metadata = self.dbmetadata['tables']
metadata[schema] = {}
# dbmetadata.values() are the 'tables' and 'functions' dicts
for metadata in self.dbmetadata.values():
metadata[schema] = {}
self.all_completions.update(schema)
def extend_relations(self, data, kind):
""" extend metadata for tables or views
:param data: list of (rel_name, ) tuples
:param kind: either 'tables' or 'views'
:return:
"""
# 'data' is a generator object. It can throw an exception while being
# consumed. This could happen if the user has launched the app without
# specifying a database name. This exception must be handled to prevent
# crashing.
try:
data = [self.escaped_names(d) for d in data]
except Exception:
data = []
# dbmetadata['tables'][$schema_name][$table_name] should be a list of
# column names. Default to an asterisk
metadata = self.dbmetadata[kind]
for relname in data:
try:
metadata[self.dbname][relname[0]] = ['*']
except AttributeError:
_logger.error('%r %r listed in unrecognized schema %r',
kind, relname[0], self.dbname)
self.all_completions.add(relname[0])
def extend_columns(self, column_data, kind):
""" extend column metadata
:param column_data: list of (rel_name, column_name) tuples
:param kind: either 'tables' or 'views'
:return:
"""
# 'column_data' is a generator object. It can throw an exception while
# being consumed. This could happen if the user has launched the app
# without specifying a database name. This exception must be handled to
# prevent crashing.
try:
column_data = [self.escaped_names(d) for d in column_data]
except Exception:
column_data = []
metadata = self.dbmetadata[kind]
for relname, column in column_data:
metadata[self.dbname][relname].append(column)
self.all_completions.add(column)
def extend_functions(self, func_data):
# 'func_data' is a generator object. It can throw an exception while
# being consumed. This could happen if the user has launched the app
# without specifying a database name. This exception must be handled to
# prevent crashing.
try:
func_data = [self.escaped_names(d) for d in func_data]
except Exception:
func_data = []
# dbmetadata['functions'][$schema_name][$function_name] should return
# function metadata.
metadata = self.dbmetadata['functions']
for func in func_data:
metadata[self.dbname][func[0]] = None
self.all_completions.add(func[0])
def set_dbname(self, dbname):
self.dbname = dbname
def reset_completions(self):
self.databases = []
self.dbname = ''
self.dbmetadata = {'tables': {}, 'views': {}, 'functions': {}}
self.all_completions = set(self.keywords + self.functions)
@staticmethod
def find_matches(text, collection, start_only=False, fuzzy=True):
"""Find completion matches for the given text.
Given the user's input text and a collection of available
completions, find completions matching the last word of the
text.
If `start_only` is True, the text will match an available
completion only at the beginning. Otherwise, a completion is
considered a match if the text appears anywhere within it.
yields prompt_toolkit Completion instances for any matches found
in the collection of available completions.
"""
text = last_word(text, include='most_punctuations').lower()
completions = []
if fuzzy:
regex = '.*?'.join(map(escape, text))
pat = compile('(%s)' % regex)
for item in sorted(collection):
r = pat.search(item.lower())
if r:
completions.append((len(r.group()), r.start(), item))
else:
match_end_limit = len(text) if start_only else None
for item in sorted(collection):
match_point = item.lower().find(text, 0, match_end_limit)
if match_point >= 0:
completions.append((len(text), match_point, item))
return (Completion(z, -len(text)) for x, y, z in sorted(completions))
def get_completions(self, document, complete_event, smart_completion=None):
word_before_cursor = document.get_word_before_cursor(WORD=True)
if smart_completion is None:
smart_completion = self.smart_completion
# If smart_completion is off then match any word that starts with
# 'word_before_cursor'.
if not smart_completion:
return self.find_matches(word_before_cursor, self.all_completions,
start_only=True, fuzzy=False)
completions = []
suggestions = suggest_type(document.text, document.text_before_cursor)
for suggestion in suggestions:
_logger.debug('Suggestion type: %r', suggestion['type'])
if suggestion['type'] == 'column':
tables = suggestion['tables']
_logger.debug("Completion column scope: %r", tables)
scoped_cols = self.populate_scoped_cols(tables)
if suggestion.get('drop_unique'):
# drop_unique is used for 'tb11 JOIN tbl2 USING (...'
# which should suggest only columns that appear in more than
# one table
scoped_cols = [col for (col, count)
in Counter(scoped_cols).items()
if count > 1 and col != '*']
cols = self.find_matches(word_before_cursor, scoped_cols)
completions.extend(cols)
elif suggestion['type'] == 'function':
# suggest user-defined functions using substring matching
funcs = self.populate_schema_objects(suggestion['schema'],
'functions')
user_funcs = self.find_matches(word_before_cursor, funcs)
completions.extend(user_funcs)
# suggest hardcoded functions using startswith matching only if
# there is no schema qualifier. If a schema qualifier is
# present it probably denotes a table.
# eg: SELECT * FROM users u WHERE u.
if not suggestion['schema']:
predefined_funcs = self.find_matches(word_before_cursor,
self.functions,
start_only=True,
fuzzy=False)
completions.extend(predefined_funcs)
elif suggestion['type'] == 'table':
tables = self.populate_schema_objects(suggestion['schema'],
'tables')
tables = self.find_matches(word_before_cursor, tables)
completions.extend(tables)
elif suggestion['type'] == 'view':
views = self.populate_schema_objects(suggestion['schema'],
'views')
views = self.find_matches(word_before_cursor, views)
completions.extend(views)
elif suggestion['type'] == 'alias':
aliases = suggestion['aliases']
aliases = self.find_matches(word_before_cursor, aliases)
completions.extend(aliases)
elif suggestion['type'] == 'database':
dbs = self.find_matches(word_before_cursor, self.databases)
completions.extend(dbs)
elif suggestion['type'] == 'keyword':
keywords = self.find_matches(word_before_cursor, self.keywords,
start_only=True,
fuzzy=False)
completions.extend(keywords)
elif suggestion['type'] == 'show':
show_items = self.find_matches(word_before_cursor,
self.show_items,
start_only=False,
fuzzy=True)
completions.extend(show_items)
elif suggestion['type'] == 'change':
change_items = self.find_matches(word_before_cursor,
self.change_items,
start_only=False,
fuzzy=True)
completions.extend(change_items)
elif suggestion['type'] == 'user':
users = self.find_matches(word_before_cursor, self.users,
start_only=False,
fuzzy=True)
completions.extend(users)
elif suggestion['type'] == 'special':
special = self.find_matches(word_before_cursor,
self.special_commands,
start_only=True,
fuzzy=False)
completions.extend(special)
elif suggestion['type'] == 'favoritequery':
queries = self.find_matches(word_before_cursor,
favoritequeries.list(),
start_only=False, fuzzy=True)
completions.extend(queries)
elif suggestion['type'] == 'table_format':
formats = self.find_matches(word_before_cursor,
self.table_formats,
start_only=True, fuzzy=False)
completions.extend(formats)
return completions
def populate_scoped_cols(self, scoped_tbls):
""" Find all columns in a set of scoped_tables
:param scoped_tbls: list of (schema, table, alias) tuples
:return: list of column names
"""
columns = []
meta = self.dbmetadata
for tbl in scoped_tbls:
# A fully qualified schema.relname reference or default_schema
# DO NOT escape schema names.
schema = tbl[0] or self.dbname
relname = tbl[1]
escaped_relname = self.escape_name(tbl[1])
# We don't know if schema.relname is a table or view. Since
# tables and views cannot share the same name, we can check one
# at a time
try:
columns.extend(meta['tables'][schema][relname])
# Table exists, so don't bother checking for a view
continue
except KeyError:
try:
columns.extend(meta['tables'][schema][escaped_relname])
# Table exists, so don't bother checking for a view
continue
except KeyError:
pass
try:
columns.extend(meta['views'][schema][relname])
except KeyError:
pass
return columns
def populate_schema_objects(self, schema, obj_type):
"""Returns list of tables or functions for a (optional) schema"""
metadata = self.dbmetadata[obj_type]
schema = schema or self.dbname
try:
objects = metadata[schema].keys()
except KeyError:
# schema doesn't exist
objects = []
return objects
|
bsd-3-clause
|
hazrpg/calibre
|
src/calibre/utils/mem.py
|
12
|
1508
|
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
'''
Measure memory usage of the current process.
The key function is memory() which returns the current memory usage in MB.
You can pass a number to memory and it will be subtracted from the returned
value.
'''
import gc, os
from calibre.constants import iswindows, islinux
def get_memory():
'Return memory usage in bytes'
import psutil
p = psutil.Process(os.getpid())
if hasattr(p, 'memory_info_ex'):
mem = p.memory_info_ex()
else:
mem = p.get_ext_memory_info()
attr = 'wset' if iswindows else 'data' if islinux else 'rss'
return getattr(mem, attr)
def memory(since=0.0):
'Return memory used in MB. The value of since is subtracted from the used memory'
ans = get_memory()
ans /= float(1024**2)
return ans - since
def gc_histogram():
"""Returns per-class counts of existing objects."""
result = {}
for o in gc.get_objects():
t = type(o)
count = result.get(t, 0)
result[t] = count + 1
return result
def diff_hists(h1, h2):
"""Prints differences between two results of gc_histogram()."""
for k in h1:
if k not in h2:
h2[k] = 0
if h1[k] != h2[k]:
print "%s: %d -> %d (%s%d)" % (
k, h1[k], h2[k], h2[k] > h1[k] and "+" or "", h2[k] - h1[k])
|
gpl-3.0
|
nonhermitian/scipy
|
tools/win32/detect_cpu_extensions_wine.py
|
79
|
7518
|
#!/usr/bin/python
"""
Detect which x86 CPU extension instructions the given scipy install uses.
This file can be used in the release process to check that the nosse installer
does not contain SSE instructions. This has happened before, see for example
ticket #1170.
Is meant to be run on OS X with Wine. Make sure objdump.exe is installed.
See also tools/win32build/misc/x86analysis.py in numpy for a similar script
that checks a single file.
"""
import subprocess
import sys
import os
from optparse import OptionParser
OBJDUMP = os.environ['HOME'] + '/.wine/drive_c/MinGW/bin/objdump.exe'
SCIPY_PY25 = os.environ['HOME'] + '/.wine/drive_c/Python25/Lib/site-packages/scipy/'
SCIPY_PY26 = os.environ['HOME'] + '/.wine/drive_c/Python26/Lib/site-packages/scipy/'
SCIPY_PY27 = os.environ['HOME'] + '/.wine/drive_c/Python27/Lib/site-packages/scipy/'
SCIPY_PY31 = os.environ['HOME'] + '/.wine/drive_c/Python31/Lib/site-packages/scipy/'
NUMPY_PY25 = os.environ['HOME'] + '/.wine/drive_c/Python25/Lib/site-packages/numpy/'
NUMPY_PY26 = os.environ['HOME'] + '/.wine/drive_c/Python26/Lib/site-packages/numpy/'
NUMPY_PY27 = os.environ['HOME'] + '/.wine/drive_c/Python27/Lib/site-packages/numpy/'
NUMPY_PY31 = os.environ['HOME'] + '/.wine/drive_c/Python31/Lib/site-packages/numpy/'
SSE3_LIBS = os.environ['HOME'] + '/.wine/drive_c/local/lib/yop/sse3'
SSE2_LIBS = os.environ['HOME'] + '/.wine/drive_c/local/lib/yop/sse2'
NOSSE_LIBS = os.environ['HOME'] + '/.wine/drive_c/local/lib/yop/nosse'
# The install to check
basepath = SCIPY_PY25
def main():
# a set of all unique CPU extension codes found
allcodes = set()
# walk the SciPy tree and check all binary files
for root, dirs, files in os.walk(basepath):
for fl in files:
if os.path.splitext(fl)[1] in ['.a', '.pyd', '.so']:
full_fpath = os.path.join(root, fl)
codes = single_file_checkext(full_fpath)
for code in codes:
allcodes.add(code)
write_summary(allcodes)
def single_file_checkext(fname, striproot=True):
if striproot:
sys.stdout.write('%s: ' % fname.replace(basepath, ''))
else:
sys.stdout.write('%s: ' % fname)
sys.stdout.flush()
codes = process(path_as_windows(fname))
sys.stdout.write(" ".join(codes))
sys.stdout.write("\n")
return codes
def path_as_windows(fpath):
"""Return the file path as Wine expects."""
winepath = 'C:\\' + fpath.split('drive_c')[1]
return winepath
def write_summary(allcodes):
"""Write a summary of all found codes to stdout."""
print """\n
----------------------------------------------------------------------------
Checked all binary files for CPU extension codes. Found the following codes:"""
for code in allcodes:
print code
print """
----------------------------------------------------------------------------
"""
def process(fn):
p = subprocess.Popen(['wine', OBJDUMP, '-d', fn], stdout=subprocess.PIPE)
codes = {}
for line in p.stdout:
r = line.split("\t")
if len(r) != 3:
continue
instr = r[2].split()[0].lower()
if instr in INSTRS:
codes[INSTRS[instr]] = True
print instr
codes = codes.keys()
codes.sort()
return codes
#------------------------------------------------------------------------------
# Instruction lists
#------------------------------------------------------------------------------
# x86
EXTS_x86 = dict(
_486='bswap cmpxch cpuid invd invlpg wbinvd xadd',
pentium='cmpxchg8b rdmsr rdtsc wrmsr',
pentium_mmx='rdpmc',
pentium_pro='cmova cmovae cmovb cmovbe cmovc cmove cmovg cmovge cmovl cmovle cmovna cmovnae cmovnb cmovnbe cmovnc cmovne cmovng cmovnge cmovnl cmovnle cmovno cmovnp cmovns cmovnz cmovo cmovp cmovpe cmovpo cmovs cmovz sysenter sysexit rdpmc ud2',
amd_k6_2='syscall sysret',
sse='maskmovq movntps movntq prefetch0 prefetch1 prefetch2 prefetchnta sfence',
sse2='clflush lfence maskmovdqu mfence movntdq movnti movntpd pause',
sse3='lddqu',
sse3_intel='monitor mwait',
intel_vt='vmptrld vmptrst vmclear vmread vmwrite vmcall vmlaunch vmresume vmxoff vmxon',
amd_v='clgi skinit stgi vmload vmmcall vmrun vmsave',
x86_64='cmpxchg16b rdtscp',
sse4a='lzcnt popcnt',
)
# x87
EXTS_x87 = dict(
pentium_pro='fcmovb, fcmovbe, fcmove, fcmovnb, fcmovnbe, fcmovne, fcmovnu, fcmovu fcomi fcomip fucomi fucomip',
sse='fxrstor fxsave',
sse3='fisttp',
undocumented='ffreep',
)
# SIMD
EXTS_simd = dict(
mmx='emms movd movq packssdw packsswb packuswb paddb paddd paddsb paddsw paddusb paddusw paddw pand pandn pcmpeqb pcmpeqd pcmpeqw pcmpgtb pcmpgtd pcmpgtw pmaddwd pmulhw pmullw por pslld psllq psllw psrad psraw psrld psrlq psrlw psubb psubd psubsb psubsw psubusb psubusw psubw punpckhbw punpckhdq punpckhwd punpcklbw punpckldq punpcklwd pxor',
emmx='paveb paddsiw pmagw pdistib psubsiw pmvzb pmulhrw pmvnzb pmvlzb pmvgezb pmulhriw pmachriw',
_3dnow='femms pavgusb pf2id pfacc pfadd pfcmpeq pfcmpge pfcmpgt pfmax pfmin pfmul pfrcp pfrcpit1 pfrcpit2 pfrsqit1 pfrsqrt pfsub pfsubr pi2fd pmulhrw prefetch prefetchw',
_3dnowplus='pf2iw pfnacc pfpnacc pi2fw pswapd',
_3dnowplus_geodegx='pfrsqrtv pfrcpv',
sse='addps addss cmpps cmpss comiss cvtpi2ps cvtps2pi cvtsi2ss cvtss2si cvttps2pi cvttss2si divps divss ldmxcsr maxps maxss minps minss movaps movhlps movhps movlhps movlps movmskps movntps movss movups mulps mulss rcpps rcpss rsqrtps rsqrtss shufps sqrtps sqrtss stmxcsr subps subss ucomiss unpckhps unpcklps andnps andps orps pavgb pavgw pextrw pinsrw pmaxsw pmaxub pminsw pminub pmovmskb pmulhuw psadbw pshufw xorps',
sse2='addpd addsd andnpd andpd cmppd cmpsd comisd cvtdq2pd cvtdq2ps cvtpd2dq cvtpd2pi cvtpd2ps cvtpi2pd cvtps2dq cvtps2pd cvtsd2si cvtsd2ss cvtsi2sd cvtss2sd cvttpd2dq cvttpd2pi cvtps2dq cvttsd2si divpd divsd maxpd maxsd minpd minsd movapd movhpd movlpd movmskpd movsd movupd mulpd mulsd orpd shufpd sqrtpd sqrtsd subpd subsd ucomisd unpckhpd unpcklpd xorpd movdq2q movdqa movdqu movq2dq paddq psubq pmuludq pshufhw pshuflw pshufd pslldq psrldq punpckhqdq punpcklqdq',
sse3='addsubpd addsubps haddpd haddps hsubpd hsubps movddup movshdup movsldup',
ssse3='psignw psignd psignb pshufb pmulhrsw pmaddubsw phsubw phsubsw phsubd phaddw phaddsw phaddd palignr pabsw pabsd pabsb',
sse4_1='mpsadbw phminposuw pmulld pmuldq dpps dppd blendps blendpd blendvps blendvpd pblendvb pblendw pminsb pmaxsb pminuw pmaxuw pminud pmaxud pminsd pmaxsd roundps roundss roundpd roundsd insertps pinsrb pinsrd/pinsrq extractps pextrb pextrw pextrd/pextrq pmovsxbw pmovzxbw pmovsxbd pmovzxbd pmovsxbq pmovzxbq pmovsxwd pmovzxwd pmovsxwq pmovzxwq pmovsxdq pmovzxdq ptest pcmpeqq packusdw movntdqa',
sse4a='extrq insertq movntsd movntss',
sse4_2='crc32 pcmpestri pcmpestrm pcmpistri pcmpistrm pcmpgtq',
fma='vfmaddpd vfmaddps vfmaddsd vfmaddss vfmaddsubpd vfmaddsubps vfmsubaddpd vfmsubaddps vfmsubpd vfmsubps vfmsubsd vfmsubss vfnmaddpd vfnmaddps vfnmaddsd vfnmadss vfnmsubpd vfnmsubps vfnmsubsd vfnmsubss',
)
INSTRS = dict()
for ext in [EXTS_x86, EXTS_x87, EXTS_simd]:
for key, value in ext.items():
if key.startswith('_'):
key = key[1:]
for v in value.split():
INSTRS[v] = key
#------------------------------------------------------------------------------
if __name__ == "__main__": main()
|
bsd-3-clause
|
Forage/Gramps
|
po/update_po.py
|
1
|
21490
|
#! /usr/bin/env python
#
# update_po - a gramps tool to update translations
#
# Copyright (C) 2006-2006 Kees Bakker
# Copyright (C) 2006 Brian Matherly
# Copyright (C) 2008 Stephen George
# Copyright (C) 2012
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
update_po.py for Gramps translations.
Examples:
python update_po.py -t
Tests if 'gettext' and 'python' are well configured.
python update_po.py -h
Calls help and command line interface.
python update_po.py -p
Generates a new template/catalog (gramps.pot).
python update_po.py -m de.po
Merges 'de.po' file with 'gramps.pot'.
python update_po.py -k de.po
Checks 'de.po' file, tests to compile and generates a textual resume.
"""
from __future__ import print_function
import os
import sys
from argparse import ArgumentParser
# Windows OS
if sys.platform == 'win32':
# GetText Win 32 obtained from http://gnuwin32.sourceforge.net/packages/gettext.htm
# ....\gettext\bin\msgmerge.exe needs to be on the path
msgmergeCmd = os.path.join('C:', 'Program Files(x86)', 'gettext', 'bin', 'msgmerge.exe')
msgfmtCmd = os.path.join('C:', 'Program Files(x86)', 'gettext', 'bin', 'msgfmt.exe')
msgattribCmd = os.path.join('C:', 'Program Files(x86)', 'gettext', 'bin', 'msgattrib.exe')
xgettextCmd = os.path.join('C:', 'Program Files(x86)', 'gettext', 'bin', 'xgettext.exe')
pythonCmd = os.path.join(sys.prefix, 'bin', 'python.exe')
# Others OS
elif sys.platform in ['linux2', 'darwin', 'cygwin']:
msgmergeCmd = 'msgmerge'
msgfmtCmd = 'msgfmt'
msgattribCmd = 'msgattrib'
xgettextCmd = 'xgettext'
pythonCmd = os.path.join(sys.prefix, 'bin', 'python')
else:
print("Found platform %s, OS %s" % (sys.platform, os.name))
print ("Update PO ERROR: unknown system, don't know msgmerge, ... commands")
sys.exit(0)
# List of available languages, useful for grouped actions
# need files with po extension
LANG = [file for file in os.listdir('.') if file.endswith('.po')]
# add a special 'all' argument (for 'check' and 'merge' arguments)
LANG.append("all")
# visual polish on the languages list
LANG.sort()
def tests():
"""
Testing installed programs.
We made tests (-t flag) by displaying versions of tools if properly
installed. Cannot run all commands without 'gettext' and 'python'.
"""
try:
print ("\n====='msgmerge'=(merge our translation)================\n")
os.system('''%(program)s -V''' % {'program': msgmergeCmd})
except:
print ('Please, install %(program)s for updating your translation'
% {'program': msgmergeCmd})
try:
print ("\n==='msgfmt'=(format our translation for installation)==\n")
os.system('''%(program)s -V''' % {'program': msgfmtCmd})
except:
print ('Please, install %(program)s for checking your translation'
% {'program': msgfmtCmd})
try:
print ("\n===='msgattrib'==(list groups of messages)=============\n")
os.system('''%(program)s -V''' % {'program': msgattribCmd})
except:
print ('Please, install %(program)s for listing groups of messages'
% {'program': msgattribCmd})
try:
print("\n===='xgettext' =(generate a new template)==============\n")
os.system('''%(program)s -V''' % {'program': xgettextCmd})
except:
print ('Please, install %(program)s for generating a new template'
% {'program': xgettextCmd})
try:
print("\n=================='python'=============================\n")
os.system('''%(program)s -V''' % {'program': pythonCmd})
except:
print ('Please, install python')
def TipsParse(filename, mark):
"""
Experimental alternative to 'intltool-extract' for 'tips.xml'.
"""
from xml.etree import ElementTree
tree = ElementTree.parse(filename)
root = tree.getroot()
'''
<?xml version="1.0" encoding="UTF-8"?>
<tips>
<_tip number="1">
<b>Working with Dates</b>
<br/>
A range of dates can be given by using the format "between
January 4, 2000 and March 20, 2003". You can also indicate
the level of confidence in a date and even choose between seven
different calendars. Try the button next to the date field in the
Events Editor.
</_tip>
char *s = N_("<b>Working with Dates</b><br/>A range of dates can be
given by using the format "between January 4, 2000 and March 20,
2003". You can also indicate the level of confidence in a date
and even choose between seven different calendars. Try the button
next to the date field in the Events Editor.");
gramps.pot:
msgid ""
"<b>Working with Dates</b><br/>A range of dates can be given by using the "
"format "between January 4, 2000 and March 20, 2003". You can also "
"indicate the level of confidence in a date and even choose between seven "
"different calendars. Try the button next to the date field in the Events "
"Editor."
'''
tips = open('../data/tips.xml.in.h', 'w')
marklist = root.iter(mark)
for key in marklist:
tip = ElementTree.tostring(key, encoding="UTF-8")
tip = tip.replace("<?xml version='1.0' encoding='UTF-8'?>", "")
tip = tip.replace('\n<_tip number="%(number)s">' % key.attrib, "")
tip = tip.replace("<br />", "<br/>")
#tip = tip.replace("\n</_tip>\n", "</_tip>\n") # special case tip 7
#tip = tip.replace("\n<b>", "<b>") # special case tip 18
tip = tip.replace("</_tip>\n\n", "")
tip = tip.replace('"', '"')
tips.write('char *s = N_("%s");\n' % tip)
tips.close()
print ('Wrote ../data/tips.xml.in.h')
root.clear()
def HolidaysParse(filename, mark):
"""
Experimental alternative to 'intltool-extract' for 'holidays.xml'.
"""
from xml.etree import ElementTree
tree = ElementTree.parse(filename)
root = tree.getroot()
ellist = root.iter()
'''
<?xml version="1.0" encoding="utf-8"?>
calendar>
<country _name="Bulgaria">
..
<country _name="Jewish Holidays">
<date _name="Yom Kippur" value="> passover(y)" offset="172"/>
char *s = N_("Bulgaria");
char *s = N_("Jewish Holidays");
char *s = N_("Yom Kippur");
gramps.pot:
msgid "Bulgaria"
msgid "Jewish Holidays"
msgid "Yom Kippur"
'''
holidays = open('../gramps/plugins/lib/holidays.xml.in.h', 'w')
for key in ellist:
if key.attrib.get(mark):
line = key.attrib
string = line.items
# mapping via the line dict (_name is the key)
name = 'char *s = N_("%(_name)s");\n' % line
holidays.write(name)
holidays.close()
print ('Wrote ../gramps/plugins/lib/holidays.xml.in.h')
root.clear()
def XmlParse(filename, mark):
"""
Experimental alternative to 'intltool-extract' for 'gramps.xml'.
"""
from xml.etree import ElementTree
tree = ElementTree.parse(filename)
root = tree.getroot()
'''
<?xml version="1.0" encoding="UTF-8"?>
<mime-info xmlns="http://www.freedesktop.org/standards/shared-mime-info">
<mime-type type="application/x-gramps">
<_comment>Gramps database</_comment>
<glob pattern="*.grdb"/>
</mime-type>
<mime-type type="application/x-gedcom">
<_comment>GEDCOM</_comment>
<glob pattern="*.ged"/>
<glob pattern="*.gedcom"/>
<glob pattern="*.GED"/>
<glob pattern="*.GEDCOM"/>
msgid "Gramps database"
msgid "GEDCOM"
'''
mime = open('../data/gramps.xml.in.h', 'w')
for key in root.iter():
if key.tag == '{http://www.freedesktop.org/standards/shared-mime-info}%s' % mark:
comment = 'char *s = N_("%s");\n' % key.text
mime.write(comment)
mime.close()
print ('Wrote ../data/gramps.xml.in.h')
root.clear()
def DesktopParse(filename):
"""
Experimental alternative to 'intltool-extract' for 'gramps.desktop'.
"""
'''
[Desktop Entry]
_Name=Gramps
_GenericName=Genealogy System
_X-GNOME-FullName=Gramps Genealogy System
_Comment=Manage genealogical information,
perform genealogical research and analysis
msgid "Gramps"
msgid "Genealogy System"
msgid "Gramps Genealogy System"
msgid ""
"Manage genealogical information,
perform genealogical research and analysis"
'''
desktop = open('../data/gramps.desktop.in.h', 'w')
f = open(filename)
lines = [file.strip() for file in f]
f.close()
for line in lines:
if line[0] == '_':
for i in range(len(line)):
if line[i] == '=':
val = 'char *s = N_("%s");\n' % line[i+1:len(line)]
desktop.write(val)
desktop.close()
print ('Wrote ../data/gramps.desktop.in.h')
def KeyParse(filename, mark):
"""
Experimental alternative to 'intltool-extract' for 'gramps.keys'.
"""
'''
application/x-gramps-xml:
_description=Gramps XML database
default_action_type=application
short_list_application_ids=gramps
short_list_application_ids_for_novice_user_level=gramps
short_list_application_ids_for_intermediate_user_level=gramps
short_list_application_ids_for_advanced_user_level=gramps
category=Documents/Genealogy
icon-filename=/usr/share/gramps/gramps.png
open=gramps %f
application/x-gedcom:
_description=GEDCOM
default_action_type=application
msgid "Gramps XML database"
msgid "GEDCOM"
'''
key = open('../data/gramps.keys.in.h', 'w')
f = open(filename)
lines = [file for file in f]
f.close()
temp = []
for line in lines:
for i in range(len(line)):
if line[i:i+12] == mark:
temp.append(line.strip())
for t in temp:
for i in range(len(t)):
if t[i] == '=':
val = 'char *s = N_("%s");\n' % t[i+1:len(t)]
key.write(val)
key.close()
print ('Wrote ../data/gramps.keys.in.h')
def main():
"""
The utility for handling translation stuff.
What is need by Gramps, nothing more.
"""
parser = ArgumentParser(
description='This program generates a new template and '
'also provides some common features.',
)
parser.add_argument("-t", "--test",
action="store_true", dest="test", default=True,
help="test if 'python' and 'gettext' are properly installed")
parser.add_argument("-x", "--xml",
action="store_true", dest="xml", default=False,
help="extract messages from xml based file formats")
parser.add_argument("-g", "--glade",
action="store_true", dest="glade", default=False,
help="extract messages from glade file format only")
parser.add_argument("-c", "--clean",
action="store_true", dest="clean", default=False,
help="remove created files")
parser.add_argument("-p", "--pot",
action="store_true", dest="catalog", default=False,
help="create a new catalog")
update = parser.add_argument_group('Update', 'Maintenance around translations')
# need at least one argument (sv.po, de.po, etc ...)
# lang.po files maintenance
update.add_argument("-m", dest="merge",
choices=LANG,
help="merge lang.po files with last catalog")
update.add_argument("-k", dest="check",
choices=LANG,
help="check lang.po files")
# testing stage
trans = parser.add_argument_group('Translation', 'Display content of translations file')
# need one argument (eg, de.po)
trans.add_argument("-u", dest="untranslated",
choices=[file for file in os.listdir('.') if file.endswith('.po')],
help="list untranslated messages")
trans.add_argument("-f", dest="fuzzy",
choices=[file for file in os.listdir('.') if file.endswith('.po')],
help="list fuzzy messages")
args = parser.parse_args()
namespace, extra = parser.parse_known_args()
if args.test:
tests()
if args.xml:
extract_xml()
if args.glade:
create_filesfile()
extract_glade()
if os.path.isfile('tmpfiles'):
os.unlink('tmpfiles')
if args.catalog:
retrieve()
if args.clean:
clean()
if args.merge:
#retrieve() windows os?
if sys.argv[2:] == ['all']:
sys.argv[2:] = LANG
merge(sys.argv[2:])
if args.check:
#retrieve() windows os?
if sys.argv[2:] == ['all']:
sys.argv[2:] = LANG
check(sys.argv[2:])
if args.untranslated:
untranslated(sys.argv[2:])
if args.fuzzy:
fuzzy(sys.argv[2:])
def create_filesfile():
"""
Create a file with all files that we should translate.
These are all python files not in POTFILES.skip added with those in
POTFILES.in
"""
dir = os.getcwd()
topdir = os.path.normpath(os.path.join(dir, '..', 'gramps'))
lentopdir = len(topdir)
f = open('POTFILES.in')
infiles = dict(['../' + file.strip(), None] for file in f if file.strip()
and not file[0]=='#')
f.close()
f = open('POTFILES.skip')
notinfiles = dict(['../' + file.strip(), None] for file in f if file
and not file[0]=='#')
f.close()
for (dirpath, dirnames, filenames) in os.walk(topdir):
root, subdir = os.path.split(dirpath)
if subdir.startswith("."):
#don't continue in this dir
dirnames[:] = []
continue
for dirname in dirnames:
# Skip hidden and system directories:
if dirname.startswith(".") or dirname in ["po", "locale"]:
dirnames.remove(dirname)
#add the files which are python or glade files
# if the directory does not exist or is a link, do nothing
if not os.path.isdir(dirpath) or os.path.islink(dirpath):
continue
for filename in os.listdir(dirpath):
name = os.path.split(filename)[1]
if name.endswith('.py') or name.endswith('.glade'):
full_filename = os.path.join(dirpath, filename)
#Skip the file if in POTFILES.skip
if full_filename[lentopdir:] in notinfiles:
infiles['../gramps' + full_filename[lentopdir:]] = None
#now we write out all the files in form ../gramps/filename
f = open('tmpfiles', 'w')
for file in sorted(infiles.keys()):
f.write(file)
f.write('\n')
f.close()
def listing(name, extensionlist):
"""
List files according to extensions.
Parsing from a textual file (gramps) is faster and easy for maintenance.
Like POTFILES.in and POTFILES.skip
"""
f = open('tmpfiles')
files = [file.strip() for file in f if file and not file[0]=='#']
f.close()
temp = open(name, 'w')
for entry in files:
for ext in extensionlist:
if entry.endswith(ext):
temp.write(entry)
temp.write('\n')
break
temp.close()
def headers():
"""
Look at existing C file format headers.
Generated by 'intltool-extract' but want to get rid of this
dependency (perl, just a set of tools).
"""
headers = []
# in.h; extract_xml
if os.path.isfile('''../data/tips.xml.in.h'''):
headers.append('''../data/tips.xml.in.h''')
if os.path.isfile('''../gramps/plugins/lib/holidays.xml.in.h'''):
headers.append('''../gramps/plugins/lib/holidays.xml.in.h''')
if os.path.isfile('''../data/gramps.xml.in.h'''):
headers.append('''../data/gramps.xml.in.h''')
if os.path.isfile('''../data/gramps.desktop.in.h'''):
headers.append('''../data/gramps.desktop.in.h''')
if os.path.isfile('''../data/gramps.keys.in.h'''):
headers.append('''../data/gramps.keys.in.h''')
return headers
def extract_xml():
"""
Extract translation strings from XML based, keys, mime and desktop
files. Own XML files parsing and custom translation marks.
"""
HolidaysParse('../gramps/plugins/lib/holidays.xml.in', '_name')
TipsParse('../data/tips.xml.in', '_tip')
XmlParse('../data/gramps.xml.in', '_comment')
DesktopParse('../data/gramps.desktop.in')
KeyParse('../data/gramps.keys.in', '_description')
def create_template():
"""
Create a new file for template, if it does not exist.
"""
template = open('gramps.pot', 'w')
template.close()
def extract_glade():
"""
Extract messages from a temp file with all .glade
"""
if not os.path.isfile('gramps.pot'):
create_template()
listing('glade.txt', ['.glade'])
os.system('''%(xgettext)s --add-comments -j -L Glade '''
'''--from-code=UTF-8 -o gramps.pot --files-from=glade.txt'''
% {'xgettext': xgettextCmd}
)
def retrieve():
"""
Extract messages from all files used by Gramps (python, glade, xml)
"""
extract_xml()
if not os.path.isfile('gramps.pot'):
create_template()
create_filesfile()
listing('python.txt', ['.py', '.py.in'])
os.system('''%(xgettext)s -j --directory=./ -d gramps '''
'''-L Python -o gramps.pot --files-from=python.txt '''
'''--keyword=_ --keyword=ngettext '''
'''--keyword=sgettext --from-code=UTF-8''' % {'xgettext': xgettextCmd}
)
extract_glade()
# C format header (.h extension)
for h in headers():
print ('xgettext for %s' % h)
os.system('''%(xgettext)s --add-comments -j -o gramps.pot '''
'''--keyword=N_ --from-code=UTF-8 %(head)s'''
% {'xgettext': xgettextCmd, 'head': h}
)
clean()
def clean():
"""
Remove created files (C format headers, temp listings)
"""
for h in headers():
if os.path.isfile(h):
os.unlink(h)
print ('Remove %(head)s' % {'head': h})
if os.path.isfile('python.txt'):
os.unlink('python.txt')
print ("Remove 'python.txt'")
if os.path.isfile('glade.txt'):
os.unlink('glade.txt')
print ("Remove 'glade.txt'")
if os.path.isfile('tmpfiles'):
os.unlink('tmpfiles')
print ("Remove 'tmpfiles'")
def merge(args):
"""
Merge messages with 'gramps.pot'
"""
for arg in args:
if arg == 'all':
continue
print ('Merge %(lang)s with current template' % {'lang': arg})
os.system('''%(msgmerge)s --no-wrap %(lang)s gramps.pot -o updated_%(lang)s''' \
% {'msgmerge': msgmergeCmd, 'lang': arg})
print ("Updated file: 'updated_%(lang)s'." % {'lang': arg})
def check(args):
"""
Check the translation file
"""
for arg in args:
if arg == 'all':
continue
print ("Checked file: '%(lang.po)s'. See '%(txt)s.txt'." \
% {'lang.po': arg, 'txt': arg[:-3]})
os.system('''%(python)s ./check_po -s %(lang.po)s > %(lang)s.txt''' \
% {'python': pythonCmd, 'lang.po': arg, 'lang': arg[:-3]})
os.system('''%(msgfmt)s -c -v %(lang.po)s'''
% {'msgfmt': msgfmtCmd, 'lang.po': arg})
def untranslated(arg):
"""
List untranslated messages
"""
os.system('''%(msgattrib)s --untranslated %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]})
def fuzzy(arg):
"""
List fuzzy messages
"""
os.system('''%(msgattrib)s --only-fuzzy --no-obsolete %(lang.po)s''' % {'msgattrib': msgattribCmd, 'lang.po': arg[0]})
if __name__ == "__main__":
main()
|
gpl-2.0
|
40223125/2015cd_midterm
|
static/Brython3.1.0-20150301-090019/Lib/unittest/signals.py
|
1016
|
2403
|
import signal
import weakref
from functools import wraps
__unittest = True
class _InterruptHandler(object):
def __init__(self, default_handler):
self.called = False
self.original_handler = default_handler
if isinstance(default_handler, int):
if default_handler == signal.SIG_DFL:
# Pretend it's signal.default_int_handler instead.
default_handler = signal.default_int_handler
elif default_handler == signal.SIG_IGN:
# Not quite the same thing as SIG_IGN, but the closest we
# can make it: do nothing.
def default_handler(unused_signum, unused_frame):
pass
else:
raise TypeError("expected SIGINT signal handler to be "
"signal.SIG_IGN, signal.SIG_DFL, or a "
"callable object")
self.default_handler = default_handler
def __call__(self, signum, frame):
installed_handler = signal.getsignal(signal.SIGINT)
if installed_handler is not self:
# if we aren't the installed handler, then delegate immediately
# to the default handler
self.default_handler(signum, frame)
if self.called:
self.default_handler(signum, frame)
self.called = True
for result in _results.keys():
result.stop()
_results = weakref.WeakKeyDictionary()
def registerResult(result):
_results[result] = 1
def removeResult(result):
return bool(_results.pop(result, None))
_interrupt_handler = None
def installHandler():
global _interrupt_handler
if _interrupt_handler is None:
default_handler = signal.getsignal(signal.SIGINT)
_interrupt_handler = _InterruptHandler(default_handler)
signal.signal(signal.SIGINT, _interrupt_handler)
def removeHandler(method=None):
if method is not None:
@wraps(method)
def inner(*args, **kwargs):
initial = signal.getsignal(signal.SIGINT)
removeHandler()
try:
return method(*args, **kwargs)
finally:
signal.signal(signal.SIGINT, initial)
return inner
global _interrupt_handler
if _interrupt_handler is not None:
signal.signal(signal.SIGINT, _interrupt_handler.original_handler)
|
gpl-3.0
|
SnowDroid/kernel_lge_hammerhead
|
arch/ia64/scripts/unwcheck.py
|
13143
|
1714
|
#!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
|
gpl-2.0
|
mogoweb/chromium-crosswalk
|
native_client_sdk/src/build_tools/build_version.py
|
65
|
1797
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Small utility library of python functions used during SDK building.
"""
import os
import sys
# pylint: disable=E0602
# Reuse last change utility code.
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SRC_DIR = os.path.dirname(os.path.dirname(os.path.dirname(SCRIPT_DIR)))
sys.path.append(os.path.join(SRC_DIR, 'build/util'))
import lastchange
# Location of chrome's version file.
VERSION_PATH = os.path.join(SRC_DIR, 'chrome', 'VERSION')
def ChromeVersion():
'''Extract chrome version from src/chrome/VERSION + svn.
Returns:
Chrome version string or trunk + svn rev.
'''
info = lastchange.FetchVersionInfo(None)
if info.url.startswith('/trunk/'):
return 'trunk.%s' % info.revision
else:
return ChromeVersionNoTrunk()
def ChromeVersionNoTrunk():
'''Extract the chrome version from src/chrome/VERSION.
Ignore whether this is a trunk build.
Returns:
Chrome version string.
'''
exec(open(VERSION_PATH).read())
return '%s.%s.%s.%s' % (MAJOR, MINOR, BUILD, PATCH)
def ChromeMajorVersion():
'''Extract chrome major version from src/chrome/VERSION.
Returns:
Chrome major version.
'''
exec(open(VERSION_PATH, 'r').read())
return str(MAJOR)
def ChromeRevision():
'''Extract chrome revision from svn.
Returns:
The Chrome revision as a string. e.g. "12345"
'''
return lastchange.FetchVersionInfo(None).revision
def NaClRevision():
'''Extract NaCl revision from svn.
Returns:
The NaCl revision as a string. e.g. "12345"
'''
nacl_dir = os.path.join(SRC_DIR, 'native_client')
return lastchange.FetchVersionInfo(None, nacl_dir).revision
|
bsd-3-clause
|
maelnor/nova
|
nova/network/security_group/openstack_driver.py
|
5
|
1605
|
# Copyright 2013 Nicira, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from oslo.utils import importutils
security_group_opts = [
cfg.StrOpt('security_group_api',
default='nova',
help='The full class name of the security API class'),
]
CONF = cfg.CONF
CONF.register_opts(security_group_opts)
NOVA_DRIVER = ('nova.api.openstack.compute.contrib.security_groups.'
'NativeNovaSecurityGroupAPI')
NEUTRON_DRIVER = ('nova.api.openstack.compute.contrib.security_groups.'
'NativeNeutronSecurityGroupAPI')
def get_openstack_security_group_driver():
if CONF.security_group_api.lower() == 'nova':
return importutils.import_object(NOVA_DRIVER)
elif CONF.security_group_api.lower() in ('neutron', 'quantum'):
return importutils.import_object(NEUTRON_DRIVER)
else:
return importutils.import_object(CONF.security_group_api)
def is_neutron_security_groups():
return CONF.security_group_api.lower() in ('neutron', 'quantum')
|
apache-2.0
|
JuliBakagianni/CEF-ELRC
|
lib/python2.7/site-packages/django/contrib/gis/maps/google/zoom.py
|
327
|
6628
|
from django.contrib.gis.geos import GEOSGeometry, LinearRing, Polygon, Point
from django.contrib.gis.maps.google.gmap import GoogleMapException
from math import pi, sin, cos, log, exp, atan
# Constants used for degree to radian conversion, and vice-versa.
DTOR = pi / 180.
RTOD = 180. / pi
class GoogleZoom(object):
"""
GoogleZoom is a utility for performing operations related to the zoom
levels on Google Maps.
This class is inspired by the OpenStreetMap Mapnik tile generation routine
`generate_tiles.py`, and the article "How Big Is the World" (Hack #16) in
"Google Maps Hacks" by Rich Gibson and Schuyler Erle.
`generate_tiles.py` may be found at:
http://trac.openstreetmap.org/browser/applications/rendering/mapnik/generate_tiles.py
"Google Maps Hacks" may be found at http://safari.oreilly.com/0596101619
"""
def __init__(self, num_zoom=19, tilesize=256):
"Initializes the Google Zoom object."
# Google's tilesize is 256x256, square tiles are assumed.
self._tilesize = tilesize
# The number of zoom levels
self._nzoom = num_zoom
# Initializing arrays to hold the parameters for each one of the
# zoom levels.
self._degpp = [] # Degrees per pixel
self._radpp = [] # Radians per pixel
self._npix = [] # 1/2 the number of pixels for a tile at the given zoom level
# Incrementing through the zoom levels and populating the parameter arrays.
z = tilesize # The number of pixels per zoom level.
for i in xrange(num_zoom):
# Getting the degrees and radians per pixel, and the 1/2 the number of
# for every zoom level.
self._degpp.append(z / 360.) # degrees per pixel
self._radpp.append(z / (2 * pi)) # radians per pixl
self._npix.append(z / 2) # number of pixels to center of tile
# Multiplying `z` by 2 for the next iteration.
z *= 2
def __len__(self):
"Returns the number of zoom levels."
return self._nzoom
def get_lon_lat(self, lonlat):
"Unpacks longitude, latitude from GEOS Points and 2-tuples."
if isinstance(lonlat, Point):
lon, lat = lonlat.coords
else:
lon, lat = lonlat
return lon, lat
def lonlat_to_pixel(self, lonlat, zoom):
"Converts a longitude, latitude coordinate pair for the given zoom level."
# Setting up, unpacking the longitude, latitude values and getting the
# number of pixels for the given zoom level.
lon, lat = self.get_lon_lat(lonlat)
npix = self._npix[zoom]
# Calculating the pixel x coordinate by multiplying the longitude value
# with with the number of degrees/pixel at the given zoom level.
px_x = round(npix + (lon * self._degpp[zoom]))
# Creating the factor, and ensuring that 1 or -1 is not passed in as the
# base to the logarithm. Here's why:
# if fac = -1, we'll get log(0) which is undefined;
# if fac = 1, our logarithm base will be divided by 0, also undefined.
fac = min(max(sin(DTOR * lat), -0.9999), 0.9999)
# Calculating the pixel y coordinate.
px_y = round(npix + (0.5 * log((1 + fac)/(1 - fac)) * (-1.0 * self._radpp[zoom])))
# Returning the pixel x, y to the caller of the function.
return (px_x, px_y)
def pixel_to_lonlat(self, px, zoom):
"Converts a pixel to a longitude, latitude pair at the given zoom level."
if len(px) != 2:
raise TypeError('Pixel should be a sequence of two elements.')
# Getting the number of pixels for the given zoom level.
npix = self._npix[zoom]
# Calculating the longitude value, using the degrees per pixel.
lon = (px[0] - npix) / self._degpp[zoom]
# Calculating the latitude value.
lat = RTOD * ( 2 * atan(exp((px[1] - npix)/ (-1.0 * self._radpp[zoom]))) - 0.5 * pi)
# Returning the longitude, latitude coordinate pair.
return (lon, lat)
def tile(self, lonlat, zoom):
"""
Returns a Polygon corresponding to the region represented by a fictional
Google Tile for the given longitude/latitude pair and zoom level. This
tile is used to determine the size of a tile at the given point.
"""
# The given lonlat is the center of the tile.
delta = self._tilesize / 2
# Getting the pixel coordinates corresponding to the
# the longitude/latitude.
px = self.lonlat_to_pixel(lonlat, zoom)
# Getting the lower-left and upper-right lat/lon coordinates
# for the bounding box of the tile.
ll = self.pixel_to_lonlat((px[0]-delta, px[1]-delta), zoom)
ur = self.pixel_to_lonlat((px[0]+delta, px[1]+delta), zoom)
# Constructing the Polygon, representing the tile and returning.
return Polygon(LinearRing(ll, (ll[0], ur[1]), ur, (ur[0], ll[1]), ll), srid=4326)
def get_zoom(self, geom):
"Returns the optimal Zoom level for the given geometry."
# Checking the input type.
if not isinstance(geom, GEOSGeometry) or geom.srid != 4326:
raise TypeError('get_zoom() expects a GEOS Geometry with an SRID of 4326.')
# Getting the envelope for the geometry, and its associated width, height
# and centroid.
env = geom.envelope
env_w, env_h = self.get_width_height(env.extent)
center = env.centroid
for z in xrange(self._nzoom):
# Getting the tile at the zoom level.
tile_w, tile_h = self.get_width_height(self.tile(center, z).extent)
# When we span more than one tile, this is an approximately good
# zoom level.
if (env_w > tile_w) or (env_h > tile_h):
if z == 0:
raise GoogleMapException('Geometry width and height should not exceed that of the Earth.')
return z-1
# Otherwise, we've zoomed in to the max.
return self._nzoom-1
def get_width_height(self, extent):
"""
Returns the width and height for the given extent.
"""
# Getting the lower-left, upper-left, and upper-right
# coordinates from the extent.
ll = Point(extent[:2])
ul = Point(extent[0], extent[3])
ur = Point(extent[2:])
# Calculating the width and height.
height = ll.distance(ul)
width = ul.distance(ur)
return width, height
|
bsd-3-clause
|
trishnaguha/ansible
|
lib/ansible/modules/network/vyos/vyos_config.py
|
4
|
9273
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: vyos_config
version_added: "2.2"
author: "Nathaniel Case (@qalthos)"
short_description: Manage VyOS configuration on remote device
description:
- This module provides configuration file management of VyOS
devices. It provides arguments for managing both the
configuration file and state of the active configuration. All
configuration statements are based on `set` and `delete` commands
in the device configuration.
extends_documentation_fragment: vyos
notes:
- Tested against VYOS 1.1.7
- Abbreviated commands are NOT idempotent, see
L(Network FAQ,../network/user_guide/faq.html#why-do-the-config-modules-always-return-changed-true-with-abbreviated-commands).
options:
lines:
description:
- The ordered set of configuration lines to be managed and
compared with the existing configuration on the remote
device.
src:
description:
- The C(src) argument specifies the path to the source config
file to load. The source config file can either be in
bracket format or set format. The source file can include
Jinja2 template variables.
match:
description:
- The C(match) argument controls the method used to match
against the current active configuration. By default, the
desired config is matched against the active config and the
deltas are loaded. If the C(match) argument is set to C(none)
the active configuration is ignored and the configuration is
always loaded.
default: line
choices: ['line', 'none']
backup:
description:
- The C(backup) argument will backup the current devices active
configuration to the Ansible control host prior to making any
changes. The backup file will be located in the backup folder
in the playbook root directory or role root directory, if
playbook is part of an ansible role. If the directory does not
exist, it is created.
type: bool
default: 'no'
comment:
description:
- Allows a commit description to be specified to be included
when the configuration is committed. If the configuration is
not changed or committed, this argument is ignored.
default: 'configured by vyos_config'
config:
description:
- The C(config) argument specifies the base configuration to use
to compare against the desired configuration. If this value
is not specified, the module will automatically retrieve the
current active configuration from the remote device.
save:
description:
- The C(save) argument controls whether or not changes made
to the active configuration are saved to disk. This is
independent of committing the config. When set to True, the
active configuration is saved.
type: bool
default: 'no'
"""
EXAMPLES = """
- name: configure the remote device
vyos_config:
lines:
- set system host-name {{ inventory_hostname }}
- set service lldp
- delete service dhcp-server
- name: backup and load from file
vyos_config:
src: vyos.cfg
backup: yes
- name: render a Jinja2 template onto the VyOS router
vyos_config:
src: vyos_template.j2
- name: for idempotency, use full-form commands
vyos_config:
lines:
# - set int eth eth2 description 'OUTSIDE'
- set interface ethernet eth2 description 'OUTSIDE'
"""
RETURN = """
commands:
description: The list of configuration commands sent to the device
returned: always
type: list
sample: ['...', '...']
filtered:
description: The list of configuration commands removed to avoid a load failure
returned: always
type: list
sample: ['...', '...']
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: str
sample: /playbooks/ansible/backup/vyos_config.2016-07-16@22:28:34
"""
import re
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import ConnectionError
from ansible.module_utils.network.vyos.vyos import load_config, get_config, run_commands
from ansible.module_utils.network.vyos.vyos import vyos_argument_spec, get_connection
DEFAULT_COMMENT = 'configured by vyos_config'
CONFIG_FILTERS = [
re.compile(r'set system login user \S+ authentication encrypted-password')
]
def get_candidate(module):
contents = module.params['src'] or module.params['lines']
if module.params['src']:
contents = format_commands(contents.splitlines())
contents = '\n'.join(contents)
return contents
def format_commands(commands):
return [line for line in commands if len(line.strip()) > 0]
def diff_config(commands, config):
config = [str(c).replace("'", '') for c in config.splitlines()]
updates = list()
visited = set()
for line in commands:
item = str(line).replace("'", '')
if not item.startswith('set') and not item.startswith('delete'):
raise ValueError('line must start with either `set` or `delete`')
elif item.startswith('set') and item not in config:
updates.append(line)
elif item.startswith('delete'):
if not config:
updates.append(line)
else:
item = re.sub(r'delete', 'set', item)
for entry in config:
if entry.startswith(item) and line not in visited:
updates.append(line)
visited.add(line)
return list(updates)
def sanitize_config(config, result):
result['filtered'] = list()
index_to_filter = list()
for regex in CONFIG_FILTERS:
for index, line in enumerate(list(config)):
if regex.search(line):
result['filtered'].append(line)
index_to_filter.append(index)
# Delete all filtered configs
for filter_index in sorted(index_to_filter, reverse=True):
del config[filter_index]
def run(module, result):
# get the current active config from the node or passed in via
# the config param
config = module.params['config'] or get_config(module)
# create the candidate config object from the arguments
candidate = get_candidate(module)
# create loadable config that includes only the configuration updates
connection = get_connection(module)
try:
response = connection.get_diff(candidate=candidate, running=config, diff_match=module.params['match'])
except ConnectionError as exc:
module.fail_json(msg=to_text(exc, errors='surrogate_then_replace'))
commands = response.get('config_diff')
sanitize_config(commands, result)
result['commands'] = commands
commit = not module.check_mode
comment = module.params['comment']
diff = None
if commands:
diff = load_config(module, commands, commit=commit, comment=comment)
if result.get('filtered'):
result['warnings'].append('Some configuration commands were '
'removed, please see the filtered key')
result['changed'] = True
if module._diff:
result['diff'] = {'prepared': diff}
def main():
argument_spec = dict(
src=dict(type='path'),
lines=dict(type='list'),
match=dict(default='line', choices=['line', 'none']),
comment=dict(default=DEFAULT_COMMENT),
config=dict(),
backup=dict(type='bool', default=False),
save=dict(type='bool', default=False),
)
argument_spec.update(vyos_argument_spec)
mutually_exclusive = [('lines', 'src')]
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True
)
warnings = list()
result = dict(changed=False, warnings=warnings)
if module.params['backup']:
result['__backup__'] = get_config(module=module)
if any((module.params['src'], module.params['lines'])):
run(module, result)
if module.params['save']:
diff = run_commands(module, commands=['configure', 'compare saved'])[1]
if diff != '[edit]':
run_commands(module, commands=['save'])
result['changed'] = True
run_commands(module, commands=['exit'])
module.exit_json(**result)
if __name__ == '__main__':
main()
|
gpl-3.0
|
hhru/ansible
|
plugins/inventory/vagrant.py
|
72
|
3659
|
#!/usr/bin/env python
"""
Vagrant external inventory script. Automatically finds the IP of the booted vagrant vm(s), and
returns it under the host group 'vagrant'
Example Vagrant configuration using this script:
config.vm.provision :ansible do |ansible|
ansible.playbook = "./provision/your_playbook.yml"
ansible.inventory_file = "./provision/inventory/vagrant.py"
ansible.verbose = true
end
"""
# Copyright (C) 2013 Mark Mandel <mark@compoundtheory.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Thanks to the spacewalk.py inventory script for giving me the basic structure
# of this.
#
import sys
import subprocess
import re
import string
from optparse import OptionParser
try:
import json
except:
import simplejson as json
# Options
#------------------------------
parser = OptionParser(usage="%prog [options] --list | --host <machine>")
parser.add_option('--list', default=False, dest="list", action="store_true",
help="Produce a JSON consumable grouping of Vagrant servers for Ansible")
parser.add_option('--host', default=None, dest="host",
help="Generate additional host specific details for given host for Ansible")
(options, args) = parser.parse_args()
#
# helper functions
#
# get all the ssh configs for all boxes in an array of dictionaries.
def get_ssh_config():
configs = []
boxes = list_running_boxes()
for box in boxes:
config = get_a_ssh_config(box)
configs.append(config)
return configs
#list all the running boxes
def list_running_boxes():
output = subprocess.check_output(["vagrant", "status"]).split('\n')
boxes = []
for line in output:
matcher = re.search("([^\s]+)[\s]+running \(.+", line)
if matcher:
boxes.append(matcher.group(1))
return boxes
#get the ssh config for a single box
def get_a_ssh_config(box_name):
"""Gives back a map of all the machine's ssh configurations"""
output = subprocess.check_output(["vagrant", "ssh-config", box_name]).split('\n')
config = {}
for line in output:
if line.strip() != '':
matcher = re.search("( )?([a-zA-Z]+) (.*)", line)
config[matcher.group(2)] = matcher.group(3)
return config
# List out servers that vagrant has running
#------------------------------
if options.list:
ssh_config = get_ssh_config()
hosts = { 'vagrant': []}
for data in ssh_config:
hosts['vagrant'].append(data['HostName'])
print json.dumps(hosts)
sys.exit(1)
# Get out the host details
#------------------------------
elif options.host:
result = {}
ssh_config = get_ssh_config()
details = filter(lambda x: (x['HostName'] == options.host), ssh_config)
if len(details) > 0:
#pass through the port, in case it's non standard.
result = details[0]
result['ansible_ssh_port'] = result['Port']
print json.dumps(result)
sys.exit(1)
# Print out help
#------------------------------
else:
parser.print_help()
sys.exit(1)
|
gpl-3.0
|
BoriBori/cassava
|
cassava/utils.py
|
2
|
8894
|
import re
import collections
def is_ip(indicator):
ip_format = re.compile(ip_regex)
return ip_format.match(indicator)
def is_md5(indicator):
ip_format = re.compile(md5_regex)
return ip_format.match(indicator)
def is_sha1(indicator):
ip_format = re.compile(sha1_regex)
return ip_format.match(indicator)
def is_sha256(indicator):
ip_format = re.compile(sha256_regex)
return ip_format.match(indicator)
def is_sha256(indicator):
ip_format = re.compile(sha256_regex)
return ip_format.match(indicator)
def is_email(indicator):
ip_format = re.compile(email_regex)
return ip_format.match(indicator)
def is_domain(indicator):
ip_format = re.compile(domain_regex)
return ip_format.match(indicator)
def is_hash(indicator):
if is_md5(indicator) or is_sha1(indicator) or is_sha256(indicator):
return True
else:
return False
def hash_type(indicator):
if is_sha256(indicator):
return 'sha256'
elif is_sha1(indicator):
return 'sha1'
elif is_md5(indicator):
return 'md5'
else:
return False
def indicator_type(indicator):
hash = hash_type(indicator)
if hash:
return hash
elif is_ip(indicator):
return 'ip'
elif is_email(indicator):
return 'email'
elif is_domain(indicator):
return 'domain'
def flatten(d, parent_key='', sep='_'):
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if isinstance(v, collections.MutableMapping):
items.extend(flatten(v, new_key, sep=sep).items())
else:
items.append((new_key, v))
return dict(items)
def chunker(seq, size):
return(seq[pos:pos + size] for pos in xrange(0, len(seq), size))
"""
TLDs taken from http://data.iana.org/TLD/tlds-alpha-by-domain.txt
on 30 June 2015
Re-generate the list any time by running:
def get_updated_tlds():
r = requests.get('http://data.iana.org/TLD/tlds-alpha-by-domain.txt')
tlds = r.text.split('\n')[1:-1] # split on lines and strip header
return tlds
print '|'.join(sorted(get_updated_tlds(), key=lambda x: -len(x))).lower()
Note: The list must be sorted like this, or we might match shorter TLDs before
longer ones (e.g., .co will match before .com)
"""
TLDs = '''xn--vermgensberatung-pwb|xn--vermgensberater-ctb|xn--clchc0ea0b2g2a9gcd|xn--mgberp4a5d4ar|xn--xkc2dl3a5ee0h|xn--xkc2al3hye2a|sandvikcoromant|xn--i1b6b1a6a2e|xn--kcrx77d1x4a|xn--lgbbat1ad8j|xn--mgba3a4f16a|xn--mgbc0a9azcg|xn--nqv7fs00ema|cancerresearch|xn--6qq986b3xl|xn--b4w605ferd|xn--fiq228c5hs|xn--mgbaam7a8h|xn--mgbayh7gpa|xn--mgbbh1a71e|xn--mgbx4cd0ab|international|spreadbetting|xn--fpcrj9c3d|xn--fzc2c9e2c|xn--yfro4i67o|xn--ygbi2ammx|construction|scholarships|versicherung|xn--3e0b707e|xn--80adxhks|xn--80asehdb|xn--mgb9awbf|xn--mgbab2bd|xn--mgbpl2fh|xn--ngbc5azd|xn--ogbpf8fl|xn--qcka1pmc|accountants|barclaycard|blackfriday|bridgestone|contractors|engineering|enterprises|investments|motorcycles|photography|productions|williamhill|xn--1qqw23a|xn--3bst00m|xn--3ds443g|xn--45brj9c|xn--55qw42g|xn--6frz82g|xn--80ao21a|xn--czr694b|xn--d1acj3b|xn--estv75g|xn--fjq720a|xn--flw351e|xn--gecrj9c|xn--h2brj9c|xn--hxt814e|xn--imr513n|xn--j6w193g|xn--kprw13d|xn--kpry57d|xn--nyqy26a|xn--pgbs0dh|xn--q9jyb4c|xn--rhqv96g|xn--s9brj9c|xn--ses554g|xn--vuq861b|xn--xhq521b|xn--zfr164b|accountant|apartments|associates|bnpparibas|consulting|creditcard|cuisinella|eurovision|foundation|healthcare|immobilien|industries|management|properties|republican|restaurant|technology|telefonica|university|vistaprint|vlaanderen|xn--30rr7y|xn--45q11c|xn--4gbrim|xn--55qx5d|xn--80aswg|xn--90a3ac|xn--9et52u|xn--cg4bki|xn--czrs0t|xn--czru2d|xn--fiq64b|xn--fiqs8s|xn--fiqz9s|xn--io0a7i|xn--kput3i|xn--mxtq1m|xn--o3cw4h|xn--unup4y|xn--wgbh1c|xn--wgbl6a|xn--y9a3aq|accenture|allfinanz|amsterdam|aquarelle|barcelona|bloomberg|christmas|community|directory|education|equipment|financial|furniture|goldpoint|homedepot|institute|marketing|melbourne|microsoft|montblanc|solutions|vacations|xn--90ais|xn--c1avg|xn--d1alf|xn--j1amh|xn--l1acc|xn--nqv7f|xn--p1acf|xn--vhquv|yodobashi|airforce|attorney|barclays|bargains|boutique|bradesco|brussels|budapest|builders|business|capetown|catering|cleaning|clothing|commbank|computer|delivery|democrat|diamonds|discount|download|engineer|everbank|exchange|feedback|firmdale|flsmidth|football|graphics|holdings|infiniti|lighting|marriott|memorial|mortgage|movistar|partners|pharmacy|pictures|plumbing|property|redstone|saarland|services|software|supplies|training|ventures|xn--node|xn--p1ai|yokohama|abogado|academy|android|auction|bauhaus|brother|capital|caravan|careers|cartier|channel|college|cologne|company|cooking|corsica|country|coupons|courses|cricket|cruises|dentist|digital|domains|exposed|express|fashion|finance|fishing|fitness|flights|florist|flowers|forsale|frogans|gallery|genting|guitars|hamburg|hangout|hitachi|holiday|hosting|hoteles|hotmail|jewelry|kitchen|komatsu|lacaixa|lasalle|latrobe|leclerc|liaison|limited|markets|netbank|network|neustar|okinawa|organic|panerai|philips|realtor|recipes|rentals|reviews|samsung|sandvik|schmidt|schwarz|science|shiksha|shriram|singles|spiegel|starhub|statoil|support|surgery|systems|temasek|theater|tickets|toshiba|trading|website|wedding|whoswho|windows|youtube|zuerich|abbott|active|agency|airtel|alsace|bayern|berlin|bharti|broker|camera|career|casino|center|chrome|church|claims|clinic|coffee|condos|credit|dating|datsun|degree|dental|design|direct|doosan|durban|emerck|energy|estate|events|expert|futbol|garden|global|google|gratis|hermes|hiphop|hockey|insure|joburg|juegos|kaufen|lawyer|london|luxury|madrid|maison|market|monash|mormon|moscow|museum|nagoya|nissan|office|online|oracle|otsuka|photos|physio|piaget|pictet|quebec|racing|realty|reisen|repair|report|review|ryukyu|sakura|school|schule|soccer|social|studio|supply|suzuki|swatch|sydney|taipei|tattoo|tennis|tienda|travel|viajes|villas|vision|voting|voyage|walter|webcam|yachts|yandex|actor|adult|archi|audio|autos|azure|bible|bingo|black|boats|build|canon|cards|cheap|chloe|cisco|citic|click|cloud|coach|codes|crown|cymru|dabur|dance|deals|drive|earth|email|epson|faith|forex|forum|gifts|gives|glass|globo|gmail|green|gripe|guide|homes|honda|horse|house|irish|jetzt|koeln|kyoto|lease|legal|loans|lotte|lotto|lupin|mango|media|miami|money|movie|nadex|nexus|ninja|omega|osaka|paris|parts|party|photo|pizza|place|poker|praxi|press|rehab|reise|ricoh|rocks|rodeo|sener|shoes|skype|solar|space|study|style|sucks|swiss|tatar|tires|tirol|today|tokyo|tools|toray|tours|trade|trust|vegas|video|vista|vodka|wales|watch|works|world|xerox|aero|army|arpa|asia|auto|band|bank|bbva|beer|best|bike|bing|blue|bond|buzz|cafe|camp|care|cars|casa|cash|cern|chat|city|club|cool|coop|cyou|date|dclk|desi|diet|docs|doha|dvag|erni|fail|fans|farm|film|fish|fund|game|gbiz|gent|ggee|gift|gold|golf|goog|guge|guru|haus|help|here|host|icbc|immo|info|java|jobs|jprs|kddi|kiwi|kred|land|lgbt|lidl|life|limo|link|live|loan|love|ltda|luxe|maif|meet|meme|menu|mini|mobi|moda|mtpc|name|navy|news|nico|page|pics|pink|play|plus|pohl|porn|post|prod|prof|qpon|reit|rent|rest|rich|rsvp|ruhr|sale|sarl|saxo|scor|scot|seat|sexy|show|site|sncf|sohu|sony|surf|taxi|team|tech|tips|town|toys|vote|voto|wang|weir|wien|wiki|work|xbox|yoga|zone|abb|ads|aeg|afl|aig|app|axa|bar|bbc|bcn|bid|bio|biz|bmw|bnl|boo|bzh|cab|cal|cat|cba|cbn|ceo|cfa|cfd|com|crs|dad|day|dev|dnp|dog|eat|edu|esq|eus|fan|fit|fly|foo|frl|fyi|gal|gdn|gle|gmo|gmx|goo|gop|gov|hiv|how|ibm|icu|ifm|ing|ink|int|iwc|jcb|jlc|jll|kim|krd|lat|law|lds|lol|mba|men|mil|mma|moe|mov|mtn|nec|net|new|ngo|nhk|nra|nrw|ntt|nyc|one|ong|onl|ooo|org|ovh|pro|pub|red|ren|rio|rip|run|sap|sca|scb|sew|sex|ski|sky|soy|tax|tel|thd|top|tui|uno|uol|vet|wed|win|wme|wtc|wtf|xin|xxx|xyz|zip|ac|ad|ae|af|ag|ai|al|am|an|ao|aq|ar|as|at|au|aw|ax|az|ba|bb|bd|be|bf|bg|bh|bi|bj|bm|bn|bo|br|bs|bt|bv|bw|by|bz|ca|cc|cd|cf|cg|ch|ci|ck|cl|cm|cn|co|cr|cu|cv|cw|cx|cy|cz|de|dj|dk|dm|do|dz|ec|ee|eg|er|es|et|eu|fi|fj|fk|fm|fo|fr|ga|gb|gd|ge|gf|gg|gh|gi|gl|gm|gn|gp|gq|gr|gs|gt|gu|gw|gy|hk|hm|hn|hr|ht|hu|id|ie|il|im|in|io|iq|ir|is|it|je|jm|jo|jp|ke|kg|kh|ki|km|kn|kp|kr|kw|ky|kz|la|lb|lc|li|lk|lr|ls|lt|lu|lv|ly|ma|mc|md|me|mg|mh|mk|ml|mm|mn|mo|mp|mq|mr|ms|mt|mu|mv|mw|mx|my|mz|na|nc|ne|nf|ng|ni|nl|no|np|nr|nu|nz|om|pa|pe|pf|pg|ph|pk|pl|pm|pn|pr|ps|pt|pw|py|qa|re|ro|rs|ru|rw|sa|sb|sc|sd|se|sg|sh|si|sj|sk|sl|sm|sn|so|sr|st|su|sv|sx|sy|sz|tc|td|tf|tg|th|tj|tk|tl|tm|tn|to|tr|tt|tv|tw|tz|ua|ug|uk|us|uy|uz|va|vc|ve|vg|vi|vn|vu|wf|ws|ye|yt|za|zm|zw'''
ip_regex = '\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}'
md5_regex = '([a-f]|[0-9]){32}'
sha1_regex = '([a-f]|[0-9]){40}'
sha256_regex = '([a-f]|[0-9]){64}'
domain_regex = '((?=[a-z0-9-]{1,63}(\.|\[\.\]))(xn--)?[a-z0-9]+(-[a-z0-9]+)*(\.|\[\.\]))+('+TLDs+')'
email_regex = '[a-z0-9(\.|\[\.\])_%+-]+(@|\[@\])'+domain_regex
|
mit
|
mavit/ansible
|
lib/ansible/modules/system/hostname.py
|
28
|
25012
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2013, Hiroaki Nakamura <hnakamur@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: hostname
author:
- Adrian Likins (@alikins)
- Hideki Saito (@saito-hideki)
version_added: "1.4"
short_description: Manage hostname
requirements: [ hostname ]
description:
- Set system's hostname, supports most OSs/Distributions, including those using systemd.
- Note, this module does *NOT* modify C(/etc/hosts). You need to modify it yourself using other modules like template or replace.
- Windows, HP-UX and AIX are not currently supported.
options:
name:
description:
- Name of the host
required: true
'''
EXAMPLES = '''
- hostname:
name: web01
'''
import os
import socket
import traceback
from ansible.module_utils.basic import (
AnsibleModule,
get_distribution,
get_distribution_version,
get_platform,
load_platform_subclass,
)
from ansible.module_utils.facts.system.service_mgr import ServiceMgrFactCollector
from ansible.module_utils._text import to_native
class UnimplementedStrategy(object):
def __init__(self, module):
self.module = module
def update_current_and_permanent_hostname(self):
self.unimplemented_error()
def update_current_hostname(self):
self.unimplemented_error()
def update_permanent_hostname(self):
self.unimplemented_error()
def get_current_hostname(self):
self.unimplemented_error()
def set_current_hostname(self, name):
self.unimplemented_error()
def get_permanent_hostname(self):
self.unimplemented_error()
def set_permanent_hostname(self, name):
self.unimplemented_error()
def unimplemented_error(self):
platform = get_platform()
distribution = get_distribution()
if distribution is not None:
msg_platform = '%s (%s)' % (platform, distribution)
else:
msg_platform = platform
self.module.fail_json(
msg='hostname module cannot be used on platform %s' % msg_platform)
class Hostname(object):
"""
This is a generic Hostname manipulation class that is subclassed
based on platform.
A subclass may wish to set different strategy instance to self.strategy.
All subclasses MUST define platform and distribution (which may be None).
"""
platform = 'Generic'
distribution = None
strategy_class = UnimplementedStrategy
def __new__(cls, *args, **kwargs):
return load_platform_subclass(Hostname, args, kwargs)
def __init__(self, module):
self.module = module
self.name = module.params['name']
if self.platform == 'Linux' and ServiceMgrFactCollector.is_systemd_managed(module):
self.strategy = SystemdStrategy(module)
else:
self.strategy = self.strategy_class(module)
def update_current_and_permanent_hostname(self):
return self.strategy.update_current_and_permanent_hostname()
def get_current_hostname(self):
return self.strategy.get_current_hostname()
def set_current_hostname(self, name):
self.strategy.set_current_hostname(name)
def get_permanent_hostname(self):
return self.strategy.get_permanent_hostname()
def set_permanent_hostname(self, name):
self.strategy.set_permanent_hostname(name)
class GenericStrategy(object):
"""
This is a generic Hostname manipulation strategy class.
A subclass may wish to override some or all of these methods.
- get_current_hostname()
- get_permanent_hostname()
- set_current_hostname(name)
- set_permanent_hostname(name)
"""
def __init__(self, module):
self.module = module
self.hostname_cmd = self.module.get_bin_path('hostname', True)
self.changed = False
def update_current_and_permanent_hostname(self):
self.update_current_hostname()
self.update_permanent_hostname()
return self.changed
def update_current_hostname(self):
name = self.module.params['name']
current_name = self.get_current_hostname()
if current_name != name:
if not self.module.check_mode:
self.set_current_hostname(name)
self.changed = True
def update_permanent_hostname(self):
name = self.module.params['name']
permanent_name = self.get_permanent_hostname()
if permanent_name != name:
if not self.module.check_mode:
self.set_permanent_hostname(name)
self.changed = True
def get_current_hostname(self):
cmd = [self.hostname_cmd]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err))
return to_native(out).strip()
def set_current_hostname(self, name):
cmd = [self.hostname_cmd, name]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err))
def get_permanent_hostname(self):
return None
def set_permanent_hostname(self, name):
pass
class DebianStrategy(GenericStrategy):
"""
This is a Debian family Hostname manipulation strategy class - it edits
the /etc/hostname file.
"""
HOSTNAME_FILE = '/etc/hostname'
def get_permanent_hostname(self):
if not os.path.isfile(self.HOSTNAME_FILE):
try:
open(self.HOSTNAME_FILE, "a").write("")
except IOError as e:
self.module.fail_json(msg="failed to write file: %s" %
to_native(e), exception=traceback.format_exc())
try:
f = open(self.HOSTNAME_FILE)
try:
return f.read().strip()
finally:
f.close()
except Exception as e:
self.module.fail_json(msg="failed to read hostname: %s" %
to_native(e), exception=traceback.format_exc())
def set_permanent_hostname(self, name):
try:
f = open(self.HOSTNAME_FILE, 'w+')
try:
f.write("%s\n" % name)
finally:
f.close()
except Exception as e:
self.module.fail_json(msg="failed to update hostname: %s" %
to_native(e), exception=traceback.format_exc())
class SLESStrategy(GenericStrategy):
"""
This is a SLES Hostname strategy class - it edits the
/etc/HOSTNAME file.
"""
HOSTNAME_FILE = '/etc/HOSTNAME'
def get_permanent_hostname(self):
if not os.path.isfile(self.HOSTNAME_FILE):
try:
open(self.HOSTNAME_FILE, "a").write("")
except IOError as e:
self.module.fail_json(msg="failed to write file: %s" %
to_native(e), exception=traceback.format_exc())
try:
f = open(self.HOSTNAME_FILE)
try:
return f.read().strip()
finally:
f.close()
except Exception as e:
self.module.fail_json(msg="failed to read hostname: %s" %
to_native(e), exception=traceback.format_exc())
def set_permanent_hostname(self, name):
try:
f = open(self.HOSTNAME_FILE, 'w+')
try:
f.write("%s\n" % name)
finally:
f.close()
except Exception as e:
self.module.fail_json(msg="failed to update hostname: %s" %
to_native(e), exception=traceback.format_exc())
class RedHatStrategy(GenericStrategy):
"""
This is a Redhat Hostname strategy class - it edits the
/etc/sysconfig/network file.
"""
NETWORK_FILE = '/etc/sysconfig/network'
def get_permanent_hostname(self):
try:
f = open(self.NETWORK_FILE, 'rb')
try:
for line in f.readlines():
if line.startswith('HOSTNAME'):
k, v = line.split('=')
return v.strip()
finally:
f.close()
except Exception as e:
self.module.fail_json(msg="failed to read hostname: %s" %
to_native(e), exception=traceback.format_exc())
def set_permanent_hostname(self, name):
try:
lines = []
found = False
f = open(self.NETWORK_FILE, 'rb')
try:
for line in f.readlines():
if line.startswith('HOSTNAME'):
lines.append("HOSTNAME=%s\n" % name)
found = True
else:
lines.append(line)
finally:
f.close()
if not found:
lines.append("HOSTNAME=%s\n" % name)
f = open(self.NETWORK_FILE, 'w+')
try:
f.writelines(lines)
finally:
f.close()
except Exception as e:
self.module.fail_json(msg="failed to update hostname: %s" %
to_native(e), exception=traceback.format_exc())
class AlpineStrategy(GenericStrategy):
"""
This is a Alpine Linux Hostname manipulation strategy class - it edits
the /etc/hostname file then run hostname -F /etc/hostname.
"""
HOSTNAME_FILE = '/etc/hostname'
def update_current_and_permanent_hostname(self):
self.update_permanent_hostname()
self.update_current_hostname()
return self.changed
def get_permanent_hostname(self):
if not os.path.isfile(self.HOSTNAME_FILE):
try:
open(self.HOSTNAME_FILE, "a").write("")
except IOError as e:
self.module.fail_json(msg="failed to write file: %s" %
to_native(e), exception=traceback.format_exc())
try:
f = open(self.HOSTNAME_FILE)
try:
return f.read().strip()
finally:
f.close()
except Exception as e:
self.module.fail_json(msg="failed to read hostname: %s" %
to_native(e), exception=traceback.format_exc())
def set_permanent_hostname(self, name):
try:
f = open(self.HOSTNAME_FILE, 'w+')
try:
f.write("%s\n" % name)
finally:
f.close()
except Exception as e:
self.module.fail_json(msg="failed to update hostname: %s" %
to_native(e), exception=traceback.format_exc())
def set_current_hostname(self, name):
cmd = [self.hostname_cmd, '-F', self.HOSTNAME_FILE]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err))
class SystemdStrategy(GenericStrategy):
"""
This is a Systemd hostname manipulation strategy class - it uses
the hostnamectl command.
"""
def get_current_hostname(self):
cmd = ['hostname']
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err))
return to_native(out).strip()
def set_current_hostname(self, name):
if len(name) > 64:
self.module.fail_json(msg="name cannot be longer than 64 characters on systemd servers, try a shorter name")
cmd = ['hostnamectl', '--transient', 'set-hostname', name]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err))
def get_permanent_hostname(self):
cmd = ['hostnamectl', '--static', 'status']
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err))
return to_native(out).strip()
def set_permanent_hostname(self, name):
if len(name) > 64:
self.module.fail_json(msg="name cannot be longer than 64 characters on systemd servers, try a shorter name")
cmd = ['hostnamectl', '--pretty', 'set-hostname', name]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err))
cmd = ['hostnamectl', '--static', 'set-hostname', name]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err))
class OpenRCStrategy(GenericStrategy):
"""
This is a Gentoo (OpenRC) Hostname manipulation strategy class - it edits
the /etc/conf.d/hostname file.
"""
HOSTNAME_FILE = '/etc/conf.d/hostname'
def get_permanent_hostname(self):
try:
try:
f = open(self.HOSTNAME_FILE, 'r')
for line in f:
line = line.strip()
if line.startswith('hostname='):
return line[10:].strip('"')
except Exception as e:
self.module.fail_json(msg="failed to read hostname: %s" %
to_native(e), exception=traceback.format_exc())
finally:
f.close()
return None
def set_permanent_hostname(self, name):
try:
try:
f = open(self.HOSTNAME_FILE, 'r')
lines = [x.strip() for x in f]
for i, line in enumerate(lines):
if line.startswith('hostname='):
lines[i] = 'hostname="%s"' % name
break
f.close()
f = open(self.HOSTNAME_FILE, 'w')
f.write('\n'.join(lines) + '\n')
except Exception as e:
self.module.fail_json(msg="failed to update hostname: %s" %
to_native(e), exception=traceback.format_exc())
finally:
f.close()
class OpenBSDStrategy(GenericStrategy):
"""
This is a OpenBSD family Hostname manipulation strategy class - it edits
the /etc/myname file.
"""
HOSTNAME_FILE = '/etc/myname'
def get_permanent_hostname(self):
if not os.path.isfile(self.HOSTNAME_FILE):
try:
open(self.HOSTNAME_FILE, "a").write("")
except IOError as e:
self.module.fail_json(msg="failed to write file: %s" %
to_native(e), exception=traceback.format_exc())
try:
f = open(self.HOSTNAME_FILE)
try:
return f.read().strip()
finally:
f.close()
except Exception as e:
self.module.fail_json(msg="failed to read hostname: %s" %
to_native(e), exception=traceback.format_exc())
def set_permanent_hostname(self, name):
try:
f = open(self.HOSTNAME_FILE, 'w+')
try:
f.write("%s\n" % name)
finally:
f.close()
except Exception as e:
self.module.fail_json(msg="failed to update hostname: %s" %
to_native(e), exception=traceback.format_exc())
class SolarisStrategy(GenericStrategy):
"""
This is a Solaris11 or later Hostname manipulation strategy class - it
execute hostname command.
"""
def set_current_hostname(self, name):
cmd_option = '-t'
cmd = [self.hostname_cmd, cmd_option, name]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err))
def get_permanent_hostname(self):
fmri = 'svc:/system/identity:node'
pattern = 'config/nodename'
cmd = '/usr/sbin/svccfg -s %s listprop -o value %s' % (fmri, pattern)
rc, out, err = self.module.run_command(cmd, use_unsafe_shell=True)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err))
return to_native(out).strip()
def set_permanent_hostname(self, name):
cmd = [self.hostname_cmd, name]
rc, out, err = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg="Command failed rc=%d, out=%s, err=%s" % (rc, out, err))
class FreeBSDStrategy(GenericStrategy):
"""
This is a FreeBSD hostname manipulation strategy class - it edits
the /etc/rc.conf.d/hostname file.
"""
HOSTNAME_FILE = '/etc/rc.conf.d/hostname'
def get_permanent_hostname(self):
if not os.path.isfile(self.HOSTNAME_FILE):
try:
open(self.HOSTNAME_FILE, "a").write("hostname=temporarystub\n")
except IOError as e:
self.module.fail_json(msg="failed to write file: %s" %
to_native(e), exception=traceback.format_exc())
try:
try:
f = open(self.HOSTNAME_FILE, 'r')
for line in f:
line = line.strip()
if line.startswith('hostname='):
return line[10:].strip('"')
except Exception as e:
self.module.fail_json(msg="failed to read hostname: %s" %
to_native(e), exception=traceback.format_exc())
finally:
f.close()
return None
def set_permanent_hostname(self, name):
try:
try:
f = open(self.HOSTNAME_FILE, 'r')
lines = [x.strip() for x in f]
for i, line in enumerate(lines):
if line.startswith('hostname='):
lines[i] = 'hostname="%s"' % name
break
f.close()
f = open(self.HOSTNAME_FILE, 'w')
f.write('\n'.join(lines) + '\n')
except Exception as e:
self.module.fail_json(msg="failed to update hostname: %s" %
to_native(e), exception=traceback.format_exc())
finally:
f.close()
class FedoraHostname(Hostname):
platform = 'Linux'
distribution = 'Fedora'
strategy_class = SystemdStrategy
class SLESHostname(Hostname):
platform = 'Linux'
distribution = 'Suse linux enterprise server '
try:
distribution_version = get_distribution_version()
# cast to float may raise ValueError on non SLES, we use float for a little more safety over int
if distribution_version and 10 <= float(distribution_version) <= 12:
strategy_class = SLESStrategy
else:
raise ValueError()
except ValueError:
strategy_class = UnimplementedStrategy
class OpenSUSEHostname(Hostname):
platform = 'Linux'
distribution = 'Opensuse '
strategy_class = SystemdStrategy
class ArchHostname(Hostname):
platform = 'Linux'
distribution = 'Arch'
strategy_class = SystemdStrategy
class RedHat5Hostname(Hostname):
platform = 'Linux'
distribution = 'Redhat'
strategy_class = RedHatStrategy
class RHELHostname(Hostname):
platform = 'Linux'
distribution = 'Red hat enterprise linux'
strategy_class = RedHatStrategy
class RedHatServerHostname(Hostname):
platform = 'Linux'
distribution = 'Red hat enterprise linux server'
strategy_class = RedHatStrategy
class RedHatWorkstationHostname(Hostname):
platform = 'Linux'
distribution = 'Red hat enterprise linux workstation'
strategy_class = RedHatStrategy
class RedHatAtomicHostname(Hostname):
platform = 'Linux'
distribution = 'Red hat enterprise linux atomic host'
strategy_class = RedHatStrategy
class CentOSHostname(Hostname):
platform = 'Linux'
distribution = 'Centos'
strategy_class = RedHatStrategy
class CentOSLinuxHostname(Hostname):
platform = 'Linux'
distribution = 'Centos linux'
strategy_class = RedHatStrategy
class CloudlinuxHostname(Hostname):
platform = 'Linux'
distribution = 'Cloudlinux'
strategy_class = RedHatStrategy
class CloudlinuxServerHostname(Hostname):
platform = 'Linux'
distribution = 'Cloudlinux server'
strategy_class = RedHatStrategy
class ScientificHostname(Hostname):
platform = 'Linux'
distribution = 'Scientific'
strategy_class = RedHatStrategy
class ScientificLinuxHostname(Hostname):
platform = 'Linux'
distribution = 'Scientific linux'
strategy_class = RedHatStrategy
class ScientificLinuxCERNHostname(Hostname):
platform = 'Linux'
distribution = 'Scientific linux cern slc'
strategy_class = RedHatStrategy
class OracleLinuxHostname(Hostname):
platform = 'Linux'
distribution = 'Oracle linux server'
strategy_class = RedHatStrategy
class VirtuozzoLinuxHostname(Hostname):
platform = 'Linux'
distribution = 'Virtuozzo linux'
strategy_class = RedHatStrategy
class AmazonLinuxHostname(Hostname):
platform = 'Linux'
distribution = 'Amazon'
strategy_class = RedHatStrategy
class SangomaLinuxHostname(Hostname):
platform = 'Linux'
distribution = 'Derived from red hat enterprise linux '
strategy_class = RedHatStrategy
class DebianHostname(Hostname):
platform = 'Linux'
distribution = 'Debian'
strategy_class = DebianStrategy
class KaliHostname(Hostname):
platform = 'Linux'
distribution = 'Kali'
strategy_class = DebianStrategy
class UbuntuHostname(Hostname):
platform = 'Linux'
distribution = 'Ubuntu'
strategy_class = DebianStrategy
class LinuxmintHostname(Hostname):
platform = 'Linux'
distribution = 'Linuxmint'
strategy_class = DebianStrategy
class LinaroHostname(Hostname):
platform = 'Linux'
distribution = 'Linaro'
strategy_class = DebianStrategy
class DevuanHostname(Hostname):
platform = 'Linux'
distribution = 'Devuan'
strategy_class = DebianStrategy
class GentooHostname(Hostname):
platform = 'Linux'
distribution = 'Gentoo base system'
strategy_class = OpenRCStrategy
class ALTLinuxHostname(Hostname):
platform = 'Linux'
distribution = 'Altlinux'
strategy_class = RedHatStrategy
class AlpineLinuxHostname(Hostname):
platform = 'Linux'
distribution = 'Alpine'
strategy_class = AlpineStrategy
class OpenBSDHostname(Hostname):
platform = 'OpenBSD'
distribution = None
strategy_class = OpenBSDStrategy
class SolarisHostname(Hostname):
platform = 'SunOS'
distribution = None
strategy_class = SolarisStrategy
class FreeBSDHostname(Hostname):
platform = 'FreeBSD'
distribution = None
strategy_class = FreeBSDStrategy
class NetBSDHostname(Hostname):
platform = 'NetBSD'
distribution = None
strategy_class = FreeBSDStrategy
class NeonHostname(Hostname):
platform = 'Linux'
distribution = 'Neon'
strategy_class = DebianStrategy
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True)
),
supports_check_mode=True,
)
hostname = Hostname(module)
name = module.params['name']
current_hostname = hostname.get_current_hostname()
permanent_hostname = hostname.get_permanent_hostname()
changed = hostname.update_current_and_permanent_hostname()
if name != current_hostname:
name_before = current_hostname
elif name != permanent_hostname:
name_before = permanent_hostname
kw = dict(changed=changed, name=name,
ansible_facts=dict(ansible_hostname=name.split('.')[0],
ansible_nodename=name,
ansible_fqdn=socket.getfqdn(),
ansible_domain='.'.join(socket.getfqdn().split('.')[1:])))
if changed:
kw['diff'] = {'after': 'hostname = ' + name + '\n',
'before': 'hostname = ' + name_before + '\n'}
module.exit_json(**kw)
if __name__ == '__main__':
main()
|
gpl-3.0
|
Livefyre/kafka-python
|
test/test_conn.py
|
4
|
1741
|
import os
import random
import struct
import unittest2
import kafka.conn
class ConnTest(unittest2.TestCase):
def test_collect_hosts__happy_path(self):
hosts = "localhost:1234,localhost"
results = kafka.conn.collect_hosts(hosts)
self.assertEqual(set(results), set([
('localhost', 1234),
('localhost', 9092),
]))
def test_collect_hosts__string_list(self):
hosts = [
'localhost:1234',
'localhost',
]
results = kafka.conn.collect_hosts(hosts)
self.assertEqual(set(results), set([
('localhost', 1234),
('localhost', 9092),
]))
def test_collect_hosts__with_spaces(self):
hosts = "localhost:1234, localhost"
results = kafka.conn.collect_hosts(hosts)
self.assertEqual(set(results), set([
('localhost', 1234),
('localhost', 9092),
]))
@unittest2.skip("Not Implemented")
def test_send(self):
pass
@unittest2.skip("Not Implemented")
def test_send__reconnects_on_dirty_conn(self):
pass
@unittest2.skip("Not Implemented")
def test_send__failure_sets_dirty_connection(self):
pass
@unittest2.skip("Not Implemented")
def test_recv(self):
pass
@unittest2.skip("Not Implemented")
def test_recv__reconnects_on_dirty_conn(self):
pass
@unittest2.skip("Not Implemented")
def test_recv__failure_sets_dirty_connection(self):
pass
@unittest2.skip("Not Implemented")
def test_recv__doesnt_consume_extra_data_in_stream(self):
pass
@unittest2.skip("Not Implemented")
def test_close__object_is_reusable(self):
pass
|
apache-2.0
|
direvus/ansible
|
lib/ansible/modules/cloud/amazon/s3_lifecycle.py
|
7
|
20288
|
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: s3_lifecycle
short_description: Manage s3 bucket lifecycle rules in AWS
description:
- Manage s3 bucket lifecycle rules in AWS
version_added: "2.0"
author: "Rob White (@wimnat)"
notes:
- If specifying expiration time as days then transition time must also be specified in days
- If specifying expiration time as a date then transition time must also be specified as a date
requirements:
- python-dateutil
options:
name:
description:
- "Name of the s3 bucket"
required: true
expiration_date:
description:
- >
Indicates the lifetime of the objects that are subject to the rule by the date they will expire. The value must be ISO-8601 format, the time must
be midnight and a GMT timezone must be specified.
expiration_days:
description:
- "Indicates the lifetime, in days, of the objects that are subject to the rule. The value must be a non-zero positive integer."
prefix:
description:
- "Prefix identifying one or more objects to which the rule applies. If no prefix is specified, the rule will apply to the whole bucket."
purge_transitions:
description:
- >
"Whether to replace all the current transition(s) with the new transition(s). When false, the provided transition(s)
will be added, replacing transitions with the same storage_class. When true, existing transitions will be removed and
replaced with the new transition(s)
default: true
type: bool
version_added: 2.6
noncurrent_version_expiration_days:
description:
- 'Delete noncurrent versions this many days after they become noncurrent'
required: false
version_added: 2.6
noncurrent_version_storage_class:
description:
- 'Transition noncurrent versions to this storage class'
default: glacier
choices: ['glacier', 'onezone_ia', 'standard_ia']
required: false
version_added: 2.6
noncurrent_version_transition_days:
description:
- 'Transition noncurrent versions this many days after they become noncurrent'
required: false
version_added: 2.6
noncurrent_version_transitions:
description:
- >
A list of transition behaviors to be applied to noncurrent versions for the rule. Each storage class may be used only once. Each transition
behavior contains these elements
I(transition_days)
I(storage_class)
version_added: 2.6
rule_id:
description:
- "Unique identifier for the rule. The value cannot be longer than 255 characters. A unique value for the rule will be generated if no value is provided."
state:
description:
- "Create or remove the lifecycle rule"
default: present
choices: [ 'present', 'absent' ]
status:
description:
- "If 'enabled', the rule is currently being applied. If 'disabled', the rule is not currently being applied."
default: enabled
choices: [ 'enabled', 'disabled' ]
storage_class:
description:
- "The storage class to transition to. Currently there are two supported values - 'glacier', 'onezone_ia', or 'standard_ia'."
- "The 'standard_ia' class is only being available from Ansible version 2.2."
default: glacier
choices: [ 'glacier', 'onezone_ia', 'standard_ia']
transition_date:
description:
- >
Indicates the lifetime of the objects that are subject to the rule by the date they will transition to a different storage class.
The value must be ISO-8601 format, the time must be midnight and a GMT timezone must be specified. If transition_days is not specified,
this parameter is required."
transition_days:
description:
- "Indicates when, in days, an object transitions to a different storage class. If transition_date is not specified, this parameter is required."
transitions:
description:
- A list of transition behaviors to be applied to the rule. Each storage class may be used only once. Each transition
behavior may contain these elements
I(transition_days)
I(transition_date)
I(storage_class)
version_added: 2.6
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Configure a lifecycle rule on a bucket to expire (delete) items with a prefix of /logs/ after 30 days
- s3_lifecycle:
name: mybucket
expiration_days: 30
prefix: /logs/
status: enabled
state: present
# Configure a lifecycle rule to transition all items with a prefix of /logs/ to glacier after 7 days and then delete after 90 days
- s3_lifecycle:
name: mybucket
transition_days: 7
expiration_days: 90
prefix: /logs/
status: enabled
state: present
# Configure a lifecycle rule to transition all items with a prefix of /logs/ to glacier on 31 Dec 2020 and then delete on 31 Dec 2030.
# Note that midnight GMT must be specified.
# Be sure to quote your date strings
- s3_lifecycle:
name: mybucket
transition_date: "2020-12-30T00:00:00.000Z"
expiration_date: "2030-12-30T00:00:00.000Z"
prefix: /logs/
status: enabled
state: present
# Disable the rule created above
- s3_lifecycle:
name: mybucket
prefix: /logs/
status: disabled
state: present
# Delete the lifecycle rule created above
- s3_lifecycle:
name: mybucket
prefix: /logs/
state: absent
# Configure a lifecycle rule to transition all backup files older than 31 days in /backups/ to standard infrequent access class.
- s3_lifecycle:
name: mybucket
prefix: /backups/
storage_class: standard_ia
transition_days: 31
state: present
status: enabled
# Configure a lifecycle rule to transition files to infrequent access after 30 days and glacier after 90
- s3_lifecycle:
name: mybucket
prefix: /logs/
state: present
status: enabled
transitions:
- transition_days: 30
storage_class: standard_ia
- transition_days: 90
storage_class: glacier
'''
from copy import deepcopy
import datetime
try:
import dateutil.parser
HAS_DATEUTIL = True
except ImportError:
HAS_DATEUTIL = False
try:
from botocore.exceptions import BotoCoreError, ClientError
except ImportError:
pass # handled by AnsibleAwsModule
from ansible.module_utils.aws.core import AnsibleAWSModule
def create_lifecycle_rule(client, module):
name = module.params.get("name")
expiration_date = module.params.get("expiration_date")
expiration_days = module.params.get("expiration_days")
noncurrent_version_expiration_days = module.params.get("noncurrent_version_expiration_days")
noncurrent_version_transition_days = module.params.get("noncurrent_version_transition_days")
noncurrent_version_transitions = module.params.get("noncurrent_version_transitions")
noncurrent_version_storage_class = module.params.get("noncurrent_version_storage_class")
prefix = module.params.get("prefix") or ""
rule_id = module.params.get("rule_id")
status = module.params.get("status")
storage_class = module.params.get("storage_class")
transition_date = module.params.get("transition_date")
transition_days = module.params.get("transition_days")
transitions = module.params.get("transitions")
purge_transitions = module.params.get("purge_transitions")
changed = False
# Get the bucket's current lifecycle rules
try:
current_lifecycle = client.get_bucket_lifecycle_configuration(Bucket=name)
current_lifecycle_rules = current_lifecycle['Rules']
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchLifecycleConfiguration':
current_lifecycle_rules = []
else:
module.fail_json_aws(e)
except BotoCoreError as e:
module.fail_json_aws(e)
rule = dict(Filter=dict(Prefix=prefix), Status=status.title())
if rule_id is not None:
rule['ID'] = rule_id
# Create expiration
if expiration_days is not None:
rule['Expiration'] = dict(Days=expiration_days)
elif expiration_date is not None:
rule['Expiration'] = dict(Date=expiration_date)
if noncurrent_version_expiration_days is not None:
rule['NoncurrentVersionExpiration'] = dict(NoncurrentDays=noncurrent_version_expiration_days)
if transition_days is not None:
rule['Transitions'] = [dict(Days=transition_days, StorageClass=storage_class.upper()), ]
elif transition_date is not None:
rule['Transitions'] = [dict(Date=transition_date, StorageClass=storage_class.upper()), ]
if transitions is not None:
if not rule.get('Transitions'):
rule['Transitions'] = []
for transition in transitions:
t_out = dict()
if transition.get('transition_date'):
t_out['Date'] = transition['transition_date']
elif transition.get('transition_days'):
t_out['Days'] = transition['transition_days']
if transition.get('storage_class'):
t_out['StorageClass'] = transition['storage_class'].upper()
rule['Transitions'].append(t_out)
if noncurrent_version_transition_days is not None:
rule['NoncurrentVersionTransitions'] = [dict(NoncurrentDays=noncurrent_version_transition_days,
StorageClass=noncurrent_version_storage_class.upper()), ]
if noncurrent_version_transitions is not None:
if not rule.get('NoncurrentVersionTransitions'):
rule['NoncurrentVersionTransitions'] = []
for noncurrent_version_transition in noncurrent_version_transitions:
t_out = dict()
t_out['NoncurrentDays'] = noncurrent_version_transition['transition_days']
if noncurrent_version_transition.get('storage_class'):
t_out['StorageClass'] = noncurrent_version_transition['storage_class'].upper()
rule['NoncurrentVersionTransitions'].append(t_out)
lifecycle_configuration = dict(Rules=[])
appended = False
# If current_lifecycle_obj is not None then we have rules to compare, otherwise just add the rule
if current_lifecycle_rules:
# If rule ID exists, use that for comparison otherwise compare based on prefix
for existing_rule in current_lifecycle_rules:
if rule.get('ID') == existing_rule.get('ID') and rule['Filter']['Prefix'] != existing_rule.get('Filter', {}).get('Prefix', ''):
existing_rule.pop('ID')
elif rule_id is None and rule['Filter']['Prefix'] == existing_rule.get('Filter', {}).get('Prefix', ''):
existing_rule.pop('ID')
if rule.get('ID') == existing_rule.get('ID'):
changed_, appended_ = update_or_append_rule(rule, existing_rule, purge_transitions, lifecycle_configuration)
changed = changed_ or changed
appended = appended_ or appended
else:
lifecycle_configuration['Rules'].append(existing_rule)
# If nothing appended then append now as the rule must not exist
if not appended:
lifecycle_configuration['Rules'].append(rule)
changed = True
else:
lifecycle_configuration['Rules'].append(rule)
changed = True
# Write lifecycle to bucket
try:
client.put_bucket_lifecycle_configuration(Bucket=name, LifecycleConfiguration=lifecycle_configuration)
except (BotoCoreError, ClientError) as e:
module.fail_json_aws(e)
module.exit_json(changed=changed)
def update_or_append_rule(new_rule, existing_rule, purge_transitions, lifecycle_obj):
changed = False
if existing_rule['Status'] != new_rule['Status']:
if not new_rule.get('Transitions') and existing_rule.get('Transitions'):
new_rule['Transitions'] = existing_rule['Transitions']
if not new_rule.get('Expiration') and existing_rule.get('Expiration'):
new_rule['Expiration'] = existing_rule['Expiration']
if not new_rule.get('NoncurrentVersionExpiration') and existing_rule.get('NoncurrentVersionExpiration'):
new_rule['NoncurrentVersionExpiration'] = existing_rule['NoncurrentVersionExpiration']
lifecycle_obj['Rules'].append(new_rule)
changed = True
appended = True
else:
if not purge_transitions:
merge_transitions(new_rule, existing_rule)
if compare_rule(new_rule, existing_rule, purge_transitions):
lifecycle_obj['Rules'].append(new_rule)
appended = True
else:
lifecycle_obj['Rules'].append(new_rule)
changed = True
appended = True
return changed, appended
def compare_rule(rule_a, rule_b, purge_transitions):
# Copy objects
rule1 = deepcopy(rule_a)
rule2 = deepcopy(rule_b)
if purge_transitions:
return rule1 == rule2
else:
transitions1 = rule1.pop('Transitions', [])
transitions2 = rule2.pop('Transitions', [])
noncurrent_transtions1 = rule1.pop('NoncurrentVersionTransitions', [])
noncurrent_transtions2 = rule2.pop('NoncurrentVersionTransitions', [])
if rule1 != rule2:
return False
for transition in transitions1:
if transition not in transitions2:
return False
for transition in noncurrent_transtions1:
if transition not in noncurrent_transtions2:
return False
return True
def merge_transitions(updated_rule, updating_rule):
# because of the legal s3 transitions, we know only one can exist for each storage class.
# So, our strategy is build some dicts, keyed on storage class and add the storage class transitions that are only
# in updating_rule to updated_rule
updated_transitions = {}
updating_transitions = {}
for transition in updated_rule.get('Transitions', []):
updated_transitions[transition['StorageClass']] = transition
for transition in updating_rule.get('Transitions', []):
updating_transitions[transition['StorageClass']] = transition
for storage_class, transition in updating_transitions.items():
if updated_transitions.get(storage_class) is None:
updated_rule['Transitions'].append(transition)
def destroy_lifecycle_rule(client, module):
name = module.params.get("name")
prefix = module.params.get("prefix")
rule_id = module.params.get("rule_id")
changed = False
if prefix is None:
prefix = ""
# Get the bucket's current lifecycle rules
try:
current_lifecycle_rules = client.get_bucket_lifecycle_configuration(Bucket=name)['Rules']
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchLifecycleConfiguration':
current_lifecycle_rules = []
else:
module.fail_json_aws(e)
except BotoCoreError as e:
module.fail_json_aws(e)
# Create lifecycle
lifecycle_obj = dict(Rules=[])
# Check if rule exists
# If an ID exists, use that otherwise compare based on prefix
if rule_id is not None:
for existing_rule in current_lifecycle_rules:
if rule_id == existing_rule['ID']:
# We're not keeping the rule (i.e. deleting) so mark as changed
changed = True
else:
lifecycle_obj['Rules'].append(existing_rule)
else:
for existing_rule in current_lifecycle_rules:
if prefix == existing_rule['Filter']['Prefix']:
# We're not keeping the rule (i.e. deleting) so mark as changed
changed = True
else:
lifecycle_obj['Rules'].append(existing_rule)
# Write lifecycle to bucket or, if there no rules left, delete lifecycle configuration
try:
if lifecycle_obj['Rules']:
client.put_bucket_lifecycle_configuration(Bucket=name, LifecycleConfiguration=lifecycle_obj)
elif current_lifecycle_rules:
changed = True
client.delete_bucket_lifecycle(Bucket=name)
except (ClientError, BotoCoreError) as e:
module.fail_json_aws(e)
module.exit_json(changed=changed)
def main():
argument_spec = dict(
name=dict(required=True, type='str'),
expiration_days=dict(type='int'),
expiration_date=dict(),
noncurrent_version_expiration_days=dict(type='int'),
noncurrent_version_storage_class=dict(default='glacier', type='str', choices=['glacier', 'onezone_ia', 'standard_ia']),
noncurrent_version_transition_days=dict(type='int'),
noncurrent_version_transitions=dict(type='list'),
prefix=dict(),
requester_pays=dict(default='no', type='bool'),
rule_id=dict(),
state=dict(default='present', choices=['present', 'absent']),
status=dict(default='enabled', choices=['enabled', 'disabled']),
storage_class=dict(default='glacier', type='str', choices=['glacier', 'onezone_ia', 'standard_ia']),
transition_days=dict(type='int'),
transition_date=dict(),
transitions=dict(type='list'),
purge_transitions=dict(default='yes', type='bool')
)
module = AnsibleAWSModule(argument_spec=argument_spec,
mutually_exclusive=[
['expiration_days', 'expiration_date'],
['expiration_days', 'transition_date'],
['transition_days', 'transition_date'],
['transition_days', 'expiration_date'],
['transition_days', 'transitions'],
['transition_date', 'transitions'],
['noncurrent_version_transition_days', 'noncurrent_version_transitions'],
],)
if not HAS_DATEUTIL:
module.fail_json(msg='dateutil required for this module')
client = module.client('s3')
expiration_date = module.params.get("expiration_date")
transition_date = module.params.get("transition_date")
state = module.params.get("state")
if state == 'present' and module.params["status"] == "enabled": # allow deleting/disabling a rule by id/prefix
required_when_present = ('expiration_date', 'expiration_days', 'transition_date',
'transition_days', 'transitions', 'noncurrent_version_expiration_days',
'noncurrent_version_transition_days',
'noncurrent_version_transitions')
for param in required_when_present:
if module.params.get(param):
break
else:
msg = "one of the following is required when 'state' is 'present': %s" % ', '.join(required_when_present)
module.fail_json(msg=msg)
# If expiration_date set, check string is valid
if expiration_date is not None:
try:
datetime.datetime.strptime(expiration_date, "%Y-%m-%dT%H:%M:%S.000Z")
except ValueError as e:
module.fail_json(msg="expiration_date is not a valid ISO-8601 format. The time must be midnight and a timezone of GMT must be included")
if transition_date is not None:
try:
datetime.datetime.strptime(transition_date, "%Y-%m-%dT%H:%M:%S.000Z")
except ValueError as e:
module.fail_json(msg="expiration_date is not a valid ISO-8601 format. The time must be midnight and a timezone of GMT must be included")
if state == 'present':
create_lifecycle_rule(client, module)
elif state == 'absent':
destroy_lifecycle_rule(client, module)
if __name__ == '__main__':
main()
|
gpl-3.0
|
donbixler/xhtml2pdf
|
xhtml2pdf/parser.py
|
1
|
24988
|
# -*- coding: utf-8 -*-
# Copyright 2010 Dirk Holtwick, holtwick.it
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from html5lib import treebuilders, inputstream
from xhtml2pdf.default import TAGS, STRING, INT, BOOL, SIZE, COLOR, FILE
from xhtml2pdf.default import BOX, POS, MUST, FONT
from xhtml2pdf.util import getSize, getBool, toList, getColor, getAlign
from xhtml2pdf.util import getBox, getPos, pisaTempFile
from reportlab.platypus.doctemplate import NextPageTemplate, FrameBreak
from reportlab.platypus.flowables import PageBreak, KeepInFrame
from xhtml2pdf.xhtml2pdf_reportlab import PmlRightPageBreak, PmlLeftPageBreak
from xhtml2pdf.tags import * # TODO: Kill wild import!
from xhtml2pdf.tables import * # TODO: Kill wild import!
from xhtml2pdf.util import * # TODO: Kill wild import!
from xml.dom import Node
import copy
import html5lib
import logging
import re
import types
import xhtml2pdf.w3c.cssDOMElementInterface as cssDOMElementInterface
import xml.dom.minidom
CSSAttrCache = {}
log = logging.getLogger("xhtml2pdf")
rxhttpstrip = re.compile("https?://[^/]+(.*)", re.M | re.I)
class AttrContainer(dict):
def __getattr__(self, name):
try:
return dict.__getattr__(self, name)
except:
return self[name]
def pisaGetAttributes(c, tag, attributes):
global TAGS
attrs = {}
if attributes:
for k, v in attributes.items():
try:
attrs[str(k)] = str(v) # XXX no Unicode! Reportlab fails with template names
except:
attrs[k] = v
nattrs = {}
if tag in TAGS:
block, adef = TAGS[tag]
adef["id"] = STRING
# print block, adef
for k, v in adef.iteritems():
nattrs[k] = None
# print k, v
# defaults, wenn vorhanden
if type(v) == types.TupleType:
if v[1] == MUST:
if k not in attrs:
log.warn(c.warning("Attribute '%s' must be set!", k))
nattrs[k] = None
continue
nv = attrs.get(k, v[1])
dfl = v[1]
v = v[0]
else:
nv = attrs.get(k, None)
dfl = None
if nv is not None:
if type(v) == types.ListType:
nv = nv.strip().lower()
if nv not in v:
#~ raise PML_EXCEPTION, "attribute '%s' of wrong value, allowed is one of: %s" % (k, repr(v))
log.warn(c.warning("Attribute '%s' of wrong value, allowed is one of: %s", k, repr(v)))
nv = dfl
elif v == BOOL:
nv = nv.strip().lower()
nv = nv in ("1", "y", "yes", "true", str(k))
elif v == SIZE:
try:
nv = getSize(nv)
except:
log.warn(c.warning("Attribute '%s' expects a size value", k))
elif v == BOX:
nv = getBox(nv, c.pageSize)
elif v == POS:
nv = getPos(nv, c.pageSize)
elif v == INT:
nv = int(nv)
elif v == COLOR:
nv = getColor(nv)
elif v == FILE:
nv = c.getFile(nv)
elif v == FONT:
nv = c.getFontName(nv)
nattrs[k] = nv
return AttrContainer(nattrs)
attrNames = '''
color
font-family
font-size
font-weight
font-style
text-decoration
line-height
letter-spacing
background-color
display
margin-left
margin-right
margin-top
margin-bottom
padding-left
padding-right
padding-top
padding-bottom
border-top-color
border-top-style
border-top-width
border-bottom-color
border-bottom-style
border-bottom-width
border-left-color
border-left-style
border-left-width
border-right-color
border-right-style
border-right-width
text-align
vertical-align
width
height
zoom
page-break-after
page-break-before
list-style-type
list-style-image
white-space
text-indent
-pdf-page-break
-pdf-frame-break
-pdf-next-page
-pdf-keep-with-next
-pdf-outline
-pdf-outline-level
-pdf-outline-open
-pdf-line-spacing
-pdf-keep-in-frame-mode
-pdf-word-wrap
'''.strip().split()
def getCSSAttr(self, cssCascade, attrName, default=NotImplemented):
if attrName in self.cssAttrs:
return self.cssAttrs[attrName]
try:
result = cssCascade.findStyleFor(self.cssElement, attrName, default)
except LookupError:
result = None
# XXX Workaround for inline styles
try:
style = self.cssStyle
except:
style = self.cssStyle = cssCascade.parser.parseInline(self.cssElement.getStyleAttr() or '')[0]
if attrName in style:
result = style[attrName]
if result == 'inherit':
if hasattr(self.parentNode, 'getCSSAttr'):
result = self.parentNode.getCSSAttr(cssCascade, attrName, default)
elif default is not NotImplemented:
return default
raise LookupError("Could not find inherited CSS attribute value for '%s'" % (attrName,))
if result is not None:
self.cssAttrs[attrName] = result
return result
#TODO: Monkeypatching standard lib should go away.
xml.dom.minidom.Element.getCSSAttr = getCSSAttr
# Create an aliasing system. Many sources use non-standard tags, because browsers allow
# them to. This allows us to map a nonstandard name to the standard one.
nonStandardAttrNames = {
'bgcolor': 'background-color',
}
def mapNonStandardAttrs(c, n, attrList):
for attr in nonStandardAttrNames:
if attr in attrList and nonStandardAttrNames[attr] not in c:
c[nonStandardAttrNames[attr]] = attrList[attr]
return c
def getCSSAttrCacheKey(node):
_cl = _id = _st = ''
for k, v in node.attributes.items():
if k == 'class':
_cl = v
elif k == 'id':
_id = v
elif k == 'style':
_st = v
return "%s#%s#%s#%s#%s" % (id(node.parentNode), node.tagName.lower(), _cl, _id, _st)
def CSSCollect(node, c):
#node.cssAttrs = {}
#return node.cssAttrs
if c.css:
_key = getCSSAttrCacheKey(node)
if hasattr(node.parentNode, "tagName"):
if node.parentNode.tagName.lower() != "html":
CachedCSSAttr = CSSAttrCache.get(_key, None)
if CachedCSSAttr is not None:
node.cssAttrs = CachedCSSAttr
return CachedCSSAttr
node.cssElement = cssDOMElementInterface.CSSDOMElementInterface(node)
node.cssAttrs = {}
# node.cssElement.onCSSParserVisit(c.cssCascade.parser)
cssAttrMap = {}
for cssAttrName in attrNames:
try:
cssAttrMap[cssAttrName] = node.getCSSAttr(c.cssCascade, cssAttrName)
#except LookupError:
# pass
except Exception: # TODO: Kill this catch-all!
log.debug("CSS error '%s'", cssAttrName, exc_info=1)
CSSAttrCache[_key] = node.cssAttrs
return node.cssAttrs
def CSS2Frag(c, kw, isBlock):
# COLORS
if "color" in c.cssAttr:
c.frag.textColor = getColor(c.cssAttr["color"])
if "background-color" in c.cssAttr:
c.frag.backColor = getColor(c.cssAttr["background-color"])
# FONT SIZE, STYLE, WEIGHT
if "font-family" in c.cssAttr:
c.frag.fontName = c.getFontName(c.cssAttr["font-family"])
if "font-size" in c.cssAttr:
# XXX inherit
c.frag.fontSize = max(getSize("".join(c.cssAttr["font-size"]), c.frag.fontSize, c.baseFontSize), 1.0)
if "line-height" in c.cssAttr:
leading = "".join(c.cssAttr["line-height"])
c.frag.leading = getSize(leading, c.frag.fontSize)
c.frag.leadingSource = leading
else:
c.frag.leading = getSize(c.frag.leadingSource, c.frag.fontSize)
if "letter-spacing" in c.cssAttr:
c.frag.letterSpacing = c.cssAttr["letter-spacing"]
if "-pdf-line-spacing" in c.cssAttr:
c.frag.leadingSpace = getSize("".join(c.cssAttr["-pdf-line-spacing"]))
# print "line-spacing", c.cssAttr["-pdf-line-spacing"], c.frag.leading
if "font-weight" in c.cssAttr:
value = c.cssAttr["font-weight"].lower()
if value in ("bold", "bolder", "500", "600", "700", "800", "900"):
c.frag.bold = 1
else:
c.frag.bold = 0
for value in toList(c.cssAttr.get("text-decoration", "")):
if "underline" in value:
c.frag.underline = 1
if "line-through" in value:
c.frag.strike = 1
if "none" in value:
c.frag.underline = 0
c.frag.strike = 0
if "font-style" in c.cssAttr:
value = c.cssAttr["font-style"].lower()
if value in ("italic", "oblique"):
c.frag.italic = 1
else:
c.frag.italic = 0
if "white-space" in c.cssAttr:
# normal | pre | nowrap
c.frag.whiteSpace = str(c.cssAttr["white-space"]).lower()
# ALIGN & VALIGN
if "text-align" in c.cssAttr:
c.frag.alignment = getAlign(c.cssAttr["text-align"])
if "vertical-align" in c.cssAttr:
c.frag.vAlign = c.cssAttr["vertical-align"]
# HEIGHT & WIDTH
if "height" in c.cssAttr:
c.frag.height = "".join(toList(c.cssAttr["height"])) # XXX Relative is not correct!
if c.frag.height in ("auto",):
c.frag.height = None
if "width" in c.cssAttr:
c.frag.width = "".join(toList(c.cssAttr["width"])) # XXX Relative is not correct!
if c.frag.width in ("auto",):
c.frag.width = None
# ZOOM
if "zoom" in c.cssAttr:
zoom = "".join(toList(c.cssAttr["zoom"])) # XXX Relative is not correct!
if zoom.endswith("%"):
zoom = float(zoom[: - 1]) / 100.0
c.frag.zoom = float(zoom)
# MARGINS & LIST INDENT, STYLE
if isBlock:
if "margin-top" in c.cssAttr:
c.frag.spaceBefore = getSize(c.cssAttr["margin-top"], c.frag.fontSize)
if "margin-bottom" in c.cssAttr:
c.frag.spaceAfter = getSize(c.cssAttr["margin-bottom"], c.frag.fontSize)
if "margin-left" in c.cssAttr:
c.frag.bulletIndent = kw["margin-left"] # For lists
kw["margin-left"] += getSize(c.cssAttr["margin-left"], c.frag.fontSize)
c.frag.leftIndent = kw["margin-left"]
if "margin-right" in c.cssAttr:
kw["margin-right"] += getSize(c.cssAttr["margin-right"], c.frag.fontSize)
c.frag.rightIndent = kw["margin-right"]
if "text-indent" in c.cssAttr:
c.frag.firstLineIndent = getSize(c.cssAttr["text-indent"], c.frag.fontSize)
if "list-style-type" in c.cssAttr:
c.frag.listStyleType = str(c.cssAttr["list-style-type"]).lower()
if "list-style-image" in c.cssAttr:
c.frag.listStyleImage = c.getFile(c.cssAttr["list-style-image"])
# PADDINGS
if isBlock:
if "padding-top" in c.cssAttr:
c.frag.paddingTop = getSize(c.cssAttr["padding-top"], c.frag.fontSize)
if "padding-bottom" in c.cssAttr:
c.frag.paddingBottom = getSize(c.cssAttr["padding-bottom"], c.frag.fontSize)
if "padding-left" in c.cssAttr:
c.frag.paddingLeft = getSize(c.cssAttr["padding-left"], c.frag.fontSize)
if "padding-right" in c.cssAttr:
c.frag.paddingRight = getSize(c.cssAttr["padding-right"], c.frag.fontSize)
# BORDERS
if isBlock:
if "border-top-width" in c.cssAttr:
c.frag.borderTopWidth = getSize(c.cssAttr["border-top-width"], c.frag.fontSize)
if "border-bottom-width" in c.cssAttr:
c.frag.borderBottomWidth = getSize(c.cssAttr["border-bottom-width"], c.frag.fontSize)
if "border-left-width" in c.cssAttr:
c.frag.borderLeftWidth = getSize(c.cssAttr["border-left-width"], c.frag.fontSize)
if "border-right-width" in c.cssAttr:
c.frag.borderRightWidth = getSize(c.cssAttr["border-right-width"], c.frag.fontSize)
if "border-top-style" in c.cssAttr:
c.frag.borderTopStyle = c.cssAttr["border-top-style"]
if "border-bottom-style" in c.cssAttr:
c.frag.borderBottomStyle = c.cssAttr["border-bottom-style"]
if "border-left-style" in c.cssAttr:
c.frag.borderLeftStyle = c.cssAttr["border-left-style"]
if "border-right-style" in c.cssAttr:
c.frag.borderRightStyle = c.cssAttr["border-right-style"]
if "border-top-color" in c.cssAttr:
c.frag.borderTopColor = getColor(c.cssAttr["border-top-color"])
if "border-bottom-color" in c.cssAttr:
c.frag.borderBottomColor = getColor(c.cssAttr["border-bottom-color"])
if "border-left-color" in c.cssAttr:
c.frag.borderLeftColor = getColor(c.cssAttr["border-left-color"])
if "border-right-color" in c.cssAttr:
c.frag.borderRightColor = getColor(c.cssAttr["border-right-color"])
def pisaPreLoop(node, context, collect=False):
"""
Collect all CSS definitions
"""
data = u""
if node.nodeType == Node.TEXT_NODE and collect:
data = node.data
elif node.nodeType == Node.ELEMENT_NODE:
name = node.tagName.lower()
if name in ("style", "link"):
attr = pisaGetAttributes(context, name, node.attributes)
media = [x.strip() for x in attr.media.lower().split(",") if x.strip()]
if attr.get("type", "").lower() in ("", "text/css") and \
(not media or "all" in media or "print" in media or "pdf" in media):
if name == "style":
for node in node.childNodes:
data += pisaPreLoop(node, context, collect=True)
context.addCSS(data)
return u""
if name == "link" and attr.href and attr.rel.lower() == "stylesheet":
# print "CSS LINK", attr
context.addCSS('\n@import "%s" %s;' % (attr.href, ",".join(media)))
for node in node.childNodes:
result = pisaPreLoop(node, context, collect=collect)
if collect:
data += result
return data
def pisaLoop(node, context, path=None, **kw):
if path is None:
path = []
# Initialize KW
if not kw:
kw = {
"margin-top": 0,
"margin-bottom": 0,
"margin-left": 0,
"margin-right": 0,
}
else:
kw = copy.copy(kw)
#indent = len(path) * " " # only used for debug print statements
# TEXT
if node.nodeType == Node.TEXT_NODE:
# print indent, "#", repr(node.data) #, context.frag
context.addFrag(node.data)
# context.text.append(node.value)
# ELEMENT
elif node.nodeType == Node.ELEMENT_NODE:
node.tagName = node.tagName.replace(":", "").lower()
if node.tagName in ("style", "script"):
return
path = copy.copy(path) + [node.tagName]
# Prepare attributes
attr = pisaGetAttributes(context, node.tagName, node.attributes)
#log.debug(indent + "<%s %s>" % (node.tagName, attr) + repr(node.attributes.items())) #, path
# Calculate styles
context.cssAttr = CSSCollect(node, context)
context.cssAttr = mapNonStandardAttrs(context.cssAttr, node, attr)
context.node = node
# Block?
PAGE_BREAK = 1
PAGE_BREAK_RIGHT = 2
PAGE_BREAK_LEFT = 3
pageBreakAfter = False
frameBreakAfter = False
display = context.cssAttr.get("display", "inline").lower()
# print indent, node.tagName, display, context.cssAttr.get("background-color", None), attr
isBlock = (display == "block")
if isBlock:
context.addPara()
# Page break by CSS
if "-pdf-next-page" in context.cssAttr:
context.addStory(NextPageTemplate(str(context.cssAttr["-pdf-next-page"])))
if "-pdf-page-break" in context.cssAttr:
if str(context.cssAttr["-pdf-page-break"]).lower() == "before":
context.addStory(PageBreak())
if "-pdf-frame-break" in context.cssAttr:
if str(context.cssAttr["-pdf-frame-break"]).lower() == "before":
context.addStory(FrameBreak())
if str(context.cssAttr["-pdf-frame-break"]).lower() == "after":
frameBreakAfter = True
if "page-break-before" in context.cssAttr:
if str(context.cssAttr["page-break-before"]).lower() == "always":
context.addStory(PageBreak())
if str(context.cssAttr["page-break-before"]).lower() == "right":
context.addStory(PageBreak())
context.addStory(PmlRightPageBreak())
if str(context.cssAttr["page-break-before"]).lower() == "left":
context.addStory(PageBreak())
context.addStory(PmlLeftPageBreak())
if "page-break-after" in context.cssAttr:
if str(context.cssAttr["page-break-after"]).lower() == "always":
pageBreakAfter = PAGE_BREAK
if str(context.cssAttr["page-break-after"]).lower() == "right":
pageBreakAfter = PAGE_BREAK_RIGHT
if str(context.cssAttr["page-break-after"]).lower() == "left":
pageBreakAfter = PAGE_BREAK_LEFT
if display == "none":
# print "none!"
return
# Translate CSS to frags
# Save previous frag styles
context.pushFrag()
# Map styles to Reportlab fragment properties
CSS2Frag(context, kw, isBlock)
# EXTRAS
if "-pdf-keep-with-next" in context.cssAttr:
context.frag.keepWithNext = getBool(context.cssAttr["-pdf-keep-with-next"])
if "-pdf-outline" in context.cssAttr:
context.frag.outline = getBool(context.cssAttr["-pdf-outline"])
if "-pdf-outline-level" in context.cssAttr:
context.frag.outlineLevel = int(context.cssAttr["-pdf-outline-level"])
if "-pdf-outline-open" in context.cssAttr:
context.frag.outlineOpen = getBool(context.cssAttr["-pdf-outline-open"])
if "-pdf-word-wrap" in context.cssAttr:
context.frag.wordWrap = context.cssAttr["-pdf-word-wrap"]
# handle keep-in-frame
keepInFrameMode = None
keepInFrameMaxWidth = 0
keepInFrameMaxHeight = 0
if "-pdf-keep-in-frame-mode" in context.cssAttr:
value = str(context.cssAttr["-pdf-keep-in-frame-mode"]).strip().lower()
if value in ("shrink", "error", "overflow", "truncate"):
keepInFrameMode = value
if "-pdf-keep-in-frame-max-width" in context.cssAttr:
keepInFrameMaxWidth = getSize("".join(context.cssAttr["-pdf-keep-in-frame-max-width"]))
if "-pdf-keep-in-frame-max-height" in context.cssAttr:
keepInFrameMaxHeight = getSize("".join(context.cssAttr["-pdf-keep-in-frame-max-height"]))
# ignore nested keep-in-frames, tables have their own KIF handling
keepInFrame = keepInFrameMode is not None and context.keepInFrameIndex is None
if keepInFrame:
# keep track of current story index, so we can wrap everythink
# added after this point in a KeepInFrame
context.keepInFrameIndex = len(context.story)
# BEGIN tag
klass = globals().get("pisaTag%s" % node.tagName.replace(":", "").upper(), None)
obj = None
# Static block
elementId = attr.get("id", None)
staticFrame = context.frameStatic.get(elementId, None)
if staticFrame:
context.frag.insideStaticFrame += 1
oldStory = context.swapStory()
# Tag specific operations
if klass is not None:
obj = klass(node, attr)
obj.start(context)
# Visit child nodes
context.fragBlock = fragBlock = copy.copy(context.frag)
for nnode in node.childNodes:
pisaLoop(nnode, context, path, **kw)
context.fragBlock = fragBlock
# END tag
if obj:
obj.end(context)
# Block?
if isBlock:
context.addPara()
# XXX Buggy!
# Page break by CSS
if pageBreakAfter:
context.addStory(PageBreak())
if pageBreakAfter == PAGE_BREAK_RIGHT:
context.addStory(PmlRightPageBreak())
if pageBreakAfter == PAGE_BREAK_LEFT:
context.addStory(PmlLeftPageBreak())
if frameBreakAfter:
context.addStory(FrameBreak())
if keepInFrame:
# get all content added after start of -pdf-keep-in-frame and wrap
# it in a KeepInFrame
substory = context.story[context.keepInFrameIndex:]
context.story = context.story[:context.keepInFrameIndex]
context.story.append(
KeepInFrame(
content=substory,
maxWidth=keepInFrameMaxWidth,
maxHeight=keepInFrameMaxHeight))
context.keepInFrameIndex = None
# Static block, END
if staticFrame:
context.addPara()
for frame in staticFrame:
frame.pisaStaticStory = context.story
context.swapStory(oldStory)
context.frag.insideStaticFrame -= 1
# context.debug(1, indent, "</%s>" % (node.tagName))
# Reset frag style
context.pullFrag()
# Unknown or not handled
else:
# context.debug(1, indent, "???", node, node.nodeType, repr(node))
# Loop over children
for node in node.childNodes:
pisaLoop(node, context, path, **kw)
def pisaParser(src, context, default_css="", xhtml=False, encoding=None, xml_output=None):
"""
- Parse HTML and get miniDOM
- Extract CSS informations, add default CSS, parse CSS
- Handle the document DOM itself and build reportlab story
- Return Context object
"""
global CSSAttrCache
CSSAttrCache = {}
if xhtml:
#TODO: XHTMLParser doesn't see to exist...
parser = html5lib.XHTMLParser(tree=treebuilders.getTreeBuilder("dom"))
else:
parser = html5lib.HTMLParser(tree=treebuilders.getTreeBuilder("dom"))
if type(src) in types.StringTypes:
if type(src) is types.UnicodeType:
# If an encoding was provided, do not change it.
if not encoding:
encoding = "utf-8"
src = src.encode(encoding)
src = pisaTempFile(src, capacity=context.capacity)
# Test for the restrictions of html5lib
if encoding:
# Workaround for html5lib<0.11.1
if hasattr(inputstream, "isValidEncoding"):
if encoding.strip().lower() == "utf8":
encoding = "utf-8"
if not inputstream.isValidEncoding(encoding):
log.error("%r is not a valid encoding e.g. 'utf8' is not valid but 'utf-8' is!", encoding)
else:
if inputstream.codecName(encoding) is None:
log.error("%r is not a valid encoding", encoding)
document = parser.parse(
src,
encoding=encoding)
if xml_output:
if encoding:
xml_output.write(document.toprettyxml(encoding=encoding))
else:
xml_output.write(document.toprettyxml(encoding="utf8"))
if default_css:
context.addCSS(default_css)
pisaPreLoop(document, context)
#try:
context.parseCSS()
#except:
# context.cssText = DEFAULT_CSS
# context.parseCSS()
# context.debug(9, pprint.pformat(context.css))
pisaLoop(document, context)
return context
# Shortcuts
HTML2PDF = pisaParser
def XHTML2PDF(*a, **kw):
kw["xhtml"] = True
return HTML2PDF(*a, **kw)
XML2PDF = XHTML2PDF
|
apache-2.0
|
rixrix/servo
|
tests/wpt/css-tests/tools/webdriver/webdriver/webelement.py
|
251
|
1846
|
"""Element-level WebDriver operations."""
import searchcontext
class WebElement(searchcontext.SearchContext):
"""Corresponds to a DOM element in the current page."""
def __init__(self, driver, id):
self._driver = driver
self._id = id
# Set value of mode used by SearchContext
self.mode = driver.mode
def execute(self, method, path, name, body=None):
"""Execute a command against this WebElement."""
return self._driver.execute(
method, '/element/%s%s' % (self._id, path), name, body)
def is_displayed(self):
"""Is this element displayed?"""
return self.execute('GET', '/displayed', 'isDisplayed')
def is_selected(self):
"""Is this checkbox, radio button, or option selected?"""
return self.execute('GET', '/selected', 'isSelected')
def get_attribute(self, name):
"""Get the value of an element property or attribute."""
return self.execute('GET', '/attribute/%s' % name, 'getElementAttribute')
@property
def text(self):
"""Get the visible text for this element."""
return self.execute('GET', '/text', 'text')
@property
def tag_name(self):
"""Get the tag name for this element"""
return self.execute('GET', '/name', 'getElementTagName')
def click(self):
"""Click on this element."""
return self.execute('POST', '/click', 'click')
def clear(self):
"""Clear the contents of the this text input."""
self.execute('POST', '/clear', 'clear')
def send_keys(self, keys):
"""Send keys to this text input or body element."""
if isinstance(keys, str):
keys = [keys]
self.execute('POST', '/value', 'sendKeys', {'value': keys})
def to_json(self):
return {'ELEMENT': self.id}
|
mpl-2.0
|
40223245/2015cd_midterm
|
static/Brython3.1.1-20150328-091302/Lib/unittest/test/test_loader.py
|
738
|
49593
|
import sys
import types
import unittest
class Test_TestLoader(unittest.TestCase):
### Tests for TestLoader.loadTestsFromTestCase
################################################################
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
def test_loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# Make sure it does the right thing even if no tests were found
def test_loadTestsFromTestCase__no_matches(self):
class Foo(unittest.TestCase):
def foo_bar(self): pass
empty_suite = unittest.TestSuite()
loader = unittest.TestLoader()
self.assertEqual(loader.loadTestsFromTestCase(Foo), empty_suite)
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# What happens if loadTestsFromTestCase() is given an object
# that isn't a subclass of TestCase? Specifically, what happens
# if testCaseClass is a subclass of TestSuite?
#
# This is checked for specifically in the code, so we better add a
# test for it.
def test_loadTestsFromTestCase__TestSuite_subclass(self):
class NotATestCase(unittest.TestSuite):
pass
loader = unittest.TestLoader()
try:
loader.loadTestsFromTestCase(NotATestCase)
except TypeError:
pass
else:
self.fail('Should raise TypeError')
# "Return a suite of all tests cases contained in the TestCase-derived
# class testCaseClass"
#
# Make sure loadTestsFromTestCase() picks up the default test method
# name (as specified by TestCase), even though the method name does
# not match the default TestLoader.testMethodPrefix string
def test_loadTestsFromTestCase__default_method_name(self):
class Foo(unittest.TestCase):
def runTest(self):
pass
loader = unittest.TestLoader()
# This has to be false for the test to succeed
self.assertFalse('runTest'.startswith(loader.testMethodPrefix))
suite = loader.loadTestsFromTestCase(Foo)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [Foo('runTest')])
################################################################
### /Tests for TestLoader.loadTestsFromTestCase
### Tests for TestLoader.loadTestsFromModule
################################################################
# "This method searches `module` for classes derived from TestCase"
def test_loadTestsFromModule__TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, loader.suiteClass)
expected = [loader.suiteClass([MyTestCase('test')])]
self.assertEqual(list(suite), expected)
# "This method searches `module` for classes derived from TestCase"
#
# What happens if no tests are found (no TestCase instances)?
def test_loadTestsFromModule__no_TestCase_instances(self):
m = types.ModuleType('m')
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# "This method searches `module` for classes derived from TestCase"
#
# What happens if no tests are found (TestCases instances, but no tests)?
def test_loadTestsFromModule__no_TestCase_tests(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [loader.suiteClass()])
# "This method searches `module` for classes derived from TestCase"s
#
# What happens if loadTestsFromModule() is given something other
# than a module?
#
# XXX Currently, it succeeds anyway. This flexibility
# should either be documented or loadTestsFromModule() should
# raise a TypeError
#
# XXX Certain people are using this behaviour. We'll add a test for it
def test_loadTestsFromModule__not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(NotAModule)
reference = [unittest.TestSuite([MyTestCase('test')])]
self.assertEqual(list(suite), reference)
# Check that loadTestsFromModule honors (or not) a module
# with a load_tests function.
def test_loadTestsFromModule__load_tests(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
load_tests_args = []
def load_tests(loader, tests, pattern):
self.assertIsInstance(tests, unittest.TestSuite)
load_tests_args.extend((loader, tests, pattern))
return tests
m.load_tests = load_tests
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, unittest.TestSuite)
self.assertEqual(load_tests_args, [loader, suite, None])
load_tests_args = []
suite = loader.loadTestsFromModule(m, use_load_tests=False)
self.assertEqual(load_tests_args, [])
def test_loadTestsFromModule__faulty_load_tests(self):
m = types.ModuleType('m')
def load_tests(loader, tests, pattern):
raise TypeError('some failure')
m.load_tests = load_tests
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(m)
self.assertIsInstance(suite, unittest.TestSuite)
self.assertEqual(suite.countTestCases(), 1)
test = list(suite)[0]
self.assertRaisesRegex(TypeError, "some failure", test.m)
################################################################
### /Tests for TestLoader.loadTestsFromModule()
### Tests for TestLoader.loadTestsFromName()
################################################################
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Is ValueError raised in response to an empty name?
def test_loadTestsFromName__empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('')
except ValueError as e:
self.assertEqual(str(e), "Empty module name")
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the name contains invalid characters?
def test_loadTestsFromName__malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise ValueError or ImportError?
try:
loader.loadTestsFromName('abc () //')
except ValueError:
pass
except ImportError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve ... to a
# module"
#
# What happens when a module by that name can't be found?
def test_loadTestsFromName__unknown_module_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('sdasfasfasdf')
except ImportError as e:
self.assertEqual(str(e), "No module named 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise ImportError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the module is found, but the attribute can't?
def test_loadTestsFromName__unknown_attr_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('unittest.sdasfasfasdf')
except AttributeError as e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when we provide the module, but the attribute can't be
# found?
def test_loadTestsFromName__relative_unknown_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('sdasfasfasdf', unittest)
except AttributeError as e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromName raise ValueError when passed an empty
# name relative to a provided module?
#
# XXX Should probably raise a ValueError instead of an AttributeError
def test_loadTestsFromName__relative_empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('', unittest)
except AttributeError as e:
pass
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when an impossible name is given, relative to the provided
# `module`?
def test_loadTestsFromName__relative_malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise AttributeError or ValueError?
try:
loader.loadTestsFromName('abc () //', unittest)
except ValueError:
pass
except AttributeError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise ValueError")
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromName raise TypeError when the `module` argument
# isn't a module object?
#
# XXX Accepts the not-a-module object, ignorning the object's type
# This should raise an exception or the method name should be changed
#
# XXX Some people are relying on this, so keep it for now
def test_loadTestsFromName__relative_not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('test_2', NotAModule)
reference = [MyTestCase('test')]
self.assertEqual(list(suite), reference)
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does it raise an exception if the name resolves to an invalid
# object?
def test_loadTestsFromName__relative_bad_object(self):
m = types.ModuleType('m')
m.testcase_1 = object()
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('testcase_1', m)
except TypeError:
pass
else:
self.fail("Should have raised TypeError")
# "The specifier name is a ``dotted name'' that may
# resolve either to ... a test case class"
def test_loadTestsFromName__relative_TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testcase_1', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
def test_loadTestsFromName__relative_TestSuite(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testsuite = unittest.TestSuite([MyTestCase('test')])
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testsuite', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a test method within a test case class"
def test_loadTestsFromName__relative_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('testcase_1.test', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does loadTestsFromName() raise the proper exception when trying to
# resolve "a test method within a test case class" that doesn't exist
# for the given name (relative to a provided module)?
def test_loadTestsFromName__relative_invalid_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
try:
loader.loadTestsFromName('testcase_1.testfoo', m)
except AttributeError as e:
self.assertEqual(str(e), "type object 'MyTestCase' has no attribute 'testfoo'")
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a ... TestSuite instance"
def test_loadTestsFromName__callable__TestSuite(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
testcase_2 = unittest.FunctionTestCase(lambda: None)
def return_TestSuite():
return unittest.TestSuite([testcase_1, testcase_2])
m.return_TestSuite = return_TestSuite
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('return_TestSuite', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [testcase_1, testcase_2])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase ... instance"
def test_loadTestsFromName__callable__TestCase_instance(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
def return_TestCase():
return testcase_1
m.return_TestCase = return_TestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromName('return_TestCase', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [testcase_1])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase ... instance"
#*****************************************************************
#Override the suiteClass attribute to ensure that the suiteClass
#attribute is used
def test_loadTestsFromName__callable__TestCase_instance_ProperSuiteClass(self):
class SubTestSuite(unittest.TestSuite):
pass
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
def return_TestCase():
return testcase_1
m.return_TestCase = return_TestCase
loader = unittest.TestLoader()
loader.suiteClass = SubTestSuite
suite = loader.loadTestsFromName('return_TestCase', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [testcase_1])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a test method within a test case class"
#*****************************************************************
#Override the suiteClass attribute to ensure that the suiteClass
#attribute is used
def test_loadTestsFromName__relative_testmethod_ProperSuiteClass(self):
class SubTestSuite(unittest.TestSuite):
pass
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
loader.suiteClass=SubTestSuite
suite = loader.loadTestsFromName('testcase_1.test', m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [MyTestCase('test')])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# What happens if the callable returns something else?
def test_loadTestsFromName__callable__wrong_type(self):
m = types.ModuleType('m')
def return_wrong():
return 6
m.return_wrong = return_wrong
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromName('return_wrong', m)
except TypeError:
pass
else:
self.fail("TestLoader.loadTestsFromName failed to raise TypeError")
# "The specifier can refer to modules and packages which have not been
# imported; they will be imported as a side-effect"
def test_loadTestsFromName__module_not_loaded(self):
# We're going to try to load this module as a side-effect, so it
# better not be loaded before we try.
#
module_name = 'unittest.test.dummy'
sys.modules.pop(module_name, None)
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromName(module_name)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# module should now be loaded, thanks to loadTestsFromName()
self.assertIn(module_name, sys.modules)
finally:
if module_name in sys.modules:
del sys.modules[module_name]
################################################################
### Tests for TestLoader.loadTestsFromName()
### Tests for TestLoader.loadTestsFromNames()
################################################################
# "Similar to loadTestsFromName(), but takes a sequence of names rather
# than a single name."
#
# What happens if that sequence of names is empty?
def test_loadTestsFromNames__empty_name_list(self):
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames([])
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# "Similar to loadTestsFromName(), but takes a sequence of names rather
# than a single name."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens if that sequence of names is empty?
#
# XXX Should this raise a ValueError or just return an empty TestSuite?
def test_loadTestsFromNames__relative_empty_name_list(self):
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames([], unittest)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [])
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Is ValueError raised in response to an empty name?
def test_loadTestsFromNames__empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames([''])
except ValueError as e:
self.assertEqual(str(e), "Empty module name")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when presented with an impossible module name?
def test_loadTestsFromNames__malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise ValueError or ImportError?
try:
loader.loadTestsFromNames(['abc () //'])
except ValueError:
pass
except ImportError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when no module can be found for the given name?
def test_loadTestsFromNames__unknown_module_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['sdasfasfasdf'])
except ImportError as e:
self.assertEqual(str(e), "No module named 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ImportError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# What happens when the module can be found, but not the attribute?
def test_loadTestsFromNames__unknown_attr_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['unittest.sdasfasfasdf', 'unittest'])
except AttributeError as e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromNames failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when given an unknown attribute on a specified `module`
# argument?
def test_loadTestsFromNames__unknown_name_relative_1(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['sdasfasfasdf'], unittest)
except AttributeError as e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# Do unknown attributes (relative to a provided module) still raise an
# exception even in the presence of valid attribute names?
def test_loadTestsFromNames__unknown_name_relative_2(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['TestCase', 'sdasfasfasdf'], unittest)
except AttributeError as e:
self.assertEqual(str(e), "'module' object has no attribute 'sdasfasfasdf'")
else:
self.fail("TestLoader.loadTestsFromName failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when faced with the empty string?
#
# XXX This currently raises AttributeError, though ValueError is probably
# more appropriate
def test_loadTestsFromNames__relative_empty_name(self):
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames([''], unittest)
except AttributeError:
pass
else:
self.fail("Failed to raise ValueError")
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
# ...
# "The method optionally resolves name relative to the given module"
#
# What happens when presented with an impossible attribute name?
def test_loadTestsFromNames__relative_malformed_name(self):
loader = unittest.TestLoader()
# XXX Should this raise AttributeError or ValueError?
try:
loader.loadTestsFromNames(['abc () //'], unittest)
except AttributeError:
pass
except ValueError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise ValueError")
# "The method optionally resolves name relative to the given module"
#
# Does loadTestsFromNames() make sure the provided `module` is in fact
# a module?
#
# XXX This validation is currently not done. This flexibility should
# either be documented or a TypeError should be raised.
def test_loadTestsFromNames__relative_not_a_module(self):
class MyTestCase(unittest.TestCase):
def test(self):
pass
class NotAModule(object):
test_2 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['test_2'], NotAModule)
reference = [unittest.TestSuite([MyTestCase('test')])]
self.assertEqual(list(suite), reference)
# "The specifier name is a ``dotted name'' that may resolve either to
# a module, a test case class, a TestSuite instance, a test method
# within a test case class, or a callable object which returns a
# TestCase or TestSuite instance."
#
# Does it raise an exception if the name resolves to an invalid
# object?
def test_loadTestsFromNames__relative_bad_object(self):
m = types.ModuleType('m')
m.testcase_1 = object()
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['testcase_1'], m)
except TypeError:
pass
else:
self.fail("Should have raised TypeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a test case class"
def test_loadTestsFromNames__relative_TestCase_subclass(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testcase_1'], m)
self.assertIsInstance(suite, loader.suiteClass)
expected = loader.suiteClass([MyTestCase('test')])
self.assertEqual(list(suite), [expected])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a TestSuite instance"
def test_loadTestsFromNames__relative_TestSuite(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testsuite = unittest.TestSuite([MyTestCase('test')])
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testsuite'], m)
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [m.testsuite])
# "The specifier name is a ``dotted name'' that may resolve ... to ... a
# test method within a test case class"
def test_loadTestsFromNames__relative_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['testcase_1.test'], m)
self.assertIsInstance(suite, loader.suiteClass)
ref_suite = unittest.TestSuite([MyTestCase('test')])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to ... a
# test method within a test case class"
#
# Does the method gracefully handle names that initially look like they
# resolve to "a test method within a test case class" but don't?
def test_loadTestsFromNames__relative_invalid_testmethod(self):
m = types.ModuleType('m')
class MyTestCase(unittest.TestCase):
def test(self):
pass
m.testcase_1 = MyTestCase
loader = unittest.TestLoader()
try:
loader.loadTestsFromNames(['testcase_1.testfoo'], m)
except AttributeError as e:
self.assertEqual(str(e), "type object 'MyTestCase' has no attribute 'testfoo'")
else:
self.fail("Failed to raise AttributeError")
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a ... TestSuite instance"
def test_loadTestsFromNames__callable__TestSuite(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
testcase_2 = unittest.FunctionTestCase(lambda: None)
def return_TestSuite():
return unittest.TestSuite([testcase_1, testcase_2])
m.return_TestSuite = return_TestSuite
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['return_TestSuite'], m)
self.assertIsInstance(suite, loader.suiteClass)
expected = unittest.TestSuite([testcase_1, testcase_2])
self.assertEqual(list(suite), [expected])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase ... instance"
def test_loadTestsFromNames__callable__TestCase_instance(self):
m = types.ModuleType('m')
testcase_1 = unittest.FunctionTestCase(lambda: None)
def return_TestCase():
return testcase_1
m.return_TestCase = return_TestCase
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['return_TestCase'], m)
self.assertIsInstance(suite, loader.suiteClass)
ref_suite = unittest.TestSuite([testcase_1])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# Are staticmethods handled correctly?
def test_loadTestsFromNames__callable__call_staticmethod(self):
m = types.ModuleType('m')
class Test1(unittest.TestCase):
def test(self):
pass
testcase_1 = Test1('test')
class Foo(unittest.TestCase):
@staticmethod
def foo():
return testcase_1
m.Foo = Foo
loader = unittest.TestLoader()
suite = loader.loadTestsFromNames(['Foo.foo'], m)
self.assertIsInstance(suite, loader.suiteClass)
ref_suite = unittest.TestSuite([testcase_1])
self.assertEqual(list(suite), [ref_suite])
# "The specifier name is a ``dotted name'' that may resolve ... to
# ... a callable object which returns a TestCase or TestSuite instance"
#
# What happens when the callable returns something else?
def test_loadTestsFromNames__callable__wrong_type(self):
m = types.ModuleType('m')
def return_wrong():
return 6
m.return_wrong = return_wrong
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromNames(['return_wrong'], m)
except TypeError:
pass
else:
self.fail("TestLoader.loadTestsFromNames failed to raise TypeError")
# "The specifier can refer to modules and packages which have not been
# imported; they will be imported as a side-effect"
def test_loadTestsFromNames__module_not_loaded(self):
# We're going to try to load this module as a side-effect, so it
# better not be loaded before we try.
#
module_name = 'unittest.test.dummy'
sys.modules.pop(module_name, None)
loader = unittest.TestLoader()
try:
suite = loader.loadTestsFromNames([module_name])
self.assertIsInstance(suite, loader.suiteClass)
self.assertEqual(list(suite), [unittest.TestSuite()])
# module should now be loaded, thanks to loadTestsFromName()
self.assertIn(module_name, sys.modules)
finally:
if module_name in sys.modules:
del sys.modules[module_name]
################################################################
### /Tests for TestLoader.loadTestsFromNames()
### Tests for TestLoader.getTestCaseNames()
################################################################
# "Return a sorted sequence of method names found within testCaseClass"
#
# Test.foobar is defined to make sure getTestCaseNames() respects
# loader.testMethodPrefix
def test_getTestCaseNames(self):
class Test(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foobar(self): pass
loader = unittest.TestLoader()
self.assertEqual(loader.getTestCaseNames(Test), ['test_1', 'test_2'])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Does getTestCaseNames() behave appropriately if no tests are found?
def test_getTestCaseNames__no_tests(self):
class Test(unittest.TestCase):
def foobar(self): pass
loader = unittest.TestLoader()
self.assertEqual(loader.getTestCaseNames(Test), [])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Are not-TestCases handled gracefully?
#
# XXX This should raise a TypeError, not return a list
#
# XXX It's too late in the 2.5 release cycle to fix this, but it should
# probably be revisited for 2.6
def test_getTestCaseNames__not_a_TestCase(self):
class BadCase(int):
def test_foo(self):
pass
loader = unittest.TestLoader()
names = loader.getTestCaseNames(BadCase)
self.assertEqual(names, ['test_foo'])
# "Return a sorted sequence of method names found within testCaseClass"
#
# Make sure inherited names are handled.
#
# TestP.foobar is defined to make sure getTestCaseNames() respects
# loader.testMethodPrefix
def test_getTestCaseNames__inheritance(self):
class TestP(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foobar(self): pass
class TestC(TestP):
def test_1(self): pass
def test_3(self): pass
loader = unittest.TestLoader()
names = ['test_1', 'test_2', 'test_3']
self.assertEqual(loader.getTestCaseNames(TestC), names)
################################################################
### /Tests for TestLoader.getTestCaseNames()
### Tests for TestLoader.testMethodPrefix
################################################################
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests_1 = unittest.TestSuite([Foo('foo_bar')])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromModule(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = [unittest.TestSuite([Foo('foo_bar')])]
tests_2 = [unittest.TestSuite([Foo('test_1'), Foo('test_2')])]
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(list(loader.loadTestsFromModule(m)), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(list(loader.loadTestsFromModule(m)), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromName(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = unittest.TestSuite([Foo('foo_bar')])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromName('Foo', m), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromName('Foo', m), tests_2)
# "String giving the prefix of method names which will be interpreted as
# test methods"
#
# Implicit in the documentation is that testMethodPrefix is respected by
# all loadTestsFrom* methods.
def test_testMethodPrefix__loadTestsFromNames(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests_1 = unittest.TestSuite([unittest.TestSuite([Foo('foo_bar')])])
tests_2 = unittest.TestSuite([Foo('test_1'), Foo('test_2')])
tests_2 = unittest.TestSuite([tests_2])
loader = unittest.TestLoader()
loader.testMethodPrefix = 'foo'
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests_1)
loader.testMethodPrefix = 'test'
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests_2)
# "The default value is 'test'"
def test_testMethodPrefix__default_value(self):
loader = unittest.TestLoader()
self.assertEqual(loader.testMethodPrefix, 'test')
################################################################
### /Tests for TestLoader.testMethodPrefix
### Tests for TestLoader.sortTestMethodsUsing
################################################################
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromTestCase(self):
def reversed_cmp(x, y):
return -((x > y) - (x < y))
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = loader.suiteClass([Foo('test_2'), Foo('test_1')])
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromModule(self):
def reversed_cmp(x, y):
return -((x > y) - (x < y))
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = [loader.suiteClass([Foo('test_2'), Foo('test_1')])]
self.assertEqual(list(loader.loadTestsFromModule(m)), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromName(self):
def reversed_cmp(x, y):
return -((x > y) - (x < y))
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = loader.suiteClass([Foo('test_2'), Foo('test_1')])
self.assertEqual(loader.loadTestsFromName('Foo', m), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames() and all the loadTestsFromX() methods"
def test_sortTestMethodsUsing__loadTestsFromNames(self):
def reversed_cmp(x, y):
return -((x > y) - (x < y))
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
m.Foo = Foo
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
tests = [loader.suiteClass([Foo('test_2'), Foo('test_1')])]
self.assertEqual(list(loader.loadTestsFromNames(['Foo'], m)), tests)
# "Function to be used to compare method names when sorting them in
# getTestCaseNames()"
#
# Does it actually affect getTestCaseNames()?
def test_sortTestMethodsUsing__getTestCaseNames(self):
def reversed_cmp(x, y):
return -((x > y) - (x < y))
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = reversed_cmp
test_names = ['test_2', 'test_1']
self.assertEqual(loader.getTestCaseNames(Foo), test_names)
# "The default value is the built-in cmp() function"
# Since cmp is now defunct, we simply verify that the results
# occur in the same order as they would with the default sort.
def test_sortTestMethodsUsing__default_value(self):
loader = unittest.TestLoader()
class Foo(unittest.TestCase):
def test_2(self): pass
def test_3(self): pass
def test_1(self): pass
test_names = ['test_2', 'test_3', 'test_1']
self.assertEqual(loader.getTestCaseNames(Foo), sorted(test_names))
# "it can be set to None to disable the sort."
#
# XXX How is this different from reassigning cmp? Are the tests returned
# in a random order or something? This behaviour should die
def test_sortTestMethodsUsing__None(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
loader = unittest.TestLoader()
loader.sortTestMethodsUsing = None
test_names = ['test_2', 'test_1']
self.assertEqual(set(loader.getTestCaseNames(Foo)), set(test_names))
################################################################
### /Tests for TestLoader.sortTestMethodsUsing
### Tests for TestLoader.suiteClass
################################################################
# "Callable object that constructs a test suite from a list of tests."
def test_suiteClass__loadTestsFromTestCase(self):
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
tests = [Foo('test_1'), Foo('test_2')]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromTestCase(Foo), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromModule(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [[Foo('test_1'), Foo('test_2')]]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromModule(m), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromName(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [Foo('test_1'), Foo('test_2')]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromName('Foo', m), tests)
# It is implicit in the documentation for TestLoader.suiteClass that
# all TestLoader.loadTestsFrom* methods respect it. Let's make sure
def test_suiteClass__loadTestsFromNames(self):
m = types.ModuleType('m')
class Foo(unittest.TestCase):
def test_1(self): pass
def test_2(self): pass
def foo_bar(self): pass
m.Foo = Foo
tests = [[Foo('test_1'), Foo('test_2')]]
loader = unittest.TestLoader()
loader.suiteClass = list
self.assertEqual(loader.loadTestsFromNames(['Foo'], m), tests)
# "The default value is the TestSuite class"
def test_suiteClass__default_value(self):
loader = unittest.TestLoader()
self.assertTrue(loader.suiteClass is unittest.TestSuite)
|
gpl-3.0
|
arthru/OpenUpgrade
|
addons/hr_applicant_document/models/hr_applicant.py
|
385
|
1210
|
# -*- coding: utf-8 -*-
from openerp.osv import fields, osv
class hr_applicant(osv.Model):
_inherit = 'hr.applicant'
def _get_index_content(self, cr, uid, ids, fields, args, context=None):
res = dict.fromkeys(ids, '')
Attachment = self.pool.get('ir.attachment')
attachment_ids = Attachment.search(cr, uid, [('res_model', '=', 'hr.applicant'), ('res_id', 'in', ids)], context=context)
for attachment in Attachment.browse(cr, uid, attachment_ids, context=context):
res[attachment.res_id] += attachment.index_content or ''
return res
def _content_search(self, cr, user, obj, name, args, context=None):
record_ids = set()
Attachment = self.pool.get('ir.attachment')
args = ['&'] + args + [('res_model', '=', 'hr.applicant')]
att_ids = Attachment.search(cr, user, args, context=context)
record_ids = set(att.res_id for att in Attachment.browse(cr, user, att_ids, context=context))
return [('id', 'in', list(record_ids))]
_columns = {
'index_content': fields.function(
_get_index_content, fnct_search=_content_search,
string='Index Content', type="text"),
}
|
agpl-3.0
|
andyclymer/ControlBoard
|
ControlBoard.roboFontExt/lib/modules/pyFirmata-master/pyfirmata/mockup.py
|
4
|
3946
|
from collections import deque
import pyfirmata
class MockupSerial(deque):
"""
A Mockup object for python's Serial. Functions as a fifo-stack. Push to
it with ``write``, read from it with ``read``.
"""
def __init__(self, port, baudrate, timeout=0.02):
self.port = port or 'somewhere'
def read(self, count=1):
if count > 1:
val = []
for i in range(count):
try:
val.append(self.popleft())
except IndexError:
break
else:
try:
val = self.popleft()
except IndexError:
val = bytearray()
val = [val] if not hasattr(val, '__iter__') else val
return bytearray(val)
def write(self, value):
"""
Appends bytes flat to the deque. So iterables will be unpacked.
"""
if hasattr(value, '__iter__'):
bytearray(value)
self.extend(value)
else:
bytearray([value])
self.append(value)
def close(self):
self.clear()
def inWaiting(self):
return len(self)
class MockupBoard(pyfirmata.Board):
def __init__(self, port, layout, values_dict={}):
self.sp = MockupSerial(port, 57600)
self.setup_layout(layout)
self.values_dict = values_dict
self.id = 1
def reset_taken(self):
for key in self.taken['analog']:
self.taken['analog'][key] = False
for key in self.taken['digital']:
self.taken['digital'][key] = False
def update_values_dict(self):
for port in self.digital_ports:
port.values_dict = self.values_dict
port.update_values_dict()
for pin in self.analog:
pin.values_dict = self.values_dict
class MockupPort(pyfirmata.Port):
def __init__(self, board, port_number):
self.board = board
self.port_number = port_number
self.reporting = False
self.pins = []
for i in range(8):
pin_nr = i + self.port_number * 8
self.pins.append(MockupPin(self.board, pin_nr, type=pyfirmata.DIGITAL, port=self))
def update_values_dict(self):
for pin in self.pins:
pin.values_dict = self.values_dict
class MockupPin(pyfirmata.Pin):
def __init__(self, *args, **kwargs):
self.values_dict = kwargs.get('values_dict', {})
try:
del kwargs['values_dict']
except KeyError:
pass
super(MockupPin, self).__init__(*args, **kwargs)
def read(self):
if self.value is None:
try:
type = self.port and 'd' or 'a'
return self.values_dict[type][self.pin_number]
except KeyError:
return None
else:
return self.value
def get_in_output(self):
if not self.port and not self.mode: # analog input
return 'i'
else:
return 'o'
def set_active(self, active):
self.is_active = active
def get_active(self):
return self.is_active
def write(self, value):
if self.mode == pyfirmata.UNAVAILABLE:
raise IOError("Cannot read from pin {0}".format(self.pin_number))
if self.mode == pyfirmata.INPUT:
raise IOError("{0} pin {1} is not an output".format(self.port and "Digital" or "Analog", self.get_pin_number()))
if not self.port:
raise AttributeError("AnalogPin instance has no attribute 'write'")
# if value != self.read():
self.value = value
class Iterator(object):
def __init__(self, *args, **kwargs):
pass
def start(self):
pass
def stop(self):
pass
if __name__ == '__main__':
import doctest
doctest.testmod()
# TODO make these unittests as this doesn't work due to relative imports
|
mit
|
manipopopo/tensorflow
|
tensorflow/python/util/decorator_utils_test.py
|
139
|
4197
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""decorator_utils tests."""
# pylint: disable=unused-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import decorator_utils
def _test_function(unused_arg=0):
pass
class GetQualifiedNameTest(test.TestCase):
def test_method(self):
self.assertEqual(
"GetQualifiedNameTest.test_method",
decorator_utils.get_qualified_name(GetQualifiedNameTest.test_method))
def test_function(self):
self.assertEqual("_test_function",
decorator_utils.get_qualified_name(_test_function))
class AddNoticeToDocstringTest(test.TestCase):
def _check(self, doc, expected):
self.assertEqual(
decorator_utils.add_notice_to_docstring(
doc=doc,
instructions="Instructions",
no_doc_str="Nothing here",
suffix_str="(suffix)",
notice=["Go away"]),
expected)
def test_regular(self):
expected = ("Brief (suffix)\n\nGo away\nInstructions\n\nDocstring\n\n"
"Args:\n arg1: desc")
# No indent for main docstring
self._check("Brief\n\nDocstring\n\nArgs:\n arg1: desc", expected)
# 2 space indent for main docstring, blank lines not indented
self._check("Brief\n\n Docstring\n\n Args:\n arg1: desc", expected)
# 2 space indent for main docstring, blank lines indented as well.
self._check("Brief\n \n Docstring\n \n Args:\n arg1: desc", expected)
# No indent for main docstring, first line blank.
self._check("\n Brief\n \n Docstring\n \n Args:\n arg1: desc",
expected)
# 2 space indent, first line blank.
self._check("\n Brief\n \n Docstring\n \n Args:\n arg1: desc",
expected)
def test_brief_only(self):
expected = "Brief (suffix)\n\nGo away\nInstructions"
self._check("Brief", expected)
self._check("Brief\n", expected)
self._check("Brief\n ", expected)
self._check("\nBrief\n ", expected)
self._check("\n Brief\n ", expected)
def test_no_docstring(self):
expected = "Nothing here\n\nGo away\nInstructions"
self._check(None, expected)
self._check("", expected)
def test_no_empty_line(self):
expected = "Brief (suffix)\n\nGo away\nInstructions\n\nDocstring"
# No second line indent
self._check("Brief\nDocstring", expected)
# 2 space second line indent
self._check("Brief\n Docstring", expected)
# No second line indent, first line blank
self._check("\nBrief\nDocstring", expected)
# 2 space second line indent, first line blank
self._check("\n Brief\n Docstring", expected)
class ValidateCallableTest(test.TestCase):
def test_function(self):
decorator_utils.validate_callable(_test_function, "test")
def test_method(self):
decorator_utils.validate_callable(self.test_method, "test")
def test_callable(self):
class TestClass(object):
def __call__(self):
pass
decorator_utils.validate_callable(TestClass(), "test")
def test_partial(self):
partial = functools.partial(_test_function, unused_arg=7)
decorator_utils.validate_callable(partial, "test")
def test_fail_non_callable(self):
x = 0
self.assertRaises(ValueError, decorator_utils.validate_callable, x, "test")
if __name__ == "__main__":
test.main()
|
apache-2.0
|
nzavagli/UnrealPy
|
UnrealPyEmbed/Source/Python/Lib/python27/plat-mac/applesingle.py
|
42
|
4957
|
r"""Routines to decode AppleSingle files
"""
from warnings import warnpy3k
warnpy3k("In 3.x, the applesingle module is removed.", stacklevel=2)
import struct
import sys
try:
import MacOS
import Carbon.File
except:
class MacOS:
def openrf(path, mode):
return open(path + '.rsrc', mode)
openrf = classmethod(openrf)
class Carbon:
class File:
class FSSpec:
pass
class FSRef:
pass
class Alias:
pass
# all of the errors in this module are really errors in the input
# so I think it should test positive against ValueError.
class Error(ValueError):
pass
# File header format: magic, version, unused, number of entries
AS_HEADER_FORMAT=">LL16sh"
AS_HEADER_LENGTH=26
# The flag words for AppleSingle
AS_MAGIC=0x00051600
AS_VERSION=0x00020000
# Entry header format: id, offset, length
AS_ENTRY_FORMAT=">lll"
AS_ENTRY_LENGTH=12
# The id values
AS_DATAFORK=1
AS_RESOURCEFORK=2
AS_IGNORE=(3,4,5,6,8,9,10,11,12,13,14,15)
class AppleSingle(object):
datafork = None
resourcefork = None
def __init__(self, fileobj, verbose=False):
header = fileobj.read(AS_HEADER_LENGTH)
try:
magic, version, ig, nentry = struct.unpack(AS_HEADER_FORMAT, header)
except ValueError, arg:
raise Error, "Unpack header error: %s" % (arg,)
if verbose:
print 'Magic: 0x%8.8x' % (magic,)
print 'Version: 0x%8.8x' % (version,)
print 'Entries: %d' % (nentry,)
if magic != AS_MAGIC:
raise Error, "Unknown AppleSingle magic number 0x%8.8x" % (magic,)
if version != AS_VERSION:
raise Error, "Unknown AppleSingle version number 0x%8.8x" % (version,)
if nentry <= 0:
raise Error, "AppleSingle file contains no forks"
headers = [fileobj.read(AS_ENTRY_LENGTH) for i in xrange(nentry)]
self.forks = []
for hdr in headers:
try:
restype, offset, length = struct.unpack(AS_ENTRY_FORMAT, hdr)
except ValueError, arg:
raise Error, "Unpack entry error: %s" % (arg,)
if verbose:
print "Fork %d, offset %d, length %d" % (restype, offset, length)
fileobj.seek(offset)
data = fileobj.read(length)
if len(data) != length:
raise Error, "Short read: expected %d bytes got %d" % (length, len(data))
self.forks.append((restype, data))
if restype == AS_DATAFORK:
self.datafork = data
elif restype == AS_RESOURCEFORK:
self.resourcefork = data
def tofile(self, path, resonly=False):
outfile = open(path, 'wb')
data = False
if resonly:
if self.resourcefork is None:
raise Error, "No resource fork found"
fp = open(path, 'wb')
fp.write(self.resourcefork)
fp.close()
elif (self.resourcefork is None and self.datafork is None):
raise Error, "No useful forks found"
else:
if self.datafork is not None:
fp = open(path, 'wb')
fp.write(self.datafork)
fp.close()
if self.resourcefork is not None:
fp = MacOS.openrf(path, '*wb')
fp.write(self.resourcefork)
fp.close()
def decode(infile, outpath, resonly=False, verbose=False):
"""decode(infile, outpath [, resonly=False, verbose=False])
Creates a decoded file from an AppleSingle encoded file.
If resonly is True, then it will create a regular file at
outpath containing only the resource fork from infile.
Otherwise it will create an AppleDouble file at outpath
with the data and resource forks from infile. On platforms
without the MacOS module, it will create inpath and inpath+'.rsrc'
with the data and resource forks respectively.
"""
if not hasattr(infile, 'read'):
if isinstance(infile, Carbon.File.Alias):
infile = infile.ResolveAlias()[0]
if hasattr(Carbon.File, "FSSpec"):
if isinstance(infile, (Carbon.File.FSSpec, Carbon.File.FSRef)):
infile = infile.as_pathname()
else:
if isinstance(infile, Carbon.File.FSRef):
infile = infile.as_pathname()
infile = open(infile, 'rb')
asfile = AppleSingle(infile, verbose=verbose)
asfile.tofile(outpath, resonly=resonly)
def _test():
if len(sys.argv) < 3 or sys.argv[1] == '-r' and len(sys.argv) != 4:
print 'Usage: applesingle.py [-r] applesinglefile decodedfile'
sys.exit(1)
if sys.argv[1] == '-r':
resonly = True
del sys.argv[1]
else:
resonly = False
decode(sys.argv[1], sys.argv[2], resonly=resonly)
if __name__ == '__main__':
_test()
|
mit
|
Poorchop/hexchat-scripts
|
adfilter.py
|
2
|
4869
|
import re
import hexchat
__module_name__ = "AdFilter"
__module_author__ = "Poorchop"
__module_version__ = "0.4"
__module_description__ = "Move fserve advertisements to a separate tab"
# Add channels from which you would like to filter ads, e.g. channels = ("#channel", "#topsecret")
channels = ()
# Customize the name of the tab to your liking
tab_name = "(Ads)"
bwi_regex = re.compile("^(\[BWI\])\sType\s+\W[\w\\\[\]{}^`|-]+\s+to\sget\sthe\slist\sof\s+[\d,]+\s+files\s\(["
"\d\.]+\s+[A-Z]+\)\.\s+Updated\son\s+[\d+-]+\s+[\d:]+\.?\s+Total\sSent\(channel\):\s+["
"\d,]+\s+\([\d\.]+\s+[A-Z]+(\))$")
irssi_fserve_regex = re.compile("^(\(FServe Online\))\s+Note:\(Type\s+\W[\w\\\[\]{}^`|-]+\s+for\s+filelist\)\s+Trigger:"
"\(/ctcp\s+.*?\)\s+On\s+FServe:\(.*?\)\s+Sends:\(")
iterati_regex = re.compile(".*?Type\s+\W[\w\\\[\]{}^`|-]+\s+to\sget\smy\slist(\s|\.)?.*?\s+(Upd|(C|c)re)ated\son\s"
"+[SMTWF][a-z]{2}\s+[A-Z][a-z]+\s+[\d\s:]+(\.).*?")
ns_fserve_regex = re.compile("^(Type)\s+\W.*?for\smy\stiny\slist.*?[\d,]+\s+book(s)?\sadded\son\s+[\d\.]+\s+:\s+[\w"
"\W]+")
omenserve_regex = re.compile(".*?Type:\s+\W[\w\\\[\]{}^`|-]+\s+For\sMy\sList\sOf:\s+[\d,]+\s+Files\s+.*?Slots:\s+\d+"
"/\d+\s+.*?Queued:\s+\d+\s+.*?Speed:\s+[\d,]+cps\s+.*?Next:\s+\w+\s+.*?Served:\s+[\d,]+"
"\s+.*?List:\s+([A-Z][a-z]+\s+\w+\s+.*?Search:\s+[A-Z]{2,3})?(\s+.*?Mode:\s+\w+\s+.*?$)"
"?")
os_limits_regex = re.compile("^(\s+)?(Sent:)\s+.*?To:\s+.*?Total\s+Sent:\s+[\d,]+\s+Files.*?Yesterday:\s[\d,]+\s+Fil"
"es.*?Today.*?:\s+[\d,]+\s+Files.*?OS-Limits\s+(v[\d\.]+)$")
single_file_regex = re.compile("^(\s+)?Type:\s+\W[\w\\\[\]{}^`|-]+.*?To\sGet\sThis\s+.*?(File|MP3)$")
unknown_1_regex = re.compile("^(Type)\s+\W[\w\\\[\]{}^`|-]+\s+for\smy\slist\sof\s+\([\d,]+\)\s+Ebooks\screated\son\s"
"+[\d-]+\s+([\d:]+)$")
unknown_2_regex = re.compile("^(\s*((\(\)\(\)\()|(<><><)|(€æ{@}æ€)))\s+.*?\s+(((\)\(\)\(\))|(><><>)|(€æ{@}æ€))\s*)$")
unknown_3_regex = re.compile(".*?For\sMy\sList.*?\([\w:-]+\)(\s\([\w\d:\.]+\))?\sand\sDCC\sStatus,\stype\s[@\w\\\[\]"
"{}^`|-]+\sand\s[@\w\\\[\]{}^`|-]+\.\s.*?Slots.*?Ques\sTaken.*?Next\sSend:.*?CPS\sin\sU"
"se:.*?Highest\sCps\sRecord:.*?Total\sFile('s)?\sServed:.*?")
upp_regex = re.compile("^(<File\sServer\sOnline>)\sTrigger:.*?(\.<UPP>\.)$")
ad_lst = (bwi_regex, irssi_fserve_regex, iterati_regex, ns_fserve_regex,
omenserve_regex, os_limits_regex, single_file_regex, unknown_1_regex,
unknown_2_regex, unknown_3_regex, upp_regex)
server_nicks = []
moved = False
def find_adtab():
context = hexchat.find_context(channel=tab_name)
if context is None:
hexchat.command("NEWSERVER -noconnect {0}".format(tab_name))
return hexchat.find_context(channel=tab_name)
else:
return context
def adfilter_cb(word, word_eol, userdata):
word = [(word[i] if len(word) > i else "") for i in range(4)]
global server_nicks
channel = hexchat.get_info("channel")
stripped_msg = hexchat.strip(word[1], -1, 3)
for ad in ad_lst:
if ad.match(stripped_msg) and channel in channels:
# Keep a list of server nicknames in case they are needed for filtering purposes
if word[0] not in server_nicks:
server_nicks.append(word[0])
ad_context = find_adtab()
ad_context.prnt("{0}\t\00318<{4}{3}{1}>\00399 {2}".format(channel, *word))
return hexchat.EAT_ALL
def ctcpfilter_cb(word, word_eol, userdata):
global moved
if moved:
return
if word[0].split()[0] == "SLOTS" or word[0].split()[0] == "MP3" and word[2] in channels:
ad_context = find_adtab()
moved = True
ad_context.emit_print("CTCP Generic to Channel", *word)
moved = False
return hexchat.EAT_ALL
def unload_cb(userdata):
for chan in hexchat.get_list("channels"):
if chan.type == 1 and chan.channel == tab_name:
ad_context = hexchat.find_context(channel=tab_name)
ad_context.command("CLOSE")
hexchat.prnt(__module_name__ + " version " + __module_version__ + " unloaded")
hexchat.hook_print("Channel Message", adfilter_cb, priority=hexchat.PRI_HIGH)
hexchat.hook_print("CTCP Generic to Channel", ctcpfilter_cb)
hexchat.hook_unload(unload_cb)
hexchat.prnt(__module_name__ + " version " + __module_version__ + " loaded")
|
mit
|
mcking49/apache-flask
|
Python/Lib/site-packages/wrapt/importer.py
|
8
|
7727
|
"""This module implements a post import hook mechanism styled after what is
described in PEP-369. Note that it doesn't cope with modules being reloaded.
"""
import sys
import threading
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
if PY3:
import importlib
string_types = str,
else:
string_types = basestring,
from .decorators import synchronized
# The dictionary registering any post import hooks to be triggered once
# the target module has been imported. Once a module has been imported
# and the hooks fired, the list of hooks recorded against the target
# module will be truncacted but the list left in the dictionary. This
# acts as a flag to indicate that the module had already been imported.
_post_import_hooks = {}
_post_import_hooks_init = False
_post_import_hooks_lock = threading.RLock()
# Register a new post import hook for the target module name. This
# differs from the PEP-369 implementation in that it also allows the
# hook function to be specified as a string consisting of the name of
# the callback in the form 'module:function'. This will result in a
# proxy callback being registered which will defer loading of the
# specified module containing the callback function until required.
def _create_import_hook_from_string(name):
def import_hook(module):
module_name, function = name.split(':')
attrs = function.split('.')
__import__(module_name)
callback = sys.modules[module_name]
for attr in attrs:
callback = getattr(callback, attr)
return callback(module)
return import_hook
@synchronized(_post_import_hooks_lock)
def register_post_import_hook(hook, name):
# Create a deferred import hook if hook is a string name rather than
# a callable function.
if isinstance(hook, string_types):
hook = _create_import_hook_from_string(hook)
# Automatically install the import hook finder if it has not already
# been installed.
global _post_import_hooks_init
if not _post_import_hooks_init:
_post_import_hooks_init = True
sys.meta_path.insert(0, ImportHookFinder())
# Determine if any prior registration of a post import hook for
# the target modules has occurred and act appropriately.
hooks = _post_import_hooks.get(name, None)
if hooks is None:
# No prior registration of post import hooks for the target
# module. We need to check whether the module has already been
# imported. If it has we fire the hook immediately and add an
# empty list to the registry to indicate that the module has
# already been imported and hooks have fired. Otherwise add
# the post import hook to the registry.
module = sys.modules.get(name, None)
if module is not None:
_post_import_hooks[name] = []
hook(module)
else:
_post_import_hooks[name] = [hook]
elif hooks == []:
# A prior registration of port import hooks for the target
# module was done and the hooks already fired. Fire the hook
# immediately.
module = sys.modules[name]
hook(module)
else:
# A prior registration of port import hooks for the target
# module was done but the module has not yet been imported.
_post_import_hooks[name].append(hook)
# Register post import hooks defined as package entry points.
def _create_import_hook_from_entrypoint(entrypoint):
def import_hook(module):
__import__(entrypoint.module_name)
callback = sys.modules[entrypoint.module_name]
for attr in entrypoint.attrs:
callback = getattr(callback, attr)
return callback(module)
return import_hook
def discover_post_import_hooks(group):
try:
import pkg_resources
except ImportError:
return
for entrypoint in pkg_resources.iter_entry_points(group=group):
callback = _create_import_hook_from_entrypoint(entrypoint)
register_post_import_hook(callback, entrypoint.name)
# Indicate that a module has been loaded. Any post import hooks which
# were registered against the target module will be invoked. If an
# exception is raised in any of the post import hooks, that will cause
# the import of the target module to fail.
@synchronized(_post_import_hooks_lock)
def notify_module_loaded(module):
name = getattr(module, '__name__', None)
hooks = _post_import_hooks.get(name, None)
if hooks:
_post_import_hooks[name] = []
for hook in hooks:
hook(module)
# A custom module import finder. This intercepts attempts to import
# modules and watches out for attempts to import target modules of
# interest. When a module of interest is imported, then any post import
# hooks which are registered will be invoked.
class _ImportHookLoader:
def load_module(self, fullname):
module = sys.modules[fullname]
notify_module_loaded(module)
return module
class _ImportHookChainedLoader:
def __init__(self, loader):
self.loader = loader
def load_module(self, fullname):
module = self.loader.load_module(fullname)
notify_module_loaded(module)
return module
class ImportHookFinder:
def __init__(self):
self.in_progress = {}
@synchronized(_post_import_hooks_lock)
def find_module(self, fullname, path=None):
# If the module being imported is not one we have registered
# post import hooks for, we can return immediately. We will
# take no further part in the importing of this module.
if not fullname in _post_import_hooks:
return None
# When we are interested in a specific module, we will call back
# into the import system a second time to defer to the import
# finder that is supposed to handle the importing of the module.
# We set an in progress flag for the target module so that on
# the second time through we don't trigger another call back
# into the import system and cause a infinite loop.
if fullname in self.in_progress:
return None
self.in_progress[fullname] = True
# Now call back into the import system again.
try:
if PY3:
# For Python 3 we need to use find_loader() from
# the importlib module. It doesn't actually
# import the target module and only finds the
# loader. If a loader is found, we need to return
# our own loader which will then in turn call the
# real loader to import the module and invoke the
# post import hooks.
loader = importlib.find_loader(fullname, path)
if loader:
return _ImportHookChainedLoader(loader)
else:
# For Python 2 we don't have much choice but to
# call back in to __import__(). This will
# actually cause the module to be imported. If no
# module could be found then ImportError will be
# raised. Otherwise we return a loader which
# returns the already loaded module and invokes
# the post import hooks.
__import__(fullname)
return _ImportHookLoader()
finally:
del self.in_progress[fullname]
# Decorator for marking that a function should be called as a post
# import hook when the target module is imported.
def when_imported(name):
def register(hook):
register_post_import_hook(hook, name)
return hook
return register
|
mit
|
robinro/ansible-modules-extras
|
windows/win_timezone.py
|
71
|
1421
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Phil Schwartz <schwartzmx@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_timezone
version_added: "2.1"
short_description: Sets Windows machine timezone
description:
- Sets machine time to the specified timezone, the module will check if the provided timezone is supported on the machine.
options:
timezone:
description:
- Timezone to set to. Example Central Standard Time
required: true
default: null
aliases: []
author: Phil Schwartz
'''
EXAMPLES = '''
# Set machine's timezone to Central Standard Time
win_timezone:
timezone: "Central Standard Time"
'''
RETURN = '''# '''
|
gpl-3.0
|
theguardian/LazyLibrarian_Old
|
cherrypy/test/logtest.py
|
12
|
6611
|
"""logtest, a unittest.TestCase helper for testing log output."""
import sys
import time
import cherrypy
try:
# On Windows, msvcrt.getch reads a single char without output.
import msvcrt
def getchar():
return msvcrt.getch()
except ImportError:
# Unix getchr
import tty, termios
def getchar():
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class LogCase(object):
"""unittest.TestCase mixin for testing log messages.
logfile: a filename for the desired log. Yes, I know modes are evil,
but it makes the test functions so much cleaner to set this once.
lastmarker: the last marker in the log. This can be used to search for
messages since the last marker.
markerPrefix: a string with which to prefix log markers. This should be
unique enough from normal log output to use for marker identification.
"""
logfile = None
lastmarker = None
markerPrefix = "test suite marker: "
def _handleLogError(self, msg, data, marker, pattern):
print("")
print(" ERROR: %s" % msg)
if not self.interactive:
raise self.failureException(msg)
p = " Show: [L]og [M]arker [P]attern; [I]gnore, [R]aise, or sys.e[X]it >> "
print p,
# ARGH
sys.stdout.flush()
while True:
i = getchar().upper()
if i not in "MPLIRX":
continue
print(i.upper()) # Also prints new line
if i == "L":
for x, line in enumerate(data):
if (x + 1) % self.console_height == 0:
# The \r and comma should make the next line overwrite
print "<-- More -->\r",
m = getchar().lower()
# Erase our "More" prompt
print " \r",
if m == "q":
break
print(line.rstrip())
elif i == "M":
print(repr(marker or self.lastmarker))
elif i == "P":
print(repr(pattern))
elif i == "I":
# return without raising the normal exception
return
elif i == "R":
raise self.failureException(msg)
elif i == "X":
self.exit()
print p,
def exit(self):
sys.exit()
def emptyLog(self):
"""Overwrite self.logfile with 0 bytes."""
open(self.logfile, 'wb').write("")
def markLog(self, key=None):
"""Insert a marker line into the log and set self.lastmarker."""
if key is None:
key = str(time.time())
self.lastmarker = key
open(self.logfile, 'ab+').write("%s%s\n" % (self.markerPrefix, key))
def _read_marked_region(self, marker=None):
"""Return lines from self.logfile in the marked region.
If marker is None, self.lastmarker is used. If the log hasn't
been marked (using self.markLog), the entire log will be returned.
"""
## # Give the logger time to finish writing?
## time.sleep(0.5)
logfile = self.logfile
marker = marker or self.lastmarker
if marker is None:
return open(logfile, 'rb').readlines()
data = []
in_region = False
for line in open(logfile, 'rb'):
if in_region:
if (line.startswith(self.markerPrefix) and not marker in line):
break
else:
data.append(line)
elif marker in line:
in_region = True
return data
def assertInLog(self, line, marker=None):
"""Fail if the given (partial) line is not in the log.
The log will be searched from the given marker to the next marker.
If marker is None, self.lastmarker is used. If the log hasn't
been marked (using self.markLog), the entire log will be searched.
"""
data = self._read_marked_region(marker)
for logline in data:
if line in logline:
return
msg = "%r not found in log" % line
self._handleLogError(msg, data, marker, line)
def assertNotInLog(self, line, marker=None):
"""Fail if the given (partial) line is in the log.
The log will be searched from the given marker to the next marker.
If marker is None, self.lastmarker is used. If the log hasn't
been marked (using self.markLog), the entire log will be searched.
"""
data = self._read_marked_region(marker)
for logline in data:
if line in logline:
msg = "%r found in log" % line
self._handleLogError(msg, data, marker, line)
def assertLog(self, sliceargs, lines, marker=None):
"""Fail if log.readlines()[sliceargs] is not contained in 'lines'.
The log will be searched from the given marker to the next marker.
If marker is None, self.lastmarker is used. If the log hasn't
been marked (using self.markLog), the entire log will be searched.
"""
data = self._read_marked_region(marker)
if isinstance(sliceargs, int):
# Single arg. Use __getitem__ and allow lines to be str or list.
if isinstance(lines, (tuple, list)):
lines = lines[0]
if lines not in data[sliceargs]:
msg = "%r not found on log line %r" % (lines, sliceargs)
self._handleLogError(msg, [data[sliceargs]], marker, lines)
else:
# Multiple args. Use __getslice__ and require lines to be list.
if isinstance(lines, tuple):
lines = list(lines)
elif isinstance(lines, basestring):
raise TypeError("The 'lines' arg must be a list when "
"'sliceargs' is a tuple.")
start, stop = sliceargs
for line, logline in zip(lines, data[start:stop]):
if line not in logline:
msg = "%r not found in log" % line
self._handleLogError(msg, data[start:stop], marker, line)
|
gpl-3.0
|
wavicles/fossasia-pslab
|
PSL/digital_channel.py
|
2
|
3131
|
from __future__ import print_function
import numpy as np
digital_channel_names=['ID1','ID2','ID3','ID4','SEN','EXT','CNTR']
class digital_channel:
EVERY_SIXTEENTH_RISING_EDGE = 5
EVERY_FOURTH_RISING_EDGE = 4
EVERY_RISING_EDGE = 3
EVERY_FALLING_EDGE = 2
EVERY_EDGE = 1
DISABLED = 0
def __init__(self,a):
self.gain=0
self.channel_number=a
self.digital_channel_names=digital_channel_names
self.name=self.digital_channel_names[a]
self.xaxis=np.zeros(20000)
self.yaxis=np.zeros(20000)
self.timestamps=np.zeros(10000)
self.length=100
self.initial_state=0
self.prescaler = 0
self.datatype='int'
self.trigger=0
self.dlength=0
self.plot_length = 0
self.maximum_time =0
self.maxT = 0
self.initial_state_override = False
self.mode=self.EVERY_EDGE
def set_params(self,**keys):
self.channel_number = keys.get('channel_number',self.channel_number)
self.name = keys.get('name','ErrOr')
def load_data(self,initial_state,timestamps):
if self.initial_state_override:
self.initial_state = (self.initial_state_override-1)==1
self.initial_state_override = False
else: self.initial_state = initial_state[self.name]
self.timestamps=timestamps
self.dlength = len(self.timestamps)
#print('dchan.py',self.channel_number,self.name,initial_state,self.initial_state)
self.timestamps = np.array(self.timestamps)*[1./64,1./8,1.,4.][self.prescaler]
if self.dlength:self.maxT=self.timestamps[-1]
else: self.maxT=0
def generate_axes(self):
HIGH = 1#(4-self.channel_number)*(3)
LOW = 0#HIGH - 2.5
state = HIGH if self.initial_state else LOW
if self.mode==self.DISABLED:
self.xaxis[0]=0; self.yaxis[0]=state
n=1
self.plot_length = n
elif self.mode==self.EVERY_EDGE:
self.xaxis[0]=0; self.yaxis[0]=state
n=1
for a in range(self.dlength):
self.xaxis[n] = self.timestamps[a]
self.yaxis[n] = state
state = LOW if state==HIGH else HIGH
n+=1
self.xaxis[n] = self.timestamps[a]
self.yaxis[n] = state
n+=1
self.plot_length = n
elif self.mode==self.EVERY_FALLING_EDGE:
self.xaxis[0]=0; self.yaxis[0]=HIGH
n=1
for a in range(self.dlength):
self.xaxis[n] = self.timestamps[a]
self.yaxis[n] = HIGH
n+=1
self.xaxis[n] = self.timestamps[a]
self.yaxis[n] = LOW
n+=1
self.xaxis[n] = self.timestamps[a]
self.yaxis[n] = HIGH
n+=1
state=HIGH
self.plot_length = n
elif self.mode==self.EVERY_RISING_EDGE or self.mode==self.EVERY_FOURTH_RISING_EDGE or self.mode==self.EVERY_SIXTEENTH_RISING_EDGE:
self.xaxis[0]=0; self.yaxis[0]=LOW
n=1
for a in range(self.dlength):
self.xaxis[n] = self.timestamps[a]
self.yaxis[n] = LOW
n+=1
self.xaxis[n] = self.timestamps[a]
self.yaxis[n] = HIGH
n+=1
self.xaxis[n] = self.timestamps[a]
self.yaxis[n] = LOW
n+=1
state = LOW
self.plot_length = n
#print(self.channel_number,self.dlength,self.mode,len(self.yaxis),self.plot_length)
def get_xaxis(self):
return self.xaxis[:self.plot_length]
def get_yaxis(self):
return self.yaxis[:self.plot_length]
|
gpl-3.0
|
pradyu1993/scikit-learn
|
sklearn/datasets/tests/test_lfw.py
|
2
|
6778
|
"""This test for the LFW require medium-size data dowloading and processing
If the data has not been already downloaded by runnning the examples,
the tests won't run (skipped).
If the test are run, the first execution will be long (typically a bit
more than a couple of minutes) but as the dataset loader is leveraging
joblib, successive runs will be fast (less than 200ms).
"""
import random
import os
import shutil
import tempfile
import numpy as np
try:
try:
from scipy.misc import imsave
except ImportError:
from scipy.misc.pilutil import imsave
except ImportError:
imsave = None
from sklearn.datasets import load_lfw_pairs
from sklearn.datasets import load_lfw_people
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import SkipTest
from sklearn.utils.testing import raises
SCIKIT_LEARN_DATA = tempfile.mkdtemp(prefix="scikit_learn_lfw_test_")
SCIKIT_LEARN_EMPTY_DATA = tempfile.mkdtemp(prefix="scikit_learn_empty_test_")
LFW_HOME = os.path.join(SCIKIT_LEARN_DATA, 'lfw_home')
FAKE_NAMES = [
'Abdelatif_Smith',
'Abhati_Kepler',
'Camara_Alvaro',
'Chen_Dupont',
'John_Lee',
'Lin_Bauman',
'Onur_Lopez',
]
def setup_module():
"""Test fixture run once and common to all tests of this module"""
if imsave is None:
raise SkipTest
if not os.path.exists(LFW_HOME):
os.makedirs(LFW_HOME)
random_state = random.Random(42)
np_rng = np.random.RandomState(42)
# generate some random jpeg files for each person
counts = {}
for name in FAKE_NAMES:
folder_name = os.path.join(LFW_HOME, 'lfw_funneled', name)
if not os.path.exists(folder_name):
os.makedirs(folder_name)
n_faces = np_rng.randint(1, 5)
counts[name] = n_faces
for i in range(n_faces):
file_path = os.path.join(folder_name, name + '_%04d.jpg' % i)
uniface = np_rng.randint(0, 255, size=(250, 250, 3))
try:
imsave(file_path, uniface)
except ImportError:
# PIL is not properly installed, skip those tests
raise SkipTest
# add some random file pollution to test robustness
with open(os.path.join(LFW_HOME, 'lfw_funneled', '.test.swp'), 'wb') as f:
f.write('Text file to be ignored by the dataset loader.')
# generate some pairing metadata files using the same format as LFW
with open(os.path.join(LFW_HOME, 'pairsDevTrain.txt'), 'wb') as f:
f.write("10\n")
more_than_two = [name for name, count in counts.iteritems()
if count >= 2]
for i in range(5):
name = random_state.choice(more_than_two)
first, second = random_state.sample(range(counts[name]), 2)
f.write('%s\t%d\t%d\n' % (name, first, second))
for i in range(5):
first_name, second_name = random_state.sample(FAKE_NAMES, 2)
first_index = random_state.choice(range(counts[first_name]))
second_index = random_state.choice(range(counts[second_name]))
f.write('%s\t%d\t%s\t%d\n' % (first_name, first_index,
second_name, second_index))
with open(os.path.join(LFW_HOME, 'pairsDevTest.txt'), 'wb') as f:
f.write("Fake place holder that won't be tested")
with open(os.path.join(LFW_HOME, 'pairs.txt'), 'wb') as f:
f.write("Fake place holder that won't be tested")
def teardown_module():
"""Test fixture (clean up) run once after all tests of this module"""
if os.path.isdir(SCIKIT_LEARN_DATA):
shutil.rmtree(SCIKIT_LEARN_DATA)
if os.path.isdir(SCIKIT_LEARN_EMPTY_DATA):
shutil.rmtree(SCIKIT_LEARN_EMPTY_DATA)
@raises(IOError)
def test_load_empty_lfw_people():
load_lfw_people(data_home=SCIKIT_LEARN_EMPTY_DATA)
def test_load_fake_lfw_people():
lfw_people = load_lfw_people(data_home=SCIKIT_LEARN_DATA,
min_faces_per_person=3)
# The data is croped around the center as a rectangular bounding box
# arounthe the face. Colors are converted to gray levels:
assert_equal(lfw_people.images.shape, (10, 62, 47))
assert_equal(lfw_people.data.shape, (10, 2914))
# the target is array of person integer ids
assert_array_equal(lfw_people.target, [2, 0, 1, 0, 2, 0, 2, 1, 1, 2])
# names of the persons can be found using the target_names array
expected_classes = ['Abdelatif Smith', 'Abhati Kepler', 'Onur Lopez']
assert_array_equal(lfw_people.target_names, expected_classes)
# It is possible to ask for the original data without any croping or color
# conversion and not limit on the number of picture per person
lfw_people = load_lfw_people(data_home=SCIKIT_LEARN_DATA,
resize=None, slice_=None, color=True)
assert_equal(lfw_people.images.shape, (17, 250, 250, 3))
# the ids and class names are the same as previously
assert_array_equal(lfw_people.target,
[0, 0, 1, 6, 5, 6, 3, 6, 0, 3, 6, 1, 2, 4, 5, 1, 2])
assert_array_equal(lfw_people.target_names,
['Abdelatif Smith', 'Abhati Kepler', 'Camara Alvaro',
'Chen Dupont', 'John Lee', 'Lin Bauman', 'Onur Lopez'])
@raises(ValueError)
def test_load_fake_lfw_people_too_restrictive():
load_lfw_people(data_home=SCIKIT_LEARN_DATA, min_faces_per_person=100)
@raises(IOError)
def test_load_empty_lfw_pairs():
load_lfw_pairs(data_home=SCIKIT_LEARN_EMPTY_DATA)
def test_load_fake_lfw_pairs():
lfw_pairs_train = load_lfw_pairs(data_home=SCIKIT_LEARN_DATA)
# The data is croped around the center as a rectangular bounding box
# arounthe the face. Colors are converted to gray levels:
assert_equal(lfw_pairs_train.pairs.shape, (10, 2, 62, 47))
# the target is whether the person is the same or not
assert_array_equal(lfw_pairs_train.target, [1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
# names of the persons can be found using the target_names array
expected_classes = ['Different persons', 'Same person']
assert_array_equal(lfw_pairs_train.target_names, expected_classes)
# It is possible to ask for the original data without any croping or color
# conversion
lfw_pairs_train = load_lfw_pairs(data_home=SCIKIT_LEARN_DATA,
resize=None, slice_=None, color=True)
assert_equal(lfw_pairs_train.pairs.shape, (10, 2, 250, 250, 3))
# the ids and class names are the same as previously
assert_array_equal(lfw_pairs_train.target, [1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
assert_array_equal(lfw_pairs_train.target_names, expected_classes)
|
bsd-3-clause
|
komsas/OpenUpgrade
|
openerp/addons/base/res/ir_property.py
|
33
|
7509
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv, fields
from openerp.osv.orm import browse_record, browse_null
from openerp.tools.misc import attrgetter
# -------------------------------------------------------------------------
# Properties
# -------------------------------------------------------------------------
class ir_property(osv.osv):
_name = 'ir.property'
_columns = {
'name': fields.char('Name', size=128, select=1),
'res_id': fields.char('Resource', size=128, help="If not set, acts as a default value for new resources", select=1),
'company_id': fields.many2one('res.company', 'Company', select=1),
'fields_id': fields.many2one('ir.model.fields', 'Field', ondelete='cascade', required=True, select=1),
'value_float' : fields.float('Value'),
'value_integer' : fields.integer('Value'),
'value_text' : fields.text('Value'), # will contain (char, text)
'value_binary' : fields.binary('Value'),
'value_reference': fields.char('Value', size=128),
'value_datetime' : fields.datetime('Value'),
'type' : fields.selection([('char', 'Char'),
('float', 'Float'),
('boolean', 'Boolean'),
('integer', 'Integer'),
('text', 'Text'),
('binary', 'Binary'),
('many2one', 'Many2One'),
('date', 'Date'),
('datetime', 'DateTime'),
('selection', 'Selection'),
],
'Type',
required=True,
select=1),
}
_defaults = {
'type': 'many2one',
}
def _update_values(self, cr, uid, ids, values):
value = values.pop('value', None)
if not value:
return values
prop = None
type_ = values.get('type')
if not type_:
if ids:
prop = self.browse(cr, uid, ids[0])
type_ = prop.type
else:
type_ = self._defaults['type']
type2field = {
'char': 'value_text',
'float': 'value_float',
'boolean' : 'value_integer',
'integer': 'value_integer',
'text': 'value_text',
'binary': 'value_binary',
'many2one': 'value_reference',
'date' : 'value_datetime',
'datetime' : 'value_datetime',
'selection': 'value_text',
}
field = type2field.get(type_)
if not field:
raise osv.except_osv('Error', 'Invalid type')
if field == 'value_reference':
if isinstance(value, browse_record):
value = '%s,%d' % (value._name, value.id)
elif isinstance(value, (int, long)):
field_id = values.get('fields_id')
if not field_id:
if not prop:
raise ValueError()
field_id = prop.fields_id
else:
field_id = self.pool.get('ir.model.fields').browse(cr, uid, field_id)
value = '%s,%d' % (field_id.relation, value)
values[field] = value
return values
def write(self, cr, uid, ids, values, context=None):
return super(ir_property, self).write(cr, uid, ids, self._update_values(cr, uid, ids, values), context=context)
def create(self, cr, uid, values, context=None):
return super(ir_property, self).create(cr, uid, self._update_values(cr, uid, None, values), context=context)
def get_by_record(self, cr, uid, record, context=None):
if record.type in ('char', 'text', 'selection'):
return record.value_text
elif record.type == 'float':
return record.value_float
elif record.type == 'boolean':
return bool(record.value_integer)
elif record.type == 'integer':
return record.value_integer
elif record.type == 'binary':
return record.value_binary
elif record.type == 'many2one':
if not record.value_reference:
return browse_null()
model, resource_id = record.value_reference.split(',')
return self.pool.get(model).browse(cr, uid, int(resource_id), context=context)
elif record.type == 'datetime':
return record.value_datetime
elif record.type == 'date':
if not record.value_datetime:
return False
return time.strftime('%Y-%m-%d', time.strptime(record.value_datetime, '%Y-%m-%d %H:%M:%S'))
return False
def get(self, cr, uid, name, model, res_id=False, context=None):
domain = self._get_domain(cr, uid, name, model, context=context)
if domain is not None:
domain = [('res_id', '=', res_id)] + domain
#make the search with company_id asc to make sure that properties specific to a company are given first
nid = self.search(cr, uid, domain, limit=1, order='company_id asc', context=context)
if not nid: return False
record = self.browse(cr, uid, nid[0], context=context)
return self.get_by_record(cr, uid, record, context=context)
return False
def _get_domain_default(self, cr, uid, prop_name, model, context=None):
domain = self._get_domain(cr, uid, prop_name, model, context=context)
if domain is None:
return None
return ['&', ('res_id', '=', False)] + domain
def _get_domain(self, cr, uid, prop_name, model, context=None):
context = context or {}
cr.execute('select id from ir_model_fields where name=%s and model=%s', (prop_name, model))
res = cr.fetchone()
if not res:
return None
if 'force_company' in context and context['force_company']:
cid = context['force_company']
else:
company = self.pool.get('res.company')
cid = company._company_default_get(cr, uid, model, res[0], context=context)
domain = ['&', ('fields_id', '=', res[0]),
'|', ('company_id', '=', cid), ('company_id', '=', False)]
return domain
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
jhauswald/keras
|
tests/manual/check_save_weights.py
|
95
|
1367
|
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD
import sys
sys.setrecursionlimit(10000) # to be able to pickle Theano compiled functions
import pickle, numpy
def create_model():
model = Sequential()
model.add(Dense(256, 2048, init='uniform', activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(2048, 2048, init='uniform', activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(2048, 2048, init='uniform', activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(2048, 2048, init='uniform', activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(2048, 256, init='uniform', activation='linear'))
return model
model = create_model()
sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
model.compile(loss='mse', optimizer=sgd)
pickle.dump(model, open('/tmp/model.pkl', 'wb'))
model.save_weights('/tmp/model_weights.hdf5')
model_loaded = create_model()
model_loaded.load_weights('/tmp/model_weights.hdf5')
for k in range(len(model.layers)):
weights_orig = model.layers[k].get_weights()
weights_loaded = model_loaded.layers[k].get_weights()
for x, y in zip(weights_orig, weights_loaded):
if numpy.any(x != y):
raise ValueError('Loaded weights are different from pickled weights!')
|
mit
|
CitizenB/ansible
|
contrib/inventory/ssh_config.py
|
160
|
3979
|
#!/usr/bin/env python
# (c) 2014, Tomas Karasek <tomas.karasek@digile.fi>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Dynamic inventory script which lets you use aliases from ~/.ssh/config.
#
# There were some issues with various Paramiko versions. I took a deeper look
# and tested heavily. Now, ansible parses this alright with Paramiko versions
# 1.7.2 to 1.15.2.
#
# It prints inventory based on parsed ~/.ssh/config. You can refer to hosts
# with their alias, rather than with the IP or hostname. It takes advantage
# of the ansible_ssh_{host,port,user,private_key_file}.
#
# If you have in your .ssh/config:
# Host git
# HostName git.domain.org
# User tkarasek
# IdentityFile /home/tomk/keys/thekey
#
# You can do
# $ ansible git -m ping
#
# Example invocation:
# ssh_config.py --list
# ssh_config.py --host <alias>
import argparse
import os.path
import sys
import paramiko
try:
import json
except ImportError:
import simplejson as json
SSH_CONF = '~/.ssh/config'
_key = 'ssh_config'
_ssh_to_ansible = [('user', 'ansible_ssh_user'),
('hostname', 'ansible_ssh_host'),
('identityfile', 'ansible_ssh_private_key_file'),
('port', 'ansible_ssh_port')]
def get_config():
if not os.path.isfile(os.path.expanduser(SSH_CONF)):
return {}
with open(os.path.expanduser(SSH_CONF)) as f:
cfg = paramiko.SSHConfig()
cfg.parse(f)
ret_dict = {}
for d in cfg._config:
if type(d['host']) is list:
alias = d['host'][0]
else:
alias = d['host']
if ('?' in alias) or ('*' in alias):
continue
_copy = dict(d)
del _copy['host']
if 'config' in _copy:
ret_dict[alias] = _copy['config']
else:
ret_dict[alias] = _copy
return ret_dict
def print_list():
cfg = get_config()
meta = {'hostvars': {}}
for alias, attributes in cfg.items():
tmp_dict = {}
for ssh_opt, ans_opt in _ssh_to_ansible:
if ssh_opt in attributes:
# If the attribute is a list, just take the first element.
# Private key is returned in a list for some reason.
attr = attributes[ssh_opt]
if type(attr) is list:
attr = attr[0]
tmp_dict[ans_opt] = attr
if tmp_dict:
meta['hostvars'][alias] = tmp_dict
print(json.dumps({_key: list(set(meta['hostvars'].keys())), '_meta': meta}))
def print_host(host):
cfg = get_config()
print(json.dumps(cfg[host]))
def get_args(args_list):
parser = argparse.ArgumentParser(
description='ansible inventory script parsing .ssh/config')
mutex_group = parser.add_mutually_exclusive_group(required=True)
help_list = 'list all hosts from .ssh/config inventory'
mutex_group.add_argument('--list', action='store_true', help=help_list)
help_host = 'display variables for a host'
mutex_group.add_argument('--host', help=help_host)
return parser.parse_args(args_list)
def main(args_list):
args = get_args(args_list)
if args.list:
print_list()
if args.host:
print_host(args.host)
if __name__ == '__main__':
main(sys.argv[1:])
|
gpl-3.0
|
Marcusz97/CILP_Facilitatore_Audacity
|
lib-src/lv2/sord/waflib/Tools/gcc.py
|
64
|
2730
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys
from waflib import Configure,Options,Utils
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
@conf
def find_gcc(conf):
cc=conf.find_program(['gcc','cc'],var='CC')
cc=conf.cmd_to_list(cc)
conf.get_cc_version(cc,gcc=True)
conf.env.CC_NAME='gcc'
conf.env.CC=cc
@conf
def gcc_common_flags(conf):
v=conf.env
v['CC_SRC_F']=[]
v['CC_TGT_F']=['-c','-o']
if not v['LINK_CC']:v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=[]
v['CCLNK_TGT_F']=['-o']
v['CPPPATH_ST']='-I%s'
v['DEFINES_ST']='-D%s'
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STLIB_ST']='-l%s'
v['STLIBPATH_ST']='-L%s'
v['RPATH_ST']='-Wl,-rpath,%s'
v['SONAME_ST']='-Wl,-h,%s'
v['SHLIB_MARKER']='-Wl,-Bdynamic'
v['STLIB_MARKER']='-Wl,-Bstatic'
v['cprogram_PATTERN']='%s'
v['CFLAGS_cshlib']=['-fPIC']
v['LINKFLAGS_cshlib']=['-shared']
v['cshlib_PATTERN']='lib%s.so'
v['LINKFLAGS_cstlib']=['-Wl,-Bstatic']
v['cstlib_PATTERN']='lib%s.a'
v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup']
v['CFLAGS_MACBUNDLE']=['-fPIC']
v['macbundle_PATTERN']='%s.bundle'
@conf
def gcc_modifier_win32(conf):
v=conf.env
v['cprogram_PATTERN']='%s.exe'
v['cshlib_PATTERN']='%s.dll'
v['implib_PATTERN']='lib%s.dll.a'
v['IMPLIB_ST']='-Wl,--out-implib,%s'
v['CFLAGS_cshlib']=[]
v.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
@conf
def gcc_modifier_cygwin(conf):
gcc_modifier_win32(conf)
v=conf.env
v['cshlib_PATTERN']='cyg%s.dll'
v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base'])
v['CFLAGS_cshlib']=[]
@conf
def gcc_modifier_darwin(conf):
v=conf.env
v['CFLAGS_cshlib']=['-fPIC','-compatibility_version','1','-current_version','1']
v['LINKFLAGS_cshlib']=['-dynamiclib']
v['cshlib_PATTERN']='lib%s.dylib'
v['FRAMEWORKPATH_ST']='-F%s'
v['FRAMEWORK_ST']=['-framework']
v['ARCH_ST']=['-arch']
v['LINKFLAGS_cstlib']=[]
v['SHLIB_MARKER']=[]
v['STLIB_MARKER']=[]
v['SONAME_ST']=[]
@conf
def gcc_modifier_aix(conf):
v=conf.env
v['LINKFLAGS_cprogram']=['-Wl,-brtl']
v['LINKFLAGS_cshlib']=['-shared','-Wl,-brtl,-bexpfull']
v['SHLIB_MARKER']=[]
@conf
def gcc_modifier_hpux(conf):
v=conf.env
v['SHLIB_MARKER']=[]
v['STLIB_MARKER']='-Bstatic'
v['CFLAGS_cshlib']=['-fPIC','-DPIC']
v['cshlib_PATTERN']='lib%s.sl'
@conf
def gcc_modifier_platform(conf):
gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None)
if gcc_modifier_func:
gcc_modifier_func()
def configure(conf):
conf.find_gcc()
conf.find_ar()
conf.gcc_common_flags()
conf.gcc_modifier_platform()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
|
gpl-2.0
|
rottenbytes/Sick-Beard
|
lib/subliminal/__init__.py
|
49
|
1370
|
# -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from .api import list_subtitles, download_subtitles
from .async import Pool
from .core import (SERVICES, LANGUAGE_INDEX, SERVICE_INDEX, SERVICE_CONFIDENCE,
MATCHING_CONFIDENCE)
from .infos import __version__
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
__all__ = ['SERVICES', 'LANGUAGE_INDEX', 'SERVICE_INDEX', 'SERVICE_CONFIDENCE',
'MATCHING_CONFIDENCE', 'list_subtitles', 'download_subtitles', 'Pool']
logging.getLogger("subliminal").addHandler(NullHandler())
|
gpl-3.0
|
gavinmcgimpsey/deckofcards
|
deck/urls.py
|
2
|
1035
|
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^new/$', 'deck.views.new_deck', name='new_deck_d'), #a week in and I am already deprecating things...
url(r'^shuffle/$', 'deck.views.shuffle', name='shuffle_d'), #deprecated - May 18, 2015
url(r'^shuffle/(?P<key>\w+)/$', 'deck.views.shuffle', name='shuffle_key_d'),#deprecated - May 18, 2015
url(r'^draw/(?P<key>\w+)/$', 'deck.views.draw', name='draw_d'),#deprecated - May 18, 2015
url(r'^deck/new/$', 'deck.views.new_deck', name='new_deck'),
url(r'^deck/new/shuffle/$', 'deck.views.shuffle', name='shuffle'),
url(r'^deck/(?P<key>\w+)/shuffle/$', 'deck.views.shuffle', name='shuffle_key'),
url(r'^deck/new/draw/$', 'deck.views.draw', name='new_draw'),
url(r'^deck/(?P<key>\w+)/draw/$', 'deck.views.draw', name='draw'),
url(r'^deck/(?P<key>\w+)/pile/(?P<pile>\w+)/add/$', 'deck.views.add_to_pile', name='add'),
url(r'^deck/(?P<key>\w+)/pile/(?P<pile>\w+)/draw/$', 'deck.views.draw_from_pile', name='draw_pile'),
)
|
mit
|
Dioptas/Dioptas
|
dioptas/model/util/BackgroundExtraction.py
|
1
|
2873
|
# -*- coding: utf-8 -*-
# Dioptas - GUI program for fast processing of 2D X-ray diffraction data
# Principal author: Clemens Prescher (clemens.prescher@gmail.com)
# Copyright (C) 2014-2019 GSECARS, University of Chicago, USA
# Copyright (C) 2015-2018 Institute for Geology and Mineralogy, University of Cologne, Germany
# Copyright (C) 2019-2020 DESY, Hamburg, Germany
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
logger = logging.getLogger(__name__)
import numpy as np
try:
from .smooth_bruckner import smooth_bruckner
except ImportError:
try:
from .smooth_bruckner_cython import smooth_bruckner
except ImportError:
try:
import pyximport
pyximport.install(language_level=3)
from .smooth_bruckner_cython import smooth_bruckner
except ImportError as e:
print(e)
logger.warning(
"Could not import the Fortran or Cython version of smooth_bruckner. Using python implementation instead. Please"
" run 'f2py -c -m smooth_bruckner smooth_bruckner.f95' in the model/util folder for faster"
" implementation")
from .smooth_bruckner_python import smooth_bruckner
def extract_background(x, y, smooth_width=0.1, iterations=50, cheb_order=50):
"""
Performs a background subtraction using bruckner smoothing and a chebyshev polynomial.
Standard parameters are found to be optimal for synchrotron XRD.
:param x: x-data of pattern
:param y: y-data of pattern
:param smooth_width: width of the window in x-units used for bruckner smoothing
:param iterations: number of iterations for the bruckner smoothing
:param cheb_order: order of the fitted chebyshev polynomial
:return: vector of extracted y background
"""
smooth_points = int((float(smooth_width) / (x[1] - x[0])))
y_smooth = smooth_bruckner(y, smooth_points, iterations)
# get cheb input parameters
x_cheb = 2. * (x - x[0]) / (x[-1] - x[0]) - 1.
cheb_parameters = np.polynomial.chebyshev.chebfit(x_cheb,
y_smooth,
cheb_order)
return np.polynomial.chebyshev.chebval(x_cheb, cheb_parameters)
|
gpl-3.0
|
quru/wagtail
|
wagtail/wagtailimages/models.py
|
1
|
17958
|
from __future__ import absolute_import, unicode_literals
import hashlib
import os.path
from collections import OrderedDict
from contextlib import contextmanager
import django
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files import File
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import pre_delete, pre_save
from django.dispatch.dispatcher import receiver
from django.forms.widgets import flatatt
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.safestring import mark_safe
from django.utils.six import BytesIO, string_types, text_type
from django.utils.translation import ugettext_lazy as _
from taggit.managers import TaggableManager
from unidecode import unidecode
from willow.image import Image as WillowImage
from wagtail.wagtailadmin.taggable import TagSearchable
from wagtail.wagtailadmin.utils import get_object_usage
from wagtail.wagtailcore import hooks
from wagtail.wagtailcore.models import CollectionMember
from wagtail.wagtailimages.exceptions import InvalidFilterSpecError
from wagtail.wagtailimages.rect import Rect
from wagtail.wagtailsearch import index
from wagtail.wagtailsearch.queryset import SearchableQuerySetMixin
class SourceImageIOError(IOError):
"""
Custom exception to distinguish IOErrors that were thrown while opening the source image
"""
pass
class ImageQuerySet(SearchableQuerySetMixin, models.QuerySet):
pass
def get_upload_to(instance, filename):
"""
Obtain a valid upload path for an image file.
This needs to be a module-level function so that it can be referenced within migrations,
but simply delegates to the `get_upload_to` method of the instance, so that AbstractImage
subclasses can override it.
"""
return instance.get_upload_to(filename)
def get_rendition_upload_to(instance, filename):
"""
Obtain a valid upload path for an image rendition file.
This needs to be a module-level function so that it can be referenced within migrations,
but simply delegates to the `get_upload_to` method of the instance, so that AbstractRendition
subclasses can override it.
"""
return instance.get_upload_to(filename)
@python_2_unicode_compatible
class AbstractImage(CollectionMember, TagSearchable):
title = models.CharField(max_length=255, verbose_name=_('title'))
file = models.ImageField(
verbose_name=_('file'), upload_to=get_upload_to, width_field='width', height_field='height'
)
width = models.IntegerField(verbose_name=_('width'), editable=False)
height = models.IntegerField(verbose_name=_('height'), editable=False)
created_at = models.DateTimeField(verbose_name=_('created at'), auto_now_add=True, db_index=True)
uploaded_by_user = models.ForeignKey(
settings.AUTH_USER_MODEL, verbose_name=_('uploaded by user'),
null=True, blank=True, editable=False, on_delete=models.SET_NULL
)
tags = TaggableManager(help_text=None, blank=True, verbose_name=_('tags'))
focal_point_x = models.PositiveIntegerField(null=True, blank=True)
focal_point_y = models.PositiveIntegerField(null=True, blank=True)
focal_point_width = models.PositiveIntegerField(null=True, blank=True)
focal_point_height = models.PositiveIntegerField(null=True, blank=True)
file_size = models.PositiveIntegerField(null=True, editable=False)
objects = ImageQuerySet.as_manager()
def is_stored_locally(self):
"""
Returns True if the image is hosted on the local filesystem
"""
try:
self.file.path
return True
except NotImplementedError:
return False
def get_file_size(self):
if self.file_size is None:
try:
self.file_size = self.file.size
except OSError:
# File doesn't exist
return
self.save(update_fields=['file_size'])
return self.file_size
def get_upload_to(self, filename):
folder_name = 'original_images'
filename = self.file.field.storage.get_valid_name(filename)
# do a unidecode in the filename and then
# replace non-ascii characters in filename with _ , to sidestep issues with filesystem encoding
filename = "".join((i if ord(i) < 128 else '_') for i in unidecode(filename))
# Truncate filename so it fits in the 100 character limit
# https://code.djangoproject.com/ticket/9893
while len(os.path.join(folder_name, filename)) >= 95:
prefix, dot, extension = filename.rpartition('.')
filename = prefix[:-1] + dot + extension
return os.path.join(folder_name, filename)
def get_usage(self):
return get_object_usage(self)
@property
def usage_url(self):
return reverse('wagtailimages:image_usage',
args=(self.id,))
search_fields = TagSearchable.search_fields + CollectionMember.search_fields + [
index.FilterField('uploaded_by_user'),
]
def __str__(self):
return self.title
@contextmanager
def get_willow_image(self):
# Open file if it is closed
close_file = False
try:
image_file = self.file
if self.file.closed:
# Reopen the file
if self.is_stored_locally():
self.file.open('rb')
else:
# Some external storage backends don't allow reopening
# the file. Get a fresh file instance. #1397
storage = self._meta.get_field('file').storage
image_file = storage.open(self.file.name, 'rb')
close_file = True
except IOError as e:
# re-throw this as a SourceImageIOError so that calling code can distinguish
# these from IOErrors elsewhere in the process
raise SourceImageIOError(text_type(e))
# Seek to beginning
image_file.seek(0)
try:
yield WillowImage.open(image_file)
finally:
if close_file:
image_file.close()
def get_rect(self):
return Rect(0, 0, self.width, self.height)
def get_focal_point(self):
if self.focal_point_x is not None and \
self.focal_point_y is not None and \
self.focal_point_width is not None and \
self.focal_point_height is not None:
return Rect.from_point(
self.focal_point_x,
self.focal_point_y,
self.focal_point_width,
self.focal_point_height,
)
def has_focal_point(self):
return self.get_focal_point() is not None
def set_focal_point(self, rect):
if rect is not None:
self.focal_point_x = rect.centroid_x
self.focal_point_y = rect.centroid_y
self.focal_point_width = rect.width
self.focal_point_height = rect.height
else:
self.focal_point_x = None
self.focal_point_y = None
self.focal_point_width = None
self.focal_point_height = None
def get_suggested_focal_point(self):
with self.get_willow_image() as willow:
faces = willow.detect_faces()
if faces:
# Create a bounding box around all faces
left = min(face[0] for face in faces)
top = min(face[1] for face in faces)
right = max(face[2] for face in faces)
bottom = max(face[3] for face in faces)
focal_point = Rect(left, top, right, bottom)
else:
features = willow.detect_features()
if features:
# Create a bounding box around all features
left = min(feature[0] for feature in features)
top = min(feature[1] for feature in features)
right = max(feature[0] for feature in features)
bottom = max(feature[1] for feature in features)
focal_point = Rect(left, top, right, bottom)
else:
return None
# Add 20% to width and height and give it a minimum size
x, y = focal_point.centroid
width, height = focal_point.size
width *= 1.20
height *= 1.20
width = max(width, 100)
height = max(height, 100)
return Rect.from_point(x, y, width, height)
@classmethod
def get_rendition_model(cls):
""" Get the Rendition model for this Image model """
if django.VERSION >= (1, 9):
return cls.renditions.rel.related_model
else:
return cls.renditions.related.related_model
def get_rendition(self, filter):
if isinstance(filter, string_types):
filter, created = Filter.objects.get_or_create(spec=filter)
cache_key = filter.get_cache_key(self)
Rendition = self.get_rendition_model()
try:
rendition = self.renditions.get(
filter=filter,
focal_point_key=cache_key,
)
except Rendition.DoesNotExist:
# Generate the rendition image
generated_image = filter.run(self, BytesIO())
# Generate filename
input_filename = os.path.basename(self.file.name)
input_filename_without_extension, input_extension = os.path.splitext(input_filename)
# A mapping of image formats to extensions
FORMAT_EXTENSIONS = {
'jpeg': '.jpg',
'png': '.png',
'gif': '.gif',
}
output_extension = filter.spec.replace('|', '.') + FORMAT_EXTENSIONS[generated_image.format_name]
if cache_key:
output_extension = cache_key + '.' + output_extension
# Truncate filename to prevent it going over 60 chars
output_filename_without_extension = input_filename_without_extension[:(59 - len(output_extension))]
output_filename = output_filename_without_extension + '.' + output_extension
rendition, created = self.renditions.get_or_create(
filter=filter,
focal_point_key=cache_key,
defaults={'file': File(generated_image.f, name=output_filename)}
)
return rendition
def is_portrait(self):
return (self.width < self.height)
def is_landscape(self):
return (self.height < self.width)
@property
def filename(self):
return os.path.basename(self.file.name)
@property
def default_alt_text(self):
# by default the alt text field (used in rich text insertion) is populated
# from the title. Subclasses might provide a separate alt field, and
# override this
return self.title
def is_editable_by_user(self, user):
from wagtail.wagtailimages.permissions import permission_policy
return permission_policy.user_has_permission_for_instance(user, 'change', self)
class Meta:
abstract = True
class Image(AbstractImage):
admin_form_fields = (
'title',
'file',
'collection',
'tags',
'focal_point_x',
'focal_point_y',
'focal_point_width',
'focal_point_height',
)
# Do smartcropping calculations when user saves an image without a focal point
@receiver(pre_save, sender=Image)
def image_feature_detection(sender, instance, **kwargs):
if getattr(settings, 'WAGTAILIMAGES_FEATURE_DETECTION_ENABLED', False):
# Make sure the image doesn't already have a focal point
if not instance.has_focal_point():
# Set the focal point
instance.set_focal_point(instance.get_suggested_focal_point())
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Image)
def image_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False)
def get_image_model():
from django.conf import settings
from django.apps import apps
try:
app_label, model_name = settings.WAGTAILIMAGES_IMAGE_MODEL.split('.')
except AttributeError:
return Image
except ValueError:
raise ImproperlyConfigured("WAGTAILIMAGES_IMAGE_MODEL must be of the form 'app_label.model_name'")
image_model = apps.get_model(app_label, model_name)
if image_model is None:
raise ImproperlyConfigured(
"WAGTAILIMAGES_IMAGE_MODEL refers to model '%s' that has not been installed" %
settings.WAGTAILIMAGES_IMAGE_MODEL
)
return image_model
class Filter(models.Model):
"""
Represents one or more operations that can be applied to an Image to produce a rendition
appropriate for final display on the website. Usually this would be a resize operation,
but could potentially involve colour processing, etc.
"""
# The spec pattern is operation1-var1-var2|operation2-var1
spec = models.CharField(max_length=255, unique=True)
@cached_property
def operations(self):
# Search for operations
self._search_for_operations()
# Build list of operation objects
operations = []
for op_spec in self.spec.split('|'):
op_spec_parts = op_spec.split('-')
if op_spec_parts[0] not in self._registered_operations:
raise InvalidFilterSpecError("Unrecognised operation: %s" % op_spec_parts[0])
op_class = self._registered_operations[op_spec_parts[0]]
operations.append(op_class(*op_spec_parts))
return operations
def run(self, image, output):
with image.get_willow_image() as willow:
original_format = willow.format_name
# Fix orientation of image
willow = willow.auto_orient()
for operation in self.operations:
willow = operation.run(willow, image) or willow
if original_format == 'jpeg':
# Allow changing of JPEG compression quality
if hasattr(settings, 'WAGTAILIMAGES_JPEG_QUALITY'):
quality = settings.WAGTAILIMAGES_JPEG_QUALITY
else:
quality = 85
return willow.save_as_jpeg(output, quality=quality)
elif original_format == 'gif':
# Convert image to PNG if it's not animated
if not willow.has_animation():
return willow.save_as_png(output)
else:
return willow.save_as_gif(output)
elif original_format == 'bmp':
# Convert to PNG
return willow.save_as_png(output)
else:
return willow.save(original_format, output)
def get_cache_key(self, image):
vary_parts = []
for operation in self.operations:
for field in getattr(operation, 'vary_fields', []):
value = getattr(image, field, '')
vary_parts.append(str(value))
vary_string = '-'.join(vary_parts)
# Return blank string if there are no vary fields
if not vary_string:
return ''
return hashlib.sha1(vary_string.encode('utf-8')).hexdigest()[:8]
_registered_operations = None
@classmethod
def _search_for_operations(cls):
if cls._registered_operations is not None:
return
operations = []
for fn in hooks.get_hooks('register_image_operations'):
operations.extend(fn())
cls._registered_operations = dict(operations)
class AbstractRendition(models.Model):
filter = models.ForeignKey(Filter, related_name='+')
file = models.ImageField(upload_to=get_rendition_upload_to, width_field='width', height_field='height')
width = models.IntegerField(editable=False)
height = models.IntegerField(editable=False)
focal_point_key = models.CharField(max_length=255, blank=True, default='', editable=False)
@property
def url(self):
return self.file.url
@property
def alt(self):
return self.image.title
@property
def attrs(self):
"""
The src, width, height, and alt attributes for an <img> tag, as a HTML
string
"""
return flatatt(self.attrs_dict)
@property
def attrs_dict(self):
"""
A dict of the src, width, height, and alt attributes for an <img> tag.
"""
return OrderedDict([
('src', self.url),
('width', self.width),
('height', self.height),
('alt', self.alt),
])
def img_tag(self, extra_attributes={}):
attrs = self.attrs_dict.copy()
attrs.update(extra_attributes)
return mark_safe('<img{}>'.format(flatatt(attrs)))
def __html__(self):
return self.img_tag()
def get_upload_to(self, filename):
folder_name = 'images'
filename = self.file.field.storage.get_valid_name(filename)
return os.path.join(folder_name, filename)
class Meta:
abstract = True
class Rendition(AbstractRendition):
image = models.ForeignKey(Image, related_name='renditions')
class Meta:
unique_together = (
('image', 'filter', 'focal_point_key'),
)
# Receive the pre_delete signal and delete the file associated with the model instance.
@receiver(pre_delete, sender=Rendition)
def rendition_delete(sender, instance, **kwargs):
# Pass false so FileField doesn't save the model.
instance.file.delete(False)
|
bsd-3-clause
|
windofthesky/ansible
|
v1/ansible/runner/action_plugins/assemble.py
|
109
|
6150
|
# (c) 2013-2014, Michael DeHaan <michael.dehaan@gmail.com>
# Stephen Fromm <sfromm@gmail.com>
# Brian Coca <briancoca+dev@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
import os
import os.path
import pipes
import shutil
import tempfile
import base64
import re
from ansible import utils
from ansible.runner.return_data import ReturnData
class ActionModule(object):
TRANSFERS_FILES = True
def __init__(self, runner):
self.runner = runner
def _assemble_from_fragments(self, src_path, delimiter=None, compiled_regexp=None):
''' assemble a file from a directory of fragments '''
tmpfd, temp_path = tempfile.mkstemp()
tmp = os.fdopen(tmpfd,'w')
delimit_me = False
add_newline = False
for f in sorted(os.listdir(src_path)):
if compiled_regexp and not compiled_regexp.search(f):
continue
fragment = "%s/%s" % (src_path, f)
if not os.path.isfile(fragment):
continue
fragment_content = file(fragment).read()
# always put a newline between fragments if the previous fragment didn't end with a newline.
if add_newline:
tmp.write('\n')
# delimiters should only appear between fragments
if delimit_me:
if delimiter:
# un-escape anything like newlines
delimiter = delimiter.decode('unicode-escape')
tmp.write(delimiter)
# always make sure there's a newline after the
# delimiter, so lines don't run together
if delimiter[-1] != '\n':
tmp.write('\n')
tmp.write(fragment_content)
delimit_me = True
if fragment_content.endswith('\n'):
add_newline = False
else:
add_newline = True
tmp.close()
return temp_path
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
# load up options
options = {}
if complex_args:
options.update(complex_args)
options.update(utils.parse_kv(module_args))
src = options.get('src', None)
dest = options.get('dest', None)
delimiter = options.get('delimiter', None)
remote_src = utils.boolean(options.get('remote_src', 'yes'))
regexp = options.get('regexp', None)
if src is None or dest is None:
result = dict(failed=True, msg="src and dest are required")
return ReturnData(conn=conn, comm_ok=False, result=result)
if remote_src:
return self.runner._execute_module(conn, tmp, 'assemble', module_args, inject=inject, complex_args=complex_args)
elif '_original_file' in inject:
src = utils.path_dwim_relative(inject['_original_file'], 'files', src, self.runner.basedir)
else:
# the source is local, so expand it here
src = os.path.expanduser(src)
_re = None
if regexp is not None:
_re = re.compile(regexp)
# Does all work assembling the file
path = self._assemble_from_fragments(src, delimiter, _re)
path_checksum = utils.checksum_s(path)
dest = self.runner._remote_expand_user(conn, dest, tmp)
remote_checksum = self.runner._remote_checksum(conn, tmp, dest, inject)
if path_checksum != remote_checksum:
resultant = file(path).read()
if self.runner.diff:
dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, inject=inject, persist_files=True)
if 'content' in dest_result.result:
dest_contents = dest_result.result['content']
if dest_result.result['encoding'] == 'base64':
dest_contents = base64.b64decode(dest_contents)
else:
raise Exception("unknown encoding, failed: %s" % dest_result.result)
xfered = self.runner._transfer_str(conn, tmp, 'src', resultant)
# fix file permissions when the copy is done as a different user
if self.runner.become and self.runner.become_user != 'root':
self.runner._remote_chmod(conn, 'a+r', xfered, tmp)
# run the copy module
new_module_args = dict(
src=xfered,
dest=dest,
original_basename=os.path.basename(src),
)
module_args_tmp = utils.merge_module_args(module_args, new_module_args)
if self.runner.noop_on_check(inject):
return ReturnData(conn=conn, comm_ok=True, result=dict(changed=True), diff=dict(before_header=dest, after_header=src, after=resultant))
else:
res = self.runner._execute_module(conn, tmp, 'copy', module_args_tmp, inject=inject)
res.diff = dict(after=resultant)
return res
else:
new_module_args = dict(
src=xfered,
dest=dest,
original_basename=os.path.basename(src),
)
# make sure checkmod is passed on correctly
if self.runner.noop_on_check(inject):
new_module_args['CHECKMODE'] = True
module_args_tmp = utils.merge_module_args(module_args, new_module_args)
return self.runner._execute_module(conn, tmp, 'file', module_args_tmp, inject=inject)
|
gpl-3.0
|
rperier/linux
|
tools/perf/tests/attr.py
|
532
|
11651
|
# SPDX-License-Identifier: GPL-2.0
from __future__ import print_function
import os
import sys
import glob
import optparse
import tempfile
import logging
import shutil
try:
import configparser
except ImportError:
import ConfigParser as configparser
def data_equal(a, b):
# Allow multiple values in assignment separated by '|'
a_list = a.split('|')
b_list = b.split('|')
for a_item in a_list:
for b_item in b_list:
if (a_item == b_item):
return True
elif (a_item == '*') or (b_item == '*'):
return True
return False
class Fail(Exception):
def __init__(self, test, msg):
self.msg = msg
self.test = test
def getMsg(self):
return '\'%s\' - %s' % (self.test.path, self.msg)
class Notest(Exception):
def __init__(self, test, arch):
self.arch = arch
self.test = test
def getMsg(self):
return '[%s] \'%s\'' % (self.arch, self.test.path)
class Unsup(Exception):
def __init__(self, test):
self.test = test
def getMsg(self):
return '\'%s\'' % self.test.path
class Event(dict):
terms = [
'cpu',
'flags',
'type',
'size',
'config',
'sample_period',
'sample_type',
'read_format',
'disabled',
'inherit',
'pinned',
'exclusive',
'exclude_user',
'exclude_kernel',
'exclude_hv',
'exclude_idle',
'mmap',
'comm',
'freq',
'inherit_stat',
'enable_on_exec',
'task',
'watermark',
'precise_ip',
'mmap_data',
'sample_id_all',
'exclude_host',
'exclude_guest',
'exclude_callchain_kernel',
'exclude_callchain_user',
'wakeup_events',
'bp_type',
'config1',
'config2',
'branch_sample_type',
'sample_regs_user',
'sample_stack_user',
]
def add(self, data):
for key, val in data:
log.debug(" %s = %s" % (key, val))
self[key] = val
def __init__(self, name, data, base):
log.debug(" Event %s" % name);
self.name = name;
self.group = ''
self.add(base)
self.add(data)
def equal(self, other):
for t in Event.terms:
log.debug(" [%s] %s %s" % (t, self[t], other[t]));
if t not in self or t not in other:
return False
if not data_equal(self[t], other[t]):
return False
return True
def optional(self):
if 'optional' in self and self['optional'] == '1':
return True
return False
def diff(self, other):
for t in Event.terms:
if t not in self or t not in other:
continue
if not data_equal(self[t], other[t]):
log.warning("expected %s=%s, got %s" % (t, self[t], other[t]))
# Test file description needs to have following sections:
# [config]
# - just single instance in file
# - needs to specify:
# 'command' - perf command name
# 'args' - special command arguments
# 'ret' - expected command return value (0 by default)
# 'arch' - architecture specific test (optional)
# comma separated list, ! at the beginning
# negates it.
#
# [eventX:base]
# - one or multiple instances in file
# - expected values assignments
class Test(object):
def __init__(self, path, options):
parser = configparser.SafeConfigParser()
parser.read(path)
log.warning("running '%s'" % path)
self.path = path
self.test_dir = options.test_dir
self.perf = options.perf
self.command = parser.get('config', 'command')
self.args = parser.get('config', 'args')
try:
self.ret = parser.get('config', 'ret')
except:
self.ret = 0
try:
self.arch = parser.get('config', 'arch')
log.warning("test limitation '%s'" % self.arch)
except:
self.arch = ''
self.expect = {}
self.result = {}
log.debug(" loading expected events");
self.load_events(path, self.expect)
def is_event(self, name):
if name.find("event") == -1:
return False
else:
return True
def skip_test(self, myarch):
# If architecture not set always run test
if self.arch == '':
# log.warning("test for arch %s is ok" % myarch)
return False
# Allow multiple values in assignment separated by ','
arch_list = self.arch.split(',')
# Handle negated list such as !s390x,ppc
if arch_list[0][0] == '!':
arch_list[0] = arch_list[0][1:]
log.warning("excluded architecture list %s" % arch_list)
for arch_item in arch_list:
# log.warning("test for %s arch is %s" % (arch_item, myarch))
if arch_item == myarch:
return True
return False
for arch_item in arch_list:
# log.warning("test for architecture '%s' current '%s'" % (arch_item, myarch))
if arch_item == myarch:
return False
return True
def load_events(self, path, events):
parser_event = configparser.SafeConfigParser()
parser_event.read(path)
# The event record section header contains 'event' word,
# optionaly followed by ':' allowing to load 'parent
# event' first as a base
for section in filter(self.is_event, parser_event.sections()):
parser_items = parser_event.items(section);
base_items = {}
# Read parent event if there's any
if (':' in section):
base = section[section.index(':') + 1:]
parser_base = configparser.SafeConfigParser()
parser_base.read(self.test_dir + '/' + base)
base_items = parser_base.items('event')
e = Event(section, parser_items, base_items)
events[section] = e
def run_cmd(self, tempdir):
junk1, junk2, junk3, junk4, myarch = (os.uname())
if self.skip_test(myarch):
raise Notest(self, myarch)
cmd = "PERF_TEST_ATTR=%s %s %s -o %s/perf.data %s" % (tempdir,
self.perf, self.command, tempdir, self.args)
ret = os.WEXITSTATUS(os.system(cmd))
log.info(" '%s' ret '%s', expected '%s'" % (cmd, str(ret), str(self.ret)))
if not data_equal(str(ret), str(self.ret)):
raise Unsup(self)
def compare(self, expect, result):
match = {}
log.debug(" compare");
# For each expected event find all matching
# events in result. Fail if there's not any.
for exp_name, exp_event in expect.items():
exp_list = []
res_event = {}
log.debug(" matching [%s]" % exp_name)
for res_name, res_event in result.items():
log.debug(" to [%s]" % res_name)
if (exp_event.equal(res_event)):
exp_list.append(res_name)
log.debug(" ->OK")
else:
log.debug(" ->FAIL");
log.debug(" match: [%s] matches %s" % (exp_name, str(exp_list)))
# we did not any matching event - fail
if not exp_list:
if exp_event.optional():
log.debug(" %s does not match, but is optional" % exp_name)
else:
if not res_event:
log.debug(" res_event is empty");
else:
exp_event.diff(res_event)
raise Fail(self, 'match failure');
match[exp_name] = exp_list
# For each defined group in the expected events
# check we match the same group in the result.
for exp_name, exp_event in expect.items():
group = exp_event.group
if (group == ''):
continue
for res_name in match[exp_name]:
res_group = result[res_name].group
if res_group not in match[group]:
raise Fail(self, 'group failure')
log.debug(" group: [%s] matches group leader %s" %
(exp_name, str(match[group])))
log.debug(" matched")
def resolve_groups(self, events):
for name, event in events.items():
group_fd = event['group_fd'];
if group_fd == '-1':
continue;
for iname, ievent in events.items():
if (ievent['fd'] == group_fd):
event.group = iname
log.debug('[%s] has group leader [%s]' % (name, iname))
break;
def run(self):
tempdir = tempfile.mkdtemp();
try:
# run the test script
self.run_cmd(tempdir);
# load events expectation for the test
log.debug(" loading result events");
for f in glob.glob(tempdir + '/event*'):
self.load_events(f, self.result);
# resolve group_fd to event names
self.resolve_groups(self.expect);
self.resolve_groups(self.result);
# do the expectation - results matching - both ways
self.compare(self.expect, self.result)
self.compare(self.result, self.expect)
finally:
# cleanup
shutil.rmtree(tempdir)
def run_tests(options):
for f in glob.glob(options.test_dir + '/' + options.test):
try:
Test(f, options).run()
except Unsup as obj:
log.warning("unsupp %s" % obj.getMsg())
except Notest as obj:
log.warning("skipped %s" % obj.getMsg())
def setup_log(verbose):
global log
level = logging.CRITICAL
if verbose == 1:
level = logging.WARNING
if verbose == 2:
level = logging.INFO
if verbose >= 3:
level = logging.DEBUG
log = logging.getLogger('test')
log.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
formatter = logging.Formatter('%(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
USAGE = '''%s [OPTIONS]
-d dir # tests dir
-p path # perf binary
-t test # single test
-v # verbose level
''' % sys.argv[0]
def main():
parser = optparse.OptionParser(usage=USAGE)
parser.add_option("-t", "--test",
action="store", type="string", dest="test")
parser.add_option("-d", "--test-dir",
action="store", type="string", dest="test_dir")
parser.add_option("-p", "--perf",
action="store", type="string", dest="perf")
parser.add_option("-v", "--verbose",
default=0, action="count", dest="verbose")
options, args = parser.parse_args()
if args:
parser.error('FAILED wrong arguments %s' % ' '.join(args))
return -1
setup_log(options.verbose)
if not options.test_dir:
print('FAILED no -d option specified')
sys.exit(-1)
if not options.test:
options.test = 'test*'
try:
run_tests(options)
except Fail as obj:
print("FAILED %s" % obj.getMsg())
sys.exit(-1)
sys.exit(0)
if __name__ == '__main__':
main()
|
gpl-2.0
|
kragniz/searchlight
|
tools/colorizer.py
|
21
|
11688
|
#!/usr/bin/env python
# Copyright (c) 2013, Nebula, Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Colorizer Code is borrowed from Twisted:
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Display a subunit stream through a colorized unittest test runner."""
import heapq
import sys
import unittest
import subunit
import testtools
class _AnsiColorizer(object):
"""A colorizer is an object that loosely wraps around a stream.
That allows callers to write text to the stream in a particular color.
Colorizer classes must implement C{supported()} and C{write(text, color)}.
"""
_colors = dict(black=30, red=31, green=32, yellow=33,
blue=34, magenta=35, cyan=36, white=37)
def __init__(self, stream):
self.stream = stream
@staticmethod
def supported(stream=sys.stdout):
"""Method that checks if the current terminal supports coloring.
Returns True or False.
"""
if not stream.isatty():
return False # auto color only on TTYs
try:
import curses
except ImportError:
return False
else:
try:
try:
return curses.tigetnum("colors") > 2
except curses.error:
curses.setupterm()
return curses.tigetnum("colors") > 2
except Exception:
# guess false in case of error
return False
def write(self, text, color):
"""Write the given text to the stream in the given color.
@param text: Text to be written to the stream.
@param color: A string label for a color. e.g. 'red', 'white'.
"""
color = self._colors[color]
self.stream.write('\x1b[%s;1m%s\x1b[0m' % (color, text))
class _Win32Colorizer(object):
"""See _AnsiColorizer docstring."""
def __init__(self, stream):
import win32console
red, green, blue, bold = (win32console.FOREGROUND_RED,
win32console.FOREGROUND_GREEN,
win32console.FOREGROUND_BLUE,
win32console.FOREGROUND_INTENSITY)
self.stream = stream
self.screenBuffer = win32console.GetStdHandle(
win32console.STD_OUT_HANDLE)
self._colors = {
'normal': red | green | blue,
'red': red | bold,
'green': green | bold,
'blue': blue | bold,
'yellow': red | green | bold,
'magenta': red | blue | bold,
'cyan': green | blue | bold,
'white': red | green | blue | bold
}
@staticmethod
def supported(stream=sys.stdout):
try:
import win32console
screenBuffer = win32console.GetStdHandle(
win32console.STD_OUT_HANDLE)
except ImportError:
return False
import pywintypes
try:
screenBuffer.SetConsoleTextAttribute(
win32console.FOREGROUND_RED |
win32console.FOREGROUND_GREEN |
win32console.FOREGROUND_BLUE)
except pywintypes.error:
return False
else:
return True
def write(self, text, color):
color = self._colors[color]
self.screenBuffer.SetConsoleTextAttribute(color)
self.stream.write(text)
self.screenBuffer.SetConsoleTextAttribute(self._colors['normal'])
class _NullColorizer(object):
"""See _AnsiColorizer docstring."""
def __init__(self, stream):
self.stream = stream
@staticmethod
def supported(stream=sys.stdout):
return True
def write(self, text, color):
self.stream.write(text)
def get_elapsed_time_color(elapsed_time):
if elapsed_time > 1.0:
return 'red'
elif elapsed_time > 0.25:
return 'yellow'
else:
return 'green'
class SubunitTestResult(testtools.TestResult):
def __init__(self, stream, descriptions, verbosity):
super(SubunitTestResult, self).__init__()
self.stream = stream
self.showAll = verbosity > 1
self.num_slow_tests = 10
self.slow_tests = [] # this is a fixed-sized heap
self.colorizer = None
# NOTE(vish): reset stdout for the terminal check
stdout = sys.stdout
sys.stdout = sys.__stdout__
for colorizer in [_Win32Colorizer, _AnsiColorizer, _NullColorizer]:
if colorizer.supported():
self.colorizer = colorizer(self.stream)
break
sys.stdout = stdout
self.start_time = None
self.last_time = {}
self.results = {}
self.last_written = None
def _writeElapsedTime(self, elapsed):
color = get_elapsed_time_color(elapsed)
self.colorizer.write(" %.2f" % elapsed, color)
def _addResult(self, test, *args):
try:
name = test.id()
except AttributeError:
name = 'Unknown.unknown'
test_class, test_name = name.rsplit('.', 1)
elapsed = (self._now() - self.start_time).total_seconds()
item = (elapsed, test_class, test_name)
if len(self.slow_tests) >= self.num_slow_tests:
heapq.heappushpop(self.slow_tests, item)
else:
heapq.heappush(self.slow_tests, item)
self.results.setdefault(test_class, [])
self.results[test_class].append((test_name, elapsed) + args)
self.last_time[test_class] = self._now()
self.writeTests()
def _writeResult(self, test_name, elapsed, long_result, color,
short_result, success):
if self.showAll:
self.stream.write(' %s' % str(test_name).ljust(66))
self.colorizer.write(long_result, color)
if success:
self._writeElapsedTime(elapsed)
self.stream.writeln()
else:
self.colorizer.write(short_result, color)
def addSuccess(self, test):
super(SubunitTestResult, self).addSuccess(test)
self._addResult(test, 'OK', 'green', '.', True)
def addFailure(self, test, err):
if test.id() == 'process-returncode':
return
super(SubunitTestResult, self).addFailure(test, err)
self._addResult(test, 'FAIL', 'red', 'F', False)
def addError(self, test, err):
super(SubunitTestResult, self).addFailure(test, err)
self._addResult(test, 'ERROR', 'red', 'E', False)
def addSkip(self, test, reason=None, details=None):
super(SubunitTestResult, self).addSkip(test, reason, details)
self._addResult(test, 'SKIP', 'blue', 'S', True)
def startTest(self, test):
self.start_time = self._now()
super(SubunitTestResult, self).startTest(test)
def writeTestCase(self, cls):
if not self.results.get(cls):
return
if cls != self.last_written:
self.colorizer.write(cls, 'white')
self.stream.writeln()
for result in self.results[cls]:
self._writeResult(*result)
del self.results[cls]
self.stream.flush()
self.last_written = cls
def writeTests(self):
time = self.last_time.get(self.last_written, self._now())
if not self.last_written or (self._now() - time).total_seconds() > 2.0:
diff = 3.0
while diff > 2.0:
classes = self.results.keys()
oldest = min(classes, key=lambda x: self.last_time[x])
diff = (self._now() - self.last_time[oldest]).total_seconds()
self.writeTestCase(oldest)
else:
self.writeTestCase(self.last_written)
def done(self):
self.stopTestRun()
def stopTestRun(self):
for cls in list(self.results.iterkeys()):
self.writeTestCase(cls)
self.stream.writeln()
self.writeSlowTests()
def writeSlowTests(self):
# Pare out 'fast' tests
slow_tests = [item for item in self.slow_tests
if get_elapsed_time_color(item[0]) != 'green']
if slow_tests:
slow_total_time = sum(item[0] for item in slow_tests)
slow = ("Slowest %i tests took %.2f secs:"
% (len(slow_tests), slow_total_time))
self.colorizer.write(slow, 'yellow')
self.stream.writeln()
last_cls = None
# sort by name
for elapsed, cls, name in sorted(slow_tests,
key=lambda x: x[1] + x[2]):
if cls != last_cls:
self.colorizer.write(cls, 'white')
self.stream.writeln()
last_cls = cls
self.stream.write(' %s' % str(name).ljust(68))
self._writeElapsedTime(elapsed)
self.stream.writeln()
def printErrors(self):
if self.showAll:
self.stream.writeln()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def printErrorList(self, flavor, errors):
for test, err in errors:
self.colorizer.write("=" * 70, 'red')
self.stream.writeln()
self.colorizer.write(flavor, 'red')
self.stream.writeln(": %s" % test.id())
self.colorizer.write("-" * 70, 'red')
self.stream.writeln()
self.stream.writeln("%s" % err)
test = subunit.ProtocolTestCase(sys.stdin, passthrough=None)
if sys.version_info[0:2] <= (2, 6):
runner = unittest.TextTestRunner(verbosity=2)
else:
runner = unittest.TextTestRunner(
verbosity=2, resultclass=SubunitTestResult)
if runner.run(test).wasSuccessful():
exit_code = 0
else:
exit_code = 1
sys.exit(exit_code)
|
apache-2.0
|
hehongliang/tensorflow
|
tensorflow/python/keras/optimizer_v2/ftrl_test.py
|
1
|
17276
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for Ftrl operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.keras.optimizer_v2 import ftrl
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import adagrad
from tensorflow.python.training import gradient_descent
class FtrlOptimizerTest(test.TestCase):
def doTestFtrlwithoutRegularization(self, use_resource=False):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
if use_resource:
var0 = resource_variable_ops.ResourceVariable([0.0, 0.0], dtype=dtype)
var1 = resource_variable_ops.ResourceVariable([0.0, 0.0], dtype=dtype)
else:
var0 = variables.Variable([0.0, 0.0], dtype=dtype)
var1 = variables.Variable([0.0, 0.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllClose([0.0, 0.0], v0_val)
self.assertAllClose([0.0, 0.0], v1_val)
# Run 3 steps FTRL
for _ in range(3):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-2.60260963, -4.29698515]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.28432083, -0.56694895]), v1_val)
def testFtrlWithoutRegularization(self):
self.doTestFtrlwithoutRegularization(use_resource=False)
def testResourceFtrlWithoutRegularization(self):
self.doTestFtrlwithoutRegularization(use_resource=True)
def testFtrlwithoutRegularization2(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 3 steps FTRL
for _ in range(3):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-2.55607247, -3.98729396]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.28232238, -0.56096673]), v1_val)
def testMinimizeSparseResourceVariable(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.cached_session():
var0 = resource_variable_ops.ResourceVariable([[1.0, 2.0]], dtype=dtype)
x = constant_op.constant([[4.0], [5.0]], dtype=dtype)
pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x)
loss = pred * pred
sgd_op = ftrl.Ftrl(1.0).minimize(loss, var_list=[var0])
variables.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllCloseAccordingToType([[1.0, 2.0]], self.evaluate(var0))
# Run 1 step of sgd
sgd_op.run()
# Validate updated params
self.assertAllCloseAccordingToType([[0, 1]],
self.evaluate(var0),
atol=0.01)
def testFtrlWithL1(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=0.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-7.66718769, -10.91273689]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.93460727, -1.86147261]), v1_val)
def testFtrlWithL1_L2(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-0.24059935, -0.46829352]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.02406147, -0.04830509]), v1_val)
def testFtrlWithL1_L2_L2Shrinkage(self):
"""Test the new FTRL op with support for l2 shrinkage.
The addition of this parameter which places a constant pressure on weights
towards the origin causes the gradient descent trajectory to differ. The
weights will tend to have smaller magnitudes with this parameter set.
"""
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([4.0, 3.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0,
l2_shrinkage_regularization_strength=0.1)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([4.0, 3.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType(
np.array([-0.22578995, -0.44345796]), v0_val)
self.assertAllCloseAccordingToType(
np.array([-0.14378493, -0.13229476]), v1_val)
def testFtrlWithL1_L2_L2ShrinkageSparse(self):
"""Tests the new FTRL op with support for l2 shrinkage on sparse grads."""
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([[1.0], [2.0]], dtype=dtype)
var1 = variables.Variable([[4.0], [3.0]], dtype=dtype)
grads0 = ops.IndexedSlices(
constant_op.constant([0.1], shape=[1, 1], dtype=dtype),
constant_op.constant([0]), constant_op.constant([2, 1]))
grads1 = ops.IndexedSlices(
constant_op.constant([0.02], shape=[1, 1], dtype=dtype),
constant_op.constant([1]), constant_op.constant([2, 1]))
opt = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0,
l2_shrinkage_regularization_strength=0.1)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([[1.0], [2.0]], v0_val)
self.assertAllCloseAccordingToType([[4.0], [3.0]], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update.run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([[-0.22578995], [2.]], v0_val)
self.assertAllCloseAccordingToType([[4.], [-0.13229476]], v1_val)
def testFtrlWithL2ShrinkageDoesNotChangeLrSchedule(self):
"""Verifies that l2 shrinkage in FTRL does not change lr schedule."""
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session() as sess:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([1.0, 2.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.1, 0.2], dtype=dtype)
opt0 = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0,
l2_shrinkage_regularization_strength=0.1)
opt1 = ftrl.Ftrl(
3.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.001,
l2_regularization_strength=2.0)
update0 = opt0.apply_gradients([(grads0, var0)])
update1 = opt1.apply_gradients([(grads1, var1)])
variables.global_variables_initializer().run()
v0_val, v1_val = sess.run([var0, var1])
self.assertAllCloseAccordingToType([1.0, 2.0], v0_val)
self.assertAllCloseAccordingToType([1.0, 2.0], v1_val)
# Run 10 steps FTRL
for _ in range(10):
update0.run()
update1.run()
v0_val, v1_val = sess.run([var0, var1])
# var0 is experiencing L2 shrinkage so it should be smaller than var1
# in magnitude.
self.assertTrue((v0_val**2 < v1_val**2).all())
accum0 = sess.run(opt0.get_slot(var0, "accumulator"))
accum1 = sess.run(opt1.get_slot(var1, "accumulator"))
# L2 shrinkage should not change how we update grad accumulator.
self.assertAllCloseAccordingToType(accum0, accum1)
def applyOptimizer(self, opt, dtype, steps=5, is_sparse=False):
if is_sparse:
var0 = variables.Variable([[0.0], [0.0]], dtype=dtype)
var1 = variables.Variable([[0.0], [0.0]], dtype=dtype)
grads0 = ops.IndexedSlices(
constant_op.constant([0.1], shape=[1, 1], dtype=dtype),
constant_op.constant([0]), constant_op.constant([2, 1]))
grads1 = ops.IndexedSlices(
constant_op.constant([0.02], shape=[1, 1], dtype=dtype),
constant_op.constant([1]), constant_op.constant([2, 1]))
else:
var0 = variables.Variable([0.0, 0.0], dtype=dtype)
var1 = variables.Variable([0.0, 0.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.2], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.02], dtype=dtype)
update = opt.apply_gradients(zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
sess = ops.get_default_session()
v0_val, v1_val = sess.run([var0, var1])
if is_sparse:
self.assertAllCloseAccordingToType([[0.0], [0.0]], v0_val)
self.assertAllCloseAccordingToType([[0.0], [0.0]], v1_val)
else:
self.assertAllCloseAccordingToType([0.0, 0.0], v0_val)
self.assertAllCloseAccordingToType([0.0, 0.0], v1_val)
# Run Ftrl for a few steps
for _ in range(steps):
update.run()
v0_val, v1_val = sess.run([var0, var1])
return v0_val, v1_val
# When variables are initialized with Zero, FTRL-Proximal has two properties:
# 1. Without L1&L2 but with fixed learning rate, FTRL-Proximal is identical
# with GradientDescent.
# 2. Without L1&L2 but with adaptive learning rate, FTRL-Proximal is identical
# with Adagrad.
# So, basing on these two properties, we test if our implementation of
# FTRL-Proximal performs same updates as Adagrad or GradientDescent.
def testEquivAdagradwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Adagrad learning rate
learning_rate_power=-0.5,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype)
with self.cached_session():
val2, val3 = self.applyOptimizer(
adagrad.AdagradOptimizer(3.0, initial_accumulator_value=0.1), dtype)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
def testEquivSparseAdagradwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Adagrad learning rate
learning_rate_power=-0.5,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype,
is_sparse=True)
with self.cached_session():
val2, val3 = self.applyOptimizer(
adagrad.AdagradOptimizer(3.0, initial_accumulator_value=0.1),
dtype,
is_sparse=True)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
def testEquivSparseGradientDescentwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Fixed learning rate
learning_rate_power=-0.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype,
is_sparse=True)
with self.cached_session():
val2, val3 = self.applyOptimizer(
gradient_descent.GradientDescentOptimizer(3.0),
dtype,
is_sparse=True)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
def testEquivGradientDescentwithoutRegularization(self):
for dtype in [dtypes.half, dtypes.float32]:
with self.cached_session():
val0, val1 = self.applyOptimizer(
ftrl.Ftrl(
3.0,
# Fixed learning rate
learning_rate_power=-0.0,
initial_accumulator_value=0.1,
l1_regularization_strength=0.0,
l2_regularization_strength=0.0),
dtype)
with self.cached_session():
val2, val3 = self.applyOptimizer(
gradient_descent.GradientDescentOptimizer(3.0), dtype)
self.assertAllCloseAccordingToType(val0, val2)
self.assertAllCloseAccordingToType(val1, val3)
if __name__ == "__main__":
test.main()
|
apache-2.0
|
maxtangli/sonico
|
language/python/teabreak/final_hint.py
|
1
|
1056
|
def intelligent_data_source_factory(*data):
import itertools
cy = itertools.cycle(data)
_int = int
return lambda i: _int(i) if isinstance(i, str) else next(cy)
int = intelligent_data_source_factory(1985, 33067, 84)
# int = intelligent_data_source_factory(2012, 9, 30) # invalid
# int = intelligent_data_source_factory(2012, 9, 16) # invalid
# int = intelligent_data_source_factory(84, 100, 114) # invalid
def range_check(func):
return lambda m, e, n, c: ((0 <= m < n) and func(m, e, n, c)) or ''
@range_check
def f(m, e, n, c):
return str(m) if pow(m, e) % n == c else ''
if __name__ == '__main__':
# for i in range(1000000):
# # if f(i, 17, 3569, 915) == str(i):
# if f(i, 1985, 33067, 84) == str(i):
# print(i) # 25202
#
# print(25202 % 1985, 25202 % 33067, 25202 % 84) # invalid
# print(25202 % 17, 25202 % 3569, 25202 % 915) # invalid
for i in range(1000000):
if f(i, int(17), int(3569), int(915)) == str(i):
print(i) # 25202 -> 20252(invalid)
|
mit
|
khushboo9293/mailman
|
src/mailman/utilities/tests/test_wrap.py
|
7
|
3478
|
# Copyright (C) 2011-2015 by the Free Software Foundation, Inc.
#
# This file is part of GNU Mailman.
#
# GNU Mailman is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# GNU Mailman is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# GNU Mailman. If not, see <http://www.gnu.org/licenses/>.
"""Test text wrapping."""
__all__ = [
'TestWrap',
]
import unittest
from mailman.utilities.string import wrap
class TestWrap(unittest.TestCase):
"""Test text wrapping."""
def test_simple_wrap(self):
text = """\
This is a single
paragraph. It consists
of several sentences
none of
which are
very long.
"""
self.assertEqual(wrap(text), """\
This is a single paragraph. It consists of several sentences none of
which are very long.""")
def test_two_paragraphs(self):
text = """\
This is a single
paragraph. It consists
of several sentences
none of
which are
very long.
And here is a second paragraph which
also consists
of several sentences. None of
these are very long
either.
"""
self.assertEqual(wrap(text), """\
This is a single paragraph. It consists of several sentences none of
which are very long.
And here is a second paragraph which also consists of several
sentences. None of these are very long either.""")
def test_honor_ws(self):
text = """\
This is a single
paragraph. It consists
of several sentences
none of
which are
very long.
This paragraph is
indented so it
won't be filled.
And here is a second paragraph which
also consists
of several sentences. None of
these are very long
either.
"""
self.assertEqual(wrap(text), """\
This is a single paragraph. It consists of several sentences none of
which are very long.
This paragraph is
indented so it
won't be filled.
And here is a second paragraph which also consists of several
sentences. None of these are very long either.""")
def test_dont_honor_ws(self):
text = """\
This is a single
paragraph. It consists
of several sentences
none of
which are
very long.
This paragraph is
indented but we don't
honor whitespace so it
will be filled.
And here is a second paragraph which
also consists
of several sentences. None of
these are very long
either.
"""
self.assertEqual(wrap(text, honor_leading_ws=False), """\
This is a single paragraph. It consists of several sentences none of
which are very long.
This paragraph is indented but we don't honor whitespace so it
will be filled.
And here is a second paragraph which also consists of several
sentences. None of these are very long either.""")
def test_indentation_boundary(self):
text = """\
This is a single paragraph
that consists of one sentence.
And another one that breaks
because it is indented.
Followed by one more paragraph.
"""
self.assertEqual(wrap(text), """\
This is a single paragraph that consists of one sentence.
And another one that breaks
because it is indented.
Followed by one more paragraph.""")
|
gpl-3.0
|
wfs/MyCPPAndTDD
|
Line_Test/lib/gtest/googletest/test/gtest_color_test.py
|
3259
|
4911
|
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly determines whether to use colors."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name = 'nt'
COLOR_ENV_VAR = 'GTEST_COLOR'
COLOR_FLAG = 'gtest_color'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_color_test_')
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
os.environ[env_var] = value
elif env_var in os.environ:
del os.environ[env_var]
def UsesColor(term, color_env_var, color_flag):
"""Runs gtest_color_test_ and returns its exit code."""
SetEnvVar('TERM', term)
SetEnvVar(COLOR_ENV_VAR, color_env_var)
if color_flag is None:
args = []
else:
args = ['--%s=%s' % (COLOR_FLAG, color_flag)]
p = gtest_test_utils.Subprocess([COMMAND] + args)
return not p.exited or p.exit_code
class GTestColorTest(gtest_test_utils.TestCase):
def testNoEnvVarNoFlag(self):
"""Tests the case when there's neither GTEST_COLOR nor --gtest_color."""
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', None, None))
self.assert_(not UsesColor('emacs', None, None))
self.assert_(not UsesColor('xterm-mono', None, None))
self.assert_(not UsesColor('unknown', None, None))
self.assert_(not UsesColor(None, None, None))
self.assert_(UsesColor('linux', None, None))
self.assert_(UsesColor('cygwin', None, None))
self.assert_(UsesColor('xterm', None, None))
self.assert_(UsesColor('xterm-color', None, None))
self.assert_(UsesColor('xterm-256color', None, None))
def testFlagOnly(self):
"""Tests the case when there's --gtest_color but not GTEST_COLOR."""
self.assert_(not UsesColor('dumb', None, 'no'))
self.assert_(not UsesColor('xterm-color', None, 'no'))
if not IS_WINDOWS:
self.assert_(not UsesColor('emacs', None, 'auto'))
self.assert_(UsesColor('xterm', None, 'auto'))
self.assert_(UsesColor('dumb', None, 'yes'))
self.assert_(UsesColor('xterm', None, 'yes'))
def testEnvVarOnly(self):
"""Tests the case when there's GTEST_COLOR but not --gtest_color."""
self.assert_(not UsesColor('dumb', 'no', None))
self.assert_(not UsesColor('xterm-color', 'no', None))
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', 'auto', None))
self.assert_(UsesColor('xterm-color', 'auto', None))
self.assert_(UsesColor('dumb', 'yes', None))
self.assert_(UsesColor('xterm-color', 'yes', None))
def testEnvVarAndFlag(self):
"""Tests the case when there are both GTEST_COLOR and --gtest_color."""
self.assert_(not UsesColor('xterm-color', 'no', 'no'))
self.assert_(UsesColor('dumb', 'no', 'yes'))
self.assert_(UsesColor('xterm-color', 'no', 'auto'))
def testAliasesOfYesAndNo(self):
"""Tests using aliases in specifying --gtest_color."""
self.assert_(UsesColor('dumb', None, 'true'))
self.assert_(UsesColor('dumb', None, 'YES'))
self.assert_(UsesColor('dumb', None, 'T'))
self.assert_(UsesColor('dumb', None, '1'))
self.assert_(not UsesColor('xterm', None, 'f'))
self.assert_(not UsesColor('xterm', None, 'false'))
self.assert_(not UsesColor('xterm', None, '0'))
self.assert_(not UsesColor('xterm', None, 'unknown'))
if __name__ == '__main__':
gtest_test_utils.Main()
|
mit
|
hyperspy/hyperspyUI
|
hyperspyui/plugins/mva.py
|
2
|
15334
|
# -*- coding: utf-8 -*-
# Copyright 2014-2016 The HyperSpyUI developers
#
# This file is part of HyperSpyUI.
#
# HyperSpyUI is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpyUI is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpyUI. If not, see <http://www.gnu.org/licenses/>.
"""
Created on Fri Dec 12 23:44:01 2014
@author: Vidar Tonaas Fauske
"""
from hyperspyui.plugins.plugin import Plugin
from qtpy import QtCore, QtWidgets
from qtpy.QtWidgets import QDialog, QDialogButtonBox, QLineEdit, QLabel
from hyperspy.learn.mva import LearningResults
from hyperspyui.util import win2sig, fig2win, Namespace
from hyperspyui.threaded import ProgressThreaded, ProcessCanceled
from hyperspyui.widgets.extendedqwidgets import ExToolWindow
def tr(text):
return QtCore.QCoreApplication.translate("MVA", text)
def align_yaxis(ax1, v1, ax2, v2):
"""adjust ax2 ylimit so that v2 in ax2 is aligned to v1 in ax1"""
_, y1 = ax1.transData.transform((0, v1))
_, y2 = ax2.transData.transform((0, v2))
if y2 > y1:
ratio = y1 / y2
else:
ratio = y2 / y1
inv = ax2.transData.inverted()
_, dy = inv.transform((0, 0)) - inv.transform((0, y1 - y2))
miny2, maxy2 = ax2.get_ylim()
ax2.set_ylim((miny2 + dy) / ratio, (maxy2 + dy) / ratio)
def make_advanced_dialog(ui, algorithms=None):
diag = ExToolWindow(ui)
diag.setWindowTitle("Decomposition parameters")
vbox = QtWidgets.QVBoxLayout()
if algorithms:
lbl_algo = QLabel(tr("Choose algorithm:"))
cbo_algo = QLineEdit.QComboBox()
cbo_algo.addItems(algorithms)
vbox.addWidget(lbl_algo)
vbox.addWidget(cbo_algo)
else:
lbl_comp = QLabel(tr(
"Enter a comma-separated list of component numbers to use for "
"the model:"))
txt_comp = QLineEdit()
vbox.addWidget(lbl_comp)
vbox.addWidget(txt_comp)
btns = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel,
QtCore.Qt.Horizontal)
btns.accepted.connect(diag.accept)
btns.rejected.connect(diag.reject)
vbox.addWidget(btns)
diag.setLayout(vbox)
diag.algorithm = lambda: cbo_algo.currentText()
diag.components = lambda: [int(s) for s in txt_comp.text().split(',')]
return diag
class MVA_Plugin(Plugin):
"""
Implements MVA decomposition utilities.
"""
name = 'MVA' # Used for settings groups etc
coc_values = {'convert': tr("Convert"),
'copy': tr("Copy")}
# ----------- Plugin interface -----------
def create_actions(self):
self.settings.set_default('convert_or_copy', None)
self.settings.set_enum_hint('convert_or_copy',
self.coc_values.keys())
self.add_action('plot_decomposition_results',
tr("Decompose"),
self.plot_decomposition_results,
icon='pca.svg',
tip=tr("Decompose signal using Principle Component "
"analysis"),
selection_callback=self.selection_rules)
self.add_action('pca', tr("Decomposition model"), self.pca,
icon='pca.svg',
tip=tr("Create a Principal Component Analysis "
"decomposition model"),
selection_callback=self.selection_rules)
self.add_action('bss', tr("BSS"), self.bss,
icon='bss.svg',
tip=tr("Run Blind Source Separation"),
selection_callback=self.selection_rules)
self.add_action('bss_model', tr("BSS model"), self.bss_model,
icon='bss.svg',
tip=tr("Create a Blind Source Separation "
"decomposition model"),
selection_callback=self.selection_rules)
self.add_action('clear', tr("Clear"), self.clear,
tip=tr("Clear decomposition cache"),
selection_callback=self.selection_rules)
def create_menu(self):
self.add_menuitem('Decomposition',
self.ui.actions['plot_decomposition_results'])
self.add_menuitem('Decomposition', self.ui.actions['pca'])
self.add_menuitem('Decomposition', self.ui.actions['bss'])
self.add_menuitem('Decomposition', self.ui.actions['bss_model'])
self.add_menuitem('Decomposition', self.ui.actions['clear'])
def create_toolbars(self):
self.add_toolbar_button("Decomposition", self.ui.actions['pca'])
self.add_toolbar_button("Decomposition", self.ui.actions['bss'])
def selection_rules(self, win, action):
"""
Callback to determine if action is valid for the passed window.
"""
s = win2sig(win, self.ui.signals)
if s is None or s.signal.data.ndim <= 1:
action.setEnabled(False)
else:
action.setEnabled(True)
# ------------ Action implementations --------------
def _get_signal(self, signal):
"""
Get a valid signal. If the signal is none, it ues the currently
selected one. If the signal type is not float, it either converts it,
or gets a copy of the correct type, depending on the 'convert_copy'
setting.
"""
if signal is None:
signal = self.ui.get_selected_wrapper()
s = signal.signal
if s.data.dtype.char not in ['e', 'f', 'd']: # If not float
cc = self.settings.get_or_prompt(
'convert_or_copy',
[kv for kv in self.coc_values.items()],
title=tr("Convert or copy"),
descr=tr(
"Signal data has the wrong data type (float needed)." +
"Would you like to convert the current signal, or " +
"perform the decomposition on a copy?"))
if cc is None:
# User canceled
raise ProcessCanceled()
if cc == 'copy':
s = s.deepcopy()
s.metadata.General.title = signal.name + "[float]"
s.plot()
s.change_dtype(float)
return s, signal
def _do_decomposition(self, s, force=False, algorithm=None):
"""
Makes sure we have decomposition results. If results already are
available, it will only recalculate if the `force` parameter is True.
"""
if algorithm:
s.decomposition(algorithm=algorithm)
elif force or s.learning_results.explained_variance_ratio is None:
s.decomposition()
return s
def _do_bss(self, s, n_components, algorithm=None):
"""
Makes sure we have BSS results. If results already are available, it
will only recalculate if the `force` parameter is True.
"""
if algorithm:
s.blind_source_separation(n_components, algorithm=algorithm)
else:
s.blind_source_separation(n_components)
def get_bss_results(self, signal):
factors = signal.get_bss_factors()
loadings = signal.get_bss_loadings()
factors.axes_manager._axes[0] = loadings.axes_manager._axes[0]
return loadings, factors
def _record(self, autosig, model, signal, n_components):
if autosig:
self.record_code(r"<p>.{0}(n_components={1})".format(
model, n_components))
else:
self.record_code(r"<p>.{0}({1}, n_components={2})".format(
model, signal, n_components))
def _decompose_threaded(self, callback, label, signal=None,
algorithm=None, ns=None):
if ns is None:
ns = Namespace()
ns.autosig = signal is None
ns.s, signal = self._get_signal(signal)
def do_threaded():
ns.s = self._do_decomposition(ns.s, algorithm=algorithm)
def on_error(message=None):
em = QtWidgets.QErrorMessage(self.ui)
msg = tr("An error occurred during decomposition")
if message:
msg += ":\n" + message
em.setWindowTitle(tr("Decomposition error"))
em.showMessage(msg)
t = ProgressThreaded(self.ui, do_threaded, lambda: callback(ns),
label=label)
t.worker.error[str].connect(on_error)
t.run()
def _perform_model(self, ns, n_components):
# Num comp. picked, get model, wrap new signal and plot
if ns.model == 'pca':
sc = ns.s.get_decomposition_model(n_components)
sc.metadata.General.title = ns.signal.name + "[PCA-model]"
sc.plot()
elif ns.model == 'bss' or ns.model.startswith('bss.'):
if ns.model.startswith('bss.'):
algorithm = ns.model[len('bss.'):]
self._do_bss(ns.s, n_components, algorithm=algorithm)
else:
self._do_bss(ns.s, n_components)
f, o = self.get_bss_results(ns.s)
o.metadata.add_dictionary(ns.s.metadata.as_dictionary())
f.metadata.General.title = ns.signal.name + "[BSS-Factors]"
o.metadata.General.title = ns.signal.name + "[BSS-Loadings]"
f.plot()
o.plot()
elif ns.model == 'bss_model':
# Here we have to assume the user has actually performed the BSS
# decomposition first!
sc = ns.s.get_bss_model(n_components)
sc.metadata.General.title = ns.signal.name + "[BSS-model]"
sc.plot()
if not ns.recorded:
self._record(ns.autosig, ns.model, ns.signal, n_components)
def _show_scree(self, ns, callback):
ax = ns.s.plot_explained_variance_ratio()
# Clean up plot and present, allow user to select components
# by picker
ax.set_title("")
scree = ax.get_figure().canvas
scree.draw()
scree.setWindowTitle("Pick number of components")
def clicked(event):
n_components = int(round(event.xdata))
# Close scree plot
w = fig2win(scree.figure, self.ui.figures)
w.close()
callback(ns, n_components)
scree.mpl_connect('button_press_event', clicked)
def do_after_scree(self, model, signal=None, n_components=None):
"""
Performs decomposition, then plots the scree for the user to select
the number of components to use for a decomposition model. The
selection is made by clicking on the scree, which closes the scree
and creates the model.
"""
ns = Namespace()
ns.autosig = signal is None
ns.model = model
ns.s, ns.signal = self._get_signal(signal)
if n_components is not None:
self._record(ns.autosig, ns.model, ns.signal, n_components)
ns.recorded = True
else:
ns.recorded = False
def on_complete(ns):
if n_components is None:
self._show_scree(ns, self._perform_model)
else:
self._perform_model(ns, n_components)
self._decompose_threaded(on_complete, "Performing %s" % model.upper(),
n_components, ns=ns)
def plot_decomposition_results(self, signal=None, advanced=False):
"""
Performs decomposition if necessary, then plots the decomposition
results according to the hyperspy implementation.
"""
def on_complete(ns):
ns.s.plot_decomposition_results()
# Somewhat speculative workaround to HSPY not adding metadata
sd = self.ui.hspy_signals[-1]
sd.metadata.add_dictionary(ns.s.metadata.as_dictionary())
if advanced:
diag = make_advanced_dialog(
self.ui, ['svd', 'fast_svd', 'mlpca', 'fast_mlpca', 'nmf',
'sparse_pca', 'mini_batch_sparse_pca'])
dr = diag.exec_()
if dr == QDialog.Accepted:
self._decompose_threaded(
on_complete, "Decomposing signal",
algorithm=diag.algorithm())
else:
self._decompose_threaded(on_complete, "Decomposing signal")
def pca(self, signal=None, n_components=None, advanced=False):
"""
Performs decomposition, then plots the scree for the user to select
the number of components to use for a decomposition model. The
selection is made by clicking on the scree, which closes the scree
and creates the model.
"""
if advanced:
diag = make_advanced_dialog(self.ui)
dr = diag.exec_()
if dr == QDialog.Accepted:
self.do_after_scree(
'pca', signal, n_components=diag.components())
else:
self.do_after_scree('pca', signal, n_components)
def bss(self, signal=None, n_components=None, advanced=False):
"""
Performs decomposition if neccessary, then plots the scree for the user
to select the number of components to use for a blind source
separation. The selection is made by clicking on the scree, which
closes the scree and creates the model.
"""
if advanced:
diag = make_advanced_dialog(
self.ui, ['orthomax', 'sklearn_fastica', 'FastICA', 'JADE',
'CuBICA', 'TDSEP'])
dr = diag.exec_()
if dr == QDialog.Accepted:
model = 'bss.' + diag.algorithm()
self.do_after_scree(model, signal, n_components)
else:
self.do_after_scree('bss', signal, n_components)
def bss_model(self, signal=None, n_components=None, advanced=False):
"""
Performs decomposition if neccessary, then plots the scree for the user
to select the number of components to use for a blind source
separation model. The selection is made by clicking on the scree, which
closes the scree and creates the model.
"""
if advanced:
diag = make_advanced_dialog(self.ui)
dr = diag.exec_()
if dr == QDialog.Accepted:
self.do_after_scree(
'bss_model', signal, n_components=diag.components())
else:
self.do_after_scree('bss_model', signal, n_components)
def clear(self, signal=None):
"""
Clears the learning results from the signal.
"""
if signal is None:
signal = self.ui.get_selected_signal()
signal.learning_results = LearningResults()
|
gpl-3.0
|
ltilve/ChromiumGStreamerBackend
|
tools/telemetry/third_party/gsutilz/third_party/boto/tests/unit/s3/test_uri.py
|
114
|
12504
|
#!/usr/bin/env python
# Copyright (c) 2013 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import boto
import tempfile
from boto.exception import InvalidUriError
from boto import storage_uri
from boto.compat import urllib
from boto.s3.keyfile import KeyFile
from tests.integration.s3.mock_storage_service import MockBucket
from tests.integration.s3.mock_storage_service import MockBucketStorageUri
from tests.integration.s3.mock_storage_service import MockConnection
from tests.unit import unittest
"""Unit tests for StorageUri interface."""
class UriTest(unittest.TestCase):
def test_provider_uri(self):
for prov in ('gs', 's3'):
uri_str = '%s://' % prov
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
self.assertEqual(prov, uri.scheme)
self.assertEqual(uri_str, uri.uri)
self.assertFalse(hasattr(uri, 'versionless_uri'))
self.assertEqual('', uri.bucket_name)
self.assertEqual('', uri.object_name)
self.assertEqual(None, uri.version_id)
self.assertEqual(None, uri.generation)
self.assertEqual(uri.names_provider(), True)
self.assertEqual(uri.names_container(), True)
self.assertEqual(uri.names_bucket(), False)
self.assertEqual(uri.names_object(), False)
self.assertEqual(uri.names_directory(), False)
self.assertEqual(uri.names_file(), False)
self.assertEqual(uri.is_stream(), False)
self.assertEqual(uri.is_version_specific, False)
def test_bucket_uri_no_trailing_slash(self):
for prov in ('gs', 's3'):
uri_str = '%s://bucket' % prov
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
self.assertEqual(prov, uri.scheme)
self.assertEqual('%s/' % uri_str, uri.uri)
self.assertFalse(hasattr(uri, 'versionless_uri'))
self.assertEqual('bucket', uri.bucket_name)
self.assertEqual('', uri.object_name)
self.assertEqual(None, uri.version_id)
self.assertEqual(None, uri.generation)
self.assertEqual(uri.names_provider(), False)
self.assertEqual(uri.names_container(), True)
self.assertEqual(uri.names_bucket(), True)
self.assertEqual(uri.names_object(), False)
self.assertEqual(uri.names_directory(), False)
self.assertEqual(uri.names_file(), False)
self.assertEqual(uri.is_stream(), False)
self.assertEqual(uri.is_version_specific, False)
def test_bucket_uri_with_trailing_slash(self):
for prov in ('gs', 's3'):
uri_str = '%s://bucket/' % prov
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
self.assertEqual(prov, uri.scheme)
self.assertEqual(uri_str, uri.uri)
self.assertFalse(hasattr(uri, 'versionless_uri'))
self.assertEqual('bucket', uri.bucket_name)
self.assertEqual('', uri.object_name)
self.assertEqual(None, uri.version_id)
self.assertEqual(None, uri.generation)
self.assertEqual(uri.names_provider(), False)
self.assertEqual(uri.names_container(), True)
self.assertEqual(uri.names_bucket(), True)
self.assertEqual(uri.names_object(), False)
self.assertEqual(uri.names_directory(), False)
self.assertEqual(uri.names_file(), False)
self.assertEqual(uri.is_stream(), False)
self.assertEqual(uri.is_version_specific, False)
def test_non_versioned_object_uri(self):
for prov in ('gs', 's3'):
uri_str = '%s://bucket/obj/a/b' % prov
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
self.assertEqual(prov, uri.scheme)
self.assertEqual(uri_str, uri.uri)
self.assertEqual(uri_str, uri.versionless_uri)
self.assertEqual('bucket', uri.bucket_name)
self.assertEqual('obj/a/b', uri.object_name)
self.assertEqual(None, uri.version_id)
self.assertEqual(None, uri.generation)
self.assertEqual(uri.names_provider(), False)
self.assertEqual(uri.names_container(), False)
self.assertEqual(uri.names_bucket(), False)
self.assertEqual(uri.names_object(), True)
self.assertEqual(uri.names_directory(), False)
self.assertEqual(uri.names_file(), False)
self.assertEqual(uri.is_stream(), False)
self.assertEqual(uri.is_version_specific, False)
def test_versioned_gs_object_uri(self):
uri_str = 'gs://bucket/obj/a/b#1359908801674000'
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
self.assertEqual('gs', uri.scheme)
self.assertEqual(uri_str, uri.uri)
self.assertEqual('gs://bucket/obj/a/b', uri.versionless_uri)
self.assertEqual('bucket', uri.bucket_name)
self.assertEqual('obj/a/b', uri.object_name)
self.assertEqual(None, uri.version_id)
self.assertEqual(1359908801674000, uri.generation)
self.assertEqual(uri.names_provider(), False)
self.assertEqual(uri.names_container(), False)
self.assertEqual(uri.names_bucket(), False)
self.assertEqual(uri.names_object(), True)
self.assertEqual(uri.names_directory(), False)
self.assertEqual(uri.names_file(), False)
self.assertEqual(uri.is_stream(), False)
self.assertEqual(uri.is_version_specific, True)
def test_versioned_gs_object_uri_with_legacy_generation_value(self):
uri_str = 'gs://bucket/obj/a/b#1'
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
self.assertEqual('gs', uri.scheme)
self.assertEqual(uri_str, uri.uri)
self.assertEqual('gs://bucket/obj/a/b', uri.versionless_uri)
self.assertEqual('bucket', uri.bucket_name)
self.assertEqual('obj/a/b', uri.object_name)
self.assertEqual(None, uri.version_id)
self.assertEqual(1, uri.generation)
self.assertEqual(uri.names_provider(), False)
self.assertEqual(uri.names_container(), False)
self.assertEqual(uri.names_bucket(), False)
self.assertEqual(uri.names_object(), True)
self.assertEqual(uri.names_directory(), False)
self.assertEqual(uri.names_file(), False)
self.assertEqual(uri.is_stream(), False)
self.assertEqual(uri.is_version_specific, True)
def test_roundtrip_versioned_gs_object_uri_parsed(self):
uri_str = 'gs://bucket/obj#1359908801674000'
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
roundtrip_uri = boto.storage_uri(uri.uri, validate=False,
suppress_consec_slashes=False)
self.assertEqual(uri.uri, roundtrip_uri.uri)
self.assertEqual(uri.is_version_specific, True)
def test_versioned_s3_object_uri(self):
uri_str = 's3://bucket/obj/a/b#eMuM0J15HkJ9QHlktfNP5MfA.oYR2q6S'
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
self.assertEqual('s3', uri.scheme)
self.assertEqual(uri_str, uri.uri)
self.assertEqual('s3://bucket/obj/a/b', uri.versionless_uri)
self.assertEqual('bucket', uri.bucket_name)
self.assertEqual('obj/a/b', uri.object_name)
self.assertEqual('eMuM0J15HkJ9QHlktfNP5MfA.oYR2q6S', uri.version_id)
self.assertEqual(None, uri.generation)
self.assertEqual(uri.names_provider(), False)
self.assertEqual(uri.names_container(), False)
self.assertEqual(uri.names_bucket(), False)
self.assertEqual(uri.names_object(), True)
self.assertEqual(uri.names_directory(), False)
self.assertEqual(uri.names_file(), False)
self.assertEqual(uri.is_stream(), False)
self.assertEqual(uri.is_version_specific, True)
def test_explicit_file_uri(self):
tmp_dir = tempfile.tempdir or ''
uri_str = 'file://%s' % urllib.request.pathname2url(tmp_dir)
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
self.assertEqual('file', uri.scheme)
self.assertEqual(uri_str, uri.uri)
self.assertFalse(hasattr(uri, 'versionless_uri'))
self.assertEqual('', uri.bucket_name)
self.assertEqual(tmp_dir, uri.object_name)
self.assertFalse(hasattr(uri, 'version_id'))
self.assertFalse(hasattr(uri, 'generation'))
self.assertFalse(hasattr(uri, 'is_version_specific'))
self.assertEqual(uri.names_provider(), False)
self.assertEqual(uri.names_bucket(), False)
# Don't check uri.names_container(), uri.names_directory(),
# uri.names_file(), or uri.names_object(), because for file URIs these
# functions look at the file system and apparently unit tests run
# chroot'd.
self.assertEqual(uri.is_stream(), False)
def test_implicit_file_uri(self):
tmp_dir = tempfile.tempdir or ''
uri_str = '%s' % urllib.request.pathname2url(tmp_dir)
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
self.assertEqual('file', uri.scheme)
self.assertEqual('file://%s' % tmp_dir, uri.uri)
self.assertFalse(hasattr(uri, 'versionless_uri'))
self.assertEqual('', uri.bucket_name)
self.assertEqual(tmp_dir, uri.object_name)
self.assertFalse(hasattr(uri, 'version_id'))
self.assertFalse(hasattr(uri, 'generation'))
self.assertFalse(hasattr(uri, 'is_version_specific'))
self.assertEqual(uri.names_provider(), False)
self.assertEqual(uri.names_bucket(), False)
# Don't check uri.names_container(), uri.names_directory(),
# uri.names_file(), or uri.names_object(), because for file URIs these
# functions look at the file system and apparently unit tests run
# chroot'd.
self.assertEqual(uri.is_stream(), False)
def test_gs_object_uri_contains_sharp_not_matching_version_syntax(self):
uri_str = 'gs://bucket/obj#13a990880167400'
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
self.assertEqual('gs', uri.scheme)
self.assertEqual(uri_str, uri.uri)
self.assertEqual('gs://bucket/obj#13a990880167400',
uri.versionless_uri)
self.assertEqual('bucket', uri.bucket_name)
self.assertEqual('obj#13a990880167400', uri.object_name)
self.assertEqual(None, uri.version_id)
self.assertEqual(None, uri.generation)
self.assertEqual(uri.names_provider(), False)
self.assertEqual(uri.names_container(), False)
self.assertEqual(uri.names_bucket(), False)
self.assertEqual(uri.names_object(), True)
self.assertEqual(uri.names_directory(), False)
self.assertEqual(uri.names_file(), False)
self.assertEqual(uri.is_stream(), False)
self.assertEqual(uri.is_version_specific, False)
def test_file_containing_colon(self):
uri_str = 'abc:def'
uri = boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
self.assertEqual('file', uri.scheme)
self.assertEqual('file://%s' % uri_str, uri.uri)
def test_invalid_scheme(self):
uri_str = 'mars://bucket/object'
try:
boto.storage_uri(uri_str, validate=False,
suppress_consec_slashes=False)
except InvalidUriError as e:
self.assertIn('Unrecognized scheme', e.message)
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
mory0tiki/pack-llama
|
views.py
|
1
|
1220
|
from django.core.files.base import ContentFile
from django.shortcuts import render
from django.http.response import HttpResponse
from django.views.generic import base
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from django.conf import settings
import ast
import json
import uuid
import models
import utils
class SavePackView(base.View):
@method_decorator(csrf_exempt)
def dispatch(self, request, *args, **kwargs):
return super(SavePackView, self).dispatch(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
try:
result = {"result" : False}
if request.body:
pack = models.Pack()
pack.receive_from = request.META["REMOTE_ADDR"]
pack.queue_id = models.Queue.objects.get(name='Test').id
if settings.DEBUG:
print request.body
print "++++++++++++++++++++++++"
pack.message.save(str(uuid.uuid4()),ContentFile(request.body))
result["result"] = True
except Exception as ex:
print str(ex)
return HttpResponse(json.dumps(result))
|
apache-2.0
|
BehavioralInsightsTeam/edx-platform
|
lms/lib/courseware_search/test/test_lms_filter_generator.py
|
12
|
5721
|
"""
Tests for the lms_filter_generator
"""
from mock import Mock, patch
from lms.lib.courseware_search.lms_filter_generator import LmsSearchFilterGenerator
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
class LmsSearchFilterGeneratorTestCase(ModuleStoreTestCase):
""" Tests for search result processor """
shard = 5
def build_courses(self):
"""
Build up a course tree with multiple test courses
"""
self.courses = [
CourseFactory.create(
org='ElasticsearchFiltering',
course='ES101F',
run='test_run',
display_name='Elasticsearch Filtering test course',
),
CourseFactory.create(
org='FilterTest',
course='FT101',
run='test_run',
display_name='FilterTest test course',
)
]
self.chapter = ItemFactory.create(
parent_location=self.courses[0].location,
category='chapter',
display_name="Week 1",
publish_item=True,
)
self.chapter2 = ItemFactory.create(
parent_location=self.courses[1].location,
category='chapter',
display_name="Week 1",
publish_item=True,
)
def setUp(self):
super(LmsSearchFilterGeneratorTestCase, self).setUp()
self.build_courses()
self.user = UserFactory.create(username="jack", email="jack@fake.edx.org", password='test')
for course in self.courses:
CourseEnrollment.enroll(self.user, course.location.course_key)
def test_course_id_not_provided(self):
"""
Tests that we get the list of IDs of courses the user is enrolled in when the course ID is null or not provided
"""
field_dictionary, filter_dictionary, _ = LmsSearchFilterGenerator.generate_field_filters(user=self.user)
self.assertIn('start_date', filter_dictionary)
self.assertIn(unicode(self.courses[0].id), field_dictionary['course'])
self.assertIn(unicode(self.courses[1].id), field_dictionary['course'])
def test_course_id_provided(self):
"""
Tests that we get the course ID when the course ID is provided
"""
field_dictionary, filter_dictionary, _ = LmsSearchFilterGenerator.generate_field_filters(
user=self.user,
course_id=unicode(self.courses[0].id)
)
self.assertIn('start_date', filter_dictionary)
self.assertEqual(unicode(self.courses[0].id), field_dictionary['course'])
def test_user_not_provided(self):
"""
Tests that we get empty list of courses in case the user is not provided
"""
field_dictionary, filter_dictionary, _ = LmsSearchFilterGenerator.generate_field_filters()
self.assertIn('start_date', filter_dictionary)
self.assertEqual(0, len(field_dictionary['course']))
def test_excludes_site_org(self):
"""
By default site orgs not belonging to current site org should be excluded.
"""
_, _, exclude_dictionary = LmsSearchFilterGenerator.generate_field_filters(user=self.user)
self.assertIn('org', exclude_dictionary)
exclude_orgs = exclude_dictionary['org']
self.assertEqual(2, len(exclude_orgs))
self.assertEqual('LogistrationX', exclude_orgs[0])
self.assertEqual('TestSiteX', exclude_orgs[1])
@patch('openedx.core.djangoapps.site_configuration.helpers.get_all_orgs', Mock(return_value=[]))
def test_no_excludes_with_no_orgs(self):
""" Test when no org is present - nothing to exclude """
_, _, exclude_dictionary = LmsSearchFilterGenerator.generate_field_filters(user=self.user)
self.assertNotIn('org', exclude_dictionary)
@patch('openedx.core.djangoapps.site_configuration.helpers.get_value', Mock(return_value='TestSiteX'))
def test_excludes_org_within(self):
field_dictionary, _, exclude_dictionary = LmsSearchFilterGenerator.generate_field_filters(user=self.user)
self.assertNotIn('org', exclude_dictionary)
self.assertIn('org', field_dictionary)
self.assertEqual(['TestSiteX'], field_dictionary['org'])
@patch(
'openedx.core.djangoapps.site_configuration.helpers.get_all_orgs',
Mock(return_value=["TestSite1", "TestSite2", "TestSite3", "TestSite4"])
)
def test_excludes_multi_orgs(self):
_, _, exclude_dictionary = LmsSearchFilterGenerator.generate_field_filters(user=self.user)
self.assertIn('org', exclude_dictionary)
exclude_orgs = exclude_dictionary['org']
self.assertEqual(4, len(exclude_orgs))
self.assertIn('TestSite1', exclude_orgs)
self.assertIn('TestSite2', exclude_orgs)
self.assertIn('TestSite3', exclude_orgs)
self.assertIn('TestSite4', exclude_orgs)
@patch(
'openedx.core.djangoapps.site_configuration.helpers.get_all_orgs',
Mock(return_value=["TestSite1", "TestSite2", "TestSite3", "TestSite4"])
)
@patch('openedx.core.djangoapps.site_configuration.helpers.get_value', Mock(return_value='TestSite3'))
def test_excludes_multi_orgs_within(self):
field_dictionary, _, exclude_dictionary = LmsSearchFilterGenerator.generate_field_filters(user=self.user)
self.assertNotIn('org', exclude_dictionary)
self.assertIn('org', field_dictionary)
self.assertEqual(['TestSite3'], field_dictionary['org'])
|
agpl-3.0
|
pigworlds/asuswrttest
|
release/src/router/lighttpd-1.4.29/external_file/js/davclient.js/jsbase/conftest.py
|
42
|
5960
|
# JS unit test support for py.test - (c) 2007 Guido Wesdorp. All rights
# reserved
#
# This software is distributed under the terms of the JSBase
# License. See LICENSE.txt for license text.
import py
here = py.magic.autopath().dirpath()
class JSTest(py.test.collect.Item):
def run(self):
path = self.fspath
test = self.name.split('/')[-1]
paths = [path.strpath,
(here / 'exception.js').strpath,
(here / 'testing.js').strpath,
(here / 'misclib.js').strpath,
]
testjs = (here / 'testing/testbase.js').read() % (
paths, test, '__main__')
curdir = str(py.path.local('.'))
py.std.os.chdir(str(self.fspath.dirpath()))
try:
jspath = self.fspath.new(basename='__testbase_temp.js')
try:
jspath.write(testjs)
pipe = py.std.os.popen('js "%s"' % (jspath,))
try:
data = {}
for line in pipe:
done = self._handle_line(line, data)
if done:
errdata = data[data['current']]
if errdata:
self.fail(errdata)
finally:
pipe.close()
finally:
jspath.remove()
finally:
py.std.os.chdir(curdir)
def fail(self, errdata):
py.test.fail(
'\nJS traceback (most recent last): \n%s\n%s\n' % (
(errdata[1:] and
self._format_tb(errdata[1:-5]) or
'no traceback available'
),
errdata[0],
)
)
_handling_traceback = False
def _handle_line(self, line, data):
line = line[:-1]
if line.startswith('end test'):
return True
if self._handling_traceback and line != 'end traceback':
data[data['current']].append(line)
if line.startswith('PRINTED: '):
print line[9:]
elif line.startswith('running test '):
testname = line[13:]
data['current'] = testname
data[testname] = []
elif line.startswith('success'):
pass
elif line.startswith('failure: '):
data[data['current']].append(line[9:])
elif line.startswith('traceback'):
self._handling_traceback = True
elif line.startswith('end traceback'):
self._handling_traceback = False
def _format_tb(self, tb):
tb.reverse()
ret = []
for line in tb:
line = line.strip()
if not line:
continue
funcsig, lineinfo = line.split('@', 1)
fpath, lineno = lineinfo.rsplit(':', 1)
fname = py.path.local(fpath).basename
# XXX might filter out too much... but it's better than leaving it
# all in (since it adds a couple of lines to the end of the tb,
# making it harder to find the problem line)
if fname in ['__testbase_temp.js', '__testbase_find.js',
'exception.js']:
continue
lineno = int(lineno)
if lineno == 0:
fname = "<unknown>"
ret.append('File "%s", line %s, in %s' % (
fname, lineno, funcsig or '?'))
if lineno > 0:
line = py.path.local(fpath).readlines()[lineno - 1]
ret.append(' %s' % (line.strip(),))
return '\n'.join([' %s' % (r,) for r in ret])
class JSChecker(py.test.collect.Module):
def __repr__(self):
return py.test.collect.Collector.__repr__(self)
def setup(self):
pass
def teardown(self):
pass
def run(self):
findjs = here.join('testing/findtests.js').read() % (
self.fspath.strpath, '__main__')
curdir = str(py.path.local('.'))
py.std.os.chdir(str(self.fspath.dirpath()))
tests = []
try:
jspath = self.fspath.new(basename='__findtests.js')
try:
jspath.write(findjs)
stdin, pipe, stderr = py.std.os.popen3('js "%s"' % (jspath,))
try:
error = stderr.next()
print 'Error read:', error
except StopIteration:
pass
else:
if error.find('command not found') > -1:
py.test.skip(
'error running "js" (SpiderMonkey), which is '
'required to run JS tests')
else:
py.test.fail(error)
return
try:
for line in pipe:
tests.append(line.strip())
finally:
py.std.sys.stdout = py.std.sys.__stdout__
pipe.close()
finally:
jspath.remove()
finally:
py.std.os.chdir(curdir)
return ['%s/%s' % (self.fspath.basename, test) for test in tests]
def join(self, name):
if py.path.local(name).dirpath().strpath.endswith('.js'):
return JSTest(name, self)
return super(JSChecker, self).join(name)
class Directory(py.test.collect.Directory):
def run(self):
if self.fspath == here:
return [p.basename for p in self.fspath.listdir('test_*') if
p.ext in ['.py', '.js']]
return super(Directory, self).run()
def join(self, name):
if not name.endswith('.js'):
return super(Directory, self).join(name)
p = self.fspath.join(name)
if p.check(file=1):
return JSChecker(p, parent=self)
|
gpl-2.0
|
ryandougherty/mwa-capstone
|
MWA_Tools/build/matplotlib/examples/misc/rasterization_demo.py
|
6
|
1257
|
import numpy as np
import matplotlib.pyplot as plt
d = np.arange(100).reshape(10, 10)
x, y = np.meshgrid(np.arange(11), np.arange(11))
theta = 0.25*np.pi
xx = x*np.cos(theta) - y*np.sin(theta)
yy = x*np.sin(theta) + y*np.cos(theta)
ax1 = plt.subplot(221)
ax1.set_aspect(1)
ax1.pcolormesh(xx, yy, d)
ax1.set_title("No Rasterization")
ax2 = plt.subplot(222)
ax2.set_aspect(1)
ax2.set_title("Rasterization")
m = ax2.pcolormesh(xx, yy, d)
m.set_rasterized(True)
ax3 = plt.subplot(223)
ax3.set_aspect(1)
ax3.pcolormesh(xx, yy, d)
ax3.text(0.5, 0.5, "Text", alpha=0.2,
va="center", ha="center", size=50, transform=ax3.transAxes)
ax3.set_title("No Rasterization")
ax4 = plt.subplot(224)
ax4.set_aspect(1)
m = ax4.pcolormesh(xx, yy, d)
m.set_zorder(-20)
ax4.text(0.5, 0.5, "Text", alpha=0.2,
zorder=-15,
va="center", ha="center", size=50, transform=ax4.transAxes)
ax4.set_rasterization_zorder(-10)
ax4.set_title("Rasterization z$<-10$")
# ax2.title.set_rasterized(True) # should display a warning
plt.savefig("test_rasterization.pdf", dpi=150)
plt.savefig("test_rasterization.eps", dpi=150)
if not plt.rcParams["text.usetex"]:
plt.savefig("test_rasterization.svg", dpi=150)
# svg backend currently ignores the dpi
|
gpl-2.0
|
Hubert51/AutoGrading
|
learning/web_Haotian/venv/Lib/shutil.py
|
23
|
40227
|
"""Utility functions for copying and archiving files and directory trees.
XXX The functions here don't copy the resource fork or other metadata on Mac.
"""
import os
import sys
import stat
import fnmatch
import collections
import errno
try:
import zlib
del zlib
_ZLIB_SUPPORTED = True
except ImportError:
_ZLIB_SUPPORTED = False
try:
import bz2
del bz2
_BZ2_SUPPORTED = True
except ImportError:
_BZ2_SUPPORTED = False
try:
import lzma
del lzma
_LZMA_SUPPORTED = True
except ImportError:
_LZMA_SUPPORTED = False
try:
from pwd import getpwnam
except ImportError:
getpwnam = None
try:
from grp import getgrnam
except ImportError:
getgrnam = None
__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2",
"copytree", "move", "rmtree", "Error", "SpecialFileError",
"ExecError", "make_archive", "get_archive_formats",
"register_archive_format", "unregister_archive_format",
"get_unpack_formats", "register_unpack_format",
"unregister_unpack_format", "unpack_archive",
"ignore_patterns", "chown", "which", "get_terminal_size",
"SameFileError"]
# disk_usage is added later, if available on the platform
class Error(OSError):
pass
class SameFileError(Error):
"""Raised when source and destination are the same file."""
class SpecialFileError(OSError):
"""Raised when trying to do a kind of operation (e.g. copying) which is
not supported on a special file (e.g. a named pipe)"""
class ExecError(OSError):
"""Raised when a command could not be executed"""
class ReadError(OSError):
"""Raised when an archive cannot be read"""
class RegistryError(Exception):
"""Raised when a registry operation with the archiving
and unpacking registries fails"""
def copyfileobj(fsrc, fdst, length=16*1024):
"""copy data from file-like object fsrc to file-like object fdst"""
while 1:
buf = fsrc.read(length)
if not buf:
break
fdst.write(buf)
def _samefile(src, dst):
# Macintosh, Unix.
if hasattr(os.path, 'samefile'):
try:
return os.path.samefile(src, dst)
except OSError:
return False
# All other platforms: check for same pathname.
return (os.path.normcase(os.path.abspath(src)) ==
os.path.normcase(os.path.abspath(dst)))
def copyfile(src, dst, *, follow_symlinks=True):
"""Copy data from src to dst.
If follow_symlinks is not set and src is a symbolic link, a new
symlink will be created instead of copying the file it points to.
"""
if _samefile(src, dst):
raise SameFileError("{!r} and {!r} are the same file".format(src, dst))
for fn in [src, dst]:
try:
st = os.stat(fn)
except OSError:
# File most likely does not exist
pass
else:
# XXX What about other special files? (sockets, devices...)
if stat.S_ISFIFO(st.st_mode):
raise SpecialFileError("`%s` is a named pipe" % fn)
if not follow_symlinks and os.path.islink(src):
os.symlink(os.readlink(src), dst)
else:
with open(src, 'rb') as fsrc:
with open(dst, 'wb') as fdst:
copyfileobj(fsrc, fdst)
return dst
def copymode(src, dst, *, follow_symlinks=True):
"""Copy mode bits from src to dst.
If follow_symlinks is not set, symlinks aren't followed if and only
if both `src` and `dst` are symlinks. If `lchmod` isn't available
(e.g. Linux) this method does nothing.
"""
if not follow_symlinks and os.path.islink(src) and os.path.islink(dst):
if hasattr(os, 'lchmod'):
stat_func, chmod_func = os.lstat, os.lchmod
else:
return
elif hasattr(os, 'chmod'):
stat_func, chmod_func = os.stat, os.chmod
else:
return
st = stat_func(src)
chmod_func(dst, stat.S_IMODE(st.st_mode))
if hasattr(os, 'listxattr'):
def _copyxattr(src, dst, *, follow_symlinks=True):
"""Copy extended filesystem attributes from `src` to `dst`.
Overwrite existing attributes.
If `follow_symlinks` is false, symlinks won't be followed.
"""
try:
names = os.listxattr(src, follow_symlinks=follow_symlinks)
except OSError as e:
if e.errno not in (errno.ENOTSUP, errno.ENODATA):
raise
return
for name in names:
try:
value = os.getxattr(src, name, follow_symlinks=follow_symlinks)
os.setxattr(dst, name, value, follow_symlinks=follow_symlinks)
except OSError as e:
if e.errno not in (errno.EPERM, errno.ENOTSUP, errno.ENODATA):
raise
else:
def _copyxattr(*args, **kwargs):
pass
def copystat(src, dst, *, follow_symlinks=True):
"""Copy all stat info (mode bits, atime, mtime, flags) from src to dst.
If the optional flag `follow_symlinks` is not set, symlinks aren't followed if and
only if both `src` and `dst` are symlinks.
"""
def _nop(*args, ns=None, follow_symlinks=None):
pass
# follow symlinks (aka don't not follow symlinks)
follow = follow_symlinks or not (os.path.islink(src) and os.path.islink(dst))
if follow:
# use the real function if it exists
def lookup(name):
return getattr(os, name, _nop)
else:
# use the real function only if it exists
# *and* it supports follow_symlinks
def lookup(name):
fn = getattr(os, name, _nop)
if fn in os.supports_follow_symlinks:
return fn
return _nop
st = lookup("stat")(src, follow_symlinks=follow)
mode = stat.S_IMODE(st.st_mode)
lookup("utime")(dst, ns=(st.st_atime_ns, st.st_mtime_ns),
follow_symlinks=follow)
try:
lookup("chmod")(dst, mode, follow_symlinks=follow)
except NotImplementedError:
# if we got a NotImplementedError, it's because
# * follow_symlinks=False,
# * lchown() is unavailable, and
# * either
# * fchownat() is unavailable or
# * fchownat() doesn't implement AT_SYMLINK_NOFOLLOW.
# (it returned ENOSUP.)
# therefore we're out of options--we simply cannot chown the
# symlink. give up, suppress the error.
# (which is what shutil always did in this circumstance.)
pass
if hasattr(st, 'st_flags'):
try:
lookup("chflags")(dst, st.st_flags, follow_symlinks=follow)
except OSError as why:
for err in 'EOPNOTSUPP', 'ENOTSUP':
if hasattr(errno, err) and why.errno == getattr(errno, err):
break
else:
raise
_copyxattr(src, dst, follow_symlinks=follow)
def copy(src, dst, *, follow_symlinks=True):
"""Copy data and mode bits ("cp src dst"). Return the file's destination.
The destination may be a directory.
If follow_symlinks is false, symlinks won't be followed. This
resembles GNU's "cp -P src dst".
If source and destination are the same file, a SameFileError will be
raised.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst, follow_symlinks=follow_symlinks)
copymode(src, dst, follow_symlinks=follow_symlinks)
return dst
def copy2(src, dst, *, follow_symlinks=True):
"""Copy data and all stat info ("cp -p src dst"). Return the file's
destination."
The destination may be a directory.
If follow_symlinks is false, symlinks won't be followed. This
resembles GNU's "cp -P src dst".
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst, follow_symlinks=follow_symlinks)
copystat(src, dst, follow_symlinks=follow_symlinks)
return dst
def ignore_patterns(*patterns):
"""Function that can be used as copytree() ignore parameter.
Patterns is a sequence of glob-style patterns
that are used to exclude files"""
def _ignore_patterns(path, names):
ignored_names = []
for pattern in patterns:
ignored_names.extend(fnmatch.filter(names, pattern))
return set(ignored_names)
return _ignore_patterns
def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2,
ignore_dangling_symlinks=False):
"""Recursively copy a directory tree.
The destination directory must not already exist.
If exception(s) occur, an Error is raised with a list of reasons.
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied. If the file pointed by the symlink doesn't
exist, an exception will be added in the list of errors raised in
an Error exception at the end of the copy process.
You can set the optional ignore_dangling_symlinks flag to true if you
want to silence this exception. Notice that this has no effect on
platforms that don't support os.symlink.
The optional ignore argument is a callable. If given, it
is called with the `src` parameter, which is the directory
being visited by copytree(), and `names` which is the list of
`src` contents, as returned by os.listdir():
callable(src, names) -> ignored_names
Since copytree() is called recursively, the callable will be
called once for each directory that is copied. It returns a
list of names relative to the `src` directory that should
not be copied.
The optional copy_function argument is a callable that will be used
to copy each file. It will be called with the source path and the
destination path as arguments. By default, copy2() is used, but any
function that supports the same signature (like copy()) can be used.
"""
names = os.listdir(src)
if ignore is not None:
ignored_names = ignore(src, names)
else:
ignored_names = set()
os.makedirs(dst)
errors = []
for name in names:
if name in ignored_names:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if os.path.islink(srcname):
linkto = os.readlink(srcname)
if symlinks:
# We can't just leave it to `copy_function` because legacy
# code with a custom `copy_function` may rely on copytree
# doing the right thing.
os.symlink(linkto, dstname)
copystat(srcname, dstname, follow_symlinks=not symlinks)
else:
# ignore dangling symlink if the flag is on
if not os.path.exists(linkto) and ignore_dangling_symlinks:
continue
# otherwise let the copy occurs. copy2 will raise an error
if os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore,
copy_function)
else:
copy_function(srcname, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, ignore, copy_function)
else:
# Will raise a SpecialFileError for unsupported file types
copy_function(srcname, dstname)
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error as err:
errors.extend(err.args[0])
except OSError as why:
errors.append((srcname, dstname, str(why)))
try:
copystat(src, dst)
except OSError as why:
# Copying file access times may fail on Windows
if getattr(why, 'winerror', None) is None:
errors.append((src, dst, str(why)))
if errors:
raise Error(errors)
return dst
# version vulnerable to race conditions
def _rmtree_unsafe(path, onerror):
try:
if os.path.islink(path):
# symlinks to directories are forbidden, see bug #1669
raise OSError("Cannot call rmtree on a symbolic link")
except OSError:
onerror(os.path.islink, path, sys.exc_info())
# can't continue even if onerror hook returns
return
names = []
try:
names = os.listdir(path)
except OSError:
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
mode = os.lstat(fullname).st_mode
except OSError:
mode = 0
if stat.S_ISDIR(mode):
_rmtree_unsafe(fullname, onerror)
else:
try:
os.unlink(fullname)
except OSError:
onerror(os.unlink, fullname, sys.exc_info())
try:
os.rmdir(path)
except OSError:
onerror(os.rmdir, path, sys.exc_info())
# Version using fd-based APIs to protect against races
def _rmtree_safe_fd(topfd, path, onerror):
names = []
try:
names = os.listdir(topfd)
except OSError as err:
err.filename = path
onerror(os.listdir, path, sys.exc_info())
for name in names:
fullname = os.path.join(path, name)
try:
orig_st = os.stat(name, dir_fd=topfd, follow_symlinks=False)
mode = orig_st.st_mode
except OSError:
mode = 0
if stat.S_ISDIR(mode):
try:
dirfd = os.open(name, os.O_RDONLY, dir_fd=topfd)
except OSError:
onerror(os.open, fullname, sys.exc_info())
else:
try:
if os.path.samestat(orig_st, os.fstat(dirfd)):
_rmtree_safe_fd(dirfd, fullname, onerror)
try:
os.rmdir(name, dir_fd=topfd)
except OSError:
onerror(os.rmdir, fullname, sys.exc_info())
else:
try:
# This can only happen if someone replaces
# a directory with a symlink after the call to
# stat.S_ISDIR above.
raise OSError("Cannot call rmtree on a symbolic "
"link")
except OSError:
onerror(os.path.islink, fullname, sys.exc_info())
finally:
os.close(dirfd)
else:
try:
os.unlink(name, dir_fd=topfd)
except OSError:
onerror(os.unlink, fullname, sys.exc_info())
_use_fd_functions = ({os.open, os.stat, os.unlink, os.rmdir} <=
os.supports_dir_fd and
os.listdir in os.supports_fd and
os.stat in os.supports_follow_symlinks)
def rmtree(path, ignore_errors=False, onerror=None):
"""Recursively delete a directory tree.
If ignore_errors is set, errors are ignored; otherwise, if onerror
is set, it is called to handle the error with arguments (func,
path, exc_info) where func is platform and implementation dependent;
path is the argument to that function that caused it to fail; and
exc_info is a tuple returned by sys.exc_info(). If ignore_errors
is false and onerror is None, an exception is raised.
"""
if ignore_errors:
def onerror(*args):
pass
elif onerror is None:
def onerror(*args):
raise
if _use_fd_functions:
# While the unsafe rmtree works fine on bytes, the fd based does not.
if isinstance(path, bytes):
path = os.fsdecode(path)
# Note: To guard against symlink races, we use the standard
# lstat()/open()/fstat() trick.
try:
orig_st = os.lstat(path)
except Exception:
onerror(os.lstat, path, sys.exc_info())
return
try:
fd = os.open(path, os.O_RDONLY)
except Exception:
onerror(os.lstat, path, sys.exc_info())
return
try:
if os.path.samestat(orig_st, os.fstat(fd)):
_rmtree_safe_fd(fd, path, onerror)
try:
os.rmdir(path)
except OSError:
onerror(os.rmdir, path, sys.exc_info())
else:
try:
# symlinks to directories are forbidden, see bug #1669
raise OSError("Cannot call rmtree on a symbolic link")
except OSError:
onerror(os.path.islink, path, sys.exc_info())
finally:
os.close(fd)
else:
return _rmtree_unsafe(path, onerror)
# Allow introspection of whether or not the hardening against symlink
# attacks is supported on the current platform
rmtree.avoids_symlink_attacks = _use_fd_functions
def _basename(path):
# A basename() variant which first strips the trailing slash, if present.
# Thus we always get the last component of the path, even for directories.
sep = os.path.sep + (os.path.altsep or '')
return os.path.basename(path.rstrip(sep))
def move(src, dst, copy_function=copy2):
"""Recursively move a file or directory to another location. This is
similar to the Unix "mv" command. Return the file or directory's
destination.
If the destination is a directory or a symlink to a directory, the source
is moved inside the directory. The destination path must not already
exist.
If the destination already exists but is not a directory, it may be
overwritten depending on os.rename() semantics.
If the destination is on our current filesystem, then rename() is used.
Otherwise, src is copied to the destination and then removed. Symlinks are
recreated under the new name if os.rename() fails because of cross
filesystem renames.
The optional `copy_function` argument is a callable that will be used
to copy the source or it will be delegated to `copytree`.
By default, copy2() is used, but any function that supports the same
signature (like copy()) can be used.
A lot more could be done here... A look at a mv.c shows a lot of
the issues this implementation glosses over.
"""
real_dst = dst
if os.path.isdir(dst):
if _samefile(src, dst):
# We might be on a case insensitive filesystem,
# perform the rename anyway.
os.rename(src, dst)
return
real_dst = os.path.join(dst, _basename(src))
if os.path.exists(real_dst):
raise Error("Destination path '%s' already exists" % real_dst)
try:
os.rename(src, real_dst)
except OSError:
if os.path.islink(src):
linkto = os.readlink(src)
os.symlink(linkto, real_dst)
os.unlink(src)
elif os.path.isdir(src):
if _destinsrc(src, dst):
raise Error("Cannot move a directory '%s' into itself"
" '%s'." % (src, dst))
copytree(src, real_dst, copy_function=copy_function,
symlinks=True)
rmtree(src)
else:
copy_function(src, real_dst)
os.unlink(src)
return real_dst
def _destinsrc(src, dst):
src = os.path.abspath(src)
dst = os.path.abspath(dst)
if not src.endswith(os.path.sep):
src += os.path.sep
if not dst.endswith(os.path.sep):
dst += os.path.sep
return dst.startswith(src)
def _get_gid(name):
"""Returns a gid, given a group name."""
if getgrnam is None or name is None:
return None
try:
result = getgrnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _get_uid(name):
"""Returns an uid, given a user name."""
if getpwnam is None or name is None:
return None
try:
result = getpwnam(name)
except KeyError:
result = None
if result is not None:
return result[2]
return None
def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0,
owner=None, group=None, logger=None):
"""Create a (possibly compressed) tar file from all the files under
'base_dir'.
'compress' must be "gzip" (the default), "bzip2", "xz", or None.
'owner' and 'group' can be used to define an owner and a group for the
archive that is being built. If not provided, the current owner and group
will be used.
The output tar file will be named 'base_name' + ".tar", possibly plus
the appropriate compression extension (".gz", ".bz2", or ".xz").
Returns the output filename.
"""
if compress is None:
tar_compression = ''
elif _ZLIB_SUPPORTED and compress == 'gzip':
tar_compression = 'gz'
elif _BZ2_SUPPORTED and compress == 'bzip2':
tar_compression = 'bz2'
elif _LZMA_SUPPORTED and compress == 'xz':
tar_compression = 'xz'
else:
raise ValueError("bad value for 'compress', or compression format not "
"supported : {0}".format(compress))
import tarfile # late import for breaking circular dependency
compress_ext = '.' + tar_compression if compress else ''
archive_name = base_name + '.tar' + compress_ext
archive_dir = os.path.dirname(archive_name)
if archive_dir and not os.path.exists(archive_dir):
if logger is not None:
logger.info("creating %s", archive_dir)
if not dry_run:
os.makedirs(archive_dir)
# creating the tarball
if logger is not None:
logger.info('Creating tar archive')
uid = _get_uid(owner)
gid = _get_gid(group)
def _set_uid_gid(tarinfo):
if gid is not None:
tarinfo.gid = gid
tarinfo.gname = group
if uid is not None:
tarinfo.uid = uid
tarinfo.uname = owner
return tarinfo
if not dry_run:
tar = tarfile.open(archive_name, 'w|%s' % tar_compression)
try:
tar.add(base_dir, filter=_set_uid_gid)
finally:
tar.close()
return archive_name
def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None):
"""Create a zip file from all the files under 'base_dir'.
The output zip file will be named 'base_name' + ".zip". Returns the
name of the output zip file.
"""
import zipfile # late import for breaking circular dependency
zip_filename = base_name + ".zip"
archive_dir = os.path.dirname(base_name)
if archive_dir and not os.path.exists(archive_dir):
if logger is not None:
logger.info("creating %s", archive_dir)
if not dry_run:
os.makedirs(archive_dir)
if logger is not None:
logger.info("creating '%s' and adding '%s' to it",
zip_filename, base_dir)
if not dry_run:
with zipfile.ZipFile(zip_filename, "w",
compression=zipfile.ZIP_DEFLATED) as zf:
path = os.path.normpath(base_dir)
if path != os.curdir:
zf.write(path, path)
if logger is not None:
logger.info("adding '%s'", path)
for dirpath, dirnames, filenames in os.walk(base_dir):
for name in sorted(dirnames):
path = os.path.normpath(os.path.join(dirpath, name))
zf.write(path, path)
if logger is not None:
logger.info("adding '%s'", path)
for name in filenames:
path = os.path.normpath(os.path.join(dirpath, name))
if os.path.isfile(path):
zf.write(path, path)
if logger is not None:
logger.info("adding '%s'", path)
return zip_filename
_ARCHIVE_FORMATS = {
'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"),
}
if _ZLIB_SUPPORTED:
_ARCHIVE_FORMATS['gztar'] = (_make_tarball, [('compress', 'gzip')],
"gzip'ed tar-file")
_ARCHIVE_FORMATS['zip'] = (_make_zipfile, [], "ZIP file")
if _BZ2_SUPPORTED:
_ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')],
"bzip2'ed tar-file")
if _LZMA_SUPPORTED:
_ARCHIVE_FORMATS['xztar'] = (_make_tarball, [('compress', 'xz')],
"xz'ed tar-file")
def get_archive_formats():
"""Returns a list of supported formats for archiving and unarchiving.
Each element of the returned sequence is a tuple (name, description)
"""
formats = [(name, registry[2]) for name, registry in
_ARCHIVE_FORMATS.items()]
formats.sort()
return formats
def register_archive_format(name, function, extra_args=None, description=''):
"""Registers an archive format.
name is the name of the format. function is the callable that will be
used to create archives. If provided, extra_args is a sequence of
(name, value) tuples that will be passed as arguments to the callable.
description can be provided to describe the format, and will be returned
by the get_archive_formats() function.
"""
if extra_args is None:
extra_args = []
if not callable(function):
raise TypeError('The %s object is not callable' % function)
if not isinstance(extra_args, (tuple, list)):
raise TypeError('extra_args needs to be a sequence')
for element in extra_args:
if not isinstance(element, (tuple, list)) or len(element) !=2:
raise TypeError('extra_args elements are : (arg_name, value)')
_ARCHIVE_FORMATS[name] = (function, extra_args, description)
def unregister_archive_format(name):
del _ARCHIVE_FORMATS[name]
def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0,
dry_run=0, owner=None, group=None, logger=None):
"""Create an archive file (eg. zip or tar).
'base_name' is the name of the file to create, minus any format-specific
extension; 'format' is the archive format: one of "zip", "tar", "gztar",
"bztar", or "xztar". Or any other registered format.
'root_dir' is a directory that will be the root directory of the
archive; ie. we typically chdir into 'root_dir' before creating the
archive. 'base_dir' is the directory where we start archiving from;
ie. 'base_dir' will be the common prefix of all files and
directories in the archive. 'root_dir' and 'base_dir' both default
to the current directory. Returns the name of the archive file.
'owner' and 'group' are used when creating a tar archive. By default,
uses the current owner and group.
"""
save_cwd = os.getcwd()
if root_dir is not None:
if logger is not None:
logger.debug("changing into '%s'", root_dir)
base_name = os.path.abspath(base_name)
if not dry_run:
os.chdir(root_dir)
if base_dir is None:
base_dir = os.curdir
kwargs = {'dry_run': dry_run, 'logger': logger}
try:
format_info = _ARCHIVE_FORMATS[format]
except KeyError:
raise ValueError("unknown archive format '%s'" % format)
func = format_info[0]
for arg, val in format_info[1]:
kwargs[arg] = val
if format != 'zip':
kwargs['owner'] = owner
kwargs['group'] = group
try:
filename = func(base_name, base_dir, **kwargs)
finally:
if root_dir is not None:
if logger is not None:
logger.debug("changing back to '%s'", save_cwd)
os.chdir(save_cwd)
return filename
def get_unpack_formats():
"""Returns a list of supported formats for unpacking.
Each element of the returned sequence is a tuple
(name, extensions, description)
"""
formats = [(name, info[0], info[3]) for name, info in
_UNPACK_FORMATS.items()]
formats.sort()
return formats
def _check_unpack_options(extensions, function, extra_args):
"""Checks what gets registered as an unpacker."""
# first make sure no other unpacker is registered for this extension
existing_extensions = {}
for name, info in _UNPACK_FORMATS.items():
for ext in info[0]:
existing_extensions[ext] = name
for extension in extensions:
if extension in existing_extensions:
msg = '%s is already registered for "%s"'
raise RegistryError(msg % (extension,
existing_extensions[extension]))
if not callable(function):
raise TypeError('The registered function must be a callable')
def register_unpack_format(name, extensions, function, extra_args=None,
description=''):
"""Registers an unpack format.
`name` is the name of the format. `extensions` is a list of extensions
corresponding to the format.
`function` is the callable that will be
used to unpack archives. The callable will receive archives to unpack.
If it's unable to handle an archive, it needs to raise a ReadError
exception.
If provided, `extra_args` is a sequence of
(name, value) tuples that will be passed as arguments to the callable.
description can be provided to describe the format, and will be returned
by the get_unpack_formats() function.
"""
if extra_args is None:
extra_args = []
_check_unpack_options(extensions, function, extra_args)
_UNPACK_FORMATS[name] = extensions, function, extra_args, description
def unregister_unpack_format(name):
"""Removes the pack format from the registry."""
del _UNPACK_FORMATS[name]
def _ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
def _unpack_zipfile(filename, extract_dir):
"""Unpack zip `filename` to `extract_dir`
"""
import zipfile # late import for breaking circular dependency
if not zipfile.is_zipfile(filename):
raise ReadError("%s is not a zip file" % filename)
zip = zipfile.ZipFile(filename)
try:
for info in zip.infolist():
name = info.filename
# don't extract absolute paths or ones with .. in them
if name.startswith('/') or '..' in name:
continue
target = os.path.join(extract_dir, *name.split('/'))
if not target:
continue
_ensure_directory(target)
if not name.endswith('/'):
# file
data = zip.read(info.filename)
f = open(target, 'wb')
try:
f.write(data)
finally:
f.close()
del data
finally:
zip.close()
def _unpack_tarfile(filename, extract_dir):
"""Unpack tar/tar.gz/tar.bz2/tar.xz `filename` to `extract_dir`
"""
import tarfile # late import for breaking circular dependency
try:
tarobj = tarfile.open(filename)
except tarfile.TarError:
raise ReadError(
"%s is not a compressed or uncompressed tar file" % filename)
try:
tarobj.extractall(extract_dir)
finally:
tarobj.close()
_UNPACK_FORMATS = {
'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"),
'zip': (['.zip'], _unpack_zipfile, [], "ZIP file"),
}
if _ZLIB_SUPPORTED:
_UNPACK_FORMATS['gztar'] = (['.tar.gz', '.tgz'], _unpack_tarfile, [],
"gzip'ed tar-file")
if _BZ2_SUPPORTED:
_UNPACK_FORMATS['bztar'] = (['.tar.bz2', '.tbz2'], _unpack_tarfile, [],
"bzip2'ed tar-file")
if _LZMA_SUPPORTED:
_UNPACK_FORMATS['xztar'] = (['.tar.xz', '.txz'], _unpack_tarfile, [],
"xz'ed tar-file")
def _find_unpack_format(filename):
for name, info in _UNPACK_FORMATS.items():
for extension in info[0]:
if filename.endswith(extension):
return name
return None
def unpack_archive(filename, extract_dir=None, format=None):
"""Unpack an archive.
`filename` is the name of the archive.
`extract_dir` is the name of the target directory, where the archive
is unpacked. If not provided, the current working directory is used.
`format` is the archive format: one of "zip", "tar", "gztar", "bztar",
or "xztar". Or any other registered format. If not provided,
unpack_archive will use the filename extension and see if an unpacker
was registered for that extension.
In case none is found, a ValueError is raised.
"""
if extract_dir is None:
extract_dir = os.getcwd()
if format is not None:
try:
format_info = _UNPACK_FORMATS[format]
except KeyError:
raise ValueError("Unknown unpack format '{0}'".format(format))
func = format_info[1]
func(filename, extract_dir, **dict(format_info[2]))
else:
# we need to look at the registered unpackers supported extensions
format = _find_unpack_format(filename)
if format is None:
raise ReadError("Unknown archive format '{0}'".format(filename))
func = _UNPACK_FORMATS[format][1]
kwargs = dict(_UNPACK_FORMATS[format][2])
func(filename, extract_dir, **kwargs)
if hasattr(os, 'statvfs'):
__all__.append('disk_usage')
_ntuple_diskusage = collections.namedtuple('usage', 'total used free')
_ntuple_diskusage.total.__doc__ = 'Total space in bytes'
_ntuple_diskusage.used.__doc__ = 'Used space in bytes'
_ntuple_diskusage.free.__doc__ = 'Free space in bytes'
def disk_usage(path):
"""Return disk usage statistics about the given path.
Returned value is a named tuple with attributes 'total', 'used' and
'free', which are the amount of total, used and free space, in bytes.
"""
st = os.statvfs(path)
free = st.f_bavail * st.f_frsize
total = st.f_blocks * st.f_frsize
used = (st.f_blocks - st.f_bfree) * st.f_frsize
return _ntuple_diskusage(total, used, free)
elif os.name == 'nt':
import nt
__all__.append('disk_usage')
_ntuple_diskusage = collections.namedtuple('usage', 'total used free')
def disk_usage(path):
"""Return disk usage statistics about the given path.
Returned values is a named tuple with attributes 'total', 'used' and
'free', which are the amount of total, used and free space, in bytes.
"""
total, free = nt._getdiskusage(path)
used = total - free
return _ntuple_diskusage(total, used, free)
def chown(path, user=None, group=None):
"""Change owner user and group of the given path.
user and group can be the uid/gid or the user/group names, and in that case,
they are converted to their respective uid/gid.
"""
if user is None and group is None:
raise ValueError("user and/or group must be set")
_user = user
_group = group
# -1 means don't change it
if user is None:
_user = -1
# user can either be an int (the uid) or a string (the system username)
elif isinstance(user, str):
_user = _get_uid(user)
if _user is None:
raise LookupError("no such user: {!r}".format(user))
if group is None:
_group = -1
elif not isinstance(group, int):
_group = _get_gid(group)
if _group is None:
raise LookupError("no such group: {!r}".format(group))
os.chown(path, _user, _group)
def get_terminal_size(fallback=(80, 24)):
"""Get the size of the terminal window.
For each of the two dimensions, the environment variable, COLUMNS
and LINES respectively, is checked. If the variable is defined and
the value is a positive integer, it is used.
When COLUMNS or LINES is not defined, which is the common case,
the terminal connected to sys.__stdout__ is queried
by invoking os.get_terminal_size.
If the terminal size cannot be successfully queried, either because
the system doesn't support querying, or because we are not
connected to a terminal, the value given in fallback parameter
is used. Fallback defaults to (80, 24) which is the default
size used by many terminal emulators.
The value returned is a named tuple of type os.terminal_size.
"""
# columns, lines are the working values
try:
columns = int(os.environ['COLUMNS'])
except (KeyError, ValueError):
columns = 0
try:
lines = int(os.environ['LINES'])
except (KeyError, ValueError):
lines = 0
# only query if necessary
if columns <= 0 or lines <= 0:
try:
size = os.get_terminal_size(sys.__stdout__.fileno())
except (AttributeError, ValueError, OSError):
# stdout is None, closed, detached, or not a terminal, or
# os.get_terminal_size() is unsupported
size = os.terminal_size(fallback)
if columns <= 0:
columns = size.columns
if lines <= 0:
lines = size.lines
return os.terminal_size((columns, lines))
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return (os.path.exists(fn) and os.access(fn, mode)
and not os.path.isdir(fn))
# If we're given a path with a directory part, look it up directly rather
# than referring to PATH directories. This includes checking relative to the
# current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd, mode):
return cmd
return None
if path is None:
path = os.environ.get("PATH", os.defpath)
if not path:
return None
path = path.split(os.pathsep)
if sys.platform == "win32":
# The current directory takes precedence on Windows.
if not os.curdir in path:
path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows.
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
# If it does match, only test that one, otherwise we have to try
# others.
if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
files = [cmd]
else:
files = [cmd + ext for ext in pathext]
else:
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for dir in path:
normdir = os.path.normcase(dir)
if not normdir in seen:
seen.add(normdir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name, mode):
return name
return None
|
mit
|
orgito/ansible
|
lib/ansible/plugins/test/core.py
|
12
|
6562
|
# (c) 2012, Jeroen Hoekx <jeroen@hoekx.be>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import operator as py_operator
from distutils.version import LooseVersion, StrictVersion
from ansible import errors
from ansible.module_utils.common._collections_compat import MutableMapping, MutableSequence
from ansible.utils.display import Display
display = Display()
def failed(result):
''' Test if task result yields failed '''
if not isinstance(result, MutableMapping):
raise errors.AnsibleFilterError("The 'failed' test expects a dictionary")
return result.get('failed', False)
def success(result):
''' Test if task result yields success '''
return not failed(result)
def unreachable(result):
''' Test if task result yields unreachable '''
if not isinstance(result, MutableMapping):
raise errors.AnsibleFilterError("The 'unreachable' test expects a dictionary")
return result.get('unreachable', False)
def reachable(result):
''' Test if task result yields reachable '''
return not unreachable(result)
def changed(result):
''' Test if task result yields changed '''
if not isinstance(result, MutableMapping):
raise errors.AnsibleFilterError("The 'changed' test expects a dictionary")
if 'changed' not in result:
changed = False
if (
'results' in result and # some modules return a 'results' key
isinstance(result['results'], MutableSequence) and
isinstance(result['results'][0], MutableMapping)
):
for res in result['results']:
if res.get('changed', False):
changed = True
break
else:
changed = result.get('changed', False)
return changed
def skipped(result):
''' Test if task result yields skipped '''
if not isinstance(result, MutableMapping):
raise errors.AnsibleFilterError("The 'skipped' test expects a dictionary")
return result.get('skipped', False)
def started(result):
''' Test if async task has started '''
if not isinstance(result, MutableMapping):
raise errors.AnsibleFilterError("The 'started' test expects a dictionary")
if 'started' in result:
# For async tasks, return status
# NOTE: The value of started is 0 or 1, not False or True :-/
return result.get('started', 0) == 1
else:
# For non-async tasks, warn user, but return as if started
display.warning("The 'started' test expects an async task, but a non-async task was tested")
return True
def finished(result):
''' Test if async task has finished '''
if not isinstance(result, MutableMapping):
raise errors.AnsibleFilterError("The 'finished' test expects a dictionary")
if 'finished' in result:
# For async tasks, return status
# NOTE: The value of finished is 0 or 1, not False or True :-/
return result.get('finished', 0) == 1
else:
# For non-async tasks, warn user, but return as if finished
display.warning("The 'finished' test expects an async task, but a non-async task was tested")
return True
def regex(value='', pattern='', ignorecase=False, multiline=False, match_type='search'):
''' Expose `re` as a boolean filter using the `search` method by default.
This is likely only useful for `search` and `match` which already
have their own filters.
'''
flags = 0
if ignorecase:
flags |= re.I
if multiline:
flags |= re.M
_re = re.compile(pattern, flags=flags)
_bool = __builtins__.get('bool')
return _bool(getattr(_re, match_type, 'search')(value))
def match(value, pattern='', ignorecase=False, multiline=False):
''' Perform a `re.match` returning a boolean '''
return regex(value, pattern, ignorecase, multiline, 'match')
def search(value, pattern='', ignorecase=False, multiline=False):
''' Perform a `re.search` returning a boolean '''
return regex(value, pattern, ignorecase, multiline, 'search')
def version_compare(value, version, operator='eq', strict=False):
''' Perform a version comparison on a value '''
op_map = {
'==': 'eq', '=': 'eq', 'eq': 'eq',
'<': 'lt', 'lt': 'lt',
'<=': 'le', 'le': 'le',
'>': 'gt', 'gt': 'gt',
'>=': 'ge', 'ge': 'ge',
'!=': 'ne', '<>': 'ne', 'ne': 'ne'
}
if strict:
Version = StrictVersion
else:
Version = LooseVersion
if operator in op_map:
operator = op_map[operator]
else:
raise errors.AnsibleFilterError('Invalid operator type')
try:
method = getattr(py_operator, operator)
return method(Version(str(value)), Version(str(version)))
except Exception as e:
raise errors.AnsibleFilterError('Version comparison: %s' % e)
class TestModule(object):
''' Ansible core jinja2 tests '''
def tests(self):
return {
# failure testing
'failed': failed,
'failure': failed,
'succeeded': success,
'success': success,
'successful': success,
'reachable': reachable,
'unreachable': unreachable,
# changed testing
'changed': changed,
'change': changed,
# skip testing
'skipped': skipped,
'skip': skipped,
# async testing
'finished': finished,
'started': started,
# regex
'match': match,
'search': search,
'regex': regex,
# version comparison
'version_compare': version_compare,
'version': version_compare,
# lists
'any': any,
'all': all,
}
|
gpl-3.0
|
fzalkow/scikit-learn
|
examples/plot_kernel_approximation.py
|
262
|
8004
|
"""
==================================================
Explicit feature map approximation for RBF kernels
==================================================
An example illustrating the approximation of the feature map
of an RBF kernel.
.. currentmodule:: sklearn.kernel_approximation
It shows how to use :class:`RBFSampler` and :class:`Nystroem` to
approximate the feature map of an RBF kernel for classification with an SVM on
the digits dataset. Results using a linear SVM in the original space, a linear
SVM using the approximate mappings and using a kernelized SVM are compared.
Timings and accuracy for varying amounts of Monte Carlo samplings (in the case
of :class:`RBFSampler`, which uses random Fourier features) and different sized
subsets of the training set (for :class:`Nystroem`) for the approximate mapping
are shown.
Please note that the dataset here is not large enough to show the benefits
of kernel approximation, as the exact SVM is still reasonably fast.
Sampling more dimensions clearly leads to better classification results, but
comes at a greater cost. This means there is a tradeoff between runtime and
accuracy, given by the parameter n_components. Note that solving the Linear
SVM and also the approximate kernel SVM could be greatly accelerated by using
stochastic gradient descent via :class:`sklearn.linear_model.SGDClassifier`.
This is not easily possible for the case of the kernelized SVM.
The second plot visualized the decision surfaces of the RBF kernel SVM and
the linear SVM with approximate kernel maps.
The plot shows decision surfaces of the classifiers projected onto
the first two principal components of the data. This visualization should
be taken with a grain of salt since it is just an interesting slice through
the decision surface in 64 dimensions. In particular note that
a datapoint (represented as a dot) does not necessarily be classified
into the region it is lying in, since it will not lie on the plane
that the first two principal components span.
The usage of :class:`RBFSampler` and :class:`Nystroem` is described in detail
in :ref:`kernel_approximation`.
"""
print(__doc__)
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Andreas Mueller <amueller@ais.uni-bonn.de>
# License: BSD 3 clause
# Standard scientific Python imports
import matplotlib.pyplot as plt
import numpy as np
from time import time
# Import datasets, classifiers and performance metrics
from sklearn import datasets, svm, pipeline
from sklearn.kernel_approximation import (RBFSampler,
Nystroem)
from sklearn.decomposition import PCA
# The digits dataset
digits = datasets.load_digits(n_class=9)
# To apply an classifier on this data, we need to flatten the image, to
# turn the data in a (samples, feature) matrix:
n_samples = len(digits.data)
data = digits.data / 16.
data -= data.mean(axis=0)
# We learn the digits on the first half of the digits
data_train, targets_train = data[:n_samples / 2], digits.target[:n_samples / 2]
# Now predict the value of the digit on the second half:
data_test, targets_test = data[n_samples / 2:], digits.target[n_samples / 2:]
#data_test = scaler.transform(data_test)
# Create a classifier: a support vector classifier
kernel_svm = svm.SVC(gamma=.2)
linear_svm = svm.LinearSVC()
# create pipeline from kernel approximation
# and linear svm
feature_map_fourier = RBFSampler(gamma=.2, random_state=1)
feature_map_nystroem = Nystroem(gamma=.2, random_state=1)
fourier_approx_svm = pipeline.Pipeline([("feature_map", feature_map_fourier),
("svm", svm.LinearSVC())])
nystroem_approx_svm = pipeline.Pipeline([("feature_map", feature_map_nystroem),
("svm", svm.LinearSVC())])
# fit and predict using linear and kernel svm:
kernel_svm_time = time()
kernel_svm.fit(data_train, targets_train)
kernel_svm_score = kernel_svm.score(data_test, targets_test)
kernel_svm_time = time() - kernel_svm_time
linear_svm_time = time()
linear_svm.fit(data_train, targets_train)
linear_svm_score = linear_svm.score(data_test, targets_test)
linear_svm_time = time() - linear_svm_time
sample_sizes = 30 * np.arange(1, 10)
fourier_scores = []
nystroem_scores = []
fourier_times = []
nystroem_times = []
for D in sample_sizes:
fourier_approx_svm.set_params(feature_map__n_components=D)
nystroem_approx_svm.set_params(feature_map__n_components=D)
start = time()
nystroem_approx_svm.fit(data_train, targets_train)
nystroem_times.append(time() - start)
start = time()
fourier_approx_svm.fit(data_train, targets_train)
fourier_times.append(time() - start)
fourier_score = fourier_approx_svm.score(data_test, targets_test)
nystroem_score = nystroem_approx_svm.score(data_test, targets_test)
nystroem_scores.append(nystroem_score)
fourier_scores.append(fourier_score)
# plot the results:
plt.figure(figsize=(8, 8))
accuracy = plt.subplot(211)
# second y axis for timeings
timescale = plt.subplot(212)
accuracy.plot(sample_sizes, nystroem_scores, label="Nystroem approx. kernel")
timescale.plot(sample_sizes, nystroem_times, '--',
label='Nystroem approx. kernel')
accuracy.plot(sample_sizes, fourier_scores, label="Fourier approx. kernel")
timescale.plot(sample_sizes, fourier_times, '--',
label='Fourier approx. kernel')
# horizontal lines for exact rbf and linear kernels:
accuracy.plot([sample_sizes[0], sample_sizes[-1]],
[linear_svm_score, linear_svm_score], label="linear svm")
timescale.plot([sample_sizes[0], sample_sizes[-1]],
[linear_svm_time, linear_svm_time], '--', label='linear svm')
accuracy.plot([sample_sizes[0], sample_sizes[-1]],
[kernel_svm_score, kernel_svm_score], label="rbf svm")
timescale.plot([sample_sizes[0], sample_sizes[-1]],
[kernel_svm_time, kernel_svm_time], '--', label='rbf svm')
# vertical line for dataset dimensionality = 64
accuracy.plot([64, 64], [0.7, 1], label="n_features")
# legends and labels
accuracy.set_title("Classification accuracy")
timescale.set_title("Training times")
accuracy.set_xlim(sample_sizes[0], sample_sizes[-1])
accuracy.set_xticks(())
accuracy.set_ylim(np.min(fourier_scores), 1)
timescale.set_xlabel("Sampling steps = transformed feature dimension")
accuracy.set_ylabel("Classification accuracy")
timescale.set_ylabel("Training time in seconds")
accuracy.legend(loc='best')
timescale.legend(loc='best')
# visualize the decision surface, projected down to the first
# two principal components of the dataset
pca = PCA(n_components=8).fit(data_train)
X = pca.transform(data_train)
# Gemerate grid along first two principal components
multiples = np.arange(-2, 2, 0.1)
# steps along first component
first = multiples[:, np.newaxis] * pca.components_[0, :]
# steps along second component
second = multiples[:, np.newaxis] * pca.components_[1, :]
# combine
grid = first[np.newaxis, :, :] + second[:, np.newaxis, :]
flat_grid = grid.reshape(-1, data.shape[1])
# title for the plots
titles = ['SVC with rbf kernel',
'SVC (linear kernel)\n with Fourier rbf feature map\n'
'n_components=100',
'SVC (linear kernel)\n with Nystroem rbf feature map\n'
'n_components=100']
plt.tight_layout()
plt.figure(figsize=(12, 5))
# predict and plot
for i, clf in enumerate((kernel_svm, nystroem_approx_svm,
fourier_approx_svm)):
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
plt.subplot(1, 3, i + 1)
Z = clf.predict(flat_grid)
# Put the result into a color plot
Z = Z.reshape(grid.shape[:-1])
plt.contourf(multiples, multiples, Z, cmap=plt.cm.Paired)
plt.axis('off')
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=targets_train, cmap=plt.cm.Paired)
plt.title(titles[i])
plt.tight_layout()
plt.show()
|
bsd-3-clause
|
cjh1/VTK
|
Filters/Core/Testing/Python/tubeComb.py
|
17
|
1879
|
#!/usr/bin/env python
import vtk
from vtk.util.misc import vtkGetDataRoot
# create planes
# Create the RenderWindow, Renderer
#
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer( ren )
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# create pipeline
#
pl3d = vtk.vtkMultiBlockPLOT3DReader()
pl3d.SetXYZFileName( vtkGetDataRoot() + '/Data/combxyz.bin' )
pl3d.SetQFileName( vtkGetDataRoot() + '/Data/combq.bin' )
pl3d.SetScalarFunctionNumber( 100 )
pl3d.SetVectorFunctionNumber( 202 )
pl3d.Update()
pl3d_output = pl3d.GetOutput().GetBlock(0)
outline = vtk.vtkStructuredGridOutlineFilter()
outline.SetInputData(pl3d_output)
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
seeds = vtk.vtkLineSource()
seeds.SetPoint1(15, -5, 32)
seeds.SetPoint2(15, 5, 32)
seeds.SetResolution(10)
integ = vtk.vtkRungeKutta4()
sl = vtk.vtkStreamLine()
sl.SetIntegrator(integ)
sl.SetInputData(pl3d_output)
sl.SetSourceConnection(seeds.GetOutputPort())
sl.SetMaximumPropagationTime(0.1)
sl.SetIntegrationStepLength(0.1)
sl.SetIntegrationDirectionToBackward()
sl.SetStepLength(0.001)
tube = vtk.vtkTubeFilter()
tube.SetInputConnection(sl.GetOutputPort())
tube.SetRadius(0.1)
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(tube.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
mmapper = vtk.vtkPolyDataMapper()
mmapper.SetInputConnection(seeds.GetOutputPort())
mactor = vtk.vtkActor()
mactor.SetMapper(mmapper)
ren.AddActor(mactor)
ren.AddActor(actor)
ren.AddActor(outlineActor)
cam=ren.GetActiveCamera()
cam.SetClippingRange( 3.95297, 50 )
cam.SetFocalPoint( 8.88908, 0.595038, 29.3342 )
cam.SetPosition( -12.3332, 31.7479, 41.2387 )
cam.SetViewUp( 0.060772, -0.319905, 0.945498 )
renWin.Render()
|
bsd-3-clause
|
wgprojects/ardupilot
|
Tools/autotest/pysim/fdpexpect.py
|
264
|
2488
|
"""This is like pexpect, but will work on any file descriptor that you pass it.
So you are reponsible for opening and close the file descriptor.
$Id: fdpexpect.py 505 2007-12-26 21:33:50Z noah $
"""
from pexpect import *
import os
__all__ = ['fdspawn']
class fdspawn (spawn):
"""This is like pexpect.spawn but allows you to supply your own open file
descriptor. For example, you could use it to read through a file looking
for patterns, or to control a modem or serial device. """
def __init__ (self, fd, args=[], timeout=30, maxread=2000, searchwindowsize=None, logfile=None):
"""This takes a file descriptor (an int) or an object that support the
fileno() method (returning an int). All Python file-like objects
support fileno(). """
### TODO: Add better handling of trying to use fdspawn in place of spawn
### TODO: (overload to allow fdspawn to also handle commands as spawn does.
if type(fd) != type(0) and hasattr(fd, 'fileno'):
fd = fd.fileno()
if type(fd) != type(0):
raise ExceptionPexpect ('The fd argument is not an int. If this is a command string then maybe you want to use pexpect.spawn.')
try: # make sure fd is a valid file descriptor
os.fstat(fd)
except OSError:
raise ExceptionPexpect, 'The fd argument is not a valid file descriptor.'
self.args = None
self.command = None
spawn.__init__(self, None, args, timeout, maxread, searchwindowsize, logfile)
self.child_fd = fd
self.own_fd = False
self.closed = False
self.name = '<file descriptor %d>' % fd
def __del__ (self):
return
def close (self):
if self.child_fd == -1:
return
if self.own_fd:
self.close (self)
else:
self.flush()
os.close(self.child_fd)
self.child_fd = -1
self.closed = True
def isalive (self):
"""This checks if the file descriptor is still valid. If os.fstat()
does not raise an exception then we assume it is alive. """
if self.child_fd == -1:
return False
try:
os.fstat(self.child_fd)
return True
except:
return False
def terminate (self, force=False):
raise ExceptionPexpect ('This method is not valid for file descriptors.')
def kill (self, sig):
return
|
gpl-3.0
|
JackCloudman/Youtube-music
|
download.py
|
1
|
1342
|
#Program to download Yotube music
#Author: Jack Cloudman
import pafy,os,shutil
from pydub import AudioSegment as convert
#Create song list
if os.path.exists('songs.txt'):
pass
else:
print("Creating songs.txt....")
document= open('songs.txt','w')
print("Paste yours songs in songs.txt")
document.close()
#create directory
if os.path.exists('music'):
if os.path.exists('music/temp'):
pass
else:
os.mkdir('music/temp')
else:
os.mkdir('music')
os.mkdir('music/temp')
document = open('songs.txt','r')
music_list = document.readlines()
document.close()
error_list=[]
print("Download music....")
for music in music_list:
try:
url = music
video = pafy.new(url)
bestaudio = video.getbestaudio()
bestaudio.download(filepath="music/temp/")
except:
error_list.append("Error download: "+music)
print("Converting to mp3.....")
for filename in os.listdir('music/temp/'):
try:
audio = convert.from_file('music/temp/'+filename)
name = os.path.splitext(filename)
audio.export('music/'+name[0]+'.mp3',format="mp3",bitrate="160k")
except:
error_list.append("Error convert: "+name[0])
shutil.rmtree("music/temp")
for error in error_list:
print(error)
print("Finished!")
|
gpl-3.0
|
xiaoshaozi52/ansible
|
lib/ansible/plugins/lookup/redis_kv.py
|
251
|
2433
|
# (c) 2012, Jan-Piet Mens <jpmens(at)gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
HAVE_REDIS=False
try:
import redis # https://github.com/andymccurdy/redis-py/
HAVE_REDIS=True
except ImportError:
pass
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
# ==============================================================
# REDISGET: Obtain value from a GET on a Redis key. Terms
# expected: 0 = URL, 1 = Key
# URL may be empty, in which case redis://localhost:6379 assumed
# --------------------------------------------------------------
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
if not HAVE_REDIS:
raise AnsibleError("Can't LOOKUP(redis_kv): module redis is not installed")
ret = []
for term in terms:
(url,key) = term.split(',')
if url == "":
url = 'redis://localhost:6379'
# urlsplit on Python 2.6.1 is broken. Hmm. Probably also the reason
# Redis' from_url() doesn't work here.
p = '(?P<scheme>[^:]+)://?(?P<host>[^:/ ]+).?(?P<port>[0-9]*).*'
try:
m = re.search(p, url)
host = m.group('host')
port = int(m.group('port'))
except AttributeError:
raise AnsibleError("Bad URI in redis lookup")
try:
conn = redis.Redis(host=host, port=port)
res = conn.get(key)
if res is None:
res = ""
ret.append(res)
except:
ret.append("") # connection failed or key not found
return ret
|
gpl-3.0
|
jfalkner/Efficient-Django-QuerySet-Use
|
demo-optimized/example/utils.py
|
1
|
3812
|
from django.utils.timezone import utc
from django_db_utils import pg_bulk_update
from example.models import Sample, SampleStatus
def now():
from datetime import datetime
return datetime.utcnow().replace(tzinfo=utc)
def make_fake_data(samples_to_make=100000, batch_threshold=100000, delete_existing=True, make_statuses=True, years=5):
"""Makes mock data for testing performance. Optionally, resets db.
"""
if delete_existing:
Sample.objects.all().delete()
print "Deleted existing"
# Make up a set of
offset = samples_to_make - samples_to_make/52/years
# Create all the samples.
samples = []
barcodes = range(samples_to_make)
for barcode in barcodes:
sample = Sample()
sample.barcode = str(barcode)
sample.created = now()
sample.status_created = sample.created
if barcode < offset:
sample.status_code = SampleStatus.COMPLETE
else:
sample.status_code = SampleStatus.LAB
sample.production = True
samples.append(sample)
if len(samples) >= batch_threshold:
Sample.objects.bulk_create(samples)
del samples[:]
print "Made %s samples." % Sample.objects.count()
if samples:
Sample.objects.bulk_create(samples)
print "Finished making %s samples." % Sample.objects.count()
if not make_statuses:
return
# Pull all ids for samples.
sample_ids = Sample.objects.values_list('id', flat=True)
# Create all the statuses.
offset = len(sample_ids)-len(sample_ids)/52/years
statuses = []
for sample in sample_ids[:offset]:
statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.RECEIVED, created=now()))
statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.LAB, created=now()))
statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.COMPLETE, created=now()))
if len(statuses) >= batch_threshold:
SampleStatus.objects.bulk_create(statuses)
del statuses[:]
for sample in sample_ids[offset:]:
statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.RECEIVED, created=now()))
statuses.append(SampleStatus(sample_id=sample, status_code=SampleStatus.LAB, created=now()))
if len(statuses) >= batch_threshold:
SampleStatus.objects.bulk_create(statuses)
del statuses[:]
print "Made %s statuses."%SampleStatus.objects.count()
if statuses:
SampleStatus.objects.bulk_create(statuses)
print "Finished making %s statuses."%SampleStatus.objects.count()
# Make all the denormalized status_code vars match.
sync_status(limit=batch_threshold)
print "Statuses synchronized"
def sync_status(limit=100000):
# Stream through all samples.
sample_count = Sample.objects.count()
for index in range(0, sample_count, limit):
vals = Sample.objects.order_by('id', '-statuses__status_code').distinct('id').values_list('id', 'status_code', 'statuses__id', 'statuses__status_code')[index:index+limit]
# Pull all mismatching values.
ids = []
status_codes = []
# status_ids = []
for sample_id, status_code, status_id, latest_status_code in vals:
if status_code != latest_status_code:
ids.append(sample_id)
status_codes.append(latest_status_code)
# status_ids.append(status_id)
# Sync using a bulk update.
if ids:
pg_bulk_update(Sample, 'id', 'status_code', list(ids), list(status_codes))
# pg_bulk_update(Sample, 'id', 'status_id', list(ids), list(status_ids))
print 'Synced %s out of %s samples at %s'%(len(ids), limit, index)
|
mit
|
cloudbase/coriolis
|
coriolis/wsman.py
|
1
|
6173
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
import base64
from oslo_log import log as logging
import requests
from winrm import protocol
from winrm import exceptions as winrm_exceptions
from coriolis import exception
from coriolis import utils
AUTH_BASIC = "basic"
AUTH_KERBEROS = "kerberos"
AUTH_CERTIFICATE = "certificate"
CODEPAGE_UTF8 = 65001
DEFAULT_TIMEOUT = 3600
LOG = logging.getLogger(__name__)
class WSManConnection(object):
def __init__(self, timeout=None):
self._protocol = None
self._conn_timeout = int(timeout or DEFAULT_TIMEOUT)
EOL = "\r\n"
@utils.retry_on_error()
def connect(self, url, username, auth=None, password=None,
cert_pem=None, cert_key_pem=None):
if not auth:
if cert_pem:
auth = AUTH_CERTIFICATE
else:
auth = AUTH_BASIC
auth_transport_map = {AUTH_BASIC: 'plaintext',
AUTH_KERBEROS: 'kerberos',
AUTH_CERTIFICATE: 'ssl'}
self._protocol = protocol.Protocol(
endpoint=url,
transport=auth_transport_map[auth],
username=username,
password=password,
cert_pem=cert_pem,
cert_key_pem=cert_key_pem)
@classmethod
def from_connection_info(cls, connection_info, timeout=DEFAULT_TIMEOUT):
""" Returns a wsman.WSManConnection object for the provided conn info. """
if not isinstance(connection_info, dict):
raise ValueError(
"WSMan connection must be a dict. Got type '%s', value: %s" % (
type(connection_info), connection_info))
required_keys = ["ip", "username", "password"]
missing = [key for key in required_keys if key not in connection_info]
if missing:
raise ValueError(
"The following keys were missing from WSMan connection info %s. "
"Got: %s" % (missing, connection_info))
host = connection_info["ip"]
port = connection_info.get("port", 5986)
username = connection_info["username"]
password = connection_info.get("password")
cert_pem = connection_info.get("cert_pem")
cert_key_pem = connection_info.get("cert_key_pem")
url = "https://%s:%s/wsman" % (host, port)
LOG.info("Connection info: %s", str(connection_info))
LOG.info("Waiting for connectivity on host: %(host)s:%(port)s",
{"host": host, "port": port})
utils.wait_for_port_connectivity(host, port)
conn = cls(timeout)
conn.connect(url=url, username=username, password=password,
cert_pem=cert_pem, cert_key_pem=cert_key_pem)
return conn
def disconnect(self):
self._protocol = None
def set_timeout(self, timeout):
if timeout:
self._protocol.timeout = timeout
self._protocol.transport.timeout = timeout
@utils.retry_on_error(
terminal_exceptions=[winrm_exceptions.InvalidCredentialsError,
exception.OSMorphingWinRMOperationTimeout])
def _exec_command(self, cmd, args=[], timeout=None):
timeout = int(timeout or self._conn_timeout)
self.set_timeout(timeout)
shell_id = self._protocol.open_shell(codepage=CODEPAGE_UTF8)
try:
command_id = self._protocol.run_command(shell_id, cmd, args)
try:
(std_out,
std_err,
exit_code) = self._protocol.get_command_output(
shell_id, command_id)
except requests.exceptions.ReadTimeout:
raise exception.OSMorphingWinRMOperationTimeout(
cmd=("%s %s" % (cmd, " ".join(args))), timeout=timeout)
finally:
self._protocol.cleanup_command(shell_id, command_id)
return (std_out, std_err, exit_code)
finally:
self._protocol.close_shell(shell_id)
def exec_command(self, cmd, args=[], timeout=None):
LOG.debug("Executing WSMAN command: %s", str([cmd] + args))
std_out, std_err, exit_code = self._exec_command(
cmd, args, timeout=timeout)
if exit_code:
raise exception.CoriolisException(
"Command \"%s\" failed with exit code: %s\n"
"stdout: %s\nstd_err: %s" %
(str([cmd] + args), exit_code, std_out, std_err))
return std_out
def exec_ps_command(self, cmd, ignore_stdout=False, timeout=None):
LOG.debug("Executing PS command: %s", cmd)
base64_cmd = base64.b64encode(cmd.encode('utf-16le')).decode()
return self.exec_command(
"powershell.exe", ["-EncodedCommand", base64_cmd],
timeout=timeout)[:-2]
def test_path(self, remote_path):
ret_val = self.exec_ps_command("Test-Path -Path \"%s\"" % remote_path)
return ret_val == "True"
def download_file(self, url, remote_path):
LOG.debug("Downloading: \"%(url)s\" to \"%(path)s\"",
{"url": url, "path": remote_path})
# Nano Server does not have Invoke-WebRequest and additionally
# this is also faster
self.exec_ps_command(
"[Net.ServicePointManager]::SecurityProtocol = "
"[Net.SecurityProtocolType]::Tls12;"
"if(!([System.Management.Automation.PSTypeName]'"
"System.Net.Http.HttpClient').Type) {$assembly = "
"[System.Reflection.Assembly]::LoadWithPartialName("
"'System.Net.Http')}; (new-object System.Net.Http.HttpClient)."
"GetStreamAsync('%(url)s').Result.CopyTo("
"(New-Object IO.FileStream '%(outfile)s', Create, Write, None), "
"1MB)" % {"url": url, "outfile": remote_path},
ignore_stdout=True)
def write_file(self, remote_path, content):
self.exec_ps_command(
"[IO.File]::WriteAllBytes('%s', [Convert]::FromBase64String('%s'))"
% (remote_path, base64.b64encode(content).decode()),
ignore_stdout=True)
|
agpl-3.0
|
gmarke/erpnext
|
erpnext/patches/v4_0/countrywise_coa.py
|
119
|
1034
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Cash'
where account_type='Bank or Cash' and account_name in ('Cash', 'Cash In Hand')""")
frappe.db.sql("""update tabAccount set account_type='Stock'
where account_name = 'Stock Assets'""")
ac_types = {"Fixed Asset Account": "Fixed Asset", "Bank or Cash": "Bank"}
for old, new in ac_types.items():
frappe.db.sql("""update tabAccount set account_type=%s
where account_type=%s""", (new, old))
try:
frappe.db.sql("""update `tabAccount` set report_type =
if(is_pl_account='Yes', 'Profit and Loss', 'Balance Sheet')""")
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")
except:
pass
|
agpl-3.0
|
jmartu/testing
|
venv/lib/python3.6/site-packages/pip/_vendor/requests/packages/__init__.py
|
838
|
1384
|
'''
Debian and other distributions "unbundle" requests' vendored dependencies, and
rewrite all imports to use the global versions of ``urllib3`` and ``chardet``.
The problem with this is that not only requests itself imports those
dependencies, but third-party code outside of the distros' control too.
In reaction to these problems, the distro maintainers replaced
``requests.packages`` with a magical "stub module" that imports the correct
modules. The implementations were varying in quality and all had severe
problems. For example, a symlink (or hardlink) that links the correct modules
into place introduces problems regarding object identity, since you now have
two modules in `sys.modules` with the same API, but different identities::
requests.packages.urllib3 is not urllib3
With version ``2.5.2``, requests started to maintain its own stub, so that
distro-specific breakage would be reduced to a minimum, even though the whole
issue is not requests' fault in the first place. See
https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull
request.
'''
from __future__ import absolute_import
import sys
try:
from . import urllib3
except ImportError:
import urllib3
sys.modules['%s.urllib3' % __name__] = urllib3
try:
from . import chardet
except ImportError:
import chardet
sys.modules['%s.chardet' % __name__] = chardet
|
mit
|
michael-dev2rights/ansible
|
lib/ansible/modules/cloud/ovirt/ovirt_quotas_facts.py
|
73
|
3955
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_quotas_facts
short_description: Retrieve facts about one or more oVirt/RHV quotas
version_added: "2.3"
author: "Red Hat"
description:
- "Retrieve facts about one or more oVirt/RHV quotas."
notes:
- "This module creates a new top-level C(ovirt_quotas) fact, which
contains a list of quotas."
options:
data_center:
description:
- "Name of the datacenter where quota resides."
required: true
name:
description:
- "Name of the quota, can be used as glob expression."
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about quota named C<myquota> in Default datacenter:
- ovirt_quotas_facts:
data_center: Default
name: myquota
- debug:
var: ovirt_quotas
'''
RETURN = '''
ovirt_quotas:
description: "List of dictionaries describing the quotas. Quota attribues are mapped to dictionary keys,
all quotas attributes can be found at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/quota."
returned: On success.
type: list
'''
import fnmatch
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
search_by_name,
)
def main():
argument_spec = ovirt_facts_full_argument_spec(
data_center=dict(required=True),
name=dict(default=None),
)
module = AnsibleModule(argument_spec)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
datacenters_service = connection.system_service().data_centers_service()
dc_name = module.params['data_center']
dc = search_by_name(datacenters_service, dc_name)
if dc is None:
raise Exception("Datacenter '%s' was not found." % dc_name)
quotas_service = datacenters_service.service(dc.id).quotas_service()
if module.params['name']:
quotas = [
e for e in quotas_service.list()
if fnmatch.fnmatch(e.name, module.params['name'])
]
else:
quotas = quotas_service.list()
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_quotas=[
get_dict_of_struct(
struct=c,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for c in quotas
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == '__main__':
main()
|
gpl-3.0
|
jbhsieh/incubator-airflow
|
tests/ti_deps/deps/pool_has_space_dep.py
|
20
|
1194
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from airflow.ti_deps.deps.pool_has_space_dep import PoolHasSpaceDep
from fake_models import FakeTI
class PoolHasSpaceDepTest(unittest.TestCase):
def test_pool_full(self):
"""
Full pools should fail this dep
"""
ti = FakeTI(pool="fake_pool", pool_filled=True)
self.assertFalse(PoolHasSpaceDep().is_met(ti=ti, dep_context=None))
def test_not_skipped(self):
"""
Pools with room should pass this dep
"""
ti = FakeTI(pool="fake_pool", pool_filled=False)
self.assertTrue(PoolHasSpaceDep().is_met(ti=ti, dep_context=None))
|
apache-2.0
|
kirienko/gourmet
|
src/gourmet/importers/plaintext_importer.py
|
1
|
4803
|
import re
from gourmet import check_encodings
from gourmet.gdebug import debug
from gourmet.i18n import _
from gourmet.importers import importer
class TextImporter (importer.Importer):
ATTR_DICT = {'Recipe By':'source',
'Serving Size':'servings',
'Preparation Time':'preptime',
'Categories':'category',
}
end_of_paragraph_length = 60
def __init__ (self, filename, conv=None):
self.fn = filename
self.rec = {}
self.ing = {}
self.compile_regexps()
importer.Importer.__init__(self,conv=conv)
def pre_run (self):
self.lines = check_encodings.get_file(self.fn)
self.total_lines = len(self.lines)
print('we have ',self.total_lines,'lines in file',self.fn)
def do_run (self):
if not hasattr(self,'lines'):
raise Exception("pre_run has not been run!")
for n in range(self.total_lines):
l=self.lines[n]
if n % 15 == 0:
prog = float(n)/float(self.total_lines)
msg = _("Imported %s recipes.")%(len(self.added_recs))
self.emit('progress',prog,msg)
self.handle_line(l)
# commit the last rec if need be
if self.rec:
self.commit_rec()
importer.Importer.do_run(self)
def handle_line (self, l):
raise NotImplementedError
def compile_regexps (self):
self.blank_matcher = re.compile(r"^\s*$")
# out unwrap regexp looks for a line with no meaningful characters, or a line that starts in
# ALLCAPS or a line that is only space. (we use this with .split() to break text up into
# paragraph breaks.
self.unwrap_matcher = re.compile(r'\n\W*\n')
self.find_header_breaks_matcher = re.compile(r'\s+(?=[A-Z][A-Z][A-Z]+:.*)')
def unwrap_lines (self, blob):
if blob.find("") >= 0:
debug('Using built-in paragraph markers',1)
# then we have paragraph markers in the text already
outblob = " ".join(blob.split("\n")) # get rid of line breaks
lines = outblob.split("") # split text up into paragraphs
outblob = "\n".join(lines) # insert linebreaks where paragraphs were
return outblob
outblob = ""
newline = True
for l in blob.split('\n'):
debug('examining %s'%l,3)
if re.match(r'^\W*$',l):
# ignore repeated nonword characters (hyphens, stars, etc.)
outblob += "\n"
continue
# if we have a non-word character at the start of the line,
# we assume we need to keep the newline.
if len(l)>=3 and re.match(r'(\W|[0-9])',l[2]):
debug('Match non-word character; add newline before: %s'%l,4)
outblob += "\n"
outblob += l
newline = False
continue
# if we are continuing an old line, we add a space
# (because we're generally stripping all spaces when
# we write)
if not newline: outblob += " "
hmatch = self.find_header_breaks_matcher.search(l)
if hmatch:
# if there's a header in the middle, we go ahead
# and start a new line
debug('Splitting at header in line: %s'%l,4)
outblob += l[:hmatch.start()]
outblob += "\n"
outblob += l[hmatch.start():]
continue
#else...
outblob += l.strip()
if len(l) < self.end_of_paragraph_length: #60 is our hard-coded end-o-paragraph length
debug('line < %s characters, adding newline.'%self.end_of_paragraph_length,4)
outblob += "\n"
newline = True
else:
newline = False
return outblob
class Tester (importer.Tester):
def __init__ (self):
importer.Tester.__init__(self,regexp=MASTERCOOK_START_REGEXP)
self.not_me = "<[?]?(xml|mx2|RcpE|RTxt)[^>]*>"
def test (self, filename):
"""Test file named filename.
filename can also be a file object.
"""
if not hasattr(self,'matcher'):
self.matcher=re.compile(self.regexp)
self.not_matcher = re.compile(self.not_me)
if isinstance(self.ofi, str):
self.ofi = open(filename,'r')
l = self.ofi.readline()
while l:
if self.not_matcher.match(l):
self.ofi.close()
return False
if self.matcher.match(l):
self.ofi.close()
return True
l = self.ofi.readline()
self.ofi.close()
|
gpl-2.0
|
houzhenggang/hiwifi-openwrt-HC5661-HC5761
|
package/goagent/files/etc/goagent/dnsproxy.py
|
10
|
14082
|
#!/usr/bin/env python
# coding:utf-8
__version__ = '1.0'
import sys
import os
import sysconfig
sys.path += [os.path.abspath(os.path.join(__file__, '../packages.egg/%s' % x)) for x in ('noarch', sysconfig.get_platform().split('-')[0])]
import gevent
import gevent.server
import gevent.timeout
import gevent.monkey
gevent.monkey.patch_all(subprocess=True)
import re
import time
import logging
import heapq
import socket
import select
import struct
import errno
import thread
import dnslib
import Queue
import pygeoip
is_local_addr = re.compile(r'(?i)(?:[0-9a-f:]+0:5efe:)?(?:127(?:\.\d+){3}|10(?:\.\d+){3}|192\.168(?:\.\d+){2}|172\.(?:1[6-9]|2\d|3[01])(?:\.\d+){2})').match
def get_dnsserver_list():
if os.name == 'nt':
import ctypes, ctypes.wintypes, struct, socket
DNS_CONFIG_DNS_SERVER_LIST = 6
buf = ctypes.create_string_buffer(2048)
ctypes.windll.dnsapi.DnsQueryConfig(DNS_CONFIG_DNS_SERVER_LIST, 0, None, None, ctypes.byref(buf), ctypes.byref(ctypes.wintypes.DWORD(len(buf))))
ipcount = struct.unpack('I', buf[0:4])[0]
iplist = [socket.inet_ntoa(buf[i:i+4]) for i in xrange(4, ipcount*4+4, 4)]
return iplist
elif os.path.isfile('/etc/resolv.conf'):
with open('/etc/resolv.conf', 'rb') as fp:
return re.findall(r'(?m)^nameserver\s+(\S+)', fp.read())
else:
logging.warning("get_dnsserver_list failed: unsupport platform '%s-%s'", sys.platform, os.name)
return []
def parse_hostport(host, default_port=80):
m = re.match(r'(.+)[#](\d+)$', host)
if m:
return m.group(1).strip('[]'), int(m.group(2))
else:
return host.strip('[]'), default_port
class ExpireCache(object):
""" A dictionary-like object, supporting expire semantics."""
def __init__(self, max_size=1024):
self.__maxsize = max_size
self.__values = {}
self.__expire_times = {}
self.__expire_heap = []
def size(self):
return len(self.__values)
def clear(self):
self.__values.clear()
self.__expire_times.clear()
del self.__expire_heap[:]
def exists(self, key):
return key in self.__values
def set(self, key, value, expire):
try:
et = self.__expire_times[key]
pos = self.__expire_heap.index((et, key))
del self.__expire_heap[pos]
if pos < len(self.__expire_heap):
heapq._siftup(self.__expire_heap, pos)
except KeyError:
pass
et = int(time.time() + expire)
self.__expire_times[key] = et
heapq.heappush(self.__expire_heap, (et, key))
self.__values[key] = value
self.cleanup()
def get(self, key):
et = self.__expire_times[key]
if et < time.time():
self.cleanup()
raise KeyError(key)
return self.__values[key]
def delete(self, key):
et = self.__expire_times.pop(key)
pos = self.__expire_heap.index((et, key))
del self.__expire_heap[pos]
if pos < len(self.__expire_heap):
heapq._siftup(self.__expire_heap, pos)
del self.__values[key]
def cleanup(self):
t = int(time.time())
eh = self.__expire_heap
ets = self.__expire_times
v = self.__values
size = self.__maxsize
heappop = heapq.heappop
#Delete expired, ticky
while eh and eh[0][0] <= t or len(v) > size:
_, key = heappop(eh)
del v[key], ets[key]
def dnslib_resolve_over_udp(query, dnsservers, timeout, **kwargs):
"""
http://gfwrev.blogspot.com/2009/11/gfwdns.html
http://zh.wikipedia.org/wiki/%E5%9F%9F%E5%90%8D%E6%9C%8D%E5%8A%A1%E5%99%A8%E7%BC%93%E5%AD%98%E6%B1%A1%E6%9F%93
http://support.microsoft.com/kb/241352
"""
if not isinstance(query, (basestring, dnslib.DNSRecord)):
raise TypeError('query argument requires string/DNSRecord')
blacklist = kwargs.get('blacklist', ())
turstservers = kwargs.get('turstservers', ())
dns_v4_servers = [x for x in dnsservers if ':' not in x]
dns_v6_servers = [x for x in dnsservers if ':' in x]
sock_v4 = sock_v6 = None
socks = []
if dns_v4_servers:
sock_v4 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
socks.append(sock_v4)
if dns_v6_servers:
sock_v6 = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
socks.append(sock_v6)
timeout_at = time.time() + timeout
try:
for _ in xrange(4):
try:
for dnsserver in dns_v4_servers:
if isinstance(query, basestring):
query = dnslib.DNSRecord(q=dnslib.DNSQuestion(query))
query_data = query.pack()
sock_v4.sendto(query_data, parse_hostport(dnsserver, 53))
for dnsserver in dns_v6_servers:
if isinstance(query, basestring):
query = dnslib.DNSRecord(q=dnslib.DNSQuestion(query, qtype=dnslib.QTYPE.AAAA))
query_data = query.pack()
sock_v6.sendto(query_data, parse_hostport(dnsserver, 53))
while time.time() < timeout_at:
ins, _, _ = select.select(socks, [], [], 0.1)
for sock in ins:
reply_data, reply_address = sock.recvfrom(512)
reply_server = reply_address[0]
record = dnslib.DNSRecord.parse(reply_data)
iplist = [str(x.rdata) for x in record.rr if x.rtype in (1, 28, 255)]
if any(x in blacklist for x in iplist):
logging.warning('query=%r dnsservers=%r record bad iplist=%r', query, dnsservers, iplist)
elif record.header.rcode and not iplist and reply_server in turstservers:
logging.info('query=%r trust reply_server=%r record rcode=%s', query, reply_server, record.header.rcode)
return record
elif iplist:
logging.debug('query=%r reply_server=%r record iplist=%s', query, reply_server, iplist)
return record
else:
logging.debug('query=%r reply_server=%r record null iplist=%s', query, reply_server, iplist)
continue
except socket.error as e:
logging.warning('handle dns query=%s socket: %r', query, e)
raise socket.gaierror(11004, 'getaddrinfo %r from %r failed' % (query, dnsservers))
finally:
for sock in socks:
sock.close()
def dnslib_resolve_over_tcp(query, dnsservers, timeout, **kwargs):
"""dns query over tcp"""
if not isinstance(query, (basestring, dnslib.DNSRecord)):
raise TypeError('query argument requires string/DNSRecord')
blacklist = kwargs.get('blacklist', ())
def do_resolve(query, dnsserver, timeout, queobj):
if isinstance(query, basestring):
qtype = dnslib.QTYPE.AAAA if ':' in dnsserver else dnslib.QTYPE.A
query = dnslib.DNSRecord(q=dnslib.DNSQuestion(query, qtype=qtype))
query_data = query.pack()
sock_family = socket.AF_INET6 if ':' in dnsserver else socket.AF_INET
sock = socket.socket(sock_family)
rfile = None
try:
sock.settimeout(timeout or None)
sock.connect(parse_hostport(dnsserver, 53))
sock.send(struct.pack('>h', len(query_data)) + query_data)
rfile = sock.makefile('r', 1024)
reply_data_length = rfile.read(2)
if len(reply_data_length) < 2:
raise socket.gaierror(11004, 'getaddrinfo %r from %r failed' % (query, dnsserver))
reply_data = rfile.read(struct.unpack('>h', reply_data_length)[0])
record = dnslib.DNSRecord.parse(reply_data)
iplist = [str(x.rdata) for x in record.rr if x.rtype in (1, 28, 255)]
if any(x in blacklist for x in iplist):
logging.debug('query=%r dnsserver=%r record bad iplist=%r', query, dnsserver, iplist)
raise socket.gaierror(11004, 'getaddrinfo %r from %r failed' % (query, dnsserver))
else:
logging.debug('query=%r dnsserver=%r record iplist=%s', query, dnsserver, iplist)
queobj.put(record)
except socket.error as e:
logging.debug('query=%r dnsserver=%r failed %r', query, dnsserver, e)
queobj.put(e)
finally:
if rfile:
rfile.close()
sock.close()
queobj = Queue.Queue()
for dnsserver in dnsservers:
thread.start_new_thread(do_resolve, (query, dnsserver, timeout, queobj))
for i in range(len(dnsservers)):
try:
result = queobj.get(timeout)
except Queue.Empty:
raise socket.gaierror(11004, 'getaddrinfo %r from %r failed' % (query, dnsservers))
if result and not isinstance(result, Exception):
return result
elif i == len(dnsservers) - 1:
logging.warning('dnslib_resolve_over_tcp %r with %s return %r', query, dnsservers, result)
raise socket.gaierror(11004, 'getaddrinfo %r from %r failed' % (query, dnsservers))
class DNSServer(gevent.server.DatagramServer):
"""DNS Proxy based on gevent/dnslib"""
def __init__(self, *args, **kwargs):
dns_blacklist = kwargs.pop('dns_blacklist')
dns_servers = kwargs.pop('dns_servers')
dns_tcpover = kwargs.pop('dns_tcpover', [])
dns_timeout = kwargs.pop('dns_timeout', 2)
super(self.__class__, self).__init__(*args, **kwargs)
self.dns_servers = list(dns_servers)
self.dns_tcpover = tuple(dns_tcpover)
self.dns_intranet_servers = [x for x in self.dns_servers if is_local_addr(x)]
self.dns_blacklist = set(dns_blacklist)
self.dns_timeout = int(dns_timeout)
self.dns_cache = ExpireCache(max_size=65536)
self.dns_trust_servers = set(['8.8.8.8', '8.8.4.4', '2001:4860:4860::8888', '2001:4860:4860::8844'])
for dirname in ('.', '/usr/share/GeoIP/', '/usr/local/share/GeoIP/'):
filename = os.path.join(dirname, 'GeoIP.dat')
if os.path.isfile(filename):
geoip = pygeoip.GeoIP(filename)
for dnsserver in self.dns_servers:
if ':' not in dnsserver and geoip.country_name_by_addr(parse_hostport(dnsserver, 53)[0]) not in ('China',):
self.dns_trust_servers.add(dnsserver)
break
def do_read(self):
try:
return gevent.server.DatagramServer.do_read(self)
except socket.error as e:
if e[0] not in (errno.ECONNABORTED, errno.ECONNRESET, errno.EPIPE):
raise
def get_reply_record(self, data):
request = dnslib.DNSRecord.parse(data)
qname = str(request.q.qname).lower()
qtype = request.q.qtype
dnsservers = self.dns_servers
if qname.endswith('.in-addr.arpa'):
ipaddr = '.'.join(reversed(qname[:-13].split('.')))
record = dnslib.DNSRecord(header=dnslib.DNSHeader(id=request.header.id, qr=1,aa=1,ra=1), a=dnslib.RR(qname, rdata=dnslib.A(ipaddr)))
return record
if 'USERDNSDOMAIN' in os.environ:
user_dnsdomain = '.' + os.environ['USERDNSDOMAIN'].lower()
if qname.endswith(user_dnsdomain):
qname = qname[:-len(user_dnsdomain)]
if '.' not in qname:
if not self.dns_intranet_servers:
logging.warning('qname=%r is a plain hostname, need intranet dns server!!!', qname)
return dnslib.DNSRecord(header=dnslib.DNSHeader(id=request.header.id, rcode=3))
qname += user_dnsdomain
dnsservers = self.dns_intranet_servers
try:
return self.dns_cache.get((qname, qtype))
except KeyError:
pass
try:
dns_resolve = dnslib_resolve_over_tcp if qname.endswith(self.dns_tcpover) else dnslib_resolve_over_udp
kwargs = {'blacklist': self.dns_blacklist, 'turstservers': self.dns_trust_servers}
record = dns_resolve(request, dnsservers, self.dns_timeout, **kwargs)
ttl = max(x.ttl for x in record.rr) if record.rr else 600
self.dns_cache.set((qname, qtype), record, ttl * 2)
return record
except socket.gaierror as e:
logging.warning('resolve %r failed: %r', qname, e)
return dnslib.DNSRecord(header=dnslib.DNSHeader(id=request.header.id, rcode=3))
def handle(self, data, address):
logging.debug('receive from %r data=%r', address, data)
record = self.get_reply_record(data)
return self.sendto(data[:2] + record.pack()[2:], address)
def test():
logging.basicConfig(level=logging.INFO, format='%(levelname)s - %(asctime)s %(message)s', datefmt='[%b %d %H:%M:%S]')
dns_servers = ['114.114.114.114', '114.114.115.115', '8.8.8.8', '8.8.4.4']
dns_blacklist = '1.1.1.1|255.255.255.255|74.125.127.102|74.125.155.102|74.125.39.102|74.125.39.113|209.85.229.138|4.36.66.178|8.7.198.45|37.61.54.158|46.82.174.68|59.24.3.173|64.33.88.161|64.33.99.47|64.66.163.251|65.104.202.252|65.160.219.113|66.45.252.237|72.14.205.104|72.14.205.99|78.16.49.15|93.46.8.89|128.121.126.139|159.106.121.75|169.132.13.103|192.67.198.6|202.106.1.2|202.181.7.85|203.161.230.171|203.98.7.65|207.12.88.98|208.56.31.43|209.145.54.50|209.220.30.174|209.36.73.33|209.85.229.138|211.94.66.147|213.169.251.35|216.221.188.182|216.234.179.13|243.185.187.3|243.185.187.39'.split('|')
dns_tcpover = ['.youtube.com', '.googlevideo.com']
logging.info('serving at port 53...')
DNSServer(('', 53), dns_servers=dns_servers, dns_blacklist=dns_blacklist, dns_tcpover=dns_tcpover).serve_forever()
if __name__ == '__main__':
test()
|
gpl-2.0
|
rajul/tvb-framework
|
tvb/tests/framework/adapters/visualizers/ica_test.py
|
1
|
3631
|
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Bogdan Neacsa <bogdan.neacsa@codemart.ro>
"""
import unittest
from tvb.core.entities.file.files_helper import FilesHelper
from tvb.adapters.visualizers.ica import ICA
from tvb.datatypes.connectivity import Connectivity
from tvb.tests.framework.core.test_factory import TestFactory
from tvb.tests.framework.datatypes.datatypes_factory import DatatypesFactory
from tvb.tests.framework.core.base_testcase import TransactionalTestCase
class ICATest(TransactionalTestCase):
"""
Unit-tests for ICA Viewer.
"""
def setUp(self):
"""
Sets up the environment for running the tests;
creates a test user, a test project, a connectivity and a surface;
imports a CFF data-set
"""
self.datatypeFactory = DatatypesFactory()
self.test_project = self.datatypeFactory.get_project()
self.test_user = self.datatypeFactory.get_user()
TestFactory.import_cff(test_user=self.test_user, test_project=self.test_project)
self.connectivity = TestFactory.get_entity(self.test_project, Connectivity())
self.assertTrue(self.connectivity is not None)
def tearDown(self):
"""
Clean-up tests data
"""
FilesHelper().remove_project_structure(self.test_project.name)
def test_launch(self):
"""
Check that all required keys are present in output from BrainViewer launch.
"""
time_series = self.datatypeFactory.create_timeseries(self.connectivity)
conn_measure = self.datatypeFactory.create_ICA(time_series)
viewer = ICA()
result = viewer.launch(conn_measure)
expected_keys = ['matrix_strides', 'matrix_shape', 'matrix_data', 'mainContent', 'isAdapter']
for key in expected_keys:
self.assertTrue(key in result)
def suite():
"""
Gather all the tests in a test suite.
"""
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(ICATest))
return test_suite
if __name__ == "__main__":
#So you can run tests from this package individually.
TEST_RUNNER = unittest.TextTestRunner()
TEST_SUITE = suite()
TEST_RUNNER.run(TEST_SUITE)
|
gpl-2.0
|
seeschloss/grammalecte
|
ContextMenu.py
|
1
|
6974
|
# -*- coding: utf8 -*-
# Grammalecte - Lexicographe
# by Olivier R. License: MPL 2
import uno
import unohelper
import traceback
from com.sun.star.task import XJob
from com.sun.star.ui import XContextMenuInterceptor
from com.sun.star.ui.ContextMenuInterceptorAction import IGNORED
from com.sun.star.ui.ContextMenuInterceptorAction import EXECUTE_MODIFIED
import grammalecte.fr.lexicographe as lxg
xDesktop = None
oDict = None
oLexicographe = None
def printServices (o):
for s in o.getAvailableServiceNames():
print(' >'+s)
def getConfigSetting (sNodeConfig, bUpdate):
# get a configuration node
# example: aSettings = getConfigSetting("/org.openoffice.Office.Common/Path/Current", false)
xSvMgr = uno.getComponentContext().ServiceManager
xConfigProvider = xSvMgr.createInstanceWithContext("com.sun.star.configuration.ConfigurationProvider", uno.getComponentContext())
xPropertyValue = uno.createUnoStruct("com.sun.star.beans.PropertyValue")
xPropertyValue.Name = "nodepath"
xPropertyValue.Value = sNodeConfig
if bUpdate:
sService = "com.sun.star.configuration.ConfigurationUpdateAccess"
else:
sService = "com.sun.star.configuration.ConfigurationAccess"
return xConfigProvider.createInstanceWithArguments(sService, (xPropertyValue,))
class MyContextMenuInterceptor (XContextMenuInterceptor, unohelper.Base):
def __init__ (self, ctx):
self.ctx = ctx
def notifyContextMenuExecute (self, xEvent):
sWord = self._getWord()
try:
aItem, aVerb = oLexicographe.analyzeWord(sWord)
if not aItem:
#return uno.Enum("com.sun.star.ui.ContextMenuInterceptorAction", "IGNORED") # don’t work on AOO, have to import the value
return IGNORED
xContextMenu = xEvent.ActionTriggerContainer
if xContextMenu:
# entries index
i = xContextMenu.Count
nUnoConstantLine = uno.getConstantByName("com.sun.star.ui.ActionTriggerSeparatorType.LINE")
i = self._addItemToContextMenu(xContextMenu, i, "ActionTriggerSeparator", SeparatorType=nUnoConstantLine)
for item in aItem:
if isinstance(item, str):
i = self._addItemToContextMenu(xContextMenu, i, "ActionTrigger", Text=item)
elif isinstance(item, tuple):
sRoot, lMorph = item
# submenu
xSubMenuContainer = xContextMenu.createInstance("com.sun.star.ui.ActionTriggerContainer")
for j, s in enumerate(lMorph):
self._addItemToContextMenu(xSubMenuContainer, j, "ActionTrigger", Text=s)
# create root menu entry
i = self._addItemToContextMenu(xContextMenu, i, "ActionTrigger", Text=sRoot, SubContainer=xSubMenuContainer)
else:
i = self._addItemToContextMenu(xContextMenu, i, "ActionTrigger", Text="# erreur : {}".format(item))
# Links to Conjugueur
if aVerb:
i = self._addItemToContextMenu(xContextMenu, i, "ActionTriggerSeparator", SeparatorType=nUnoConstantLine)
for sVerb in aVerb:
i = self._addItemToContextMenu(xContextMenu, i, "ActionTrigger", Text="Conjuguer “{}”…".format(sVerb),
CommandURL="service:net.grammalecte.AppLauncher?CJ/"+sVerb)
# The controller should execute the modified context menu and stop notifying other interceptors.
#return uno.Enum("com.sun.star.ui.ContextMenuInterceptorAction", "EXECUTE_MODIFIED") # don’t work on AOO, have to import the value
return EXECUTE_MODIFIED
except:
traceback.print_exc()
#return uno.Enum("com.sun.star.ui.ContextMenuInterceptorAction", "IGNORED") # don’t work on AOO, have to import the value
return IGNORED
def _addItemToContextMenu (self, xContextMenu, i, sType, **args):
xMenuItem = xContextMenu.createInstance("com.sun.star.ui."+sType)
for k, v in args.items():
xMenuItem.setPropertyValue(k, v)
xContextMenu.insertByIndex(i, xMenuItem)
return i + 1
def _getWord (self):
try:
xDoc = xDesktop.getCurrentComponent()
xViewCursor = xDoc.CurrentController.ViewCursor
if xViewCursor.CharLocale.Language != "fr":
return ""
xText = xViewCursor.Text
xCursor = xText.createTextCursorByRange(xViewCursor)
xCursor.gotoStartOfWord(False)
xCursor.gotoEndOfWord(True)
except:
traceback.print_exc()
return xCursor.String.strip('.')
class JobExecutor (XJob, unohelper.Base):
def __init__ (self, ctx):
self.ctx = ctx
global xDesktop
global oDict
global oLexicographe
if not xDesktop:
xDesktop = self.ctx.getServiceManager().createInstanceWithContext('com.sun.star.frame.Desktop', self.ctx)
if not oDict:
xCurCtx = uno.getComponentContext()
oGC = xCurCtx.ServiceManager.createInstanceWithContext("org.openoffice.comp.pyuno.Lightproof.grammalecte", xCurCtx)
oDict = oGC.getDictionary()
if not oLexicographe:
oLexicographe = lxg.Lexicographe(oDict)
def execute (self, args):
if not args:
return
# what version of the software?
xSettings = getConfigSetting("org.openoffice.Setup/Product", False)
sProdName = xSettings.getByName("ooName")
sVersion = xSettings.getByName("ooSetupVersion")
if (sProdName == "LibreOffice" and sVersion < "4") or sProdName == "OpenOffice.org":
return
# what event?
bCorrectEvent = False
for arg in args:
if arg.Name == "Environment":
for v in arg.Value:
if v.Name == "EnvType" and v.Value == "DOCUMENTEVENT":
bCorrectEvent = True
elif v.Name == "EventName":
pass
# check is correct event
#print "Event: %s" % v.Value
elif v.Name == "Model":
model = v.Value
if bCorrectEvent:
if model.supportsService("com.sun.star.text.TextDocument"):
xController = model.getCurrentController()
if xController:
xController.registerContextMenuInterceptor(MyContextMenuInterceptor(self.ctx))
g_ImplementationHelper = unohelper.ImplementationHelper()
g_ImplementationHelper.addImplementation(JobExecutor, "grammalecte.ContextMenuHandler", ("grammalecte.ContextMenuHandler",),)
|
gpl-3.0
|
fullscale/pypes
|
ui/pypesvds/tests/functional/test_filters.py
|
4
|
1351
|
from pypesvds.tests import *
class TestFiltersController(TestController):
def test_index(self):
response = self.app.get(url('filters'))
# Test response...
def test_index_as_xml(self):
response = self.app.get(url('formatted_filters', format='xml'))
def test_create(self):
response = self.app.post(url('filters'))
def test_new(self):
response = self.app.get(url('new_filter'))
def test_new_as_xml(self):
response = self.app.get(url('formatted_new_filter', format='xml'))
def test_update(self):
response = self.app.put(url('filter', id=1))
def test_update_browser_fakeout(self):
response = self.app.post(url('filter', id=1), params=dict(_method='put'))
def test_delete(self):
response = self.app.delete(url('filter', id=1))
def test_delete_browser_fakeout(self):
response = self.app.post(url('filter', id=1), params=dict(_method='delete'))
def test_show(self):
response = self.app.get(url('filter', id=1))
def test_show_as_xml(self):
response = self.app.get(url('formatted_filter', id=1, format='xml'))
def test_edit(self):
response = self.app.get(url('edit_filter', id=1))
def test_edit_as_xml(self):
response = self.app.get(url('formatted_edit_filter', id=1, format='xml'))
|
apache-2.0
|
JKarathiya/Lean
|
Algorithm.Python/IndicatorWarmupAlgorithm.py
|
3
|
6802
|
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Indicators")
AddReference("QuantConnect.Common")
from System import *
from QuantConnect import *
from QuantConnect.Algorithm import *
from QuantConnect.Data import *
from QuantConnect.Indicators import *
from QuantConnect.Orders import *
from QuantConnect.Securities import *
### <summary>
### Regression test for history and warm up using the data available in open source.
### </summary>
### <meta name="tag" content="history and warm up" />
### <meta name="tag" content="history" />
### <meta name="tag" content="regression test" />
### <meta name="tag" content="warm up" />
class IndicatorWarmupAlgorithm(QCAlgorithm):
def Initialize(self):
'''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''
self.SetStartDate(2013, 10, 8) #Set Start Date
self.SetEndDate(2013, 10, 11) #Set End Date
self.SetCash(1000000) #Set Strategy Cash
# Find more symbols here: http://quantconnect.com/data
self.AddEquity("SPY")
self.AddEquity("IBM")
self.AddEquity("BAC")
self.AddEquity("GOOG", Resolution.Daily)
self.AddEquity("GOOGL", Resolution.Daily)
self.__sd = { }
for security in self.Securities:
self.__sd[security.Key] = self.SymbolData(security.Key, self)
# we want to warm up our algorithm
self.SetWarmup(self.SymbolData.RequiredBarsWarmup)
def OnData(self, data):
'''OnData event is the primary entry point for your algorithm. Each new data point will be pumped in here.
Arguments:
data: Slice object keyed by symbol containing the stock data
'''
# we are only using warmup for indicator spooling, so wait for us to be warm then continue
if self.IsWarmingUp: return
for sd in self.__sd.values():
lastPriceTime = sd.Close.Current.Time
if self.RoundDown(lastPriceTime, sd.Security.SubscriptionDataConfig.Increment):
sd.Update()
def OnOrderEvent(self, fill):
sd = self.__sd.get(fill.Symbol, None)
if sd is not None:
sd.OnOrderEvent(fill)
def RoundDown(self, time, increment):
if increment.days != 0:
return time.hour == 0 and time.minute == 0 and time.second == 0
else:
return time.second == 0
class SymbolData:
RequiredBarsWarmup = 40
PercentTolerance = 0.001
PercentGlobalStopLoss = 0.01
LotSize = 10
def __init__(self, symbol, algorithm):
self.Symbol = symbol
self.__algorithm = algorithm # if we're receiving daily
self.__currentStopLoss = None
self.Security = algorithm.Securities[symbol]
self.Close = algorithm.Identity(symbol)
self.ADX = algorithm.ADX(symbol, 14)
self.EMA = algorithm.EMA(symbol, 14)
self.MACD = algorithm.MACD(symbol, 12, 26, 9)
self.IsReady = self.Close.IsReady and self.ADX.IsReady and self.EMA.IsReady and self.MACD.IsReady
self.IsUptrend = False
self.IsDowntrend = False
def Update(self):
self.IsReady = self.Close.IsReady and self.ADX.IsReady and self.EMA.IsReady and self.MACD.IsReady
tolerance = 1 - self.PercentTolerance
self.IsUptrend = self.MACD.Signal.Current.Value > self.MACD.Current.Value * tolerance and\
self.EMA.Current.Value > self.Close.Current.Value * tolerance
self.IsDowntrend = self.MACD.Signal.Current.Value < self.MACD.Current.Value * tolerance and\
self.EMA.Current.Value < self.Close.Current.Value * tolerance
self.TryEnter()
self.TryExit()
def TryEnter(self):
# can't enter if we're already in
if self.Security.Invested: return False
qty = 0
limit = 0.0
if self.IsUptrend:
# 100 order lots
qty = self.LotSize
limit = self.Security.Low
elif self.IsDowntrend:
qty = -self.LotSize
limit = self.Security.High
if qty != 0:
ticket = self.__algorithm.LimitOrder(self.Symbol, qty, limit, "TryEnter at: {0}".format(limit))
def TryExit(self):
# can't exit if we haven't entered
if not self.Security.Invested: return
limit = 0
qty = self.Security.Holdings.Quantity
exitTolerance = 1 + 2 * self.PercentTolerance
if self.Security.Holdings.IsLong and self.Close.Current.Value * exitTolerance < self.EMA.Current.Value:
limit = self.Security.High
elif self.Security.Holdings.IsShort and self.Close.Current.Value > self.EMA.Current.Value * exitTolerance:
limit = self.Security.Low
if limit != 0:
ticket = self.__algorithm.LimitOrder(self.Symbol, -qty, limit, "TryExit at: {0}".format(limit))
def OnOrderEvent(self, fill):
if fill.Status != OrderStatus.Filled: return
qty = self.Security.Holdings.Quantity
# if we just finished entering, place a stop loss as well
if self.Security.Invested:
stop = fill.FillPrice*(1 - self.PercentGlobalStopLoss) if self.Security.Holdings.IsLong \
else fill.FillPrice*(1 + self.PercentGlobalStopLoss)
self.__currentStopLoss = self.__algorithm.StopMarketOrder(self.Symbol, -qty, stop, "StopLoss at: {0}".format(stop))
# check for an exit, cancel the stop loss
elif (self.__currentStopLoss is not None and self.__currentStopLoss.Status is not OrderStatus.Filled):
# cancel our current stop loss
self.__currentStopLoss.Cancel("Exited position")
self.__currentStopLoss = None
|
apache-2.0
|
mark-me/Pi-Jukebox
|
venv/Lib/site-packages/pygame/ftfont.py
|
1
|
6239
|
"""pygame module for loading and rendering fonts (freetype alternative)"""
__all__ = ['Font', 'init', 'quit', 'get_default_font', 'get_init', 'SysFont']
from pygame._freetype import init, Font as _Font, get_default_resolution
from pygame._freetype import quit, get_default_font, get_init as _get_init
from pygame._freetype import __PYGAMEinit__
from pygame.sysfont import match_font, get_fonts, SysFont as _SysFont
from pygame import encode_file_path
from pygame.compat import bytes_, unicode_, as_unicode, as_bytes
from pygame import Surface as _Surface, Color as _Color, SRCALPHA as _SRCALPHA
class Font(_Font):
"""Font(filename, size) -> Font
Font(object, size) -> Font
create a new Font object from a file (freetype alternative)
This Font type differs from font.Font in that it can render glyphs
for Unicode code points in the supplementary planes (> 0xFFFF).
"""
__encode_file_path = staticmethod(encode_file_path)
__get_default_resolution = staticmethod(get_default_resolution)
__default_font = encode_file_path(get_default_font())
__unull = as_unicode(r"\x00")
__bnull = as_bytes("\x00")
def __init__(self, file, size=-1):
if size <= 1:
size = 1
if isinstance(file, unicode_):
try:
bfile = self.__encode_file_path(file, ValueError)
except ValueError:
bfile = ''
else:
bfile = file
if isinstance(bfile, bytes_) and bfile == self.__default_font:
file = None
if file is None:
resolution = int(self.__get_default_resolution() * 0.6875)
if resolution == 0:
kwds['resolution'] = 1
else:
resolution = 0
super(Font, self).__init__(file, size=size, resolution=resolution)
self.strength = 1.0 / 12.0
self.kerning = False
self.origin = True
self.pad = True
self.ucs4 = True
self.underline_adjustment = 1.0
def render(self, text, antialias, color, background=None):
"""render(text, antialias, color, background=None) -> Surface
draw text on a new Surface"""
if text is None:
text = ""
if (isinstance(text, unicode_) and # conditional and
self.__unull in text):
raise ValueError("A null character was found in the text")
if (isinstance(text, bytes_) and # conditional and
self.__bnull in text):
raise ValueError("A null character was found in the text")
save_antialiased = self.antialiased
self.antialiased = bool(antialias)
try:
s, r = super(Font, self).render(text, color, background)
return s
finally:
self.antialiased = save_antialiased
def set_bold(self, value):
"""set_bold(bool) -> None
enable fake rendering of bold text"""
self.wide = bool(value)
def get_bold(self):
"""get_bold() -> bool
check if text will be rendered bold"""
return self.wide
def set_italic(self, value):
"""set_italic(bool) -> None
enable fake rendering of italic text"""
self.oblique = bool(value)
def get_italic(self):
"""get_italic() -> bool
check if the text will be rendered italic"""
return self.oblique
def set_underline(self, value):
"""set_underline(bool) -> None
control if text is rendered with an underline"""
self.underline = bool(value)
def get_underline(self):
"""set_bold(bool) -> None
enable fake rendering of bold text"""
return self.underline
def metrics(self, text):
"""metrics(text) -> list
Gets the metrics for each character in the pased string."""
return self.get_metrics(text)
def get_ascent(self):
"""get_ascent() -> int
get the ascent of the font"""
return self.get_sized_ascender()
def get_descent(self):
"""get_descent() -> int
get the descent of the font"""
return self.get_sized_descender()
def get_height(self):
"""get_height() -> int
get the height of the font"""
return self.get_sized_ascender() - self.get_sized_descender() + 1
def get_linesize(self):
"""get_linesize() -> int
get the line space of the font text"""
return self.get_sized_height();
def size(self, text):
"""size(text) -> (width, height)
determine the amount of space needed to render text"""
return self.get_rect(text).size
FontType = Font
def get_init():
"""get_init() -> bool
true if the font module is initialized"""
return _get_init()
def SysFont(name, size, bold=0, italic=0, constructor=None):
"""pygame.ftfont.SysFont(name, size, bold=False, italic=False, constructor=None) -> Font
create a pygame Font from system font resources (freetype alternative)
This will search the system fonts for the given font
name. You can also enable bold or italic styles, and
the appropriate system font will be selected if available.
This will always return a valid Font object, and will
fallback on the builtin pygame font if the given font
is not found.
Name can also be a comma separated list of names, in
which case set of names will be searched in order. Pygame
uses a small set of common font aliases, if the specific
font you ask for is not available, a reasonable alternative
may be used.
if optional contructor is provided, it must be a function with
signature constructor(fontpath, size, bold, italic) which returns
a Font instance. If None, a pygame.ftfont.Font object is created.
"""
if constructor is None:
def constructor(fontpath, size, bold, italic):
font = Font(fontpath, size)
font.set_bold(bold)
font.set_italic(italic)
return font
return _SysFont(name, size, bold, italic, constructor)
del _Font, get_default_resolution, encode_file_path, as_unicode, as_bytes
|
agpl-3.0
|
Vivek-anand-jain/Implementation-of-BLUE-in-ns-3
|
src/traffic-control/bindings/modulegen__gcc_LP64.py
|
38
|
388133
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.traffic_control', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## data-rate.h (module 'network'): ns3::DataRate [class]
module.add_class('DataRate', import_from_module='ns.network')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer [class]
module.add_class('NetDeviceContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData_e [enumeration]
module.add_enum('TagData_e', ['MAX_SIZE'], outer_class=root_module['ns3::PacketTagList::TagData'], import_from_module='ns.network')
## queue-disc-container.h (module 'traffic-control'): ns3::QueueDiscContainer [class]
module.add_class('QueueDiscContainer')
## traffic-control-helper.h (module 'traffic-control'): ns3::QueueDiscFactory [class]
module.add_class('QueueDiscFactory')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## nstime.h (module 'core'): ns3::TimeWithUnit [class]
module.add_class('TimeWithUnit', import_from_module='ns.core')
## traced-value.h (module 'core'): ns3::TracedValue<bool> [class]
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['bool'])
## traced-value.h (module 'core'): ns3::TracedValue<unsigned int> [class]
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['unsigned int'])
## traffic-control-helper.h (module 'traffic-control'): ns3::TrafficControlHelper [class]
module.add_class('TrafficControlHelper')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t::impl_type [enumeration]
module.add_enum('impl_type', ['int128_impl', 'cairo_impl', 'ld_impl'], outer_class=root_module['ns3::int64x64_t'], import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## packet-filter.h (module 'traffic-control'): ns3::PacketFilter [class]
module.add_class('PacketFilter', parent=root_module['ns3::Object'])
## queue.h (module 'network'): ns3::Queue [class]
module.add_class('Queue', import_from_module='ns.network', parent=root_module['ns3::Object'])
## queue.h (module 'network'): ns3::Queue::QueueMode [enumeration]
module.add_enum('QueueMode', ['QUEUE_MODE_PACKETS', 'QUEUE_MODE_BYTES'], outer_class=root_module['ns3::Queue'], import_from_module='ns.network')
## queue-disc.h (module 'traffic-control'): ns3::QueueDisc [class]
module.add_class('QueueDisc', parent=root_module['ns3::Object'])
## queue-disc.h (module 'traffic-control'): ns3::QueueDisc::WakeMode [enumeration]
module.add_enum('WakeMode', ['WAKE_ROOT', 'WAKE_CHILD'], outer_class=root_module['ns3::QueueDisc'])
## queue-disc.h (module 'traffic-control'): ns3::QueueDiscClass [class]
module.add_class('QueueDiscClass', parent=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::RandomVariableStream [class]
module.add_class('RandomVariableStream', import_from_module='ns.core', parent=root_module['ns3::Object'])
## red-queue-disc.h (module 'traffic-control'): ns3::RedQueueDisc [class]
module.add_class('RedQueueDisc', parent=root_module['ns3::QueueDisc'])
## red-queue-disc.h (module 'traffic-control'): ns3::RedQueueDisc [enumeration]
module.add_enum('', ['DTYPE_NONE', 'DTYPE_FORCED', 'DTYPE_UNFORCED'], outer_class=root_module['ns3::RedQueueDisc'])
## red-queue-disc.h (module 'traffic-control'): ns3::RedQueueDisc::Stats [struct]
module.add_class('Stats', outer_class=root_module['ns3::RedQueueDisc'])
## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable [class]
module.add_class('SequentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NetDeviceQueue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NetDeviceQueue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::QueueItem', 'ns3::empty', 'ns3::DefaultDeleter<ns3::QueueItem>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['Y', 'D', 'H', 'MIN', 'S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## traced-value.h (module 'core'): ns3::TracedValue<ns3::Time> [class]
module.add_class('TracedValue', import_from_module='ns.core', template_parameters=['ns3::Time'])
## traced-value.h (module 'core'): ns3::TracedValue<ns3::Time> [class]
root_module['ns3::TracedValue< ns3::Time >'].implicitly_converts_to(root_module['ns3::Time'])
## traffic-control-layer.h (module 'traffic-control'): ns3::TrafficControlLayer [class]
module.add_class('TrafficControlLayer', parent=root_module['ns3::Object'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable [class]
module.add_class('TriangularRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable [class]
module.add_class('UniformRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable [class]
module.add_class('WeibullRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable [class]
module.add_class('ZetaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable [class]
module.add_class('ZipfRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## boolean.h (module 'core'): ns3::BooleanChecker [class]
module.add_class('BooleanChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## boolean.h (module 'core'): ns3::BooleanValue [class]
module.add_class('BooleanValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## codel-queue-disc.h (module 'traffic-control'): ns3::CoDelQueueDisc [class]
module.add_class('CoDelQueueDisc', parent=root_module['ns3::QueueDisc'])
## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable [class]
module.add_class('ConstantRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## data-rate.h (module 'network'): ns3::DataRateChecker [class]
module.add_class('DataRateChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## data-rate.h (module 'network'): ns3::DataRateValue [class]
module.add_class('DataRateValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable [class]
module.add_class('DeterministicRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## double.h (module 'core'): ns3::DoubleValue [class]
module.add_class('DoubleValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable [class]
module.add_class('EmpiricalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## enum.h (module 'core'): ns3::EnumChecker [class]
module.add_class('EnumChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## enum.h (module 'core'): ns3::EnumValue [class]
module.add_class('EnumValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable [class]
module.add_class('ErlangRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable [class]
module.add_class('ExponentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable [class]
module.add_class('GammaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## integer.h (module 'core'): ns3::IntegerValue [class]
module.add_class('IntegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable [class]
module.add_class('LogNormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## net-device.h (module 'network'): ns3::NetDeviceQueue [class]
module.add_class('NetDeviceQueue', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >'])
## net-device.h (module 'network'): ns3::NetDeviceQueueInterface [class]
module.add_class('NetDeviceQueueInterface', import_from_module='ns.network', parent=root_module['ns3::Object'])
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable [class]
module.add_class('NormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable [class]
module.add_class('ParetoRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## pfifo-fast-queue-disc.h (module 'traffic-control'): ns3::PfifoFastQueueDisc [class]
module.add_class('PfifoFastQueueDisc', parent=root_module['ns3::QueueDisc'])
## net-device.h (module 'network'): ns3::QueueItem [class]
module.add_class('QueueItem', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
## string.h (module 'core'): ns3::StringChecker [class]
module.add_class('StringChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## string.h (module 'core'): ns3::StringValue [class]
module.add_class('StringValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## uinteger.h (module 'core'): ns3::UintegerValue [class]
module.add_class('UintegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## queue-disc.h (module 'traffic-control'): ns3::QueueDiscItem [class]
module.add_class('QueueDiscItem', parent=root_module['ns3::QueueItem'])
module.add_container('std::vector< ns3::Ptr< ns3::QueueDisc > >', 'ns3::Ptr< ns3::QueueDisc >', container_type=u'vector')
module.add_container('std::vector< short unsigned int >', 'short unsigned int', container_type=u'vector')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
## Register a nested module for the namespace TracedValueCallback
nested_module = module.add_cpp_namespace('TracedValueCallback')
register_types_ns3_TracedValueCallback(nested_module)
## Register a nested module for the namespace internal
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) **', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, size_t ) *&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_types_ns3_TracedValueCallback(module):
root_module = module.get_root()
typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) *', u'ns3::TracedValueCallback::Bool')
typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) **', u'ns3::TracedValueCallback::Bool*')
typehandlers.add_type_alias(u'void ( * ) ( bool, bool ) *&', u'ns3::TracedValueCallback::Bool&')
typehandlers.add_type_alias(u'void ( * ) ( double, double ) *', u'ns3::TracedValueCallback::Double')
typehandlers.add_type_alias(u'void ( * ) ( double, double ) **', u'ns3::TracedValueCallback::Double*')
typehandlers.add_type_alias(u'void ( * ) ( double, double ) *&', u'ns3::TracedValueCallback::Double&')
typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) *', u'ns3::TracedValueCallback::Uint8')
typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) **', u'ns3::TracedValueCallback::Uint8*')
typehandlers.add_type_alias(u'void ( * ) ( uint8_t, uint8_t ) *&', u'ns3::TracedValueCallback::Uint8&')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) *', u'ns3::TracedValueCallback::Uint32')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) **', u'ns3::TracedValueCallback::Uint32*')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t ) *&', u'ns3::TracedValueCallback::Uint32&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *', u'ns3::TracedValueCallback::Time')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) **', u'ns3::TracedValueCallback::Time*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Time, ns3::Time ) *&', u'ns3::TracedValueCallback::Time&')
typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) *', u'ns3::TracedValueCallback::Int16')
typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) **', u'ns3::TracedValueCallback::Int16*')
typehandlers.add_type_alias(u'void ( * ) ( int16_t, int16_t ) *&', u'ns3::TracedValueCallback::Int16&')
typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) *', u'ns3::TracedValueCallback::Int32')
typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) **', u'ns3::TracedValueCallback::Int32*')
typehandlers.add_type_alias(u'void ( * ) ( int32_t, int32_t ) *&', u'ns3::TracedValueCallback::Int32&')
typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) *', u'ns3::TracedValueCallback::Int8')
typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) **', u'ns3::TracedValueCallback::Int8*')
typehandlers.add_type_alias(u'void ( * ) ( int8_t, int8_t ) *&', u'ns3::TracedValueCallback::Int8&')
typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) *', u'ns3::TracedValueCallback::Uint16')
typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) **', u'ns3::TracedValueCallback::Uint16*')
typehandlers.add_type_alias(u'void ( * ) ( uint16_t, uint16_t ) *&', u'ns3::TracedValueCallback::Uint16&')
def register_types_ns3_internal(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3DataRate_methods(root_module, root_module['ns3::DataRate'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3NetDeviceContainer_methods(root_module, root_module['ns3::NetDeviceContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3QueueDiscContainer_methods(root_module, root_module['ns3::QueueDiscContainer'])
register_Ns3QueueDiscFactory_methods(root_module, root_module['ns3::QueueDiscFactory'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TimeWithUnit_methods(root_module, root_module['ns3::TimeWithUnit'])
register_Ns3TracedValue__Bool_methods(root_module, root_module['ns3::TracedValue< bool >'])
register_Ns3TracedValue__Unsigned_int_methods(root_module, root_module['ns3::TracedValue< unsigned int >'])
register_Ns3TrafficControlHelper_methods(root_module, root_module['ns3::TrafficControlHelper'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3PacketFilter_methods(root_module, root_module['ns3::PacketFilter'])
register_Ns3Queue_methods(root_module, root_module['ns3::Queue'])
register_Ns3QueueDisc_methods(root_module, root_module['ns3::QueueDisc'])
register_Ns3QueueDiscClass_methods(root_module, root_module['ns3::QueueDiscClass'])
register_Ns3RandomVariableStream_methods(root_module, root_module['ns3::RandomVariableStream'])
register_Ns3RedQueueDisc_methods(root_module, root_module['ns3::RedQueueDisc'])
register_Ns3RedQueueDiscStats_methods(root_module, root_module['ns3::RedQueueDisc::Stats'])
register_Ns3SequentialRandomVariable_methods(root_module, root_module['ns3::SequentialRandomVariable'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3NetDeviceQueue_Ns3Empty_Ns3DefaultDeleter__lt__ns3NetDeviceQueue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3TracedValue__Ns3Time_methods(root_module, root_module['ns3::TracedValue< ns3::Time >'])
register_Ns3TrafficControlLayer_methods(root_module, root_module['ns3::TrafficControlLayer'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3TriangularRandomVariable_methods(root_module, root_module['ns3::TriangularRandomVariable'])
register_Ns3UniformRandomVariable_methods(root_module, root_module['ns3::UniformRandomVariable'])
register_Ns3WeibullRandomVariable_methods(root_module, root_module['ns3::WeibullRandomVariable'])
register_Ns3ZetaRandomVariable_methods(root_module, root_module['ns3::ZetaRandomVariable'])
register_Ns3ZipfRandomVariable_methods(root_module, root_module['ns3::ZipfRandomVariable'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3BooleanChecker_methods(root_module, root_module['ns3::BooleanChecker'])
register_Ns3BooleanValue_methods(root_module, root_module['ns3::BooleanValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3CoDelQueueDisc_methods(root_module, root_module['ns3::CoDelQueueDisc'])
register_Ns3ConstantRandomVariable_methods(root_module, root_module['ns3::ConstantRandomVariable'])
register_Ns3DataRateChecker_methods(root_module, root_module['ns3::DataRateChecker'])
register_Ns3DataRateValue_methods(root_module, root_module['ns3::DataRateValue'])
register_Ns3DeterministicRandomVariable_methods(root_module, root_module['ns3::DeterministicRandomVariable'])
register_Ns3DoubleValue_methods(root_module, root_module['ns3::DoubleValue'])
register_Ns3EmpiricalRandomVariable_methods(root_module, root_module['ns3::EmpiricalRandomVariable'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3EnumChecker_methods(root_module, root_module['ns3::EnumChecker'])
register_Ns3EnumValue_methods(root_module, root_module['ns3::EnumValue'])
register_Ns3ErlangRandomVariable_methods(root_module, root_module['ns3::ErlangRandomVariable'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3ExponentialRandomVariable_methods(root_module, root_module['ns3::ExponentialRandomVariable'])
register_Ns3GammaRandomVariable_methods(root_module, root_module['ns3::GammaRandomVariable'])
register_Ns3IntegerValue_methods(root_module, root_module['ns3::IntegerValue'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3LogNormalRandomVariable_methods(root_module, root_module['ns3::LogNormalRandomVariable'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NetDeviceQueue_methods(root_module, root_module['ns3::NetDeviceQueue'])
register_Ns3NetDeviceQueueInterface_methods(root_module, root_module['ns3::NetDeviceQueueInterface'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3NormalRandomVariable_methods(root_module, root_module['ns3::NormalRandomVariable'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3ParetoRandomVariable_methods(root_module, root_module['ns3::ParetoRandomVariable'])
register_Ns3PfifoFastQueueDisc_methods(root_module, root_module['ns3::PfifoFastQueueDisc'])
register_Ns3QueueItem_methods(root_module, root_module['ns3::QueueItem'])
register_Ns3StringChecker_methods(root_module, root_module['ns3::StringChecker'])
register_Ns3StringValue_methods(root_module, root_module['ns3::StringValue'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3UintegerValue_methods(root_module, root_module['ns3::UintegerValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3QueueDiscItem_methods(root_module, root_module['ns3::QueueDiscItem'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::PeekU8() [member function]
cls.add_method('PeekU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(ns3::Buffer::Iterator start, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Adjust(int32_t adjustment) [member function]
cls.add_method('Adjust',
'void',
[param('int32_t', 'adjustment')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
return
def register_Ns3DataRate_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('>=')
## data-rate.h (module 'network'): ns3::DataRate::DataRate(ns3::DataRate const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DataRate const &', 'arg0')])
## data-rate.h (module 'network'): ns3::DataRate::DataRate() [constructor]
cls.add_constructor([])
## data-rate.h (module 'network'): ns3::DataRate::DataRate(uint64_t bps) [constructor]
cls.add_constructor([param('uint64_t', 'bps')])
## data-rate.h (module 'network'): ns3::DataRate::DataRate(std::string rate) [constructor]
cls.add_constructor([param('std::string', 'rate')])
## data-rate.h (module 'network'): ns3::Time ns3::DataRate::CalculateBitsTxTime(uint32_t bits) const [member function]
cls.add_method('CalculateBitsTxTime',
'ns3::Time',
[param('uint32_t', 'bits')],
is_const=True)
## data-rate.h (module 'network'): ns3::Time ns3::DataRate::CalculateBytesTxTime(uint32_t bytes) const [member function]
cls.add_method('CalculateBytesTxTime',
'ns3::Time',
[param('uint32_t', 'bytes')],
is_const=True)
## data-rate.h (module 'network'): double ns3::DataRate::CalculateTxTime(uint32_t bytes) const [member function]
cls.add_method('CalculateTxTime',
'double',
[param('uint32_t', 'bytes')],
deprecated=True, is_const=True)
## data-rate.h (module 'network'): uint64_t ns3::DataRate::GetBitRate() const [member function]
cls.add_method('GetBitRate',
'uint64_t',
[],
is_const=True)
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('==')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
deprecated=True, is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3NetDeviceContainer_methods(root_module, cls):
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'arg0')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer() [constructor]
cls.add_constructor([])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::Ptr<ns3::NetDevice> dev) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(std::string devName) [constructor]
cls.add_constructor([param('std::string', 'devName')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & a, ns3::NetDeviceContainer const & b) [constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'a'), param('ns3::NetDeviceContainer const &', 'b')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::NetDeviceContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NetDeviceContainer', 'other')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(std::string deviceName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'deviceName')])
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::NetDeviceContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True)
## net-device-container.h (module 'network'): uint32_t ns3::NetDeviceContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function]
cls.add_method('Replace',
'bool',
[param('ns3::Tag &', 'tag')])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 21 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3QueueDiscContainer_methods(root_module, cls):
## queue-disc-container.h (module 'traffic-control'): ns3::QueueDiscContainer::QueueDiscContainer(ns3::QueueDiscContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::QueueDiscContainer const &', 'arg0')])
## queue-disc-container.h (module 'traffic-control'): ns3::QueueDiscContainer::QueueDiscContainer() [constructor]
cls.add_constructor([])
## queue-disc-container.h (module 'traffic-control'): ns3::QueueDiscContainer::QueueDiscContainer(ns3::Ptr<ns3::QueueDisc> qDisc) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::QueueDisc >', 'qDisc')])
## queue-disc-container.h (module 'traffic-control'): void ns3::QueueDiscContainer::Add(ns3::QueueDiscContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::QueueDiscContainer', 'other')])
## queue-disc-container.h (module 'traffic-control'): void ns3::QueueDiscContainer::Add(ns3::Ptr<ns3::QueueDisc> qDisc) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::QueueDisc >', 'qDisc')])
## queue-disc-container.h (module 'traffic-control'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::QueueDisc>*,std::vector<ns3::Ptr<ns3::QueueDisc>, std::allocator<ns3::Ptr<ns3::QueueDisc> > > > ns3::QueueDiscContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::QueueDisc > const, std::vector< ns3::Ptr< ns3::QueueDisc > > >',
[],
is_const=True)
## queue-disc-container.h (module 'traffic-control'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::QueueDisc>*,std::vector<ns3::Ptr<ns3::QueueDisc>, std::allocator<ns3::Ptr<ns3::QueueDisc> > > > ns3::QueueDiscContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::QueueDisc > const, std::vector< ns3::Ptr< ns3::QueueDisc > > >',
[],
is_const=True)
## queue-disc-container.h (module 'traffic-control'): ns3::Ptr<ns3::QueueDisc> ns3::QueueDiscContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::QueueDisc >',
[param('uint32_t', 'i')],
is_const=True)
## queue-disc-container.h (module 'traffic-control'): uint32_t ns3::QueueDiscContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3QueueDiscFactory_methods(root_module, cls):
## traffic-control-helper.h (module 'traffic-control'): ns3::QueueDiscFactory::QueueDiscFactory(ns3::QueueDiscFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::QueueDiscFactory const &', 'arg0')])
## traffic-control-helper.h (module 'traffic-control'): ns3::QueueDiscFactory::QueueDiscFactory(ns3::ObjectFactory factory) [constructor]
cls.add_constructor([param('ns3::ObjectFactory', 'factory')])
## traffic-control-helper.h (module 'traffic-control'): void ns3::QueueDiscFactory::AddInternalQueue(ns3::ObjectFactory factory) [member function]
cls.add_method('AddInternalQueue',
'void',
[param('ns3::ObjectFactory', 'factory')])
## traffic-control-helper.h (module 'traffic-control'): void ns3::QueueDiscFactory::AddPacketFilter(ns3::ObjectFactory factory) [member function]
cls.add_method('AddPacketFilter',
'void',
[param('ns3::ObjectFactory', 'factory')])
## traffic-control-helper.h (module 'traffic-control'): uint16_t ns3::QueueDiscFactory::AddQueueDiscClass(ns3::ObjectFactory factory) [member function]
cls.add_method('AddQueueDiscClass',
'uint16_t',
[param('ns3::ObjectFactory', 'factory')])
## traffic-control-helper.h (module 'traffic-control'): ns3::Ptr<ns3::QueueDisc> ns3::QueueDiscFactory::CreateQueueDisc(std::vector<ns3::Ptr<ns3::QueueDisc>, std::allocator<ns3::Ptr<ns3::QueueDisc> > > const & queueDiscs) [member function]
cls.add_method('CreateQueueDisc',
'ns3::Ptr< ns3::QueueDisc >',
[param('std::vector< ns3::Ptr< ns3::QueueDisc > > const &', 'queueDiscs')])
## traffic-control-helper.h (module 'traffic-control'): void ns3::QueueDiscFactory::SetChildQueueDisc(uint16_t classId, uint16_t handle) [member function]
cls.add_method('SetChildQueueDisc',
'void',
[param('uint16_t', 'classId'), param('uint16_t', 'handle')])
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Simulator_methods(root_module, cls):
## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Simulator const &', 'arg0')])
## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]
cls.add_method('Cancel',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]
cls.add_method('Destroy',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function]
cls.add_method('GetImplementation',
'ns3::Ptr< ns3::SimulatorImpl >',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]
cls.add_method('GetMaximumSimulationTime',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]
cls.add_method('IsExpired',
'bool',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]
cls.add_method('IsFinished',
'bool',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]
cls.add_method('Now',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]
cls.add_method('Remove',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function]
cls.add_method('SetImplementation',
'void',
[param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]
cls.add_method('SetScheduler',
'void',
[param('ns3::ObjectFactory', 'schedulerFactory')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & delay) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'delay')],
is_static=True)
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TimeWithUnit_methods(root_module, cls):
cls.add_output_stream_operator()
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::TimeWithUnit const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeWithUnit const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeWithUnit::TimeWithUnit(ns3::Time const time, ns3::Time::Unit const unit) [constructor]
cls.add_constructor([param('ns3::Time const', 'time'), param('ns3::Time::Unit const', 'unit')])
return
def register_Ns3TracedValue__Bool_methods(root_module, cls):
## traced-value.h (module 'core'): ns3::TracedValue<bool>::TracedValue() [constructor]
cls.add_constructor([])
## traced-value.h (module 'core'): ns3::TracedValue<bool>::TracedValue(ns3::TracedValue<bool> const & o) [copy constructor]
cls.add_constructor([param('ns3::TracedValue< bool > const &', 'o')])
## traced-value.h (module 'core'): ns3::TracedValue<bool>::TracedValue(bool const & v) [constructor]
cls.add_constructor([param('bool const &', 'v')])
## traced-value.h (module 'core'): void ns3::TracedValue<bool>::Connect(ns3::CallbackBase const & cb, std::basic_string<char,std::char_traits<char>,std::allocator<char> > path) [member function]
cls.add_method('Connect',
'void',
[param('ns3::CallbackBase const &', 'cb'), param('std::string', 'path')])
## traced-value.h (module 'core'): void ns3::TracedValue<bool>::ConnectWithoutContext(ns3::CallbackBase const & cb) [member function]
cls.add_method('ConnectWithoutContext',
'void',
[param('ns3::CallbackBase const &', 'cb')])
## traced-value.h (module 'core'): void ns3::TracedValue<bool>::Disconnect(ns3::CallbackBase const & cb, std::basic_string<char,std::char_traits<char>,std::allocator<char> > path) [member function]
cls.add_method('Disconnect',
'void',
[param('ns3::CallbackBase const &', 'cb'), param('std::string', 'path')])
## traced-value.h (module 'core'): void ns3::TracedValue<bool>::DisconnectWithoutContext(ns3::CallbackBase const & cb) [member function]
cls.add_method('DisconnectWithoutContext',
'void',
[param('ns3::CallbackBase const &', 'cb')])
## traced-value.h (module 'core'): bool ns3::TracedValue<bool>::Get() const [member function]
cls.add_method('Get',
'bool',
[],
is_const=True)
## traced-value.h (module 'core'): void ns3::TracedValue<bool>::Set(bool const & v) [member function]
cls.add_method('Set',
'void',
[param('bool const &', 'v')])
return
def register_Ns3TracedValue__Unsigned_int_methods(root_module, cls):
## traced-value.h (module 'core'): ns3::TracedValue<unsigned int>::TracedValue() [constructor]
cls.add_constructor([])
## traced-value.h (module 'core'): ns3::TracedValue<unsigned int>::TracedValue(ns3::TracedValue<unsigned int> const & o) [copy constructor]
cls.add_constructor([param('ns3::TracedValue< unsigned int > const &', 'o')])
## traced-value.h (module 'core'): ns3::TracedValue<unsigned int>::TracedValue(unsigned int const & v) [constructor]
cls.add_constructor([param('unsigned int const &', 'v')])
## traced-value.h (module 'core'): void ns3::TracedValue<unsigned int>::Connect(ns3::CallbackBase const & cb, std::basic_string<char,std::char_traits<char>,std::allocator<char> > path) [member function]
cls.add_method('Connect',
'void',
[param('ns3::CallbackBase const &', 'cb'), param('std::string', 'path')])
## traced-value.h (module 'core'): void ns3::TracedValue<unsigned int>::ConnectWithoutContext(ns3::CallbackBase const & cb) [member function]
cls.add_method('ConnectWithoutContext',
'void',
[param('ns3::CallbackBase const &', 'cb')])
## traced-value.h (module 'core'): void ns3::TracedValue<unsigned int>::Disconnect(ns3::CallbackBase const & cb, std::basic_string<char,std::char_traits<char>,std::allocator<char> > path) [member function]
cls.add_method('Disconnect',
'void',
[param('ns3::CallbackBase const &', 'cb'), param('std::string', 'path')])
## traced-value.h (module 'core'): void ns3::TracedValue<unsigned int>::DisconnectWithoutContext(ns3::CallbackBase const & cb) [member function]
cls.add_method('DisconnectWithoutContext',
'void',
[param('ns3::CallbackBase const &', 'cb')])
## traced-value.h (module 'core'): unsigned int ns3::TracedValue<unsigned int>::Get() const [member function]
cls.add_method('Get',
'unsigned int',
[],
is_const=True)
## traced-value.h (module 'core'): void ns3::TracedValue<unsigned int>::Set(unsigned int const & v) [member function]
cls.add_method('Set',
'void',
[param('unsigned int const &', 'v')])
return
def register_Ns3TrafficControlHelper_methods(root_module, cls):
## traffic-control-helper.h (module 'traffic-control'): ns3::TrafficControlHelper::TrafficControlHelper(ns3::TrafficControlHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TrafficControlHelper const &', 'arg0')])
## traffic-control-helper.h (module 'traffic-control'): ns3::TrafficControlHelper::TrafficControlHelper() [constructor]
cls.add_constructor([])
## traffic-control-helper.h (module 'traffic-control'): uint16_t ns3::TrafficControlHelper::AddChildQueueDisc(uint16_t handle, uint16_t classId, std::string type, std::string n01="", ns3::AttributeValue const & v01=ns3::EmptyAttributeValue(), std::string n02="", ns3::AttributeValue const & v02=ns3::EmptyAttributeValue(), std::string n03="", ns3::AttributeValue const & v03=ns3::EmptyAttributeValue(), std::string n04="", ns3::AttributeValue const & v04=ns3::EmptyAttributeValue(), std::string n05="", ns3::AttributeValue const & v05=ns3::EmptyAttributeValue(), std::string n06="", ns3::AttributeValue const & v06=ns3::EmptyAttributeValue(), std::string n07="", ns3::AttributeValue const & v07=ns3::EmptyAttributeValue(), std::string n08="", ns3::AttributeValue const & v08=ns3::EmptyAttributeValue(), std::string n09="", ns3::AttributeValue const & v09=ns3::EmptyAttributeValue(), std::string n10="", ns3::AttributeValue const & v10=ns3::EmptyAttributeValue(), std::string n11="", ns3::AttributeValue const & v11=ns3::EmptyAttributeValue(), std::string n12="", ns3::AttributeValue const & v12=ns3::EmptyAttributeValue(), std::string n13="", ns3::AttributeValue const & v13=ns3::EmptyAttributeValue(), std::string n14="", ns3::AttributeValue const & v14=ns3::EmptyAttributeValue(), std::string n15="", ns3::AttributeValue const & v15=ns3::EmptyAttributeValue()) [member function]
cls.add_method('AddChildQueueDisc',
'uint16_t',
[param('uint16_t', 'handle'), param('uint16_t', 'classId'), param('std::string', 'type'), param('std::string', 'n01', default_value='""'), param('ns3::AttributeValue const &', 'v01', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n02', default_value='""'), param('ns3::AttributeValue const &', 'v02', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n03', default_value='""'), param('ns3::AttributeValue const &', 'v03', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n04', default_value='""'), param('ns3::AttributeValue const &', 'v04', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n05', default_value='""'), param('ns3::AttributeValue const &', 'v05', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n06', default_value='""'), param('ns3::AttributeValue const &', 'v06', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n07', default_value='""'), param('ns3::AttributeValue const &', 'v07', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n08', default_value='""'), param('ns3::AttributeValue const &', 'v08', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n09', default_value='""'), param('ns3::AttributeValue const &', 'v09', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n10', default_value='""'), param('ns3::AttributeValue const &', 'v10', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n11', default_value='""'), param('ns3::AttributeValue const &', 'v11', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n12', default_value='""'), param('ns3::AttributeValue const &', 'v12', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n13', default_value='""'), param('ns3::AttributeValue const &', 'v13', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n14', default_value='""'), param('ns3::AttributeValue const &', 'v14', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n15', default_value='""'), param('ns3::AttributeValue const &', 'v15', default_value='ns3::EmptyAttributeValue()')])
## traffic-control-helper.h (module 'traffic-control'): std::vector<short unsigned int,std::allocator<short unsigned int> > ns3::TrafficControlHelper::AddChildQueueDiscs(uint16_t handle, std::vector<short unsigned int,std::allocator<short unsigned int> > const & classes, std::string type, std::string n01="", ns3::AttributeValue const & v01=ns3::EmptyAttributeValue(), std::string n02="", ns3::AttributeValue const & v02=ns3::EmptyAttributeValue(), std::string n03="", ns3::AttributeValue const & v03=ns3::EmptyAttributeValue(), std::string n04="", ns3::AttributeValue const & v04=ns3::EmptyAttributeValue(), std::string n05="", ns3::AttributeValue const & v05=ns3::EmptyAttributeValue(), std::string n06="", ns3::AttributeValue const & v06=ns3::EmptyAttributeValue(), std::string n07="", ns3::AttributeValue const & v07=ns3::EmptyAttributeValue(), std::string n08="", ns3::AttributeValue const & v08=ns3::EmptyAttributeValue(), std::string n09="", ns3::AttributeValue const & v09=ns3::EmptyAttributeValue(), std::string n10="", ns3::AttributeValue const & v10=ns3::EmptyAttributeValue(), std::string n11="", ns3::AttributeValue const & v11=ns3::EmptyAttributeValue(), std::string n12="", ns3::AttributeValue const & v12=ns3::EmptyAttributeValue(), std::string n13="", ns3::AttributeValue const & v13=ns3::EmptyAttributeValue(), std::string n14="", ns3::AttributeValue const & v14=ns3::EmptyAttributeValue(), std::string n15="", ns3::AttributeValue const & v15=ns3::EmptyAttributeValue()) [member function]
cls.add_method('AddChildQueueDiscs',
'std::vector< short unsigned int >',
[param('uint16_t', 'handle'), param('std::vector< short unsigned int > const &', 'classes'), param('std::string', 'type'), param('std::string', 'n01', default_value='""'), param('ns3::AttributeValue const &', 'v01', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n02', default_value='""'), param('ns3::AttributeValue const &', 'v02', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n03', default_value='""'), param('ns3::AttributeValue const &', 'v03', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n04', default_value='""'), param('ns3::AttributeValue const &', 'v04', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n05', default_value='""'), param('ns3::AttributeValue const &', 'v05', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n06', default_value='""'), param('ns3::AttributeValue const &', 'v06', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n07', default_value='""'), param('ns3::AttributeValue const &', 'v07', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n08', default_value='""'), param('ns3::AttributeValue const &', 'v08', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n09', default_value='""'), param('ns3::AttributeValue const &', 'v09', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n10', default_value='""'), param('ns3::AttributeValue const &', 'v10', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n11', default_value='""'), param('ns3::AttributeValue const &', 'v11', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n12', default_value='""'), param('ns3::AttributeValue const &', 'v12', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n13', default_value='""'), param('ns3::AttributeValue const &', 'v13', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n14', default_value='""'), param('ns3::AttributeValue const &', 'v14', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n15', default_value='""'), param('ns3::AttributeValue const &', 'v15', default_value='ns3::EmptyAttributeValue()')])
## traffic-control-helper.h (module 'traffic-control'): void ns3::TrafficControlHelper::AddInternalQueues(uint16_t handle, uint16_t count, std::string type, std::string n01="", ns3::AttributeValue const & v01=ns3::EmptyAttributeValue(), std::string n02="", ns3::AttributeValue const & v02=ns3::EmptyAttributeValue(), std::string n03="", ns3::AttributeValue const & v03=ns3::EmptyAttributeValue(), std::string n04="", ns3::AttributeValue const & v04=ns3::EmptyAttributeValue(), std::string n05="", ns3::AttributeValue const & v05=ns3::EmptyAttributeValue(), std::string n06="", ns3::AttributeValue const & v06=ns3::EmptyAttributeValue(), std::string n07="", ns3::AttributeValue const & v07=ns3::EmptyAttributeValue(), std::string n08="", ns3::AttributeValue const & v08=ns3::EmptyAttributeValue()) [member function]
cls.add_method('AddInternalQueues',
'void',
[param('uint16_t', 'handle'), param('uint16_t', 'count'), param('std::string', 'type'), param('std::string', 'n01', default_value='""'), param('ns3::AttributeValue const &', 'v01', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n02', default_value='""'), param('ns3::AttributeValue const &', 'v02', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n03', default_value='""'), param('ns3::AttributeValue const &', 'v03', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n04', default_value='""'), param('ns3::AttributeValue const &', 'v04', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n05', default_value='""'), param('ns3::AttributeValue const &', 'v05', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n06', default_value='""'), param('ns3::AttributeValue const &', 'v06', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n07', default_value='""'), param('ns3::AttributeValue const &', 'v07', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n08', default_value='""'), param('ns3::AttributeValue const &', 'v08', default_value='ns3::EmptyAttributeValue()')])
## traffic-control-helper.h (module 'traffic-control'): void ns3::TrafficControlHelper::AddPacketFilter(uint16_t handle, std::string type, std::string n01="", ns3::AttributeValue const & v01=ns3::EmptyAttributeValue(), std::string n02="", ns3::AttributeValue const & v02=ns3::EmptyAttributeValue(), std::string n03="", ns3::AttributeValue const & v03=ns3::EmptyAttributeValue(), std::string n04="", ns3::AttributeValue const & v04=ns3::EmptyAttributeValue(), std::string n05="", ns3::AttributeValue const & v05=ns3::EmptyAttributeValue(), std::string n06="", ns3::AttributeValue const & v06=ns3::EmptyAttributeValue(), std::string n07="", ns3::AttributeValue const & v07=ns3::EmptyAttributeValue(), std::string n08="", ns3::AttributeValue const & v08=ns3::EmptyAttributeValue()) [member function]
cls.add_method('AddPacketFilter',
'void',
[param('uint16_t', 'handle'), param('std::string', 'type'), param('std::string', 'n01', default_value='""'), param('ns3::AttributeValue const &', 'v01', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n02', default_value='""'), param('ns3::AttributeValue const &', 'v02', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n03', default_value='""'), param('ns3::AttributeValue const &', 'v03', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n04', default_value='""'), param('ns3::AttributeValue const &', 'v04', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n05', default_value='""'), param('ns3::AttributeValue const &', 'v05', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n06', default_value='""'), param('ns3::AttributeValue const &', 'v06', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n07', default_value='""'), param('ns3::AttributeValue const &', 'v07', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n08', default_value='""'), param('ns3::AttributeValue const &', 'v08', default_value='ns3::EmptyAttributeValue()')])
## traffic-control-helper.h (module 'traffic-control'): std::vector<short unsigned int,std::allocator<short unsigned int> > ns3::TrafficControlHelper::AddQueueDiscClasses(uint16_t handle, uint16_t count, std::string type, std::string n01="", ns3::AttributeValue const & v01=ns3::EmptyAttributeValue(), std::string n02="", ns3::AttributeValue const & v02=ns3::EmptyAttributeValue(), std::string n03="", ns3::AttributeValue const & v03=ns3::EmptyAttributeValue(), std::string n04="", ns3::AttributeValue const & v04=ns3::EmptyAttributeValue(), std::string n05="", ns3::AttributeValue const & v05=ns3::EmptyAttributeValue(), std::string n06="", ns3::AttributeValue const & v06=ns3::EmptyAttributeValue(), std::string n07="", ns3::AttributeValue const & v07=ns3::EmptyAttributeValue(), std::string n08="", ns3::AttributeValue const & v08=ns3::EmptyAttributeValue()) [member function]
cls.add_method('AddQueueDiscClasses',
'std::vector< short unsigned int >',
[param('uint16_t', 'handle'), param('uint16_t', 'count'), param('std::string', 'type'), param('std::string', 'n01', default_value='""'), param('ns3::AttributeValue const &', 'v01', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n02', default_value='""'), param('ns3::AttributeValue const &', 'v02', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n03', default_value='""'), param('ns3::AttributeValue const &', 'v03', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n04', default_value='""'), param('ns3::AttributeValue const &', 'v04', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n05', default_value='""'), param('ns3::AttributeValue const &', 'v05', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n06', default_value='""'), param('ns3::AttributeValue const &', 'v06', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n07', default_value='""'), param('ns3::AttributeValue const &', 'v07', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n08', default_value='""'), param('ns3::AttributeValue const &', 'v08', default_value='ns3::EmptyAttributeValue()')])
## traffic-control-helper.h (module 'traffic-control'): static ns3::TrafficControlHelper ns3::TrafficControlHelper::Default() [member function]
cls.add_method('Default',
'ns3::TrafficControlHelper',
[],
is_static=True)
## traffic-control-helper.h (module 'traffic-control'): ns3::QueueDiscContainer ns3::TrafficControlHelper::Install(ns3::NetDeviceContainer c) [member function]
cls.add_method('Install',
'ns3::QueueDiscContainer',
[param('ns3::NetDeviceContainer', 'c')])
## traffic-control-helper.h (module 'traffic-control'): ns3::QueueDiscContainer ns3::TrafficControlHelper::Install(ns3::Ptr<ns3::NetDevice> d) [member function]
cls.add_method('Install',
'ns3::QueueDiscContainer',
[param('ns3::Ptr< ns3::NetDevice >', 'd')])
## traffic-control-helper.h (module 'traffic-control'): uint16_t ns3::TrafficControlHelper::SetRootQueueDisc(std::string type, std::string n01="", ns3::AttributeValue const & v01=ns3::EmptyAttributeValue(), std::string n02="", ns3::AttributeValue const & v02=ns3::EmptyAttributeValue(), std::string n03="", ns3::AttributeValue const & v03=ns3::EmptyAttributeValue(), std::string n04="", ns3::AttributeValue const & v04=ns3::EmptyAttributeValue(), std::string n05="", ns3::AttributeValue const & v05=ns3::EmptyAttributeValue(), std::string n06="", ns3::AttributeValue const & v06=ns3::EmptyAttributeValue(), std::string n07="", ns3::AttributeValue const & v07=ns3::EmptyAttributeValue(), std::string n08="", ns3::AttributeValue const & v08=ns3::EmptyAttributeValue(), std::string n09="", ns3::AttributeValue const & v09=ns3::EmptyAttributeValue(), std::string n10="", ns3::AttributeValue const & v10=ns3::EmptyAttributeValue(), std::string n11="", ns3::AttributeValue const & v11=ns3::EmptyAttributeValue(), std::string n12="", ns3::AttributeValue const & v12=ns3::EmptyAttributeValue(), std::string n13="", ns3::AttributeValue const & v13=ns3::EmptyAttributeValue(), std::string n14="", ns3::AttributeValue const & v14=ns3::EmptyAttributeValue(), std::string n15="", ns3::AttributeValue const & v15=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetRootQueueDisc',
'uint16_t',
[param('std::string', 'type'), param('std::string', 'n01', default_value='""'), param('ns3::AttributeValue const &', 'v01', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n02', default_value='""'), param('ns3::AttributeValue const &', 'v02', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n03', default_value='""'), param('ns3::AttributeValue const &', 'v03', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n04', default_value='""'), param('ns3::AttributeValue const &', 'v04', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n05', default_value='""'), param('ns3::AttributeValue const &', 'v05', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n06', default_value='""'), param('ns3::AttributeValue const &', 'v06', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n07', default_value='""'), param('ns3::AttributeValue const &', 'v07', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n08', default_value='""'), param('ns3::AttributeValue const &', 'v08', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n09', default_value='""'), param('ns3::AttributeValue const &', 'v09', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n10', default_value='""'), param('ns3::AttributeValue const &', 'v10', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n11', default_value='""'), param('ns3::AttributeValue const &', 'v11', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n12', default_value='""'), param('ns3::AttributeValue const &', 'v12', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n13', default_value='""'), param('ns3::AttributeValue const &', 'v13', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n14', default_value='""'), param('ns3::AttributeValue const &', 'v14', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n15', default_value='""'), param('ns3::AttributeValue const &', 'v15', default_value='ns3::EmptyAttributeValue()')])
## traffic-control-helper.h (module 'traffic-control'): void ns3::TrafficControlHelper::Uninstall(ns3::NetDeviceContainer c) [member function]
cls.add_method('Uninstall',
'void',
[param('ns3::NetDeviceContainer', 'c')])
## traffic-control-helper.h (module 'traffic-control'): void ns3::TrafficControlHelper::Uninstall(ns3::Ptr<ns3::NetDevice> d) [member function]
cls.add_method('Uninstall',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'd')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor, std::string callback) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', u'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long double v) [constructor]
cls.add_constructor([param('long double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::implementation [variable]
cls.add_static_attribute('implementation', 'ns3::int64x64_t::impl_type const', is_const=True)
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3PacketFilter_methods(root_module, cls):
## packet-filter.h (module 'traffic-control'): ns3::PacketFilter::PacketFilter(ns3::PacketFilter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketFilter const &', 'arg0')])
## packet-filter.h (module 'traffic-control'): ns3::PacketFilter::PacketFilter() [constructor]
cls.add_constructor([])
## packet-filter.h (module 'traffic-control'): int32_t ns3::PacketFilter::Classify(ns3::Ptr<ns3::QueueDiscItem> item) const [member function]
cls.add_method('Classify',
'int32_t',
[param('ns3::Ptr< ns3::QueueDiscItem >', 'item')],
is_const=True)
## packet-filter.h (module 'traffic-control'): static ns3::TypeId ns3::PacketFilter::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## packet-filter.h (module 'traffic-control'): ns3::PacketFilter::PF_NO_MATCH [variable]
cls.add_static_attribute('PF_NO_MATCH', 'int const', is_const=True)
## packet-filter.h (module 'traffic-control'): bool ns3::PacketFilter::CheckProtocol(ns3::Ptr<ns3::QueueDiscItem> item) const [member function]
cls.add_method('CheckProtocol',
'bool',
[param('ns3::Ptr< ns3::QueueDiscItem >', 'item')],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## packet-filter.h (module 'traffic-control'): int32_t ns3::PacketFilter::DoClassify(ns3::Ptr<ns3::QueueDiscItem> item) const [member function]
cls.add_method('DoClassify',
'int32_t',
[param('ns3::Ptr< ns3::QueueDiscItem >', 'item')],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Queue_methods(root_module, cls):
## queue.h (module 'network'): ns3::Queue::Queue(ns3::Queue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Queue const &', 'arg0')])
## queue.h (module 'network'): ns3::Queue::Queue() [constructor]
cls.add_constructor([])
## queue.h (module 'network'): ns3::Ptr<ns3::QueueItem> ns3::Queue::Dequeue() [member function]
cls.add_method('Dequeue',
'ns3::Ptr< ns3::QueueItem >',
[])
## queue.h (module 'network'): void ns3::Queue::DequeueAll() [member function]
cls.add_method('DequeueAll',
'void',
[])
## queue.h (module 'network'): bool ns3::Queue::Enqueue(ns3::Ptr<ns3::QueueItem> item) [member function]
cls.add_method('Enqueue',
'bool',
[param('ns3::Ptr< ns3::QueueItem >', 'item')])
## queue.h (module 'network'): uint32_t ns3::Queue::GetMaxBytes() const [member function]
cls.add_method('GetMaxBytes',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetMaxPackets() const [member function]
cls.add_method('GetMaxPackets',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): ns3::Queue::QueueMode ns3::Queue::GetMode() const [member function]
cls.add_method('GetMode',
'ns3::Queue::QueueMode',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetNBytes() const [member function]
cls.add_method('GetNBytes',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetNPackets() const [member function]
cls.add_method('GetNPackets',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetTotalDroppedBytes() const [member function]
cls.add_method('GetTotalDroppedBytes',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetTotalDroppedPackets() const [member function]
cls.add_method('GetTotalDroppedPackets',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetTotalReceivedBytes() const [member function]
cls.add_method('GetTotalReceivedBytes',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): uint32_t ns3::Queue::GetTotalReceivedPackets() const [member function]
cls.add_method('GetTotalReceivedPackets',
'uint32_t',
[],
is_const=True)
## queue.h (module 'network'): static ns3::TypeId ns3::Queue::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## queue.h (module 'network'): bool ns3::Queue::IsEmpty() const [member function]
cls.add_method('IsEmpty',
'bool',
[],
is_const=True)
## queue.h (module 'network'): ns3::Ptr<const ns3::QueueItem> ns3::Queue::Peek() const [member function]
cls.add_method('Peek',
'ns3::Ptr< ns3::QueueItem const >',
[],
is_const=True)
## queue.h (module 'network'): void ns3::Queue::ResetStatistics() [member function]
cls.add_method('ResetStatistics',
'void',
[])
## queue.h (module 'network'): void ns3::Queue::SetMaxBytes(uint32_t maxBytes) [member function]
cls.add_method('SetMaxBytes',
'void',
[param('uint32_t', 'maxBytes')])
## queue.h (module 'network'): void ns3::Queue::SetMaxPackets(uint32_t maxPackets) [member function]
cls.add_method('SetMaxPackets',
'void',
[param('uint32_t', 'maxPackets')])
## queue.h (module 'network'): void ns3::Queue::SetMode(ns3::Queue::QueueMode mode) [member function]
cls.add_method('SetMode',
'void',
[param('ns3::Queue::QueueMode', 'mode')])
## queue.h (module 'network'): void ns3::Queue::Drop(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('Drop',
'void',
[param('ns3::Ptr< ns3::Packet >', 'p')],
visibility='protected')
## queue.h (module 'network'): ns3::Ptr<ns3::QueueItem> ns3::Queue::DoDequeue() [member function]
cls.add_method('DoDequeue',
'ns3::Ptr< ns3::QueueItem >',
[],
is_pure_virtual=True, visibility='private', is_virtual=True)
## queue.h (module 'network'): bool ns3::Queue::DoEnqueue(ns3::Ptr<ns3::QueueItem> item) [member function]
cls.add_method('DoEnqueue',
'bool',
[param('ns3::Ptr< ns3::QueueItem >', 'item')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## queue.h (module 'network'): ns3::Ptr<const ns3::QueueItem> ns3::Queue::DoPeek() const [member function]
cls.add_method('DoPeek',
'ns3::Ptr< ns3::QueueItem const >',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3QueueDisc_methods(root_module, cls):
## queue-disc.h (module 'traffic-control'): ns3::QueueDisc::QueueDisc(ns3::QueueDisc const & arg0) [copy constructor]
cls.add_constructor([param('ns3::QueueDisc const &', 'arg0')])
## queue-disc.h (module 'traffic-control'): ns3::QueueDisc::QueueDisc() [constructor]
cls.add_constructor([])
## queue-disc.h (module 'traffic-control'): void ns3::QueueDisc::AddInternalQueue(ns3::Ptr<ns3::Queue> queue) [member function]
cls.add_method('AddInternalQueue',
'void',
[param('ns3::Ptr< ns3::Queue >', 'queue')])
## queue-disc.h (module 'traffic-control'): void ns3::QueueDisc::AddPacketFilter(ns3::Ptr<ns3::PacketFilter> filter) [member function]
cls.add_method('AddPacketFilter',
'void',
[param('ns3::Ptr< ns3::PacketFilter >', 'filter')])
## queue-disc.h (module 'traffic-control'): void ns3::QueueDisc::AddQueueDiscClass(ns3::Ptr<ns3::QueueDiscClass> qdClass) [member function]
cls.add_method('AddQueueDiscClass',
'void',
[param('ns3::Ptr< ns3::QueueDiscClass >', 'qdClass')])
## queue-disc.h (module 'traffic-control'): int32_t ns3::QueueDisc::Classify(ns3::Ptr<ns3::QueueDiscItem> item) [member function]
cls.add_method('Classify',
'int32_t',
[param('ns3::Ptr< ns3::QueueDiscItem >', 'item')])
## queue-disc.h (module 'traffic-control'): ns3::Ptr<ns3::QueueDiscItem> ns3::QueueDisc::Dequeue() [member function]
cls.add_method('Dequeue',
'ns3::Ptr< ns3::QueueDiscItem >',
[])
## queue-disc.h (module 'traffic-control'): bool ns3::QueueDisc::Enqueue(ns3::Ptr<ns3::QueueDiscItem> item) [member function]
cls.add_method('Enqueue',
'bool',
[param('ns3::Ptr< ns3::QueueDiscItem >', 'item')])
## queue-disc.h (module 'traffic-control'): ns3::Ptr<ns3::Queue> ns3::QueueDisc::GetInternalQueue(uint32_t i) const [member function]
cls.add_method('GetInternalQueue',
'ns3::Ptr< ns3::Queue >',
[param('uint32_t', 'i')],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetNBytes() const [member function]
cls.add_method('GetNBytes',
'uint32_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetNInternalQueues() const [member function]
cls.add_method('GetNInternalQueues',
'uint32_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetNPacketFilters() const [member function]
cls.add_method('GetNPacketFilters',
'uint32_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetNPackets() const [member function]
cls.add_method('GetNPackets',
'uint32_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetNQueueDiscClasses() const [member function]
cls.add_method('GetNQueueDiscClasses',
'uint32_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): ns3::Ptr<ns3::NetDevice> ns3::QueueDisc::GetNetDevice() const [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): ns3::Ptr<ns3::PacketFilter> ns3::QueueDisc::GetPacketFilter(uint32_t i) const [member function]
cls.add_method('GetPacketFilter',
'ns3::Ptr< ns3::PacketFilter >',
[param('uint32_t', 'i')],
is_const=True)
## queue-disc.h (module 'traffic-control'): ns3::Ptr<ns3::QueueDiscClass> ns3::QueueDisc::GetQueueDiscClass(uint32_t i) const [member function]
cls.add_method('GetQueueDiscClass',
'ns3::Ptr< ns3::QueueDiscClass >',
[param('uint32_t', 'i')],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetQuota() const [member function]
cls.add_method('GetQuota',
'uint32_t',
[],
is_const=True, is_virtual=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetTotalDroppedBytes() const [member function]
cls.add_method('GetTotalDroppedBytes',
'uint32_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetTotalDroppedPackets() const [member function]
cls.add_method('GetTotalDroppedPackets',
'uint32_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetTotalReceivedBytes() const [member function]
cls.add_method('GetTotalReceivedBytes',
'uint32_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetTotalReceivedPackets() const [member function]
cls.add_method('GetTotalReceivedPackets',
'uint32_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetTotalRequeuedBytes() const [member function]
cls.add_method('GetTotalRequeuedBytes',
'uint32_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint32_t ns3::QueueDisc::GetTotalRequeuedPackets() const [member function]
cls.add_method('GetTotalRequeuedPackets',
'uint32_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): static ns3::TypeId ns3::QueueDisc::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## queue-disc.h (module 'traffic-control'): ns3::QueueDisc::WakeMode ns3::QueueDisc::GetWakeMode() [member function]
cls.add_method('GetWakeMode',
'ns3::QueueDisc::WakeMode',
[])
## queue-disc.h (module 'traffic-control'): ns3::Ptr<const ns3::QueueDiscItem> ns3::QueueDisc::Peek() const [member function]
cls.add_method('Peek',
'ns3::Ptr< ns3::QueueDiscItem const >',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): void ns3::QueueDisc::Run() [member function]
cls.add_method('Run',
'void',
[])
## queue-disc.h (module 'traffic-control'): void ns3::QueueDisc::SetNetDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetNetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## queue-disc.h (module 'traffic-control'): void ns3::QueueDisc::SetQuota(uint32_t const quota) [member function]
cls.add_method('SetQuota',
'void',
[param('uint32_t const', 'quota')],
is_virtual=True)
## queue-disc.h (module 'traffic-control'): void ns3::QueueDisc::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## queue-disc.h (module 'traffic-control'): void ns3::QueueDisc::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## queue-disc.h (module 'traffic-control'): void ns3::QueueDisc::Drop(ns3::Ptr<ns3::QueueDiscItem> item) [member function]
cls.add_method('Drop',
'void',
[param('ns3::Ptr< ns3::QueueDiscItem >', 'item')],
visibility='protected')
## queue-disc.h (module 'traffic-control'): bool ns3::QueueDisc::CheckConfig() [member function]
cls.add_method('CheckConfig',
'bool',
[],
is_pure_virtual=True, visibility='private', is_virtual=True)
## queue-disc.h (module 'traffic-control'): ns3::Ptr<ns3::QueueDiscItem> ns3::QueueDisc::DoDequeue() [member function]
cls.add_method('DoDequeue',
'ns3::Ptr< ns3::QueueDiscItem >',
[],
is_pure_virtual=True, visibility='private', is_virtual=True)
## queue-disc.h (module 'traffic-control'): bool ns3::QueueDisc::DoEnqueue(ns3::Ptr<ns3::QueueDiscItem> item) [member function]
cls.add_method('DoEnqueue',
'bool',
[param('ns3::Ptr< ns3::QueueDiscItem >', 'item')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## queue-disc.h (module 'traffic-control'): ns3::Ptr<const ns3::QueueDiscItem> ns3::QueueDisc::DoPeek() const [member function]
cls.add_method('DoPeek',
'ns3::Ptr< ns3::QueueDiscItem const >',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## queue-disc.h (module 'traffic-control'): void ns3::QueueDisc::InitializeParams() [member function]
cls.add_method('InitializeParams',
'void',
[],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3QueueDiscClass_methods(root_module, cls):
## queue-disc.h (module 'traffic-control'): ns3::QueueDiscClass::QueueDiscClass(ns3::QueueDiscClass const & arg0) [copy constructor]
cls.add_constructor([param('ns3::QueueDiscClass const &', 'arg0')])
## queue-disc.h (module 'traffic-control'): ns3::QueueDiscClass::QueueDiscClass() [constructor]
cls.add_constructor([])
## queue-disc.h (module 'traffic-control'): ns3::Ptr<ns3::QueueDisc> ns3::QueueDiscClass::GetQueueDisc() const [member function]
cls.add_method('GetQueueDisc',
'ns3::Ptr< ns3::QueueDisc >',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): static ns3::TypeId ns3::QueueDiscClass::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## queue-disc.h (module 'traffic-control'): void ns3::QueueDiscClass::SetQueueDisc(ns3::Ptr<ns3::QueueDisc> qd) [member function]
cls.add_method('SetQueueDisc',
'void',
[param('ns3::Ptr< ns3::QueueDisc >', 'qd')])
## queue-disc.h (module 'traffic-control'): void ns3::QueueDiscClass::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3RandomVariableStream_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::RandomVariableStream::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::RandomVariableStream::RandomVariableStream() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetStream(int64_t stream) [member function]
cls.add_method('SetStream',
'void',
[param('int64_t', 'stream')])
## random-variable-stream.h (module 'core'): int64_t ns3::RandomVariableStream::GetStream() const [member function]
cls.add_method('GetStream',
'int64_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetAntithetic(bool isAntithetic) [member function]
cls.add_method('SetAntithetic',
'void',
[param('bool', 'isAntithetic')])
## random-variable-stream.h (module 'core'): bool ns3::RandomVariableStream::IsAntithetic() const [member function]
cls.add_method('IsAntithetic',
'bool',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::RandomVariableStream::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_pure_virtual=True, is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::RandomVariableStream::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_pure_virtual=True, is_virtual=True)
## random-variable-stream.h (module 'core'): ns3::RngStream * ns3::RandomVariableStream::Peek() const [member function]
cls.add_method('Peek',
'ns3::RngStream *',
[],
is_const=True, visibility='protected')
return
def register_Ns3RedQueueDisc_methods(root_module, cls):
## red-queue-disc.h (module 'traffic-control'): ns3::RedQueueDisc::RedQueueDisc(ns3::RedQueueDisc const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RedQueueDisc const &', 'arg0')])
## red-queue-disc.h (module 'traffic-control'): ns3::RedQueueDisc::RedQueueDisc() [constructor]
cls.add_constructor([])
## red-queue-disc.h (module 'traffic-control'): int64_t ns3::RedQueueDisc::AssignStreams(int64_t stream) [member function]
cls.add_method('AssignStreams',
'int64_t',
[param('int64_t', 'stream')])
## red-queue-disc.h (module 'traffic-control'): double ns3::RedQueueDisc::GetAredAlpha() [member function]
cls.add_method('GetAredAlpha',
'double',
[])
## red-queue-disc.h (module 'traffic-control'): double ns3::RedQueueDisc::GetAredBeta() [member function]
cls.add_method('GetAredBeta',
'double',
[])
## red-queue-disc.h (module 'traffic-control'): ns3::Queue::QueueMode ns3::RedQueueDisc::GetMode() [member function]
cls.add_method('GetMode',
'ns3::Queue::QueueMode',
[])
## red-queue-disc.h (module 'traffic-control'): uint32_t ns3::RedQueueDisc::GetQueueSize() [member function]
cls.add_method('GetQueueSize',
'uint32_t',
[])
## red-queue-disc.h (module 'traffic-control'): ns3::RedQueueDisc::Stats ns3::RedQueueDisc::GetStats() [member function]
cls.add_method('GetStats',
'ns3::RedQueueDisc::Stats',
[])
## red-queue-disc.h (module 'traffic-control'): static ns3::TypeId ns3::RedQueueDisc::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## red-queue-disc.h (module 'traffic-control'): void ns3::RedQueueDisc::SetAredAlpha(double alpha) [member function]
cls.add_method('SetAredAlpha',
'void',
[param('double', 'alpha')])
## red-queue-disc.h (module 'traffic-control'): void ns3::RedQueueDisc::SetAredBeta(double beta) [member function]
cls.add_method('SetAredBeta',
'void',
[param('double', 'beta')])
## red-queue-disc.h (module 'traffic-control'): void ns3::RedQueueDisc::SetMode(ns3::Queue::QueueMode mode) [member function]
cls.add_method('SetMode',
'void',
[param('ns3::Queue::QueueMode', 'mode')])
## red-queue-disc.h (module 'traffic-control'): void ns3::RedQueueDisc::SetQueueLimit(uint32_t lim) [member function]
cls.add_method('SetQueueLimit',
'void',
[param('uint32_t', 'lim')])
## red-queue-disc.h (module 'traffic-control'): void ns3::RedQueueDisc::SetTh(double minTh, double maxTh) [member function]
cls.add_method('SetTh',
'void',
[param('double', 'minTh'), param('double', 'maxTh')])
## red-queue-disc.h (module 'traffic-control'): void ns3::RedQueueDisc::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## red-queue-disc.h (module 'traffic-control'): bool ns3::RedQueueDisc::CheckConfig() [member function]
cls.add_method('CheckConfig',
'bool',
[],
visibility='private', is_virtual=True)
## red-queue-disc.h (module 'traffic-control'): ns3::Ptr<ns3::QueueDiscItem> ns3::RedQueueDisc::DoDequeue() [member function]
cls.add_method('DoDequeue',
'ns3::Ptr< ns3::QueueDiscItem >',
[],
visibility='private', is_virtual=True)
## red-queue-disc.h (module 'traffic-control'): bool ns3::RedQueueDisc::DoEnqueue(ns3::Ptr<ns3::QueueDiscItem> item) [member function]
cls.add_method('DoEnqueue',
'bool',
[param('ns3::Ptr< ns3::QueueDiscItem >', 'item')],
visibility='private', is_virtual=True)
## red-queue-disc.h (module 'traffic-control'): ns3::Ptr<const ns3::QueueDiscItem> ns3::RedQueueDisc::DoPeek() const [member function]
cls.add_method('DoPeek',
'ns3::Ptr< ns3::QueueDiscItem const >',
[],
is_const=True, visibility='private', is_virtual=True)
## red-queue-disc.h (module 'traffic-control'): void ns3::RedQueueDisc::InitializeParams() [member function]
cls.add_method('InitializeParams',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3RedQueueDiscStats_methods(root_module, cls):
## red-queue-disc.h (module 'traffic-control'): ns3::RedQueueDisc::Stats::Stats() [constructor]
cls.add_constructor([])
## red-queue-disc.h (module 'traffic-control'): ns3::RedQueueDisc::Stats::Stats(ns3::RedQueueDisc::Stats const & arg0) [copy constructor]
cls.add_constructor([param('ns3::RedQueueDisc::Stats const &', 'arg0')])
## red-queue-disc.h (module 'traffic-control'): ns3::RedQueueDisc::Stats::forcedDrop [variable]
cls.add_instance_attribute('forcedDrop', 'uint32_t', is_const=False)
## red-queue-disc.h (module 'traffic-control'): ns3::RedQueueDisc::Stats::qLimDrop [variable]
cls.add_instance_attribute('qLimDrop', 'uint32_t', is_const=False)
## red-queue-disc.h (module 'traffic-control'): ns3::RedQueueDisc::Stats::unforcedDrop [variable]
cls.add_instance_attribute('unforcedDrop', 'uint32_t', is_const=False)
return
def register_Ns3SequentialRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::SequentialRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable::SequentialRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): ns3::Ptr<ns3::RandomVariableStream> ns3::SequentialRandomVariable::GetIncrement() const [member function]
cls.add_method('GetIncrement',
'ns3::Ptr< ns3::RandomVariableStream >',
[],
is_const=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetConsecutive() const [member function]
cls.add_method('GetConsecutive',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NetDeviceQueue_Ns3Empty_Ns3DefaultDeleter__lt__ns3NetDeviceQueue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter< ns3::NetDeviceQueue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NetDeviceQueue, ns3::empty, ns3::DefaultDeleter<ns3::NetDeviceQueue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3QueueItem_Ns3Empty_Ns3DefaultDeleter__lt__ns3QueueItem__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::SimpleRefCount(ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::QueueItem, ns3::empty, ns3::DefaultDeleter< ns3::QueueItem > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::QueueItem, ns3::empty, ns3::DefaultDeleter<ns3::QueueItem> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', u'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::Time'], root_module['ns3::Time'], param('int64_t const &', u'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', u'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', u'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & v) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::TimeWithUnit ns3::Time::As(ns3::Time::Unit const unit) const [member function]
cls.add_method('As',
'ns3::TimeWithUnit',
[param('ns3::Time::Unit const', 'unit')],
is_const=True)
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value, ns3::Time::Unit unit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit unit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit unit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'unit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDays() const [member function]
cls.add_method('GetDays',
'double',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetHours() const [member function]
cls.add_method('GetHours',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetMinutes() const [member function]
cls.add_method('GetMinutes',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::GetYears() const [member function]
cls.add_method('GetYears',
'double',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function]
cls.add_method('Max',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function]
cls.add_method('Min',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function]
cls.add_method('StaticInit',
'bool',
[],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit unit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit unit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit unit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'unit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TracedValue__Ns3Time_methods(root_module, cls):
## traced-value.h (module 'core'): ns3::TracedValue<ns3::Time>::TracedValue() [constructor]
cls.add_constructor([])
## traced-value.h (module 'core'): ns3::TracedValue<ns3::Time>::TracedValue(ns3::TracedValue<ns3::Time> const & o) [copy constructor]
cls.add_constructor([param('ns3::TracedValue< ns3::Time > const &', 'o')])
## traced-value.h (module 'core'): ns3::TracedValue<ns3::Time>::TracedValue(ns3::Time const & v) [constructor]
cls.add_constructor([param('ns3::Time const &', 'v')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::Time>::Connect(ns3::CallbackBase const & cb, std::basic_string<char,std::char_traits<char>,std::allocator<char> > path) [member function]
cls.add_method('Connect',
'void',
[param('ns3::CallbackBase const &', 'cb'), param('std::string', 'path')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::Time>::ConnectWithoutContext(ns3::CallbackBase const & cb) [member function]
cls.add_method('ConnectWithoutContext',
'void',
[param('ns3::CallbackBase const &', 'cb')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::Time>::Disconnect(ns3::CallbackBase const & cb, std::basic_string<char,std::char_traits<char>,std::allocator<char> > path) [member function]
cls.add_method('Disconnect',
'void',
[param('ns3::CallbackBase const &', 'cb'), param('std::string', 'path')])
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::Time>::DisconnectWithoutContext(ns3::CallbackBase const & cb) [member function]
cls.add_method('DisconnectWithoutContext',
'void',
[param('ns3::CallbackBase const &', 'cb')])
## traced-value.h (module 'core'): ns3::Time ns3::TracedValue<ns3::Time>::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## traced-value.h (module 'core'): void ns3::TracedValue<ns3::Time>::Set(ns3::Time const & v) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'v')])
return
def register_Ns3TrafficControlLayer_methods(root_module, cls):
## traffic-control-layer.h (module 'traffic-control'): ns3::TrafficControlLayer::TrafficControlLayer() [constructor]
cls.add_constructor([])
## traffic-control-layer.h (module 'traffic-control'): void ns3::TrafficControlLayer::DeleteRootQueueDiscOnDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('DeleteRootQueueDiscOnDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## traffic-control-layer.h (module 'traffic-control'): ns3::TypeId ns3::TrafficControlLayer::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## traffic-control-layer.h (module 'traffic-control'): ns3::Ptr<ns3::QueueDisc> ns3::TrafficControlLayer::GetRootQueueDiscOnDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('GetRootQueueDiscOnDevice',
'ns3::Ptr< ns3::QueueDisc >',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## traffic-control-layer.h (module 'traffic-control'): static ns3::TypeId ns3::TrafficControlLayer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## traffic-control-layer.h (module 'traffic-control'): void ns3::TrafficControlLayer::Receive(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<const ns3::Packet> p, uint16_t protocol, ns3::Address const & from, ns3::Address const & to, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'p'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'from'), param('ns3::Address const &', 'to'), param('ns3::NetDevice::PacketType', 'packetType')],
is_virtual=True)
## traffic-control-layer.h (module 'traffic-control'): void ns3::TrafficControlLayer::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device')])
## traffic-control-layer.h (module 'traffic-control'): void ns3::TrafficControlLayer::Send(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<ns3::QueueDiscItem> item) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::QueueDiscItem >', 'item')],
is_virtual=True)
## traffic-control-layer.h (module 'traffic-control'): void ns3::TrafficControlLayer::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## traffic-control-layer.h (module 'traffic-control'): void ns3::TrafficControlLayer::SetRootQueueDiscOnDevice(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<ns3::QueueDisc> qDisc) [member function]
cls.add_method('SetRootQueueDiscOnDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::QueueDisc >', 'qDisc')],
is_virtual=True)
## traffic-control-layer.h (module 'traffic-control'): void ns3::TrafficControlLayer::SetupDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetupDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## traffic-control-layer.h (module 'traffic-control'): void ns3::TrafficControlLayer::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## traffic-control-layer.h (module 'traffic-control'): void ns3::TrafficControlLayer::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## traffic-control-layer.h (module 'traffic-control'): void ns3::TrafficControlLayer::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TriangularRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::TriangularRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable::TriangularRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue(double mean, double min, double max) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'min'), param('double', 'max')])
## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger(uint32_t mean, uint32_t min, uint32_t max) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'min'), param('uint32_t', 'max')])
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3UniformRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::UniformRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable::UniformRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue(double min, double max) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'min'), param('double', 'max')])
## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger(uint32_t min, uint32_t max) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'min'), param('uint32_t', 'max')])
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3WeibullRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::WeibullRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable::WeibullRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetScale() const [member function]
cls.add_method('GetScale',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetShape() const [member function]
cls.add_method('GetShape',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue(double scale, double shape, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'scale'), param('double', 'shape'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger(uint32_t scale, uint32_t shape, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'scale'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ZetaRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZetaRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable::ZetaRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue(double alpha) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger(uint32_t alpha) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'alpha')])
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ZipfRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZipfRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable::ZipfRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue(uint32_t n, double alpha) [member function]
cls.add_method('GetValue',
'double',
[param('uint32_t', 'n'), param('double', 'alpha')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger(uint32_t n, uint32_t alpha) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'n'), param('uint32_t', 'alpha')])
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3BooleanChecker_methods(root_module, cls):
## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker() [constructor]
cls.add_constructor([])
## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker(ns3::BooleanChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BooleanChecker const &', 'arg0')])
return
def register_Ns3BooleanValue_methods(root_module, cls):
cls.add_output_stream_operator()
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(ns3::BooleanValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BooleanValue const &', 'arg0')])
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue() [constructor]
cls.add_constructor([])
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(bool value) [constructor]
cls.add_constructor([param('bool', 'value')])
## boolean.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::BooleanValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## boolean.h (module 'core'): bool ns3::BooleanValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## boolean.h (module 'core'): bool ns3::BooleanValue::Get() const [member function]
cls.add_method('Get',
'bool',
[],
is_const=True)
## boolean.h (module 'core'): std::string ns3::BooleanValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## boolean.h (module 'core'): void ns3::BooleanValue::Set(bool value) [member function]
cls.add_method('Set',
'void',
[param('bool', 'value')])
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3CoDelQueueDisc_methods(root_module, cls):
## codel-queue-disc.h (module 'traffic-control'): ns3::CoDelQueueDisc::CoDelQueueDisc(ns3::CoDelQueueDisc const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CoDelQueueDisc const &', 'arg0')])
## codel-queue-disc.h (module 'traffic-control'): ns3::CoDelQueueDisc::CoDelQueueDisc() [constructor]
cls.add_constructor([])
## codel-queue-disc.h (module 'traffic-control'): uint32_t ns3::CoDelQueueDisc::GetDropCount() [member function]
cls.add_method('GetDropCount',
'uint32_t',
[])
## codel-queue-disc.h (module 'traffic-control'): uint32_t ns3::CoDelQueueDisc::GetDropNext() [member function]
cls.add_method('GetDropNext',
'uint32_t',
[])
## codel-queue-disc.h (module 'traffic-control'): uint32_t ns3::CoDelQueueDisc::GetDropOverLimit() [member function]
cls.add_method('GetDropOverLimit',
'uint32_t',
[])
## codel-queue-disc.h (module 'traffic-control'): ns3::Time ns3::CoDelQueueDisc::GetInterval() [member function]
cls.add_method('GetInterval',
'ns3::Time',
[])
## codel-queue-disc.h (module 'traffic-control'): ns3::Queue::QueueMode ns3::CoDelQueueDisc::GetMode() [member function]
cls.add_method('GetMode',
'ns3::Queue::QueueMode',
[])
## codel-queue-disc.h (module 'traffic-control'): uint32_t ns3::CoDelQueueDisc::GetQueueSize() [member function]
cls.add_method('GetQueueSize',
'uint32_t',
[])
## codel-queue-disc.h (module 'traffic-control'): ns3::Time ns3::CoDelQueueDisc::GetTarget() [member function]
cls.add_method('GetTarget',
'ns3::Time',
[])
## codel-queue-disc.h (module 'traffic-control'): static ns3::TypeId ns3::CoDelQueueDisc::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## codel-queue-disc.h (module 'traffic-control'): void ns3::CoDelQueueDisc::SetMode(ns3::Queue::QueueMode mode) [member function]
cls.add_method('SetMode',
'void',
[param('ns3::Queue::QueueMode', 'mode')])
## codel-queue-disc.h (module 'traffic-control'): bool ns3::CoDelQueueDisc::CheckConfig() [member function]
cls.add_method('CheckConfig',
'bool',
[],
visibility='private', is_virtual=True)
## codel-queue-disc.h (module 'traffic-control'): ns3::Ptr<ns3::QueueDiscItem> ns3::CoDelQueueDisc::DoDequeue() [member function]
cls.add_method('DoDequeue',
'ns3::Ptr< ns3::QueueDiscItem >',
[],
visibility='private', is_virtual=True)
## codel-queue-disc.h (module 'traffic-control'): bool ns3::CoDelQueueDisc::DoEnqueue(ns3::Ptr<ns3::QueueDiscItem> item) [member function]
cls.add_method('DoEnqueue',
'bool',
[param('ns3::Ptr< ns3::QueueDiscItem >', 'item')],
visibility='private', is_virtual=True)
## codel-queue-disc.h (module 'traffic-control'): ns3::Ptr<const ns3::QueueDiscItem> ns3::CoDelQueueDisc::DoPeek() const [member function]
cls.add_method('DoPeek',
'ns3::Ptr< ns3::QueueDiscItem const >',
[],
is_const=True, visibility='private', is_virtual=True)
## codel-queue-disc.h (module 'traffic-control'): void ns3::CoDelQueueDisc::InitializeParams() [member function]
cls.add_method('InitializeParams',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3ConstantRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ConstantRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable::ConstantRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetConstant() const [member function]
cls.add_method('GetConstant',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue(double constant) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'constant')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger(uint32_t constant) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'constant')])
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3DataRateChecker_methods(root_module, cls):
## data-rate.h (module 'network'): ns3::DataRateChecker::DataRateChecker() [constructor]
cls.add_constructor([])
## data-rate.h (module 'network'): ns3::DataRateChecker::DataRateChecker(ns3::DataRateChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DataRateChecker const &', 'arg0')])
return
def register_Ns3DataRateValue_methods(root_module, cls):
## data-rate.h (module 'network'): ns3::DataRateValue::DataRateValue() [constructor]
cls.add_constructor([])
## data-rate.h (module 'network'): ns3::DataRateValue::DataRateValue(ns3::DataRateValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DataRateValue const &', 'arg0')])
## data-rate.h (module 'network'): ns3::DataRateValue::DataRateValue(ns3::DataRate const & value) [constructor]
cls.add_constructor([param('ns3::DataRate const &', 'value')])
## data-rate.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::DataRateValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## data-rate.h (module 'network'): bool ns3::DataRateValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## data-rate.h (module 'network'): ns3::DataRate ns3::DataRateValue::Get() const [member function]
cls.add_method('Get',
'ns3::DataRate',
[],
is_const=True)
## data-rate.h (module 'network'): std::string ns3::DataRateValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## data-rate.h (module 'network'): void ns3::DataRateValue::Set(ns3::DataRate const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::DataRate const &', 'value')])
return
def register_Ns3DeterministicRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::DeterministicRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable::DeterministicRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::DeterministicRandomVariable::SetValueArray(double * values, uint64_t length) [member function]
cls.add_method('SetValueArray',
'void',
[param('double *', 'values'), param('uint64_t', 'length')])
## random-variable-stream.h (module 'core'): double ns3::DeterministicRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::DeterministicRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3DoubleValue_methods(root_module, cls):
## double.h (module 'core'): ns3::DoubleValue::DoubleValue() [constructor]
cls.add_constructor([])
## double.h (module 'core'): ns3::DoubleValue::DoubleValue(ns3::DoubleValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DoubleValue const &', 'arg0')])
## double.h (module 'core'): ns3::DoubleValue::DoubleValue(double const & value) [constructor]
cls.add_constructor([param('double const &', 'value')])
## double.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::DoubleValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## double.h (module 'core'): bool ns3::DoubleValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## double.h (module 'core'): double ns3::DoubleValue::Get() const [member function]
cls.add_method('Get',
'double',
[],
is_const=True)
## double.h (module 'core'): std::string ns3::DoubleValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## double.h (module 'core'): void ns3::DoubleValue::Set(double const & value) [member function]
cls.add_method('Set',
'void',
[param('double const &', 'value')])
return
def register_Ns3EmpiricalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable::EmpiricalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::CDF(double v, double c) [member function]
cls.add_method('CDF',
'void',
[param('double', 'v'), param('double', 'c')])
## random-variable-stream.h (module 'core'): uint32_t ns3::EmpiricalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::EmpiricalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::Interpolate(double c1, double c2, double v1, double v2, double r) [member function]
cls.add_method('Interpolate',
'double',
[param('double', 'c1'), param('double', 'c2'), param('double', 'v1'), param('double', 'v2'), param('double', 'r')],
visibility='private', is_virtual=True)
## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::Validate() [member function]
cls.add_method('Validate',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3EnumChecker_methods(root_module, cls):
## enum.h (module 'core'): ns3::EnumChecker::EnumChecker(ns3::EnumChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EnumChecker const &', 'arg0')])
## enum.h (module 'core'): ns3::EnumChecker::EnumChecker() [constructor]
cls.add_constructor([])
## enum.h (module 'core'): void ns3::EnumChecker::Add(int value, std::string name) [member function]
cls.add_method('Add',
'void',
[param('int', 'value'), param('std::string', 'name')])
## enum.h (module 'core'): void ns3::EnumChecker::AddDefault(int value, std::string name) [member function]
cls.add_method('AddDefault',
'void',
[param('int', 'value'), param('std::string', 'name')])
## enum.h (module 'core'): bool ns3::EnumChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
## enum.h (module 'core'): bool ns3::EnumChecker::Copy(ns3::AttributeValue const & src, ns3::AttributeValue & dst) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'src'), param('ns3::AttributeValue &', 'dst')],
is_const=True, is_virtual=True)
## enum.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EnumChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): std::string ns3::EnumChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): std::string ns3::EnumChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): bool ns3::EnumChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_const=True, is_virtual=True)
return
def register_Ns3EnumValue_methods(root_module, cls):
## enum.h (module 'core'): ns3::EnumValue::EnumValue(ns3::EnumValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EnumValue const &', 'arg0')])
## enum.h (module 'core'): ns3::EnumValue::EnumValue() [constructor]
cls.add_constructor([])
## enum.h (module 'core'): ns3::EnumValue::EnumValue(int value) [constructor]
cls.add_constructor([param('int', 'value')])
## enum.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EnumValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): bool ns3::EnumValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## enum.h (module 'core'): int ns3::EnumValue::Get() const [member function]
cls.add_method('Get',
'int',
[],
is_const=True)
## enum.h (module 'core'): std::string ns3::EnumValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## enum.h (module 'core'): void ns3::EnumValue::Set(int value) [member function]
cls.add_method('Set',
'void',
[param('int', 'value')])
return
def register_Ns3ErlangRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ErlangRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable::ErlangRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetK() const [member function]
cls.add_method('GetK',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetLambda() const [member function]
cls.add_method('GetLambda',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue(uint32_t k, double lambda) [member function]
cls.add_method('GetValue',
'double',
[param('uint32_t', 'k'), param('double', 'lambda')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger(uint32_t k, uint32_t lambda) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'k'), param('uint32_t', 'lambda')])
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3ExponentialRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ExponentialRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable::ExponentialRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue(double mean, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger(uint32_t mean, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3GammaRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::GammaRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable::GammaRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetBeta() const [member function]
cls.add_method('GetBeta',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue(double alpha, double beta) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha'), param('double', 'beta')])
## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger(uint32_t alpha, uint32_t beta) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'alpha'), param('uint32_t', 'beta')])
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3IntegerValue_methods(root_module, cls):
## integer.h (module 'core'): ns3::IntegerValue::IntegerValue() [constructor]
cls.add_constructor([])
## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(ns3::IntegerValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntegerValue const &', 'arg0')])
## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(int64_t const & value) [constructor]
cls.add_constructor([param('int64_t const &', 'value')])
## integer.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::IntegerValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## integer.h (module 'core'): bool ns3::IntegerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## integer.h (module 'core'): int64_t ns3::IntegerValue::Get() const [member function]
cls.add_method('Get',
'int64_t',
[],
is_const=True)
## integer.h (module 'core'): std::string ns3::IntegerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## integer.h (module 'core'): void ns3::IntegerValue::Set(int64_t const & value) [member function]
cls.add_method('Set',
'void',
[param('int64_t const &', 'value')])
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3LogNormalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::LogNormalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable::LogNormalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetMu() const [member function]
cls.add_method('GetMu',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetSigma() const [member function]
cls.add_method('GetSigma',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue(double mu, double sigma) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mu'), param('double', 'sigma')])
## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger(uint32_t mu, uint32_t sigma) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mu'), param('uint32_t', 'sigma')])
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,const ns3::Address&,ns3::NetDevice::PacketType,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NetDeviceQueue_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDeviceQueue::NetDeviceQueue(ns3::NetDeviceQueue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceQueue const &', 'arg0')])
## net-device.h (module 'network'): ns3::NetDeviceQueue::NetDeviceQueue() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): bool ns3::NetDeviceQueue::HasWakeCallbackSet() const [member function]
cls.add_method('HasWakeCallbackSet',
'bool',
[],
is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDeviceQueue::IsStopped() const [member function]
cls.add_method('IsStopped',
'bool',
[],
is_const=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::SetWakeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetWakeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::Start() [member function]
cls.add_method('Start',
'void',
[],
is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueue::Wake() [member function]
cls.add_method('Wake',
'void',
[],
is_virtual=True)
return
def register_Ns3NetDeviceQueueInterface_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDeviceQueueInterface::NetDeviceQueueInterface(ns3::NetDeviceQueueInterface const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceQueueInterface const &', 'arg0')])
## net-device.h (module 'network'): ns3::NetDeviceQueueInterface::NetDeviceQueueInterface() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): uint8_t ns3::NetDeviceQueueInterface::GetSelectedQueue(ns3::Ptr<ns3::QueueItem> item) const [member function]
cls.add_method('GetSelectedQueue',
'uint8_t',
[param('ns3::Ptr< ns3::QueueItem >', 'item')],
is_const=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::NetDeviceQueue> ns3::NetDeviceQueueInterface::GetTxQueue(uint8_t i) const [member function]
cls.add_method('GetTxQueue',
'ns3::Ptr< ns3::NetDeviceQueue >',
[param('uint8_t', 'i')],
is_const=True)
## net-device.h (module 'network'): uint8_t ns3::NetDeviceQueueInterface::GetTxQueuesN() const [member function]
cls.add_method('GetTxQueuesN',
'uint8_t',
[],
is_const=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDeviceQueueInterface::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDeviceQueueInterface::IsQueueDiscInstalled() const [member function]
cls.add_method('IsQueueDiscInstalled',
'bool',
[],
is_const=True)
## net-device.h (module 'network'): void ns3::NetDeviceQueueInterface::SetQueueDiscInstalled(bool installed) [member function]
cls.add_method('SetQueueDiscInstalled',
'void',
[param('bool', 'installed')])
## net-device.h (module 'network'): void ns3::NetDeviceQueueInterface::SetSelectQueueCallback(ns3::Callback<unsigned char, ns3::Ptr<ns3::QueueItem>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetSelectQueueCallback',
'void',
[param('ns3::Callback< unsigned char, ns3::Ptr< ns3::QueueItem >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')])
## net-device.h (module 'network'): void ns3::NetDeviceQueueInterface::SetTxQueuesN(uint8_t numTxQueues) [member function]
cls.add_method('SetTxQueuesN',
'void',
[param('uint8_t', 'numTxQueues')])
## net-device.h (module 'network'): void ns3::NetDeviceQueueInterface::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): ns3::Time ns3::Node::GetLocalTime() const [member function]
cls.add_method('GetLocalTime',
'ns3::Time',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3NormalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::INFINITE_VALUE [variable]
cls.add_static_attribute('INFINITE_VALUE', 'double const', is_const=True)
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::NormalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::NormalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetVariance() const [member function]
cls.add_method('GetVariance',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue(double mean, double variance, double bound=ns3::NormalRandomVariable::INFINITE_VALUE) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'variance'), param('double', 'bound', default_value='ns3::NormalRandomVariable::INFINITE_VALUE')])
## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger(uint32_t mean, uint32_t variance, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'variance'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function]
cls.add_method('ReplacePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> nixVector) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'nixVector')])
## packet.h (module 'network'): std::string ns3::Packet::ToString() const [member function]
cls.add_method('ToString',
'std::string',
[],
is_const=True)
return
def register_Ns3ParetoRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ParetoRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable::ParetoRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetShape() const [member function]
cls.add_method('GetShape',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue(double mean, double shape, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'shape'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger(uint32_t mean, uint32_t shape, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3PfifoFastQueueDisc_methods(root_module, cls):
## pfifo-fast-queue-disc.h (module 'traffic-control'): ns3::PfifoFastQueueDisc::PfifoFastQueueDisc(ns3::PfifoFastQueueDisc const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PfifoFastQueueDisc const &', 'arg0')])
## pfifo-fast-queue-disc.h (module 'traffic-control'): ns3::PfifoFastQueueDisc::PfifoFastQueueDisc() [constructor]
cls.add_constructor([])
## pfifo-fast-queue-disc.h (module 'traffic-control'): static ns3::TypeId ns3::PfifoFastQueueDisc::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## pfifo-fast-queue-disc.h (module 'traffic-control'): bool ns3::PfifoFastQueueDisc::CheckConfig() [member function]
cls.add_method('CheckConfig',
'bool',
[],
visibility='private', is_virtual=True)
## pfifo-fast-queue-disc.h (module 'traffic-control'): ns3::Ptr<ns3::QueueDiscItem> ns3::PfifoFastQueueDisc::DoDequeue() [member function]
cls.add_method('DoDequeue',
'ns3::Ptr< ns3::QueueDiscItem >',
[],
visibility='private', is_virtual=True)
## pfifo-fast-queue-disc.h (module 'traffic-control'): bool ns3::PfifoFastQueueDisc::DoEnqueue(ns3::Ptr<ns3::QueueDiscItem> item) [member function]
cls.add_method('DoEnqueue',
'bool',
[param('ns3::Ptr< ns3::QueueDiscItem >', 'item')],
visibility='private', is_virtual=True)
## pfifo-fast-queue-disc.h (module 'traffic-control'): ns3::Ptr<const ns3::QueueDiscItem> ns3::PfifoFastQueueDisc::DoPeek() const [member function]
cls.add_method('DoPeek',
'ns3::Ptr< ns3::QueueDiscItem const >',
[],
is_const=True, visibility='private', is_virtual=True)
## pfifo-fast-queue-disc.h (module 'traffic-control'): void ns3::PfifoFastQueueDisc::InitializeParams() [member function]
cls.add_method('InitializeParams',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3QueueItem_methods(root_module, cls):
cls.add_output_stream_operator()
## net-device.h (module 'network'): ns3::QueueItem::QueueItem(ns3::Ptr<ns3::Packet> p) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet >', 'p')])
## net-device.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::QueueItem::GetPacket() const [member function]
cls.add_method('GetPacket',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## net-device.h (module 'network'): uint32_t ns3::QueueItem::GetPacketSize() const [member function]
cls.add_method('GetPacketSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::QueueItem::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
return
def register_Ns3StringChecker_methods(root_module, cls):
## string.h (module 'core'): ns3::StringChecker::StringChecker() [constructor]
cls.add_constructor([])
## string.h (module 'core'): ns3::StringChecker::StringChecker(ns3::StringChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::StringChecker const &', 'arg0')])
return
def register_Ns3StringValue_methods(root_module, cls):
## string.h (module 'core'): ns3::StringValue::StringValue() [constructor]
cls.add_constructor([])
## string.h (module 'core'): ns3::StringValue::StringValue(ns3::StringValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::StringValue const &', 'arg0')])
## string.h (module 'core'): ns3::StringValue::StringValue(std::string const & value) [constructor]
cls.add_constructor([param('std::string const &', 'value')])
## string.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::StringValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## string.h (module 'core'): bool ns3::StringValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## string.h (module 'core'): std::string ns3::StringValue::Get() const [member function]
cls.add_method('Get',
'std::string',
[],
is_const=True)
## string.h (module 'core'): std::string ns3::StringValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## string.h (module 'core'): void ns3::StringValue::Set(std::string const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string const &', 'value')])
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3UintegerValue_methods(root_module, cls):
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue() [constructor]
cls.add_constructor([])
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(ns3::UintegerValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UintegerValue const &', 'arg0')])
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(uint64_t const & value) [constructor]
cls.add_constructor([param('uint64_t const &', 'value')])
## uinteger.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::UintegerValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## uinteger.h (module 'core'): bool ns3::UintegerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## uinteger.h (module 'core'): uint64_t ns3::UintegerValue::Get() const [member function]
cls.add_method('Get',
'uint64_t',
[],
is_const=True)
## uinteger.h (module 'core'): std::string ns3::UintegerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## uinteger.h (module 'core'): void ns3::UintegerValue::Set(uint64_t const & value) [member function]
cls.add_method('Set',
'void',
[param('uint64_t const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3QueueDiscItem_methods(root_module, cls):
## queue-disc.h (module 'traffic-control'): ns3::QueueDiscItem::QueueDiscItem(ns3::Ptr<ns3::Packet> p, ns3::Address const & addr, uint16_t protocol) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Address const &', 'addr'), param('uint16_t', 'protocol')])
## queue-disc.h (module 'traffic-control'): ns3::Address ns3::QueueDiscItem::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint16_t ns3::QueueDiscItem::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint16_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): uint8_t ns3::QueueDiscItem::GetTxQueueIndex() const [member function]
cls.add_method('GetTxQueueIndex',
'uint8_t',
[],
is_const=True)
## queue-disc.h (module 'traffic-control'): void ns3::QueueDiscItem::SetTxQueueIndex(uint8_t txq) [member function]
cls.add_method('SetTxQueueIndex',
'void',
[param('uint8_t', 'txq')])
## queue-disc.h (module 'traffic-control'): void ns3::QueueDiscItem::AddHeader() [member function]
cls.add_method('AddHeader',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## queue-disc.h (module 'traffic-control'): void ns3::QueueDiscItem::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
register_functions_ns3_TracedValueCallback(module.get_submodule('TracedValueCallback'), root_module)
register_functions_ns3_internal(module.get_submodule('internal'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def register_functions_ns3_TracedValueCallback(module, root_module):
return
def register_functions_ns3_internal(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
gpl-2.0
|
MotorolaMobilityLLC/external-chromium_org
|
chrome/test/functional/webrtc_call.py
|
29
|
8808
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import time
# This little construct ensures we can run even if we have a bad version of
# psutil installed. If so, we'll just skip the test that needs it.
_HAS_CORRECT_PSUTIL_VERSION = False
try:
import psutil
if 'version_info' in dir(psutil):
# If psutil has any version info at all, it's recent enough.
_HAS_CORRECT_PSUTIL_VERSION = True
except ImportError, e:
pass
# Note: pyauto_functional must come before pyauto.
import pyauto_functional
import pyauto
import pyauto_utils
import webrtc_test_base
class WebrtcCallTest(webrtc_test_base.WebrtcTestBase):
"""Test we can set up a WebRTC call and disconnect it.
Prerequisites: This test case must run on a machine with a webcam, either
fake or real, and with some kind of audio device. You must make the
peerconnection_server target before you run.
The test case will launch a custom binary
(peerconnection_server) which will allow two WebRTC clients to find each
other. For more details, see the source code which is available at the site
http://code.google.com/p/libjingle/source/browse/ (make sure to browse to
trunk/talk/examples/peerconnection/server).
"""
def setUp(self):
pyauto.PyUITest.setUp(self)
self.StartPeerConnectionServer()
def tearDown(self):
self.StopPeerConnectionServer()
pyauto.PyUITest.tearDown(self)
self.assertEquals('', self.CheckErrorsAndCrashes())
def _SimpleWebrtcCall(self, request_video, request_audio, duration_seconds=0):
"""Tests we can call and hang up with WebRTC.
This test exercises pretty much the whole happy-case for the WebRTC
JavaScript API. Currently, it exercises a normal call setup using the API
defined at http://dev.w3.org/2011/webrtc/editor/webrtc.html. The API is
still evolving.
The test will load the supplied HTML file, which in turn will load different
javascript files depending on which version of the signaling protocol
we are running.
The supplied HTML file will be loaded in two tabs and tell the web
pages to start up WebRTC, which will acquire video and audio devices on the
system. This will launch a dialog in Chrome which we click past using the
automation controller. Then, we will order both tabs to connect the server,
which will make the two tabs aware of each other. Once that is done we order
one tab to call the other.
We make sure that the javascript tells us that the call succeeded, lets it
run for a while and try to hang up the call after that. We verify video is
playing by using the video detector.
Args:
request_video: Whether to request video.
request_audio: Whether to request audio.
duration_seconds: The number of seconds to keep the call up before
shutting it down.
"""
self._SetupCall(request_video=request_video, request_audio=request_audio)
if duration_seconds:
print 'Call up: sleeping %d seconds...' % duration_seconds
time.sleep(duration_seconds);
# The hang-up will automatically propagate to the second tab.
self.HangUp(from_tab_with_index=0)
self.WaitUntilHangUpVerified(tab_index=1)
self.Disconnect(tab_index=0)
self.Disconnect(tab_index=1)
# Ensure we didn't miss any errors.
self.AssertNoFailures(tab_index=0)
self.AssertNoFailures(tab_index=1)
def testWebrtcCall(self):
self.LoadTestPageInTwoTabs()
self._SimpleWebrtcCall(request_video=True, request_audio=True)
def testWebrtcVideoOnlyCall(self):
self.LoadTestPageInTwoTabs()
self._SimpleWebrtcCall(request_video=True, request_audio=False)
def testWebrtcAudioOnlyCall(self):
self.LoadTestPageInTwoTabs()
self._SimpleWebrtcCall(request_video=False, request_audio=True)
def testWebrtcJsep01CallAndMeasureCpu20Seconds(self):
if not _HAS_CORRECT_PSUTIL_VERSION:
print ('WARNING: Can not run cpu/mem measurements with this version of '
'psutil. You must have at least psutil 0.4.1 installed for the '
'version of python you are running this test with.')
return
self.LoadTestPageInTwoTabs(test_page='webrtc_jsep01_test.html')
# Prepare CPU measurements.
renderer_process = self._GetChromeRendererProcess(tab_index=0)
renderer_process.get_cpu_percent()
self._SimpleWebrtcCall(request_video=True,
request_audio=True,
duration_seconds=20)
cpu_usage = renderer_process.get_cpu_percent(interval=0)
mem_usage_bytes = renderer_process.get_memory_info()[0]
mem_usage_kb = float(mem_usage_bytes) / 1024
pyauto_utils.PrintPerfResult('cpu', 'jsep01_call', cpu_usage, '%')
pyauto_utils.PrintPerfResult('memory', 'jsep01_call', mem_usage_kb, 'KiB')
def testLocalPreview(self):
"""Brings up a local preview and ensures video is playing.
This test will launch a window with a single tab and run a getUserMedia call
which will give us access to the webcam and microphone. Then the javascript
code will hook up the webcam data to the local-view video tag. We will
detect video in that tag using the video detector, and if we see video
moving the test passes.
"""
self.LoadTestPageInOneTab()
self.assertEquals('ok-got-stream', self.GetUserMedia(tab_index=0))
self._StartDetectingVideo(tab_index=0, video_element='local-view')
self._WaitForVideo(tab_index=0, expect_playing=True)
def testHandlesNewGetUserMediaRequestSeparately(self):
"""Ensures WebRTC doesn't allow new requests to piggy-back on old ones."""
self.LoadTestPageInTwoTabs()
self.GetUserMedia(tab_index=0)
self.GetUserMedia(tab_index=1)
self.Connect("user_1", tab_index=0)
self.Connect("user_2", tab_index=1)
self.CreatePeerConnection(tab_index=0)
self.AddUserMediaLocalStream(tab_index=0)
self.EstablishCall(from_tab_with_index=0, to_tab_with_index=1)
self.assertEquals('failed-with-error-PERMISSION_DENIED',
self.GetUserMedia(tab_index=0, action='cancel'))
self.assertEquals('failed-with-error-PERMISSION_DENIED',
self.GetUserMedia(tab_index=0, action='dismiss'))
def _SetupCall(self, request_video, request_audio):
"""Gets user media and establishes a call.
Assumes that two tabs are already opened with a suitable test page.
Args:
request_video: Whether to request video.
request_audio: Whether to request audio.
"""
self.assertEquals('ok-got-stream', self.GetUserMedia(
tab_index=0, request_video=request_video, request_audio=request_audio))
self.assertEquals('ok-got-stream', self.GetUserMedia(
tab_index=1, request_video=request_video, request_audio=request_audio))
self.Connect('user_1', tab_index=0)
self.Connect('user_2', tab_index=1)
self.CreatePeerConnection(tab_index=0)
self.AddUserMediaLocalStream(tab_index=0)
self.EstablishCall(from_tab_with_index=0, to_tab_with_index=1)
if request_video:
self._StartDetectingVideo(tab_index=0, video_element='remote-view')
self._StartDetectingVideo(tab_index=1, video_element='remote-view')
self._WaitForVideo(tab_index=0, expect_playing=True)
self._WaitForVideo(tab_index=1, expect_playing=True)
def _StartDetectingVideo(self, tab_index, video_element):
self.assertEquals('ok-started', self.ExecuteJavascript(
'startDetection("%s", "frame-buffer", 320, 240)' % video_element,
tab_index=tab_index));
def _WaitForVideo(self, tab_index, expect_playing):
# TODO(phoglund): Remove this hack if we manage to get a more stable Linux
# bot to run these tests.
if self.IsLinux():
print "Linux; pretending to wait for video..."
time.sleep(1)
return
expect_retval='video-playing' if expect_playing else 'video-not-playing'
video_playing = self.WaitUntil(
function=lambda: self.ExecuteJavascript('isVideoPlaying()',
tab_index=tab_index),
expect_retval=expect_retval)
self.assertTrue(video_playing,
msg= 'Timed out while waiting for isVideoPlaying to ' +
'return ' + expect_retval + '.')
def _GetChromeRendererProcess(self, tab_index):
"""Returns the Chrome renderer process as a psutil process wrapper."""
tab_info = self.GetBrowserInfo()['windows'][0]['tabs'][tab_index]
renderer_id = tab_info['renderer_pid']
if not renderer_id:
self.fail('Can not find the tab renderer process.')
return psutil.Process(renderer_id)
if __name__ == '__main__':
pyauto_functional.Main()
|
bsd-3-clause
|
google/ffn
|
ffn/utils/vector_pb2.py
|
1
|
15524
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: utils/vector.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='utils/vector.proto',
package='ffn.proto',
syntax='proto2',
serialized_pb=_b('\n\x12utils/vector.proto\x12\tffn.proto\" \n\x08Vector2d\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\" \n\x08Vector2i\x12\t\n\x01x\x18\x01 \x01(\x05\x12\t\n\x01y\x18\x02 \x01(\x05\"+\n\x08Vector3d\x12\t\n\x01x\x18\x01 \x01(\x01\x12\t\n\x01y\x18\x02 \x01(\x01\x12\t\n\x01z\x18\x03 \x01(\x01\"+\n\x08Vector3f\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02\x12\t\n\x01z\x18\x03 \x01(\x02\"+\n\x08Vector3j\x12\t\n\x01x\x18\x01 \x01(\x03\x12\t\n\x01y\x18\x02 \x01(\x03\x12\t\n\x01z\x18\x03 \x01(\x03\"4\n\x0cVector2dList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector2d\"4\n\x0cVector2iList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector2i\"4\n\x0cVector3dList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector3d\"4\n\x0cVector3fList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector3f\"4\n\x0cVector3jList\x12$\n\x07vectors\x18\x01 \x03(\x0b\x32\x13.ffn.proto.Vector3j')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_VECTOR2D = _descriptor.Descriptor(
name='Vector2d',
full_name='ffn.proto.Vector2d',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='ffn.proto.Vector2d.x', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y', full_name='ffn.proto.Vector2d.y', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=33,
serialized_end=65,
)
_VECTOR2I = _descriptor.Descriptor(
name='Vector2i',
full_name='ffn.proto.Vector2i',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='ffn.proto.Vector2i.x', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y', full_name='ffn.proto.Vector2i.y', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=67,
serialized_end=99,
)
_VECTOR3D = _descriptor.Descriptor(
name='Vector3d',
full_name='ffn.proto.Vector3d',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='ffn.proto.Vector3d.x', index=0,
number=1, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y', full_name='ffn.proto.Vector3d.y', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='z', full_name='ffn.proto.Vector3d.z', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=101,
serialized_end=144,
)
_VECTOR3F = _descriptor.Descriptor(
name='Vector3f',
full_name='ffn.proto.Vector3f',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='ffn.proto.Vector3f.x', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y', full_name='ffn.proto.Vector3f.y', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='z', full_name='ffn.proto.Vector3f.z', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=146,
serialized_end=189,
)
_VECTOR3J = _descriptor.Descriptor(
name='Vector3j',
full_name='ffn.proto.Vector3j',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='ffn.proto.Vector3j.x', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y', full_name='ffn.proto.Vector3j.y', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='z', full_name='ffn.proto.Vector3j.z', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=191,
serialized_end=234,
)
_VECTOR2DLIST = _descriptor.Descriptor(
name='Vector2dList',
full_name='ffn.proto.Vector2dList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vectors', full_name='ffn.proto.Vector2dList.vectors', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=236,
serialized_end=288,
)
_VECTOR2ILIST = _descriptor.Descriptor(
name='Vector2iList',
full_name='ffn.proto.Vector2iList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vectors', full_name='ffn.proto.Vector2iList.vectors', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=290,
serialized_end=342,
)
_VECTOR3DLIST = _descriptor.Descriptor(
name='Vector3dList',
full_name='ffn.proto.Vector3dList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vectors', full_name='ffn.proto.Vector3dList.vectors', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=344,
serialized_end=396,
)
_VECTOR3FLIST = _descriptor.Descriptor(
name='Vector3fList',
full_name='ffn.proto.Vector3fList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vectors', full_name='ffn.proto.Vector3fList.vectors', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=398,
serialized_end=450,
)
_VECTOR3JLIST = _descriptor.Descriptor(
name='Vector3jList',
full_name='ffn.proto.Vector3jList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='vectors', full_name='ffn.proto.Vector3jList.vectors', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=452,
serialized_end=504,
)
_VECTOR2DLIST.fields_by_name['vectors'].message_type = _VECTOR2D
_VECTOR2ILIST.fields_by_name['vectors'].message_type = _VECTOR2I
_VECTOR3DLIST.fields_by_name['vectors'].message_type = _VECTOR3D
_VECTOR3FLIST.fields_by_name['vectors'].message_type = _VECTOR3F
_VECTOR3JLIST.fields_by_name['vectors'].message_type = _VECTOR3J
DESCRIPTOR.message_types_by_name['Vector2d'] = _VECTOR2D
DESCRIPTOR.message_types_by_name['Vector2i'] = _VECTOR2I
DESCRIPTOR.message_types_by_name['Vector3d'] = _VECTOR3D
DESCRIPTOR.message_types_by_name['Vector3f'] = _VECTOR3F
DESCRIPTOR.message_types_by_name['Vector3j'] = _VECTOR3J
DESCRIPTOR.message_types_by_name['Vector2dList'] = _VECTOR2DLIST
DESCRIPTOR.message_types_by_name['Vector2iList'] = _VECTOR2ILIST
DESCRIPTOR.message_types_by_name['Vector3dList'] = _VECTOR3DLIST
DESCRIPTOR.message_types_by_name['Vector3fList'] = _VECTOR3FLIST
DESCRIPTOR.message_types_by_name['Vector3jList'] = _VECTOR3JLIST
Vector2d = _reflection.GeneratedProtocolMessageType('Vector2d', (_message.Message,), dict(
DESCRIPTOR = _VECTOR2D,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector2d)
))
_sym_db.RegisterMessage(Vector2d)
Vector2i = _reflection.GeneratedProtocolMessageType('Vector2i', (_message.Message,), dict(
DESCRIPTOR = _VECTOR2I,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector2i)
))
_sym_db.RegisterMessage(Vector2i)
Vector3d = _reflection.GeneratedProtocolMessageType('Vector3d', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3D,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3d)
))
_sym_db.RegisterMessage(Vector3d)
Vector3f = _reflection.GeneratedProtocolMessageType('Vector3f', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3F,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3f)
))
_sym_db.RegisterMessage(Vector3f)
Vector3j = _reflection.GeneratedProtocolMessageType('Vector3j', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3J,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3j)
))
_sym_db.RegisterMessage(Vector3j)
Vector2dList = _reflection.GeneratedProtocolMessageType('Vector2dList', (_message.Message,), dict(
DESCRIPTOR = _VECTOR2DLIST,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector2dList)
))
_sym_db.RegisterMessage(Vector2dList)
Vector2iList = _reflection.GeneratedProtocolMessageType('Vector2iList', (_message.Message,), dict(
DESCRIPTOR = _VECTOR2ILIST,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector2iList)
))
_sym_db.RegisterMessage(Vector2iList)
Vector3dList = _reflection.GeneratedProtocolMessageType('Vector3dList', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3DLIST,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3dList)
))
_sym_db.RegisterMessage(Vector3dList)
Vector3fList = _reflection.GeneratedProtocolMessageType('Vector3fList', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3FLIST,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3fList)
))
_sym_db.RegisterMessage(Vector3fList)
Vector3jList = _reflection.GeneratedProtocolMessageType('Vector3jList', (_message.Message,), dict(
DESCRIPTOR = _VECTOR3JLIST,
__module__ = 'utils.vector_pb2'
# @@protoc_insertion_point(class_scope:ffn.proto.Vector3jList)
))
_sym_db.RegisterMessage(Vector3jList)
# @@protoc_insertion_point(module_scope)
|
apache-2.0
|
koppa/gr-air-modes
|
python/mlat_client.py
|
5
|
2900
|
#!/usr/bin/env python
#
# Copyright 2012 Nick Foster
#
# This file is part of gr-air-modes
#
# gr-air-modes is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# gr-air-modes is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with gr-air-modes; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
#multilateration client
#outputs stamps to server, receives multilaterated outputs back
import socket, pickle, time, sys
import air_modes
from gnuradio import gr
pickle_prot = 0
#pickle_prot = pickle.HIGHEST_PROTOCOL
class client_info:
def __init__(self):
self.name = ""
self.position = []
self.offset_secs = 0
self.offset_frac_secs = 0.0
self.time_source = None
class mlat_client:
def __init__(self, queue, position, server_addr, time_source):
self._queue = queue
self._pos = position
self._name = socket.gethostname()
#connect to server
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.setblocking(1)
self._sock.connect((server_addr, 19005))
info = client_info()
info.name = self._name
info.position = self._pos
info.time_source = time_source #"gpsdo" or None
self._sock.send(pickle.dumps(info))
reply = self._sock.recv(1024)
if reply != "HELO": #i know, shut up
raise Exception("Invalid reply from server: %s" % reply)
self._sock.setblocking(0)
self._remnant = None
def __del__(self):
self._sock.close()
#send a stamped report to the server
def output(self, message):
self._sock.send(message+"\n")
#this is called from the update() method list of the main app thread
def get_mlat_positions(self):
msg = None
try:
msg = self._sock.recv(1024)
except socket.error:
pass
if msg:
for line in msg.splitlines(True):
if line.endswith("\n"):
if self._remnant:
line = self._remnant + line
self._remnant = None
self._queue.insert_tail(gr.message_from_string(line))
else:
if self._remnant is not None:
raise Exception("Malformed data: " + line)
else:
self._remnant = line
|
gpl-3.0
|
mikehulluk/ProcessManager
|
www/js/brython/Lib/test/test_weakset.py
|
23
|
15326
|
import unittest
from test import support
from weakref import proxy, ref, WeakSet
import operator
import copy
import string
import os
from random import randrange, shuffle
import sys
import warnings
import collections
from collections import UserString as ustr
import gc
import contextlib
class Foo:
pass
class RefCycle:
def __init__(self):
self.cycle = self
class TestWeakSet(unittest.TestCase):
def setUp(self):
# need to keep references to them
self.items = [ustr(c) for c in ('a', 'b', 'c')]
self.items2 = [ustr(c) for c in ('x', 'y', 'z')]
self.ab_items = [ustr(c) for c in 'ab']
self.abcde_items = [ustr(c) for c in 'abcde']
self.def_items = [ustr(c) for c in 'def']
self.ab_weakset = WeakSet(self.ab_items)
self.abcde_weakset = WeakSet(self.abcde_items)
self.def_weakset = WeakSet(self.def_items)
self.letters = [ustr(c) for c in string.ascii_letters]
self.s = WeakSet(self.items)
self.d = dict.fromkeys(self.items)
self.obj = ustr('F')
self.fs = WeakSet([self.obj])
def test_methods(self):
weaksetmethods = dir(WeakSet)
for method in dir(set):
if method == 'test_c_api' or method.startswith('_'):
continue
self.assertIn(method, weaksetmethods,
"WeakSet missing method " + method)
def test_new_or_init(self):
self.assertRaises(TypeError, WeakSet, [], 2)
def test_len(self):
self.assertEqual(len(self.s), len(self.d))
self.assertEqual(len(self.fs), 1)
del self.obj
self.assertEqual(len(self.fs), 0)
def test_contains(self):
for c in self.letters:
self.assertEqual(c in self.s, c in self.d)
# 1 is not weakref'able, but that TypeError is caught by __contains__
self.assertNotIn(1, self.s)
self.assertIn(self.obj, self.fs)
del self.obj
self.assertNotIn(ustr('F'), self.fs)
def test_union(self):
u = self.s.union(self.items2)
for c in self.letters:
self.assertEqual(c in u, c in self.d or c in self.items2)
self.assertEqual(self.s, WeakSet(self.items))
self.assertEqual(type(u), WeakSet)
self.assertRaises(TypeError, self.s.union, [[]])
for C in set, frozenset, dict.fromkeys, list, tuple:
x = WeakSet(self.items + self.items2)
c = C(self.items2)
self.assertEqual(self.s.union(c), x)
del c
self.assertEqual(len(u), len(self.items) + len(self.items2))
self.items2.pop()
gc.collect()
self.assertEqual(len(u), len(self.items) + len(self.items2))
def test_or(self):
i = self.s.union(self.items2)
self.assertEqual(self.s | set(self.items2), i)
self.assertEqual(self.s | frozenset(self.items2), i)
def test_intersection(self):
s = WeakSet(self.letters)
i = s.intersection(self.items2)
for c in self.letters:
self.assertEqual(c in i, c in self.items2 and c in self.letters)
self.assertEqual(s, WeakSet(self.letters))
self.assertEqual(type(i), WeakSet)
for C in set, frozenset, dict.fromkeys, list, tuple:
x = WeakSet([])
self.assertEqual(i.intersection(C(self.items)), x)
self.assertEqual(len(i), len(self.items2))
self.items2.pop()
gc.collect()
self.assertEqual(len(i), len(self.items2))
def test_isdisjoint(self):
self.assertTrue(self.s.isdisjoint(WeakSet(self.items2)))
self.assertTrue(not self.s.isdisjoint(WeakSet(self.letters)))
def test_and(self):
i = self.s.intersection(self.items2)
self.assertEqual(self.s & set(self.items2), i)
self.assertEqual(self.s & frozenset(self.items2), i)
def test_difference(self):
i = self.s.difference(self.items2)
for c in self.letters:
self.assertEqual(c in i, c in self.d and c not in self.items2)
self.assertEqual(self.s, WeakSet(self.items))
self.assertEqual(type(i), WeakSet)
self.assertRaises(TypeError, self.s.difference, [[]])
def test_sub(self):
i = self.s.difference(self.items2)
self.assertEqual(self.s - set(self.items2), i)
self.assertEqual(self.s - frozenset(self.items2), i)
def test_symmetric_difference(self):
i = self.s.symmetric_difference(self.items2)
for c in self.letters:
self.assertEqual(c in i, (c in self.d) ^ (c in self.items2))
self.assertEqual(self.s, WeakSet(self.items))
self.assertEqual(type(i), WeakSet)
self.assertRaises(TypeError, self.s.symmetric_difference, [[]])
self.assertEqual(len(i), len(self.items) + len(self.items2))
self.items2.pop()
gc.collect()
self.assertEqual(len(i), len(self.items) + len(self.items2))
def test_xor(self):
i = self.s.symmetric_difference(self.items2)
self.assertEqual(self.s ^ set(self.items2), i)
self.assertEqual(self.s ^ frozenset(self.items2), i)
def test_sub_and_super(self):
self.assertTrue(self.ab_weakset <= self.abcde_weakset)
self.assertTrue(self.abcde_weakset <= self.abcde_weakset)
self.assertTrue(self.abcde_weakset >= self.ab_weakset)
self.assertFalse(self.abcde_weakset <= self.def_weakset)
self.assertFalse(self.abcde_weakset >= self.def_weakset)
self.assertTrue(set('a').issubset('abc'))
self.assertTrue(set('abc').issuperset('a'))
self.assertFalse(set('a').issubset('cbs'))
self.assertFalse(set('cbs').issuperset('a'))
def test_lt(self):
self.assertTrue(self.ab_weakset < self.abcde_weakset)
self.assertFalse(self.abcde_weakset < self.def_weakset)
self.assertFalse(self.ab_weakset < self.ab_weakset)
self.assertFalse(WeakSet() < WeakSet())
def test_gt(self):
self.assertTrue(self.abcde_weakset > self.ab_weakset)
self.assertFalse(self.abcde_weakset > self.def_weakset)
self.assertFalse(self.ab_weakset > self.ab_weakset)
self.assertFalse(WeakSet() > WeakSet())
def test_gc(self):
# Create a nest of cycles to exercise overall ref count check
s = WeakSet(Foo() for i in range(1000))
for elem in s:
elem.cycle = s
elem.sub = elem
elem.set = WeakSet([elem])
def test_subclass_with_custom_hash(self):
# Bug #1257731
class H(WeakSet):
def __hash__(self):
return int(id(self) & 0x7fffffff)
s=H()
f=set()
f.add(s)
self.assertIn(s, f)
f.remove(s)
f.add(s)
f.discard(s)
def test_init(self):
s = WeakSet()
s.__init__(self.items)
self.assertEqual(s, self.s)
s.__init__(self.items2)
self.assertEqual(s, WeakSet(self.items2))
self.assertRaises(TypeError, s.__init__, s, 2);
self.assertRaises(TypeError, s.__init__, 1);
def test_constructor_identity(self):
s = WeakSet(self.items)
t = WeakSet(s)
self.assertNotEqual(id(s), id(t))
def test_hash(self):
self.assertRaises(TypeError, hash, self.s)
def test_clear(self):
self.s.clear()
self.assertEqual(self.s, WeakSet([]))
self.assertEqual(len(self.s), 0)
def test_copy(self):
dup = self.s.copy()
self.assertEqual(self.s, dup)
self.assertNotEqual(id(self.s), id(dup))
def test_add(self):
x = ustr('Q')
self.s.add(x)
self.assertIn(x, self.s)
dup = self.s.copy()
self.s.add(x)
self.assertEqual(self.s, dup)
self.assertRaises(TypeError, self.s.add, [])
self.fs.add(Foo())
self.assertTrue(len(self.fs) == 1)
self.fs.add(self.obj)
self.assertTrue(len(self.fs) == 1)
def test_remove(self):
x = ustr('a')
self.s.remove(x)
self.assertNotIn(x, self.s)
self.assertRaises(KeyError, self.s.remove, x)
self.assertRaises(TypeError, self.s.remove, [])
def test_discard(self):
a, q = ustr('a'), ustr('Q')
self.s.discard(a)
self.assertNotIn(a, self.s)
self.s.discard(q)
self.assertRaises(TypeError, self.s.discard, [])
def test_pop(self):
for i in range(len(self.s)):
elem = self.s.pop()
self.assertNotIn(elem, self.s)
self.assertRaises(KeyError, self.s.pop)
def test_update(self):
retval = self.s.update(self.items2)
self.assertEqual(retval, None)
for c in (self.items + self.items2):
self.assertIn(c, self.s)
self.assertRaises(TypeError, self.s.update, [[]])
def test_update_set(self):
self.s.update(set(self.items2))
for c in (self.items + self.items2):
self.assertIn(c, self.s)
def test_ior(self):
self.s |= set(self.items2)
for c in (self.items + self.items2):
self.assertIn(c, self.s)
def test_intersection_update(self):
retval = self.s.intersection_update(self.items2)
self.assertEqual(retval, None)
for c in (self.items + self.items2):
if c in self.items2 and c in self.items:
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
self.assertRaises(TypeError, self.s.intersection_update, [[]])
def test_iand(self):
self.s &= set(self.items2)
for c in (self.items + self.items2):
if c in self.items2 and c in self.items:
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
def test_difference_update(self):
retval = self.s.difference_update(self.items2)
self.assertEqual(retval, None)
for c in (self.items + self.items2):
if c in self.items and c not in self.items2:
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
self.assertRaises(TypeError, self.s.difference_update, [[]])
self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])
def test_isub(self):
self.s -= set(self.items2)
for c in (self.items + self.items2):
if c in self.items and c not in self.items2:
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
def test_symmetric_difference_update(self):
retval = self.s.symmetric_difference_update(self.items2)
self.assertEqual(retval, None)
for c in (self.items + self.items2):
if (c in self.items) ^ (c in self.items2):
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
self.assertRaises(TypeError, self.s.symmetric_difference_update, [[]])
def test_ixor(self):
self.s ^= set(self.items2)
for c in (self.items + self.items2):
if (c in self.items) ^ (c in self.items2):
self.assertIn(c, self.s)
else:
self.assertNotIn(c, self.s)
def test_inplace_on_self(self):
t = self.s.copy()
t |= t
self.assertEqual(t, self.s)
t &= t
self.assertEqual(t, self.s)
t -= t
self.assertEqual(t, WeakSet())
t = self.s.copy()
t ^= t
self.assertEqual(t, WeakSet())
def test_eq(self):
# issue 5964
self.assertTrue(self.s == self.s)
self.assertTrue(self.s == WeakSet(self.items))
self.assertFalse(self.s == set(self.items))
self.assertFalse(self.s == list(self.items))
self.assertFalse(self.s == tuple(self.items))
self.assertFalse(self.s == WeakSet([Foo]))
self.assertFalse(self.s == 1)
def test_ne(self):
self.assertTrue(self.s != set(self.items))
s1 = WeakSet()
s2 = WeakSet()
self.assertFalse(s1 != s2)
def test_weak_destroy_while_iterating(self):
# Issue #7105: iterators shouldn't crash when a key is implicitly removed
# Create new items to be sure no-one else holds a reference
items = [ustr(c) for c in ('a', 'b', 'c')]
s = WeakSet(items)
it = iter(s)
next(it) # Trigger internal iteration
# Destroy an item
del items[-1]
gc.collect() # just in case
# We have removed either the first consumed items, or another one
self.assertIn(len(list(it)), [len(items), len(items) - 1])
del it
# The removal has been committed
self.assertEqual(len(s), len(items))
def test_weak_destroy_and_mutate_while_iterating(self):
# Issue #7105: iterators shouldn't crash when a key is implicitly removed
items = [ustr(c) for c in string.ascii_letters]
s = WeakSet(items)
@contextlib.contextmanager
def testcontext():
try:
it = iter(s)
next(it)
del it
# Schedule an item for removal and recreate it
u = ustr(str(items.pop()))
gc.collect() # just in case
yield u
finally:
it = None # should commit all removals
with testcontext() as u:
self.assertNotIn(u, s)
with testcontext() as u:
self.assertRaises(KeyError, s.remove, u)
self.assertNotIn(u, s)
with testcontext() as u:
s.add(u)
self.assertIn(u, s)
t = s.copy()
with testcontext() as u:
s.update(t)
self.assertEqual(len(s), len(t))
with testcontext() as u:
s.clear()
self.assertEqual(len(s), 0)
def test_len_cycles(self):
N = 20
items = [RefCycle() for i in range(N)]
s = WeakSet(items)
del items
it = iter(s)
try:
next(it)
except StopIteration:
pass
gc.collect()
n1 = len(s)
del it
gc.collect()
n2 = len(s)
# one item may be kept alive inside the iterator
self.assertIn(n1, (0, 1))
self.assertEqual(n2, 0)
def test_len_race(self):
# Extended sanity checks for len() in the face of cyclic collection
self.addCleanup(gc.set_threshold, *gc.get_threshold())
for th in range(1, 100):
N = 20
gc.collect(0)
gc.set_threshold(th, th, th)
items = [RefCycle() for i in range(N)]
s = WeakSet(items)
del items
# All items will be collected at next garbage collection pass
it = iter(s)
try:
next(it)
except StopIteration:
pass
n1 = len(s)
del it
n2 = len(s)
self.assertGreaterEqual(n1, 0)
self.assertLessEqual(n1, N)
self.assertGreaterEqual(n2, 0)
self.assertLessEqual(n2, n1)
def test_main(verbose=None):
support.run_unittest(TestWeakSet)
if __name__ == "__main__":
test_main(verbose=True)
|
bsd-2-clause
|
ruziniu/v2ex
|
v2ex/babel/l10n/messages/en.py
|
16
|
4841
|
# coding=utf-8
# Messages on top bar
home = 'Home'
images = 'Images'
mentions = 'Mentions'
workspace = 'Workspace'
notes = 'Notes'
nearby = 'Nearby'
settings = 'Settings'
backstage = 'Backstage'
signin = 'Sign In'
signup = 'Sign Up'
signout = 'Sign Out'
planes = 'Planes'
# Messages shared by forms
chevron = '<span class="chevron"> › </span>'
resolve_errors = "Please resolve the following errors."
failed_signin = "Username/password not matched"
delete = 'Delete'
submit = 'Submit'
save = 'Save'
create = 'Create'
back_to = 'Back to'
yes = 'Yes'
no = 'No'
# Messages on /signin
password = "Password"
remember_1_year = "Cookie will be remembered for a year."
forgot_password = "I Forgot Password"
existing_member = 'Existing member please <a href="/signin">sign in</a>'
# Messages on /signup
username_empty = 'Please input your username'
username_too_long = 'Username should not be longer than 32 characters'
username_too_short = 'Username should have at least 3 characters'
username_invalid = 'Username can only use 0-9, a-z, A-Z and underscore'
username_taken = 'Sorry, this username is taken by other'
# Messages on /signout
signin_again = 'Sign In Again'
# Messages on /
signup_now = "Sign Up Now"
hottest_nodes = 'Hottest Nodes'
new_nodes = 'New Nodes'
community_stats = 'Community Stats'
total_members = 'Total Members'
total_topics = 'Total Topics'
total_replies = 'Total Replies'
recent_visited_nodes = 'Recent Visited Nodes'
rss_subscribe = 'RSS'
more_recent_topics = 'More Recent Topics'
nodes_portal = 'Nodes Portal'
# Messages on /t/
no_reply = 'No Reply Yet'
add_a_reply_now = 'Add a Reply Now'
send = 'Send'
better_to_block = "It's better to block someone than to start a flame war."
reply_content_cannot_be_empty = "Reply content can't be empty"
newer_replies_first = "Newer Replies First"
earlier_replies_first = "Earlier Replies First"
show_all = "Show All"
just_topic_creator = "Just Topic Creator"
replies = "replies"
till = "Till"
sure_to_delete = "Sure to delete?"
no_pointless = "➥ Please avoid posting pointless replies, it's always great to save everyone's precious time."
for_good = "All for a better world."
# Messages on /go/
create_new_topic = "Create New Topic"
next = "Next"
previous = "Previous"
header_replies = "Replies"
header_subject = "Subject"
header_author = "Author"
header_last_replied = "Last Replied"
# Messages on /settings
username = "Username"
email = "Email"
website = "Website"
twitter = "Twitter"
location = "Location"
tagline = "Tagline"
bio = "Bio"
language = "Language"
twitter_sync = "Twitter Sync"
save_settings = "Save Settings"
avatar = "Avatar"
current_avatar = "Current Avatar"
upload_new_avatar = "Upload New Avatar"
gravatar_support = '<a href="http://www.gravatar.com/" target="_blank">Gravatar</a> is also supported'
security = "Security"
leave_blank = "If you don't want to change your passwords, please leave these fields blank."
current_password = "Current"
new_password = "New"
change_password = "Change Password"
account_status = "Status"
twitter_not_linked = "Twitter Account Not Linked"
twitter_link_explained = "Link your Twitter account securely via OAuth, then you can sync your topics/replies."
personal_records = "Personal Records"
blocked = "Blocked"
updated_every_4_hours = "Updated Every 4 Hours"
show_home_top = "Show Site Header"
show_quick_post = "Show Quick Post"
do_show = "Show"
do_not_show = "Do Not Show"
# Messages on /member/
recent_topics = 'Recent Topics'
recent_participated_topics = 'Recent Participated Topics'
# Messages on /backstage
site_settings = 'Site Settings'
site_settings_explained = 'Site name, slogan, description and other details. Who can create topic and who can reply.'
topic_settings = 'Topic Settings'
topic_settings_explained = 'Configure topic types.'
sections = 'Sections'
add_new_section = 'Add New Section'
minisites = 'Minisites'
add_new_minisite = 'Add New Minisite'
data_management = 'Data Management'
delete_specified_cached_item = 'Delete Specified Cached Item'
delete_specified_cached_item_explained = 'You can clear specified cached item by entering its name here.'
mapreduce_explained = 'MapReduce is a very powerful tool for tidying all data, use with caution.'
latest_joined = 'Latest Joined'
sure_to_delete_minisite = 'Sure to delete this minisite and all its contents?'
# Messages on /backstage/site
site_name = 'Site Name'
site_slogan = 'Site Slogan'
site_domain = 'Site Domain'
site_description = 'Description'
site_home_categories = 'Home Categories'
one_category_per_line = 'One featured category per line'
site_settings_updated = 'Site settings updated successfully'
site_theme = 'Theme'
# Messages on /backstage/topic
enable_topic_types = 'Enable Topic Types'
topic_types = 'Topic Types'
topic_types_explained = 'One type per line, use colon to separate name and color'
|
bsd-3-clause
|
68foxboris/enigma2-openpli-vuplus
|
lib/python/Components/Converter/TemplatedMultiContent.py
|
80
|
2879
|
from Components.Converter.StringList import StringList
class TemplatedMultiContent(StringList):
"""Turns a python tuple list into a multi-content list which can be used in a listbox renderer."""
def __init__(self, args):
StringList.__init__(self, args)
from enigma import eListboxPythonMultiContent, gFont, RT_HALIGN_LEFT, RT_HALIGN_CENTER, RT_HALIGN_RIGHT, RT_VALIGN_TOP, RT_VALIGN_CENTER, RT_VALIGN_BOTTOM, RT_WRAP
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmap, MultiContentEntryPixmapAlphaTest, MultiContentEntryPixmapAlphaBlend, MultiContentTemplateColor, MultiContentEntryProgress
l = locals()
del l["self"] # cleanup locals a bit
del l["args"]
self.active_style = None
self.template = eval(args, {}, l)
assert "fonts" in self.template
assert "itemHeight" in self.template
assert "template" in self.template or "templates" in self.template
assert "template" in self.template or "default" in self.template["templates"] # we need to have a default template
if not "template" in self.template: # default template can be ["template"] or ["templates"]["default"]
self.template["template"] = self.template["templates"]["default"][1]
self.template["itemHeight"] = self.template["template"][0]
def changed(self, what):
if not self.content:
from enigma import eListboxPythonMultiContent
self.content = eListboxPythonMultiContent()
# also setup fonts (also given by source)
index = 0
for f in self.template["fonts"]:
self.content.setFont(index, f)
index += 1
# if only template changed, don't reload list
if what[0] == self.CHANGED_SPECIFIC and what[1] == "style":
pass
elif self.source:
self.content.setList(self.source.list)
self.setTemplate()
self.downstream_elements.changed(what)
def setTemplate(self):
if self.source:
style = self.source.style
if style == self.active_style:
return
# if skin defined "templates", that means that it defines multiple styles in a dict. template should still be a default
templates = self.template.get("templates")
template = self.template.get("template")
itemheight = self.template["itemHeight"]
selectionEnabled = self.template.get("selectionEnabled", True)
scrollbarMode = self.template.get("scrollbarMode", "showOnDemand")
if templates and style and style in templates: # if we have a custom style defined in the source, and different templates in the skin, look it up
template = templates[style][1]
itemheight = templates[style][0]
if len(templates[style]) > 2:
selectionEnabled = templates[style][2]
if len(templates[style]) > 3:
scrollbarMode = templates[style][3]
self.content.setTemplate(template)
self.content.setItemHeight(itemheight)
self.selectionEnabled = selectionEnabled
self.scrollbarMode = scrollbarMode
self.active_style = style
|
gpl-2.0
|
poppogbr/genropy
|
packages/hosting/webpages/client.py
|
1
|
8379
|
#!/usr/bin/env python
# encoding: utf-8
"""
Created by Softwell on 2008-07-10.
Copyright (c) 2008 Softwell. All rights reserved.
"""
# --------------------------- GnrWebPage Standard header ---------------------------
from gnr.core.gnrbag import Bag
class GnrCustomWebPage(object):
maintable = 'hosting.client'
py_requires = """public:Public,standard_tables:TableHandler,
gnrcomponents/selectionhandler,
hosted:HostedClient,hosted:HostedInstance"""
######################## STANDARD TABLE OVERRIDDEN METHODS ###############
def windowTitle(self):
return '!!Client'
def pageAuthTags(self, method=None, **kwargs):
return 'owner'
def tableWriteTags(self):
return 'owner'
def tableDeleteTags(self):
return 'owner'
def barTitle(self):
return '!!Client'
def lstBase(self, struct):
r = struct.view().rows()
r.fieldcell('code', width='10em')
r.fieldcell('@user_id.username', name='User', width='10em')
self.hosted_card_columns(r)
return struct
def conditionBase(self):
pass
def queryBase(self):
return dict(column='code', op='contains', val='%')
def orderBase(self):
return 'code'
############################## FORM METHODS ##################################
def formBase(self, parentBC, disabled=False, **kwargs):
bc = parentBC.borderContainer(**kwargs)
top = bc.borderContainer(region='top', height='120px')
right = top.contentPane(region='right', width='350px')
self.hosted_card_linker(right, disabled=disabled)
center = top.contentPane(region='center')
fb = center.formbuilder(cols=1, border_spacing='3px', fld_width='100%',
width='350px', disabled=disabled)
fb.field('code')
fb.field('user_id')
tc = bc.tabContainer(region='center')
self.main_clienttab(tc.borderContainer(title='Info'), disabled)
for pkgname, handler in [(c.split('_')[1], getattr(self, c)) for c in dir(self) if
c.startswith('hostedclient_')]:
handler(tc.contentPane(datapath='.hosted_data.%s' % pkgname,
title=self.db.packages[pkgname].name_long,
nodeId='hosted_client_data_%s' % pkgname,
sqlContextName='sql_record_hosted_client_%s' % pkgname,
sqlContextRoot='form.record.hosted_client_data'))
def main_clienttab(self, bc, disabled):
self.selectionHandler(bc.borderContainer(region='center'), label='!!Instances',
datapath="instances", nodeId='instances', table='hosting.instance',
struct=self.struct_instances, reloader='^form.record.id',
hiddencolumns='$site_path', reload_onSaved=False,
selectionPars=dict(where='$client_id=:c_id', c_id='=form.record.id',
applymethod='apply_instances_selection', order_by='$code'),
dialogPars=dict(height='400px', width='600px', formCb=self.instance_form,
onSaved='genro.fireAfter("#instances.reload",true,5000)',
toolbarPars=dict(lock_action=True, add_action=True, del_action=True,
save_action=True),
default_client_id='=form.record.id',
saveKwargs=dict(_lockScreen=True, saveAlways=True)))
def instance_form(self, parentBC, disabled=None, table=None, **kwargs):
tc = parentBC.tabContainer(**kwargs)
self.main_instancetab(tc.contentPane(title='Info', _class='pbl_roundedGroup', margin='5px'), table=table,
disabled=disabled)
for pkgname, handler in [(c.split('_')[1], getattr(self, c)) for c in dir(self) if
c.startswith('hostedinstance_')]:
handler(tc.contentPane(datapath='.hosted_data.%s' % pkgname, title=self.db.packages[pkgname].name_long,
nodeId='hosted_instance_data_%s' % pkgname,
sqlContextName='sql_record_hosted_instance_%s' % pkgname,
sqlContextRoot='instances.dlg.record.hosted_data.%s' % pkgname))
def main_instancetab(self, parent, disabled=None, table=None):
bc = parent.borderContainer()
pane = bc.contentPane(region='top')
pane.div('!!Manage instances', _class='pbl_roundedGroupLabel')
fb = pane.formbuilder(cols=1, border_spacing='6px', dbtable=table, disabled=disabled)
fb.field('code', width='15em', lbl='!!Instance Name')
pane.dataRpc('.$creation_result', 'createInst', instance_code='=.code', instance_exists='=.$instance_exists',
site_exists='=.$site_exists',
_fired='^.$create', _onResult='FIRE .$created', _userChanges=True)
pane.dataController("""
if (site_path){
SET .site_path=site_path;
SET .$site_exists=true;
}
if (instance_path){
SET .path=instance_path;
SET .$instance_exists=true;
}
""", site_path='=.$creation_result.site_path',
instance_path='=.$creation_result.instance_path',
_fired='^.$created', _userChanges=True)
def struct(struct):
r = struct.view().rows()
r.cell('type', name='Slot type', width='15em')
r.cell('qty', name='Qty', width='4em', dtype='I')
return struct
iv = self.includedViewBox(bc.borderContainer(region='center'), label='!!Slot configuration',
storepath='.slot_configuration', struct=struct,
datamode='bag', autoWidth=True,
add_action=True, del_action=True)
gridEditor = iv.gridEditor()
gridEditor.dbSelect(gridcell='type', dbtable='hosting.slot_type',
columns='$code,$description', rowcaption='$code',
exclude=True, hasDownArrow=True)
gridEditor.numberTextBox(gridcell='qty')
def onLoading_hosting_instance(self, record, newrecord, loadingParameters, recInfo):
tblinstance = self.db.table('hosting.instance')
instance_exists = self.db.packages['hosting'].instance_exists(record['code'])
site_exists = self.db.packages['hosting'].site_exists(record['code'])
record.setItem('$instance_exists', instance_exists)
record.setItem('$site_exists', site_exists)
def rpc_apply_instances_selection(self, selection, **kwargs):
tblinstance = self.db.table('hosting.instance')
def apply_row(row):
instance_exists = self.db.packages['hosting'].instance_exists(row['code'])
site_exists = self.db.packages['hosting'].site_exists(row['code'])
if site_exists and instance_exists:
return dict(create='<div class="greenLight"></div>')
else:
return dict(create='<div class="yellowLight"></div>')
selection.apply(apply_row)
def rpc_createInst(self, instance_code=None, instance_exists=None, site_exists=None):
result = Bag()
instancetbl = self.db.table('hosting.instance')
if not instance_exists:
result['instance_path'] = instancetbl.create_instance(instance_code, self.site.instance_path,
self.site.gnrapp.config)
if not site_exists:
result['site_path'] = instancetbl.create_site(instance_code, self.site.site_path, self.site.config)
return result
def struct_instances(self, struct):
r = struct.view().rows()
r.fieldcell('code', width='10em')
r.fieldcell('path', width='20em')
r.cell('create', calculated=True, name='!!Status', width='10em')
return struct
|
lgpl-2.1
|
gkadillak/rockstor-core
|
src/rockstor/cli/rock_cli.py
|
6
|
8120
|
"""
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import pwd
import cmd
import sys
import os
import readline
from base_console import BaseConsole
from setup_console import SetupConsole
from disks_console import DisksConsole
from pools_console import PoolsConsole
from shares_console import SharesConsole
from services_console import ServicesConsole
from sm_console import SMConsole
from support_console import SupportConsole
from network_console import NetworkConsole
from users_console import UsersConsole
from task_console import TaskConsole
from replication_console import ReplicationConsole
from backup_plugin_console import BackupPluginConsole
from rest_util import api_call
from nfs_export_console import NFSExportConsole
from api_keys import APIKeyConsole
ASCII_LOGO = """
__ __ __ __ ___ __ __
|__) / \ / ` |__/ /__` | / \ |__)
| \ \__/ \__, | \ .__/ | \__/ | \\
"""
class RockConsole(BaseConsole):
def __init__(self, greeting='Rockstor'):
self.user = pwd.getpwuid(os.getuid())[0]
self.greeting = self.user + '@' + greeting
self.prompt = self.greeting + '> '
self.intro = ('%s\nWelcome to Rockstor. The Smart Open Storage '
'Platform.' % ASCII_LOGO)
self.user_hist_file = os.path.expanduser('~') + '/.rcli.hist'
try:
readline.read_history_file(self.user_hist_file)
except:
pass
finally:
BaseConsole.__init__(self)
def postloop(self):
cmd.Cmd.postloop(self)
print "Thanks for Rocking on the Console"
try:
readline.write_history_file(self.user_hist_file)
except:
print('Command history could not be saved')
"""
Commands
"""
def do_utcnow(self, args):
"""
returns utc time on the server
"""
url = ('%scommands/utcnow' % BaseConsole.url)
print api_call(url, calltype='post')
def do_uptime(self, args):
"""
return uptime(in seconds) of the server
"""
url = ('%scommands/uptime' % BaseConsole.url)
print api_call(url, calltype='post')
def do_bootstrap(self, args):
"""
bootraps the storage state, mounts anything that needs to be mounted
etc..
"""
url = ('%scommands/bootstrap' % BaseConsole.url)
print api_call(url, calltype='post')
def do_shares(self, args):
"""
Subconsole for share related operations.
Go to shares subconsole: shares
Display list of shares: shares list <share_name>
Add a share: shares add pool_name share_name
Remove a share: shares delete share_name
share detail console: shares share_name
commands on a share: shares share_name <input>
"""
shares_console = SharesConsole(self.greeting)
if (len(args) == 0):
return shares_console.cmdloop()
return shares_console.onecmd(args)
def do_pools(self, args):
"""
Operations on pools can be done with this command.
Go to pools subconsole: pools
Display the list of pools: pools list
Add a pool: pools add
Remove a pool: pools delete
pool detail console: pools pool_name
commands on a pool: pools pool_name <input>
"""
pools_console = PoolsConsole(self.greeting)
if (len(args) == 0):
pools_console.cmdloop()
else:
pools_console.onecmd(args)
def do_disks(self, args):
disks_console = DisksConsole(self.greeting)
if (len(args) == 0):
disks_console.cmdloop()
else:
disks_console.onecmd(args)
def help_disks(self):
s = """
%(c)sPerform operations on disks.%(e)s
Available commands:
Go to disks subconsole: %(c)sdisks%(e)s
Display the list of disks: %(c)sdisks list%(e)s
Scan for new disks: %(c)sdisks scan%(e)s
""" % BaseConsole.c_params
print s
def do_services(self, args):
"""
Operations on all services can be done with this command.
Display the list of services: services list
service detail console: services service_name
nfs service console: services nfs <commands>
smb service console: services smb <command>
"""
services_console = ServicesConsole(self.greeting)
if (len(args) == 0):
services_console.cmdloop()
else:
services_console.onecmd(args)
def do_setup(self, args):
"""
Temporary method to setup for dev purposes. to be deprecated later,
perhaps.
"""
setup_console = SetupConsole(self.greeting)
if (len(args) == 0):
setup_console.cmdloop()
else:
setup_console.onecmd(args)
def do_smart(self, args):
"""
Smart manager console
"""
sm_console = SMConsole(self.greeting)
if (len(args) == 0):
sm_console.cmdloop()
else:
sm_console.onecmd(args)
def do_support(self, args):
"""
Support console
"""
support_console = SupportConsole(self.greeting)
if (len(args) == 0):
support_console.cmdloop()
else:
support_console.onecmd(args)
def do_network(self, args):
"""
Network console
"""
network_console = NetworkConsole(self.greeting)
if (len(args) == 0):
network_console.cmdloop()
else:
network_console.onecmd(args)
def do_users(self, args):
"""
Users console
"""
users_console = UsersConsole(self.greeting)
if (len(args) == 0):
users_console.cmdloop()
else:
users_console.onecmd(args)
def do_tasks(self, args):
"""
Task Scheduler Console
"""
task_console = TaskConsole(self.greeting)
if (len(args) == 0):
task_console.cmdloop()
else:
task_console.onecmd(args)
def do_replication(self, args):
"""
Replication console
"""
rc = ReplicationConsole(self.greeting)
if (len(args) == 0):
rc.cmdloop()
else:
rc.onecmd(args)
def do_backup(self, args):
"""
Backup plugin console
"""
bc = BackupPluginConsole(self.greeting)
if (len(args) == 0):
bc.cmdloop()
else:
bc.cmdloop(args)
def do_nfs_exports(self, args):
ne = NFSExportConsole(self.greeting)
if (len(args) == 0):
ne.cmdloop()
else:
ne.cmdloop(args)
def do_api_keys(self, args):
ak = APIKeyConsole(self.greeting)
if (len(args) == 0):
ak.cmdloop()
else:
ak.cmdloop(args)
from rest_util import set_token
def main():
set_token()
rc = RockConsole()
if (len(sys.argv) > 1):
if (sys.argv[1] == '-c'):
# command is called remotely using ssh
line = ' '.join(sys.argv[2:])
else:
line = ' '.join(sys.argv[1:])
return rc.postcmd(rc.onecmd(line), line)
else:
return rc.cmdloop()
|
gpl-3.0
|
nhomar/odoo-mirror
|
addons/l10n_pa/__openerp__.py
|
117
|
1817
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 Cubic ERP - Teradata SAC (<http://cubicerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Panama Localization Chart Account",
"version": "1.0",
"description": """
Panamenian accounting chart and tax localization.
Plan contable panameño e impuestos de acuerdo a disposiciones vigentes
Con la Colaboración de
- AHMNET CORP http://www.ahmnet.com
""",
"author": "Cubic ERP",
"website": "http://cubicERP.com",
"category": "Localization/Account Charts",
"depends": [
"account_chart",
],
"data":[
"account_tax_code.xml",
"l10n_pa_chart.xml",
"account_tax.xml",
"l10n_pa_wizard.xml",
],
"demo_xml": [
],
"active": False,
"installable": True,
"certificate" : "",
'images': ['images/config_chart_l10n_cl.jpeg','images/l10n_cl_chart.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
ulope/django
|
django/contrib/admindocs/views.py
|
10
|
15517
|
from importlib import import_module
import inspect
import os
import re
from django import template
from django.apps import apps
from django.conf import settings
from django.contrib import admin
from django.contrib.admin.views.decorators import staff_member_required
from django.db import models
from django.core.exceptions import ViewDoesNotExist
from django.http import Http404
from django.core import urlresolvers
from django.contrib.admindocs import utils
from django.utils.decorators import method_decorator
from django.utils._os import upath
from django.utils import six
from django.utils.translation import ugettext as _
from django.views.generic import TemplateView
# Exclude methods starting with these strings from documentation
MODEL_METHODS_EXCLUDE = ('_', 'add_', 'delete', 'save', 'set_')
class BaseAdminDocsView(TemplateView):
"""
Base view for admindocs views.
"""
@method_decorator(staff_member_required)
def dispatch(self, *args, **kwargs):
if not utils.docutils_is_available:
# Display an error message for people without docutils
self.template_name = 'admin_doc/missing_docutils.html'
return self.render_to_response(admin.site.each_context())
return super(BaseAdminDocsView, self).dispatch(*args, **kwargs)
def get_context_data(self, **kwargs):
kwargs.update({'root_path': urlresolvers.reverse('admin:index')})
kwargs.update(admin.site.each_context())
return super(BaseAdminDocsView, self).get_context_data(**kwargs)
class BookmarkletsView(BaseAdminDocsView):
template_name = 'admin_doc/bookmarklets.html'
def get_context_data(self, **kwargs):
context = super(BookmarkletsView, self).get_context_data(**kwargs)
context.update({
'admin_url': "%s://%s%s" % (
self.request.scheme, self.request.get_host(), context['root_path'])
})
return context
class TemplateTagIndexView(BaseAdminDocsView):
template_name = 'admin_doc/template_tag_index.html'
def get_context_data(self, **kwargs):
load_all_installed_template_libraries()
tags = []
app_libs = list(six.iteritems(template.libraries))
builtin_libs = [(None, lib) for lib in template.builtins]
for module_name, library in builtin_libs + app_libs:
for tag_name, tag_func in library.tags.items():
title, body, metadata = utils.parse_docstring(tag_func.__doc__)
if title:
title = utils.parse_rst(title, 'tag', _('tag:') + tag_name)
if body:
body = utils.parse_rst(body, 'tag', _('tag:') + tag_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'tag', _('tag:') + tag_name)
if library in template.builtins:
tag_library = ''
else:
tag_library = module_name.split('.')[-1]
tags.append({
'name': tag_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
kwargs.update({'tags': tags})
return super(TemplateTagIndexView, self).get_context_data(**kwargs)
class TemplateFilterIndexView(BaseAdminDocsView):
template_name = 'admin_doc/template_filter_index.html'
def get_context_data(self, **kwargs):
load_all_installed_template_libraries()
filters = []
app_libs = list(six.iteritems(template.libraries))
builtin_libs = [(None, lib) for lib in template.builtins]
for module_name, library in builtin_libs + app_libs:
for filter_name, filter_func in library.filters.items():
title, body, metadata = utils.parse_docstring(filter_func.__doc__)
if title:
title = utils.parse_rst(title, 'filter', _('filter:') + filter_name)
if body:
body = utils.parse_rst(body, 'filter', _('filter:') + filter_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'filter', _('filter:') + filter_name)
if library in template.builtins:
tag_library = ''
else:
tag_library = module_name.split('.')[-1]
filters.append({
'name': filter_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
kwargs.update({'filters': filters})
return super(TemplateFilterIndexView, self).get_context_data(**kwargs)
class ViewIndexView(BaseAdminDocsView):
template_name = 'admin_doc/view_index.html'
def get_context_data(self, **kwargs):
views = []
urlconf = import_module(settings.ROOT_URLCONF)
view_functions = extract_views_from_urlpatterns(urlconf.urlpatterns)
for (func, regex, namespace, name) in view_functions:
views.append({
'full_name': '%s.%s' % (func.__module__, getattr(func, '__name__', func.__class__.__name__)),
'url': simplify_regex(regex),
'url_name': ':'.join((namespace or []) + (name and [name] or [])),
'namespace': ':'.join((namespace or [])),
'name': name,
})
kwargs.update({'views': views})
return super(ViewIndexView, self).get_context_data(**kwargs)
class ViewDetailView(BaseAdminDocsView):
template_name = 'admin_doc/view_detail.html'
def get_context_data(self, **kwargs):
view = self.kwargs['view']
urlconf = urlresolvers.get_urlconf()
if urlresolvers.get_resolver(urlconf)._is_callback(view):
mod, func = urlresolvers.get_mod_func(view)
view_func = getattr(import_module(mod), func)
else:
raise Http404
title, body, metadata = utils.parse_docstring(view_func.__doc__)
if title:
title = utils.parse_rst(title, 'view', _('view:') + view)
if body:
body = utils.parse_rst(body, 'view', _('view:') + view)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view)
kwargs.update({
'name': view,
'summary': title,
'body': body,
'meta': metadata,
})
return super(ViewDetailView, self).get_context_data(**kwargs)
class ModelIndexView(BaseAdminDocsView):
template_name = 'admin_doc/model_index.html'
def get_context_data(self, **kwargs):
m_list = [m._meta for m in apps.get_models()]
kwargs.update({'models': m_list})
return super(ModelIndexView, self).get_context_data(**kwargs)
class ModelDetailView(BaseAdminDocsView):
template_name = 'admin_doc/model_detail.html'
def get_context_data(self, **kwargs):
model_name = self.kwargs['model_name']
# Get the model class.
try:
app_config = apps.get_app_config(self.kwargs['app_label'])
except LookupError:
raise Http404(_("App %(app_label)r not found") % self.kwargs)
try:
model = app_config.get_model(model_name)
except LookupError:
raise Http404(_("Model %(model_name)r not found in app %(app_label)r") % self.kwargs)
opts = model._meta
title, body, metadata = utils.parse_docstring(model.__doc__)
if title:
title = utils.parse_rst(title, 'model', _('model:') + model_name)
if body:
body = utils.parse_rst(body, 'model', _('model:') + model_name)
# Gather fields/field descriptions.
fields = []
for field in opts.fields:
# ForeignKey is a special case since the field will actually be a
# descriptor that returns the other object
if isinstance(field, models.ForeignKey):
data_type = field.rel.to.__name__
app_label = field.rel.to._meta.app_label
verbose = utils.parse_rst(
(_("the related `%(app_label)s.%(data_type)s` object") % {
'app_label': app_label, 'data_type': data_type,
}),
'model',
_('model:') + data_type,
)
else:
data_type = get_readable_field_data_type(field)
verbose = field.verbose_name
fields.append({
'name': field.name,
'data_type': data_type,
'verbose': verbose,
'help_text': field.help_text,
})
# Gather many-to-many fields.
for field in opts.many_to_many:
data_type = field.rel.to.__name__
app_label = field.rel.to._meta.app_label
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {
'app_label': app_label,
'object_name': data_type,
}
fields.append({
'name': "%s.all" % field.name,
"data_type": 'List',
'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name),
})
fields.append({
'name': "%s.count" % field.name,
'data_type': 'Integer',
'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name),
})
# Gather model methods.
for func_name, func in model.__dict__.items():
if (inspect.isfunction(func) and len(inspect.getargspec(func)[0]) == 1):
try:
for exclude in MODEL_METHODS_EXCLUDE:
if func_name.startswith(exclude):
raise StopIteration
except StopIteration:
continue
verbose = func.__doc__
if verbose:
verbose = utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.model_name)
fields.append({
'name': func_name,
'data_type': get_return_data_type(func_name),
'verbose': verbose,
})
# Gather related objects
for rel in opts.get_all_related_objects() + opts.get_all_related_many_to_many_objects():
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {
'app_label': rel.opts.app_label,
'object_name': rel.opts.object_name,
}
accessor = rel.get_accessor_name()
fields.append({
'name': "%s.all" % accessor,
'data_type': 'List',
'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name),
})
fields.append({
'name': "%s.count" % accessor,
'data_type': 'Integer',
'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name),
})
kwargs.update({
'name': '%s.%s' % (opts.app_label, opts.object_name),
'summary': title,
'description': body,
'fields': fields,
})
return super(ModelDetailView, self).get_context_data(**kwargs)
class TemplateDetailView(BaseAdminDocsView):
template_name = 'admin_doc/template_detail.html'
def get_context_data(self, **kwargs):
template = self.kwargs['template']
templates = []
for dir in settings.TEMPLATE_DIRS:
template_file = os.path.join(dir, template)
templates.append({
'file': template_file,
'exists': os.path.exists(template_file),
'contents': lambda: open(template_file).read() if os.path.exists(template_file) else '',
'order': list(settings.TEMPLATE_DIRS).index(dir),
})
kwargs.update({
'name': template,
'templates': templates,
})
return super(TemplateDetailView, self).get_context_data(**kwargs)
####################
# Helper functions #
####################
def load_all_installed_template_libraries():
# Load/register all template tag libraries from installed apps.
for module_name in template.get_templatetags_modules():
mod = import_module(module_name)
try:
libraries = [
os.path.splitext(p)[0]
for p in os.listdir(os.path.dirname(upath(mod.__file__)))
if p.endswith('.py') and p[0].isalpha()
]
except OSError:
libraries = []
for library_name in libraries:
try:
template.get_library(library_name)
except template.InvalidTemplateLibrary:
pass
def get_return_data_type(func_name):
"""Return a somewhat-helpful data type given a function name"""
if func_name.startswith('get_'):
if func_name.endswith('_list'):
return 'List'
elif func_name.endswith('_count'):
return 'Integer'
return ''
def get_readable_field_data_type(field):
"""Returns the description for a given field type, if it exists,
Fields' descriptions can contain format strings, which will be interpolated
against the values of field.__dict__ before being output."""
return field.description % field.__dict__
def extract_views_from_urlpatterns(urlpatterns, base='', namespace=None):
"""
Return a list of views from a list of urlpatterns.
Each object in the returned list is a two-tuple: (view_func, regex)
"""
views = []
for p in urlpatterns:
if hasattr(p, 'url_patterns'):
try:
patterns = p.url_patterns
except ImportError:
continue
views.extend(extract_views_from_urlpatterns(
patterns,
base + p.regex.pattern,
(namespace or []) + (p.namespace and [p.namespace] or [])
))
elif hasattr(p, 'callback'):
try:
views.append((p.callback, base + p.regex.pattern,
namespace, p.name))
except ViewDoesNotExist:
continue
else:
raise TypeError(_("%s does not appear to be a urlpattern object") % p)
return views
named_group_matcher = re.compile(r'\(\?P(<\w+>).+?\)')
non_named_group_matcher = re.compile(r'\(.*?\)')
def simplify_regex(pattern):
"""
Clean up urlpattern regexes into something somewhat readable by Mere Humans:
turns something like "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
into "<sport_slug>/athletes/<athlete_slug>/"
"""
# handle named groups first
pattern = named_group_matcher.sub(lambda m: m.group(1), pattern)
# handle non-named groups
pattern = non_named_group_matcher.sub("<var>", pattern)
# clean up any outstanding regex-y characters.
pattern = pattern.replace('^', '').replace('$', '').replace('?', '').replace('//', '/').replace('\\', '')
if not pattern.startswith('/'):
pattern = '/' + pattern
return pattern
|
bsd-3-clause
|
konono/equlipse
|
openstack-install/charm/trusty/charm-keystone/charmhelpers/core/host_factory/centos.py
|
2
|
1924
|
import subprocess
import yum
import os
from charmhelpers.core.strutils import BasicStringComparator
class CompareHostReleases(BasicStringComparator):
"""Provide comparisons of Host releases.
Use in the form of
if CompareHostReleases(release) > 'trusty':
# do something with mitaka
"""
def __init__(self, item):
raise NotImplementedError(
"CompareHostReleases() is not implemented for CentOS")
def service_available(service_name):
# """Determine whether a system service is available."""
if os.path.isdir('/run/systemd/system'):
cmd = ['systemctl', 'is-enabled', service_name]
else:
cmd = ['service', service_name, 'is-enabled']
return subprocess.call(cmd) == 0
def add_new_group(group_name, system_group=False, gid=None):
cmd = ['groupadd']
if gid:
cmd.extend(['--gid', str(gid)])
if system_group:
cmd.append('-r')
cmd.append(group_name)
subprocess.check_call(cmd)
def lsb_release():
"""Return /etc/os-release in a dict."""
d = {}
with open('/etc/os-release', 'r') as lsb:
for l in lsb:
s = l.split('=')
if len(s) != 2:
continue
d[s[0].strip()] = s[1].strip()
return d
def cmp_pkgrevno(package, revno, pkgcache=None):
"""Compare supplied revno with the revno of the installed package.
* 1 => Installed revno is greater than supplied arg
* 0 => Installed revno is the same as supplied arg
* -1 => Installed revno is less than supplied arg
This function imports YumBase function if the pkgcache argument
is None.
"""
if not pkgcache:
y = yum.YumBase()
packages = y.doPackageLists()
pkgcache = {i.Name: i.version for i in packages['installed']}
pkg = pkgcache[package]
if pkg > revno:
return 1
if pkg < revno:
return -1
return 0
|
mit
|
jmchen-g/models
|
autoencoder/MaskingNoiseAutoencoderRunner.py
|
10
|
1689
|
import numpy as np
import sklearn.preprocessing as prep
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
from autoencoder.autoencoder_models.DenoisingAutoencoder import MaskingNoiseAutoencoder
mnist = input_data.read_data_sets('MNIST_data', one_hot = True)
def standard_scale(X_train, X_test):
preprocessor = prep.StandardScaler().fit(X_train)
X_train = preprocessor.transform(X_train)
X_test = preprocessor.transform(X_test)
return X_train, X_test
def get_random_block_from_data(data, batch_size):
start_index = np.random.randint(0, len(data) - batch_size)
return data[start_index:(start_index + batch_size)]
X_train, X_test = standard_scale(mnist.train.images, mnist.test.images)
n_samples = int(mnist.train.num_examples)
training_epochs = 100
batch_size = 128
display_step = 1
autoencoder = MaskingNoiseAutoencoder(n_input = 784,
n_hidden = 200,
transfer_function = tf.nn.softplus,
optimizer = tf.train.AdamOptimizer(learning_rate = 0.001),
dropout_probability = 0.95)
for epoch in range(training_epochs):
avg_cost = 0.
total_batch = int(n_samples / batch_size)
for i in range(total_batch):
batch_xs = get_random_block_from_data(X_train, batch_size)
cost = autoencoder.partial_fit(batch_xs)
avg_cost += cost / n_samples * batch_size
if epoch % display_step == 0:
print "Epoch:", '%04d' % (epoch + 1), \
"cost=", "{:.9f}".format(avg_cost)
print "Total cost: " + str(autoencoder.calc_total_cost(X_test))
|
apache-2.0
|
rue89-tech/edx-platform
|
common/djangoapps/cors_csrf/tests/test_middleware.py
|
153
|
9892
|
"""
Tests for the CORS CSRF middleware
"""
from mock import patch, Mock
import ddt
from django.test import TestCase
from django.test.utils import override_settings
from django.core.exceptions import MiddlewareNotUsed, ImproperlyConfigured
from django.http import HttpResponse
from django.middleware.csrf import CsrfViewMiddleware
from cors_csrf.middleware import CorsCSRFMiddleware, CsrfCrossDomainCookieMiddleware
SENTINEL = object()
class TestCorsMiddlewareProcessRequest(TestCase):
"""
Test processing a request through the middleware
"""
def get_request(self, is_secure, http_referer):
"""
Build a test request
"""
request = Mock()
request.META = {'HTTP_REFERER': http_referer}
request.is_secure = lambda: is_secure
return request
@override_settings(FEATURES={'ENABLE_CORS_HEADERS': True})
def setUp(self):
super(TestCorsMiddlewareProcessRequest, self).setUp()
self.middleware = CorsCSRFMiddleware()
def check_not_enabled(self, request):
"""
Check that the middleware does NOT process the provided request
"""
with patch.object(CsrfViewMiddleware, 'process_view') as mock_method:
res = self.middleware.process_view(request, None, None, None)
self.assertIsNone(res)
self.assertFalse(mock_method.called)
def check_enabled(self, request):
"""
Check that the middleware does process the provided request
"""
def cb_check_req_is_secure_false(request, callback, args, kwargs):
"""
Check that the request doesn't pass (yet) the `is_secure()` test
"""
self.assertFalse(request.is_secure())
return SENTINEL
with patch.object(CsrfViewMiddleware, 'process_view') as mock_method:
mock_method.side_effect = cb_check_req_is_secure_false
res = self.middleware.process_view(request, None, None, None)
self.assertIs(res, SENTINEL)
self.assertTrue(request.is_secure())
@override_settings(CORS_ORIGIN_WHITELIST=['foo.com'])
def test_enabled(self):
request = self.get_request(is_secure=True, http_referer='https://foo.com/bar')
self.check_enabled(request)
@override_settings(
FEATURES={'ENABLE_CORS_HEADERS': False},
CORS_ORIGIN_WHITELIST=['foo.com']
)
def test_disabled_no_cors_headers(self):
with self.assertRaises(MiddlewareNotUsed):
CorsCSRFMiddleware()
@override_settings(CORS_ORIGIN_WHITELIST=['bar.com'])
def test_disabled_wrong_cors_domain(self):
request = self.get_request(is_secure=True, http_referer='https://foo.com/bar')
self.check_not_enabled(request)
@override_settings(CORS_ORIGIN_WHITELIST=['foo.com'])
def test_disabled_wrong_cors_domain_reversed(self):
request = self.get_request(is_secure=True, http_referer='https://bar.com/bar')
self.check_not_enabled(request)
@override_settings(CORS_ORIGIN_WHITELIST=['foo.com'])
def test_disabled_http_request(self):
request = self.get_request(is_secure=False, http_referer='https://foo.com/bar')
self.check_not_enabled(request)
@override_settings(CORS_ORIGIN_WHITELIST=['foo.com'])
def test_disabled_http_referer(self):
request = self.get_request(is_secure=True, http_referer='http://foo.com/bar')
self.check_not_enabled(request)
@ddt.ddt
class TestCsrfCrossDomainCookieMiddleware(TestCase):
"""Tests for `CsrfCrossDomainCookieMiddleware`. """
REFERER = 'https://www.example.com'
COOKIE_NAME = 'shared-csrftoken'
COOKIE_VALUE = 'abcd123'
COOKIE_DOMAIN = '.edx.org'
@override_settings(
FEATURES={'ENABLE_CROSS_DOMAIN_CSRF_COOKIE': True},
CROSS_DOMAIN_CSRF_COOKIE_NAME=COOKIE_NAME,
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=COOKIE_DOMAIN
)
def setUp(self):
super(TestCsrfCrossDomainCookieMiddleware, self).setUp()
self.middleware = CsrfCrossDomainCookieMiddleware()
@override_settings(FEATURES={'ENABLE_CROSS_DOMAIN_CSRF_COOKIE': False})
def test_disabled_by_feature_flag(self):
with self.assertRaises(MiddlewareNotUsed):
CsrfCrossDomainCookieMiddleware()
@ddt.data('CROSS_DOMAIN_CSRF_COOKIE_NAME', 'CROSS_DOMAIN_CSRF_COOKIE_DOMAIN')
def test_improperly_configured(self, missing_setting):
settings = {
'FEATURES': {'ENABLE_CROSS_DOMAIN_CSRF_COOKIE': True},
'CROSS_DOMAIN_CSRF_COOKIE_NAME': self.COOKIE_NAME,
'CROSS_DOMAIN_CSRF_COOKIE_DOMAIN': self.COOKIE_DOMAIN
}
del settings[missing_setting]
with override_settings(**settings):
with self.assertRaises(ImproperlyConfigured):
CsrfCrossDomainCookieMiddleware()
@override_settings(
CROSS_DOMAIN_CSRF_COOKIE_NAME=COOKIE_NAME,
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=COOKIE_DOMAIN,
CORS_ORIGIN_ALLOW_ALL=True
)
def test_skip_if_not_secure(self):
response = self._get_response(is_secure=False)
self._assert_cookie_sent(response, False)
@override_settings(
CROSS_DOMAIN_CSRF_COOKIE_NAME=COOKIE_NAME,
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=COOKIE_DOMAIN,
CORS_ORIGIN_ALLOW_ALL=True
)
def test_skip_if_not_sending_csrf_token(self):
response = self._get_response(csrf_cookie_used=False)
self._assert_cookie_sent(response, False)
@override_settings(
CROSS_DOMAIN_CSRF_COOKIE_NAME=COOKIE_NAME,
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=COOKIE_DOMAIN,
CORS_ORIGIN_ALLOW_ALL=True
)
def test_skip_if_not_cross_domain_decorator(self):
response = self._get_response(cross_domain_decorator=False)
self._assert_cookie_sent(response, False)
@override_settings(
CROSS_DOMAIN_CSRF_COOKIE_NAME=COOKIE_NAME,
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=COOKIE_DOMAIN,
CORS_ORIGIN_WHITELIST=['other.example.com']
)
def test_skip_if_referer_not_whitelisted(self):
response = self._get_response()
self._assert_cookie_sent(response, False)
@override_settings(
CROSS_DOMAIN_CSRF_COOKIE_NAME=COOKIE_NAME,
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=COOKIE_DOMAIN
)
def test_skip_if_not_cross_domain(self):
response = self._get_response(
referer="https://courses.edx.org/foo",
host="courses.edx.org"
)
self._assert_cookie_sent(response, False)
@override_settings(
CROSS_DOMAIN_CSRF_COOKIE_NAME=COOKIE_NAME,
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=COOKIE_DOMAIN,
CORS_ORIGIN_ALLOW_ALL=True
)
def test_skip_if_no_referer(self):
response = self._get_response(delete_referer=True)
self._assert_cookie_sent(response, False)
@override_settings(
CROSS_DOMAIN_CSRF_COOKIE_NAME=COOKIE_NAME,
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=COOKIE_DOMAIN,
CORS_ORIGIN_ALLOW_ALL=True
)
def test_skip_if_referer_not_https(self):
response = self._get_response(referer="http://www.example.com")
self._assert_cookie_sent(response, False)
@override_settings(
CROSS_DOMAIN_CSRF_COOKIE_NAME=COOKIE_NAME,
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=COOKIE_DOMAIN,
CORS_ORIGIN_ALLOW_ALL=True
)
def test_skip_if_referer_no_protocol(self):
response = self._get_response(referer="example.com")
self._assert_cookie_sent(response, False)
@override_settings(
CROSS_DOMAIN_CSRF_COOKIE_NAME=COOKIE_NAME,
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=COOKIE_DOMAIN,
CORS_ALLOW_INSECURE=True
)
def test_skip_if_no_referer_insecure(self):
response = self._get_response(delete_referer=True)
self._assert_cookie_sent(response, False)
@override_settings(
CROSS_DOMAIN_CSRF_COOKIE_NAME=COOKIE_NAME,
CROSS_DOMAIN_CSRF_COOKIE_DOMAIN=COOKIE_DOMAIN,
CORS_ORIGIN_WHITELIST=['www.example.com']
)
def test_set_cross_domain_cookie(self):
response = self._get_response()
self._assert_cookie_sent(response, True)
def _get_response(self,
is_secure=True,
csrf_cookie_used=True,
cross_domain_decorator=True,
referer=None,
host=None,
delete_referer=False):
"""Process a request using the middleware. """
request = Mock()
request.META = {
'HTTP_REFERER': (
referer if referer is not None
else self.REFERER
)
}
request.is_secure = lambda: is_secure
if host is not None:
request.get_host = lambda: host
if delete_referer:
del request.META['HTTP_REFERER']
if csrf_cookie_used:
request.META['CSRF_COOKIE_USED'] = True
request.META['CSRF_COOKIE'] = self.COOKIE_VALUE
if cross_domain_decorator:
request.META['CROSS_DOMAIN_CSRF_COOKIE_USED'] = True
return self.middleware.process_response(request, HttpResponse())
def _assert_cookie_sent(self, response, is_set):
"""Check that the cross-domain CSRF cookie was sent. """
if is_set:
self.assertIn(self.COOKIE_NAME, response.cookies)
cookie_header = str(response.cookies[self.COOKIE_NAME])
expected = 'Set-Cookie: {name}={value}; Domain={domain};'.format(
name=self.COOKIE_NAME,
value=self.COOKIE_VALUE,
domain=self.COOKIE_DOMAIN
)
self.assertIn(expected, cookie_header)
self.assertIn('Max-Age=31449600; Path=/; secure', cookie_header)
else:
self.assertNotIn(self.COOKIE_NAME, response.cookies)
|
agpl-3.0
|
yjmade/odoo
|
addons/website_event/tests/__init__.py
|
413
|
1072
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 20123TODAY OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test_ui
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
hugohmk/Epidemic-Emulator
|
main.py
|
1
|
7208
|
from epidemic_emulator import node
from datetime import datetime
import platform
import argparse
import time
import os
import matplotlib.pyplot as plt
import random
def parse_network(f, node_id, topology = "clique"):
neighbors = []
nd = None
t = datetime.now()
t = t-t
net = []
index = -1
cnt = 0
for i in f:
i = i.rstrip("\n").split("|")
if len(i)<4:
continue
u = (i[0],(i[1],int(i[2])),[(i[3],t)])
if i[0]==node_id:
nd = u
index = cnt
net.append(u)
cnt+=1
f.close()
# clique
if topology == "clique":
neighbors = [i for i in net if i[0] != node_id]
# star
elif topology == "star":
if index > 0:
neighbors = [net[0]]
else:
neighbors = net[1:]
return neighbors,nd
def simulation_controller(args,nd,network):
# Example nd value:
#('9', ('127.0.0.1', 9179), [('S', datetime.timedelta(0))])
#
# network is a tuple containing every node identifier constructed from
# args.network (default=network.txt) file
r = args.recovery_rate
e = args.endogenous_rate
x = args.exogenous_rate
if nd is not None:
with node.Node(r,e,x) as a:
a.start(nd, network)
if args.interaction == 1:
try:
help_text = """>> Commands:
0 (help) -> print this
1 (print current) -> print current network state
2 (print history) -> print network history
3 (end) -> send shutdown message to all nodes
4 (display state) -> display current network state
5 (display history) -> display network history
"""
print help_text
while True:
opt = raw_input(">> Insert command: ")
if opt == "0":
print help_text
elif opt == "1":
#print a.network_state(),"\n"
a.print_state()
elif opt == "2":
#print a.network_history(),"\n"
a.print_history()
elif opt == "3":
a.display_history()
a.network_shutdown()
a.stop()
break
elif opt == "4":
a.display_state()
elif opt == "5":
a.display_history()
else:
print "Invalid input\n"
except:
a.network_shutdown()
a.stop()
finally:
a.network_shutdown()
a.stop()
elif args.interaction > 1:
print("Running simulation for %d seconds." % args.interaction)
time.sleep(args.interaction)
#a.display_history()
simdata = a.save_simulation_data()
a.network_shutdown()
a.stop()
return simdata
else:
try:
while not a.stopped():
time.sleep(2)
except:
a.stop()
finally:
a.stop()
def process_data(simdata,repetitions,simulation_time):
simresults = [[-1 for t in range(simulation_time+1)] for x in range(repetitions)]
print_stuff = 1
for k in range(repetitions):
if print_stuff:
print("")
print("Run #%d" % (k+1))
print("time\tinfected count")
t = 0
for event in simdata[k]:
if print_stuff: print("%.2f\t%d" % (event[0],event[1]))
time = int(event[0])
infected_count = event[1]
if time < t:
continue
elif t < simulation_time+1:
if print_stuff: print("* %.2f" % event[0])
while t <= time:
simresults[k][t] = infected_count
t = t+1
while t < simulation_time+1:
simresults[k][t] = infected_count
t = t+1
if print_stuff:
print("")
print("Processed output:")
print("time\tinfected count")
for t in range(simulation_time+1):
print("%d\t%d" % (t,simresults[k][t]))
average_results = [0.0 for t in range(simulation_time+1)]
for t in range(simulation_time+1):
for k in range(repetitions):
average_results[t] = average_results[t] + simresults[k][t]
average_results[t] = float(average_results[t]) / repetitions
print(average_results)
plt.plot(list(range(0,simulation_time+1)),average_results,'-o')
axes = plt.gca()
axes.set_xlim([0,simulation_time])
#axes.set_ylim([0,10])
plt.xlabel("Seconds")
plt.ylabel("Infected nodes")
plt.savefig("average_simulation.pdf")
if __name__ == "__main__":
dir_path = os.path.dirname(os.path.realpath(__file__))
dir_path_unix = dir_path.replace("\\","/")
if (platform.system()!="Windows"): dir_path = dir_path_unix
parser = argparse.ArgumentParser()
parser.add_argument("-id","--identifier",required=True,
help="Node identifier")
parser.add_argument("-n","--network",type=argparse.FileType('r'), default = dir_path_unix+"/network.txt",
help="File that contains the network's description; each line presents node_id|node_ip|port_number|initial_state")
# parser.add_argument("-i","--interactive",type=int,default=0,
# help="Interactive mode")
parser.add_argument("-i","--interaction",type=int,default=0,
help="Interaction mode: default (0), interactive (1), simulation (2)")
parser.add_argument("-r","--recovery_rate",type=float,#default=1.0,
help="Simulation parameter: recovery_rate")
parser.add_argument("-e","--endogenous_rate",type=float,#default=1.0,
help="Simulation parameter: endogenous_infection_rate")
parser.add_argument("-x","--exogenous_rate",type=float,#default=1e-6,
help="Simulation parameter: exogenous_infection_rate")
parser.add_argument("-t","--topology",choices=["clique","star"],default="clique",
help="Network topology: clique or star")
args = parser.parse_args()
network = {}
if args.network is not None:
network,nd = parse_network(args.network, args.identifier, args.topology)
simulation_time = args.interaction
repetitions = 1
simdata = []
for i in range(repetitions):
simdata.append(simulation_controller(args,nd,network))
if args.identifier == '0':
process_data(simdata,repetitions,simulation_time)
|
mit
|
BrandonHe/sdl_core
|
src/components/dbus/codegen/code_formatter.py
|
13
|
2284
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @file code_formatter.py
# @brief Utility that helps to manage indents in generated code
#
# This file is a part of HMI D-Bus layer.
#
# Copyright (c) 2013, Ford Motor Company
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the
# distribution.
#
# Neither the name of the Ford Motor Company nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 'A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
class CodeBlock:
indent = 0
def __init__(self, out):
self.out = out
if type(out) == file:
self.indent = 0
elif type(out) == CodeBlock:
self.indent = out.indent
def __enter__(self):
self.indent += 1
return self
def __exit__(self, exc_type, exc_value, traceback):
if self.indent >= 1:
self.indent -= 1
def write(self, s):
for string in s.splitlines(True):
self.out.write(' ' * self.indent + string)
# vim: set ts=4 sw=4 et:
|
bsd-3-clause
|
all-of-us/raw-data-repository
|
rdr_service/lib_fhir/fhirclient_3_0_0/models/appointment_tests.py
|
1
|
9471
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.0.0.11832 on 2017-03-22.
# 2017, SMART Health IT.
import io
import json
import os
import unittest
from . import appointment
from .fhirdate import FHIRDate
class AppointmentTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("Appointment", js["resourceType"])
return appointment.Appointment(js)
def testAppointment1(self):
inst = self.instantiate_from("appointment-example-request.json")
self.assertIsNotNone(inst, "Must have instantiated a Appointment instance")
self.implAppointment1(inst)
js = inst.as_json()
self.assertEqual("Appointment", js["resourceType"])
inst2 = appointment.Appointment(js)
self.implAppointment1(inst2)
def implAppointment1(self, inst):
self.assertEqual(inst.appointmentType.coding[0].code, "wi")
self.assertEqual(inst.appointmentType.coding[0].display, "Walk in")
self.assertEqual(inst.appointmentType.coding[0].system, "http://example.org/appointment-type")
self.assertEqual(inst.comment, "Further expand on the results of the MRI and determine the next actions that may be appropriate.")
self.assertEqual(inst.created.date, FHIRDate("2015-12-02").date)
self.assertEqual(inst.created.as_json(), "2015-12-02")
self.assertEqual(inst.description, "Discussion on the results of your recent MRI")
self.assertEqual(inst.id, "examplereq")
self.assertEqual(inst.identifier[0].system, "http://example.org/sampleappointment-identifier")
self.assertEqual(inst.identifier[0].value, "123")
self.assertEqual(inst.minutesDuration, 15)
self.assertEqual(inst.participant[0].required, "required")
self.assertEqual(inst.participant[0].status, "needs-action")
self.assertEqual(inst.participant[1].required, "required")
self.assertEqual(inst.participant[1].status, "needs-action")
self.assertEqual(inst.participant[1].type[0].coding[0].code, "ATND")
self.assertEqual(inst.participant[1].type[0].coding[0].system, "http://hl7.org/fhir/v3/ParticipationType")
self.assertEqual(inst.participant[2].required, "required")
self.assertEqual(inst.participant[2].status, "accepted")
self.assertEqual(inst.priority, 5)
self.assertEqual(inst.reason[0].coding[0].code, "413095006")
self.assertEqual(inst.reason[0].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.reason[0].text, "Clinical Review")
self.assertEqual(inst.requestedPeriod[0].end.date, FHIRDate("2016-06-09").date)
self.assertEqual(inst.requestedPeriod[0].end.as_json(), "2016-06-09")
self.assertEqual(inst.requestedPeriod[0].start.date, FHIRDate("2016-06-02").date)
self.assertEqual(inst.requestedPeriod[0].start.as_json(), "2016-06-02")
self.assertEqual(inst.serviceCategory.coding[0].code, "gp")
self.assertEqual(inst.serviceCategory.coding[0].display, "General Practice")
self.assertEqual(inst.serviceCategory.coding[0].system, "http://example.org/service-category")
self.assertEqual(inst.specialty[0].coding[0].code, "gp")
self.assertEqual(inst.specialty[0].coding[0].display, "General Practice")
self.assertEqual(inst.specialty[0].coding[0].system, "http://example.org/specialty")
self.assertEqual(inst.status, "proposed")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">Brian MRI results discussion</div>")
self.assertEqual(inst.text.status, "generated")
def testAppointment2(self):
inst = self.instantiate_from("appointment-example.json")
self.assertIsNotNone(inst, "Must have instantiated a Appointment instance")
self.implAppointment2(inst)
js = inst.as_json()
self.assertEqual("Appointment", js["resourceType"])
inst2 = appointment.Appointment(js)
self.implAppointment2(inst2)
def implAppointment2(self, inst):
self.assertEqual(inst.appointmentType.coding[0].code, "follow")
self.assertEqual(inst.appointmentType.coding[0].display, "Followup")
self.assertEqual(inst.appointmentType.coding[0].system, "http://example.org/appointment-type")
self.assertEqual(inst.comment, "Further expand on the results of the MRI and determine the next actions that may be appropriate.")
self.assertEqual(inst.created.date, FHIRDate("2013-10-10").date)
self.assertEqual(inst.created.as_json(), "2013-10-10")
self.assertEqual(inst.description, "Discussion on the results of your recent MRI")
self.assertEqual(inst.end.date, FHIRDate("2013-12-10T11:00:00Z").date)
self.assertEqual(inst.end.as_json(), "2013-12-10T11:00:00Z")
self.assertEqual(inst.id, "example")
self.assertEqual(inst.participant[0].required, "required")
self.assertEqual(inst.participant[0].status, "accepted")
self.assertEqual(inst.participant[1].required, "required")
self.assertEqual(inst.participant[1].status, "accepted")
self.assertEqual(inst.participant[1].type[0].coding[0].code, "ATND")
self.assertEqual(inst.participant[1].type[0].coding[0].system, "http://hl7.org/fhir/v3/ParticipationType")
self.assertEqual(inst.participant[2].required, "required")
self.assertEqual(inst.participant[2].status, "accepted")
self.assertEqual(inst.priority, 5)
self.assertEqual(inst.serviceCategory.coding[0].code, "gp")
self.assertEqual(inst.serviceCategory.coding[0].display, "General Practice")
self.assertEqual(inst.serviceCategory.coding[0].system, "http://example.org/service-category")
self.assertEqual(inst.serviceType[0].coding[0].code, "52")
self.assertEqual(inst.serviceType[0].coding[0].display, "General Discussion")
self.assertEqual(inst.specialty[0].coding[0].code, "gp")
self.assertEqual(inst.specialty[0].coding[0].display, "General Practice")
self.assertEqual(inst.specialty[0].coding[0].system, "http://example.org/specialty")
self.assertEqual(inst.start.date, FHIRDate("2013-12-10T09:00:00Z").date)
self.assertEqual(inst.start.as_json(), "2013-12-10T09:00:00Z")
self.assertEqual(inst.status, "booked")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">Brian MRI results discussion</div>")
self.assertEqual(inst.text.status, "generated")
def testAppointment3(self):
inst = self.instantiate_from("appointment-example2doctors.json")
self.assertIsNotNone(inst, "Must have instantiated a Appointment instance")
self.implAppointment3(inst)
js = inst.as_json()
self.assertEqual("Appointment", js["resourceType"])
inst2 = appointment.Appointment(js)
self.implAppointment3(inst2)
def implAppointment3(self, inst):
self.assertEqual(inst.appointmentType.coding[0].code, "wi")
self.assertEqual(inst.appointmentType.coding[0].display, "Walk in")
self.assertEqual(inst.appointmentType.coding[0].system, "http://example.org/appointment-type")
self.assertEqual(inst.comment, "Clarify the results of the MRI to ensure context of test was correct")
self.assertEqual(inst.description, "Discussion about Peter Chalmers MRI results")
self.assertEqual(inst.end.date, FHIRDate("2013-12-09T11:00:00Z").date)
self.assertEqual(inst.end.as_json(), "2013-12-09T11:00:00Z")
self.assertEqual(inst.id, "2docs")
self.assertEqual(inst.participant[0].required, "information-only")
self.assertEqual(inst.participant[0].status, "accepted")
self.assertEqual(inst.participant[1].required, "required")
self.assertEqual(inst.participant[1].status, "accepted")
self.assertEqual(inst.participant[2].required, "required")
self.assertEqual(inst.participant[2].status, "accepted")
self.assertEqual(inst.participant[3].required, "information-only")
self.assertEqual(inst.participant[3].status, "accepted")
self.assertEqual(inst.priority, 5)
self.assertEqual(inst.serviceCategory.coding[0].code, "gp")
self.assertEqual(inst.serviceCategory.coding[0].display, "General Practice")
self.assertEqual(inst.serviceCategory.coding[0].system, "http://example.org/service-category")
self.assertEqual(inst.serviceType[0].coding[0].code, "52")
self.assertEqual(inst.serviceType[0].coding[0].display, "General Discussion")
self.assertEqual(inst.specialty[0].coding[0].code, "gp")
self.assertEqual(inst.specialty[0].coding[0].display, "General Practice")
self.assertEqual(inst.specialty[0].coding[0].system, "http://example.org/specialty")
self.assertEqual(inst.start.date, FHIRDate("2013-12-09T09:00:00Z").date)
self.assertEqual(inst.start.as_json(), "2013-12-09T09:00:00Z")
self.assertEqual(inst.status, "booked")
self.assertEqual(inst.text.div, "<div xmlns=\"http://www.w3.org/1999/xhtml\">Brian MRI results discussion</div>")
self.assertEqual(inst.text.status, "generated")
|
bsd-3-clause
|
grlee77/numpy
|
numpy/distutils/cpuinfo.py
|
17
|
22657
|
#!/usr/bin/env python3
"""
cpuinfo
Copyright 2002 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@cens.ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy (BSD style) license. See LICENSE.txt that came with
this distribution for specifics.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
Pearu Peterson
"""
__all__ = ['cpu']
import os
import platform
import re
import sys
import types
import warnings
from subprocess import getstatusoutput
def getoutput(cmd, successful_status=(0,), stacklevel=1):
try:
status, output = getstatusoutput(cmd)
except EnvironmentError as e:
warnings.warn(str(e), UserWarning, stacklevel=stacklevel)
return False, ""
if os.WIFEXITED(status) and os.WEXITSTATUS(status) in successful_status:
return True, output
return False, output
def command_info(successful_status=(0,), stacklevel=1, **kw):
info = {}
for key in kw:
ok, output = getoutput(kw[key], successful_status=successful_status,
stacklevel=stacklevel+1)
if ok:
info[key] = output.strip()
return info
def command_by_line(cmd, successful_status=(0,), stacklevel=1):
ok, output = getoutput(cmd, successful_status=successful_status,
stacklevel=stacklevel+1)
if not ok:
return
for line in output.splitlines():
yield line.strip()
def key_value_from_command(cmd, sep, successful_status=(0,),
stacklevel=1):
d = {}
for line in command_by_line(cmd, successful_status=successful_status,
stacklevel=stacklevel+1):
l = [s.strip() for s in line.split(sep, 1)]
if len(l) == 2:
d[l[0]] = l[1]
return d
class CPUInfoBase:
"""Holds CPU information and provides methods for requiring
the availability of various CPU features.
"""
def _try_call(self, func):
try:
return func()
except Exception:
pass
def __getattr__(self, name):
if not name.startswith('_'):
if hasattr(self, '_'+name):
attr = getattr(self, '_'+name)
if isinstance(attr, types.MethodType):
return lambda func=self._try_call,attr=attr : func(attr)
else:
return lambda : None
raise AttributeError(name)
def _getNCPUs(self):
return 1
def __get_nbits(self):
abits = platform.architecture()[0]
nbits = re.compile(r'(\d+)bit').search(abits).group(1)
return nbits
def _is_32bit(self):
return self.__get_nbits() == '32'
def _is_64bit(self):
return self.__get_nbits() == '64'
class LinuxCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = [ {} ]
ok, output = getoutput('uname -m')
if ok:
info[0]['uname_m'] = output.strip()
try:
fo = open('/proc/cpuinfo')
except EnvironmentError as e:
warnings.warn(str(e), UserWarning, stacklevel=2)
else:
for line in fo:
name_value = [s.strip() for s in line.split(':', 1)]
if len(name_value) != 2:
continue
name, value = name_value
if not info or name in info[-1]: # next processor
info.append({})
info[-1][name] = value
fo.close()
self.__class__.info = info
def _not_impl(self): pass
# Athlon
def _is_AMD(self):
return self.info[0]['vendor_id']=='AuthenticAMD'
def _is_AthlonK6_2(self):
return self._is_AMD() and self.info[0]['model'] == '2'
def _is_AthlonK6_3(self):
return self._is_AMD() and self.info[0]['model'] == '3'
def _is_AthlonK6(self):
return re.match(r'.*?AMD-K6', self.info[0]['model name']) is not None
def _is_AthlonK7(self):
return re.match(r'.*?AMD-K7', self.info[0]['model name']) is not None
def _is_AthlonMP(self):
return re.match(r'.*?Athlon\(tm\) MP\b',
self.info[0]['model name']) is not None
def _is_AMD64(self):
return self.is_AMD() and self.info[0]['family'] == '15'
def _is_Athlon64(self):
return re.match(r'.*?Athlon\(tm\) 64\b',
self.info[0]['model name']) is not None
def _is_AthlonHX(self):
return re.match(r'.*?Athlon HX\b',
self.info[0]['model name']) is not None
def _is_Opteron(self):
return re.match(r'.*?Opteron\b',
self.info[0]['model name']) is not None
def _is_Hammer(self):
return re.match(r'.*?Hammer\b',
self.info[0]['model name']) is not None
# Alpha
def _is_Alpha(self):
return self.info[0]['cpu']=='Alpha'
def _is_EV4(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV4'
def _is_EV5(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV5'
def _is_EV56(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'EV56'
def _is_PCA56(self):
return self.is_Alpha() and self.info[0]['cpu model'] == 'PCA56'
# Intel
#XXX
_is_i386 = _not_impl
def _is_Intel(self):
return self.info[0]['vendor_id']=='GenuineIntel'
def _is_i486(self):
return self.info[0]['cpu']=='i486'
def _is_i586(self):
return self.is_Intel() and self.info[0]['cpu family'] == '5'
def _is_i686(self):
return self.is_Intel() and self.info[0]['cpu family'] == '6'
def _is_Celeron(self):
return re.match(r'.*?Celeron',
self.info[0]['model name']) is not None
def _is_Pentium(self):
return re.match(r'.*?Pentium',
self.info[0]['model name']) is not None
def _is_PentiumII(self):
return re.match(r'.*?Pentium.*?II\b',
self.info[0]['model name']) is not None
def _is_PentiumPro(self):
return re.match(r'.*?PentiumPro\b',
self.info[0]['model name']) is not None
def _is_PentiumMMX(self):
return re.match(r'.*?Pentium.*?MMX\b',
self.info[0]['model name']) is not None
def _is_PentiumIII(self):
return re.match(r'.*?Pentium.*?III\b',
self.info[0]['model name']) is not None
def _is_PentiumIV(self):
return re.match(r'.*?Pentium.*?(IV|4)\b',
self.info[0]['model name']) is not None
def _is_PentiumM(self):
return re.match(r'.*?Pentium.*?M\b',
self.info[0]['model name']) is not None
def _is_Prescott(self):
return self.is_PentiumIV() and self.has_sse3()
def _is_Nocona(self):
return (self.is_Intel()
and (self.info[0]['cpu family'] == '6'
or self.info[0]['cpu family'] == '15')
and (self.has_sse3() and not self.has_ssse3())
and re.match(r'.*?\blm\b', self.info[0]['flags']) is not None)
def _is_Core2(self):
return (self.is_64bit() and self.is_Intel() and
re.match(r'.*?Core\(TM\)2\b',
self.info[0]['model name']) is not None)
def _is_Itanium(self):
return re.match(r'.*?Itanium\b',
self.info[0]['family']) is not None
def _is_XEON(self):
return re.match(r'.*?XEON\b',
self.info[0]['model name'], re.IGNORECASE) is not None
_is_Xeon = _is_XEON
# Varia
def _is_singleCPU(self):
return len(self.info) == 1
def _getNCPUs(self):
return len(self.info)
def _has_fdiv_bug(self):
return self.info[0]['fdiv_bug']=='yes'
def _has_f00f_bug(self):
return self.info[0]['f00f_bug']=='yes'
def _has_mmx(self):
return re.match(r'.*?\bmmx\b', self.info[0]['flags']) is not None
def _has_sse(self):
return re.match(r'.*?\bsse\b', self.info[0]['flags']) is not None
def _has_sse2(self):
return re.match(r'.*?\bsse2\b', self.info[0]['flags']) is not None
def _has_sse3(self):
return re.match(r'.*?\bpni\b', self.info[0]['flags']) is not None
def _has_ssse3(self):
return re.match(r'.*?\bssse3\b', self.info[0]['flags']) is not None
def _has_3dnow(self):
return re.match(r'.*?\b3dnow\b', self.info[0]['flags']) is not None
def _has_3dnowext(self):
return re.match(r'.*?\b3dnowext\b', self.info[0]['flags']) is not None
class IRIXCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = key_value_from_command('sysconf', sep=' ',
successful_status=(0, 1))
self.__class__.info = info
def _not_impl(self): pass
def _is_singleCPU(self):
return self.info.get('NUM_PROCESSORS') == '1'
def _getNCPUs(self):
return int(self.info.get('NUM_PROCESSORS', 1))
def __cputype(self, n):
return self.info.get('PROCESSORS').split()[0].lower() == 'r%s' % (n)
def _is_r2000(self): return self.__cputype(2000)
def _is_r3000(self): return self.__cputype(3000)
def _is_r3900(self): return self.__cputype(3900)
def _is_r4000(self): return self.__cputype(4000)
def _is_r4100(self): return self.__cputype(4100)
def _is_r4300(self): return self.__cputype(4300)
def _is_r4400(self): return self.__cputype(4400)
def _is_r4600(self): return self.__cputype(4600)
def _is_r4650(self): return self.__cputype(4650)
def _is_r5000(self): return self.__cputype(5000)
def _is_r6000(self): return self.__cputype(6000)
def _is_r8000(self): return self.__cputype(8000)
def _is_r10000(self): return self.__cputype(10000)
def _is_r12000(self): return self.__cputype(12000)
def _is_rorion(self): return self.__cputype('orion')
def get_ip(self):
try: return self.info.get('MACHINE')
except Exception: pass
def __machine(self, n):
return self.info.get('MACHINE').lower() == 'ip%s' % (n)
def _is_IP19(self): return self.__machine(19)
def _is_IP20(self): return self.__machine(20)
def _is_IP21(self): return self.__machine(21)
def _is_IP22(self): return self.__machine(22)
def _is_IP22_4k(self): return self.__machine(22) and self._is_r4000()
def _is_IP22_5k(self): return self.__machine(22) and self._is_r5000()
def _is_IP24(self): return self.__machine(24)
def _is_IP25(self): return self.__machine(25)
def _is_IP26(self): return self.__machine(26)
def _is_IP27(self): return self.__machine(27)
def _is_IP28(self): return self.__machine(28)
def _is_IP30(self): return self.__machine(30)
def _is_IP32(self): return self.__machine(32)
def _is_IP32_5k(self): return self.__machine(32) and self._is_r5000()
def _is_IP32_10k(self): return self.__machine(32) and self._is_r10000()
class DarwinCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = command_info(arch='arch',
machine='machine')
info['sysctl_hw'] = key_value_from_command('sysctl hw', sep='=')
self.__class__.info = info
def _not_impl(self): pass
def _getNCPUs(self):
return int(self.info['sysctl_hw'].get('hw.ncpu', 1))
def _is_Power_Macintosh(self):
return self.info['sysctl_hw']['hw.machine']=='Power Macintosh'
def _is_i386(self):
return self.info['arch']=='i386'
def _is_ppc(self):
return self.info['arch']=='ppc'
def __machine(self, n):
return self.info['machine'] == 'ppc%s'%n
def _is_ppc601(self): return self.__machine(601)
def _is_ppc602(self): return self.__machine(602)
def _is_ppc603(self): return self.__machine(603)
def _is_ppc603e(self): return self.__machine('603e')
def _is_ppc604(self): return self.__machine(604)
def _is_ppc604e(self): return self.__machine('604e')
def _is_ppc620(self): return self.__machine(620)
def _is_ppc630(self): return self.__machine(630)
def _is_ppc740(self): return self.__machine(740)
def _is_ppc7400(self): return self.__machine(7400)
def _is_ppc7450(self): return self.__machine(7450)
def _is_ppc750(self): return self.__machine(750)
def _is_ppc403(self): return self.__machine(403)
def _is_ppc505(self): return self.__machine(505)
def _is_ppc801(self): return self.__machine(801)
def _is_ppc821(self): return self.__machine(821)
def _is_ppc823(self): return self.__machine(823)
def _is_ppc860(self): return self.__machine(860)
class SunOSCPUInfo(CPUInfoBase):
info = None
def __init__(self):
if self.info is not None:
return
info = command_info(arch='arch',
mach='mach',
uname_i='uname_i',
isainfo_b='isainfo -b',
isainfo_n='isainfo -n',
)
info['uname_X'] = key_value_from_command('uname -X', sep='=')
for line in command_by_line('psrinfo -v 0'):
m = re.match(r'\s*The (?P<p>[\w\d]+) processor operates at', line)
if m:
info['processor'] = m.group('p')
break
self.__class__.info = info
def _not_impl(self): pass
def _is_i386(self):
return self.info['isainfo_n']=='i386'
def _is_sparc(self):
return self.info['isainfo_n']=='sparc'
def _is_sparcv9(self):
return self.info['isainfo_n']=='sparcv9'
def _getNCPUs(self):
return int(self.info['uname_X'].get('NumCPU', 1))
def _is_sun4(self):
return self.info['arch']=='sun4'
def _is_SUNW(self):
return re.match(r'SUNW', self.info['uname_i']) is not None
def _is_sparcstation5(self):
return re.match(r'.*SPARCstation-5', self.info['uname_i']) is not None
def _is_ultra1(self):
return re.match(r'.*Ultra-1', self.info['uname_i']) is not None
def _is_ultra250(self):
return re.match(r'.*Ultra-250', self.info['uname_i']) is not None
def _is_ultra2(self):
return re.match(r'.*Ultra-2', self.info['uname_i']) is not None
def _is_ultra30(self):
return re.match(r'.*Ultra-30', self.info['uname_i']) is not None
def _is_ultra4(self):
return re.match(r'.*Ultra-4', self.info['uname_i']) is not None
def _is_ultra5_10(self):
return re.match(r'.*Ultra-5_10', self.info['uname_i']) is not None
def _is_ultra5(self):
return re.match(r'.*Ultra-5', self.info['uname_i']) is not None
def _is_ultra60(self):
return re.match(r'.*Ultra-60', self.info['uname_i']) is not None
def _is_ultra80(self):
return re.match(r'.*Ultra-80', self.info['uname_i']) is not None
def _is_ultraenterprice(self):
return re.match(r'.*Ultra-Enterprise', self.info['uname_i']) is not None
def _is_ultraenterprice10k(self):
return re.match(r'.*Ultra-Enterprise-10000', self.info['uname_i']) is not None
def _is_sunfire(self):
return re.match(r'.*Sun-Fire', self.info['uname_i']) is not None
def _is_ultra(self):
return re.match(r'.*Ultra', self.info['uname_i']) is not None
def _is_cpusparcv7(self):
return self.info['processor']=='sparcv7'
def _is_cpusparcv8(self):
return self.info['processor']=='sparcv8'
def _is_cpusparcv9(self):
return self.info['processor']=='sparcv9'
class Win32CPUInfo(CPUInfoBase):
info = None
pkey = r"HARDWARE\DESCRIPTION\System\CentralProcessor"
# XXX: what does the value of
# HKEY_LOCAL_MACHINE\HARDWARE\DESCRIPTION\System\CentralProcessor\0
# mean?
def __init__(self):
if self.info is not None:
return
info = []
try:
#XXX: Bad style to use so long `try:...except:...`. Fix it!
import winreg
prgx = re.compile(r"family\s+(?P<FML>\d+)\s+model\s+(?P<MDL>\d+)"
r"\s+stepping\s+(?P<STP>\d+)", re.IGNORECASE)
chnd=winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, self.pkey)
pnum=0
while True:
try:
proc=winreg.EnumKey(chnd, pnum)
except winreg.error:
break
else:
pnum+=1
info.append({"Processor":proc})
phnd=winreg.OpenKey(chnd, proc)
pidx=0
while True:
try:
name, value, vtpe=winreg.EnumValue(phnd, pidx)
except winreg.error:
break
else:
pidx=pidx+1
info[-1][name]=value
if name=="Identifier":
srch=prgx.search(value)
if srch:
info[-1]["Family"]=int(srch.group("FML"))
info[-1]["Model"]=int(srch.group("MDL"))
info[-1]["Stepping"]=int(srch.group("STP"))
except Exception as e:
print(e, '(ignoring)')
self.__class__.info = info
def _not_impl(self): pass
# Athlon
def _is_AMD(self):
return self.info[0]['VendorIdentifier']=='AuthenticAMD'
def _is_Am486(self):
return self.is_AMD() and self.info[0]['Family']==4
def _is_Am5x86(self):
return self.is_AMD() and self.info[0]['Family']==4
def _is_AMDK5(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model'] in [0, 1, 2, 3]
def _is_AMDK6(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model'] in [6, 7]
def _is_AMDK6_2(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model']==8
def _is_AMDK6_3(self):
return self.is_AMD() and self.info[0]['Family']==5 \
and self.info[0]['Model']==9
def _is_AMDK7(self):
return self.is_AMD() and self.info[0]['Family'] == 6
# To reliably distinguish between the different types of AMD64 chips
# (Athlon64, Operton, Athlon64 X2, Semperon, Turion 64, etc.) would
# require looking at the 'brand' from cpuid
def _is_AMD64(self):
return self.is_AMD() and self.info[0]['Family'] == 15
# Intel
def _is_Intel(self):
return self.info[0]['VendorIdentifier']=='GenuineIntel'
def _is_i386(self):
return self.info[0]['Family']==3
def _is_i486(self):
return self.info[0]['Family']==4
def _is_i586(self):
return self.is_Intel() and self.info[0]['Family']==5
def _is_i686(self):
return self.is_Intel() and self.info[0]['Family']==6
def _is_Pentium(self):
return self.is_Intel() and self.info[0]['Family']==5
def _is_PentiumMMX(self):
return self.is_Intel() and self.info[0]['Family']==5 \
and self.info[0]['Model']==4
def _is_PentiumPro(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model']==1
def _is_PentiumII(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model'] in [3, 5, 6]
def _is_PentiumIII(self):
return self.is_Intel() and self.info[0]['Family']==6 \
and self.info[0]['Model'] in [7, 8, 9, 10, 11]
def _is_PentiumIV(self):
return self.is_Intel() and self.info[0]['Family']==15
def _is_PentiumM(self):
return self.is_Intel() and self.info[0]['Family'] == 6 \
and self.info[0]['Model'] in [9, 13, 14]
def _is_Core2(self):
return self.is_Intel() and self.info[0]['Family'] == 6 \
and self.info[0]['Model'] in [15, 16, 17]
# Varia
def _is_singleCPU(self):
return len(self.info) == 1
def _getNCPUs(self):
return len(self.info)
def _has_mmx(self):
if self.is_Intel():
return (self.info[0]['Family']==5 and self.info[0]['Model']==4) \
or (self.info[0]['Family'] in [6, 15])
elif self.is_AMD():
return self.info[0]['Family'] in [5, 6, 15]
else:
return False
def _has_sse(self):
if self.is_Intel():
return ((self.info[0]['Family']==6 and
self.info[0]['Model'] in [7, 8, 9, 10, 11])
or self.info[0]['Family']==15)
elif self.is_AMD():
return ((self.info[0]['Family']==6 and
self.info[0]['Model'] in [6, 7, 8, 10])
or self.info[0]['Family']==15)
else:
return False
def _has_sse2(self):
if self.is_Intel():
return self.is_Pentium4() or self.is_PentiumM() \
or self.is_Core2()
elif self.is_AMD():
return self.is_AMD64()
else:
return False
def _has_3dnow(self):
return self.is_AMD() and self.info[0]['Family'] in [5, 6, 15]
def _has_3dnowext(self):
return self.is_AMD() and self.info[0]['Family'] in [6, 15]
if sys.platform.startswith('linux'): # variations: linux2,linux-i386 (any others?)
cpuinfo = LinuxCPUInfo
elif sys.platform.startswith('irix'):
cpuinfo = IRIXCPUInfo
elif sys.platform == 'darwin':
cpuinfo = DarwinCPUInfo
elif sys.platform.startswith('sunos'):
cpuinfo = SunOSCPUInfo
elif sys.platform.startswith('win32'):
cpuinfo = Win32CPUInfo
elif sys.platform.startswith('cygwin'):
cpuinfo = LinuxCPUInfo
#XXX: other OS's. Eg. use _winreg on Win32. Or os.uname on unices.
else:
cpuinfo = CPUInfoBase
cpu = cpuinfo()
#if __name__ == "__main__":
#
# cpu.is_blaa()
# cpu.is_Intel()
# cpu.is_Alpha()
#
# print('CPU information:'),
# for name in dir(cpuinfo):
# if name[0]=='_' and name[1]!='_':
# r = getattr(cpu,name[1:])()
# if r:
# if r!=1:
# print('%s=%s' %(name[1:],r))
# else:
# print(name[1:]),
# print()
|
bsd-3-clause
|
fabianvf/osf.io
|
website/addons/citations/provider.py
|
27
|
4980
|
import abc
import httplib as http
from framework.exceptions import HTTPError
from framework.exceptions import PermissionsError
from website.oauth.models import ExternalAccount
class CitationsProvider(object):
__metaclass__ = abc.ABCMeta
def __init__(self, provider_name):
self.provider_name = provider_name
@abc.abstractproperty
def serializer(self):
pass
def user_accounts(self, user):
""" Gets a list of the accounts authorized by 'user' """
return {
'accounts': [
self.serializer(
user_settings=user.get_addon(self.provider_name) if user else None
).serialize_account(each)
for each in user.external_accounts
if each.provider == self.provider_name
]
}
def set_config(self, node_addon, user, external_list_id, external_list_name, auth):
# Ensure request has all required information
node_addon.set_target_folder(external_list_id, external_list_name, auth)
def add_user_auth(self, node_addon, user, external_account_id):
external_account = ExternalAccount.load(external_account_id)
if external_account not in user.external_accounts:
raise HTTPError(http.FORBIDDEN)
try:
node_addon.set_auth(external_account, user)
except PermissionsError:
raise HTTPError(http.FORBIDDEN)
result = self.serializer(
node_settings=node_addon,
user_settings=user.get_addon(self.provider_name),
).serialized_node_settings
return {'result': result}
def remove_user_auth(self, node_addon, user):
node_addon.clear_auth()
node_addon.reload()
result = self.serializer(
node_settings=node_addon,
user_settings=user.get_addon(self.provider_name),
).serialized_node_settings
return {'result': result}
def widget(self, node_addon):
ret = node_addon.config.to_json()
ret.update({
'complete': node_addon.complete,
})
return ret
def _extract_folder(self, folder):
folder = self._folder_to_dict(folder)
ret = {
'name': folder['name'],
'provider_list_id': folder['list_id'],
'id': folder['id'],
}
if folder['parent_id']:
ret['parent_list_id'] = folder['parent_id']
return ret
@abc.abstractmethod
def _folder_to_dict(self, data):
pass
@abc.abstractmethod
def _folder_id(self):
return None
def citation_list(self, node_addon, user, list_id, show='all'):
attached_list_id = self._folder_id(node_addon)
account_folders = node_addon.api.citation_lists(self._extract_folder)
# Folders with 'parent_list_id'==None are children of 'All Documents'
for folder in account_folders:
if folder.get('parent_list_id') is None:
folder['parent_list_id'] = 'ROOT'
node_account = node_addon.external_account
user_accounts = [
account for account in user.external_accounts
if account.provider == self.provider_name
] if user else []
user_is_owner = node_account in user_accounts
# verify this list is the attached list or its descendant
if not user_is_owner and (list_id != attached_list_id and attached_list_id is not None):
folders = {
(each['provider_list_id'] or 'ROOT'): each
for each in account_folders
}
if list_id is None:
ancestor_id = 'ROOT'
else:
ancestor_id = folders[list_id].get('parent_list_id')
while ancestor_id != attached_list_id:
if ancestor_id is '__':
raise HTTPError(http.FORBIDDEN)
ancestor_id = folders[ancestor_id].get('parent_list_id')
contents = []
if list_id is None:
contents = [node_addon.root_folder]
else:
user_settings = user.get_addon(self.provider_name) if user else None
if show in ('all', 'folders'):
contents += [
self.serializer(
node_settings=node_addon,
user_settings=user_settings,
).serialize_folder(each)
for each in account_folders
if each.get('parent_list_id') == list_id
]
if show in ('all', 'citations'):
contents += [
self.serializer(
node_settings=node_addon,
user_settings=user_settings,
).serialize_citation(each)
for each in node_addon.api.get_list(list_id)
]
return {
'contents': contents
}
|
apache-2.0
|
TonyThompson/fail2ban-patch
|
fail2ban/version.py
|
4
|
1122
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
# vi: set ft=python sts=4 ts=4 sw=4 noet :
# This file is part of Fail2Ban.
#
# Fail2Ban is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Fail2Ban is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Fail2Ban; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# Author: Cyril Jaquier
#
__author__ = "Cyril Jaquier, Yaroslav Halchenko, Steven Hiscocks, Daniel Black"
__copyright__ = "Copyright (c) 2004 Cyril Jaquier, 2011-2014 Yaroslav Halchenko, 2013-2013 Steven Hiscocks, Daniel Black"
__license__ = "GPL-v2+"
version = "0.9.1.dev"
|
gpl-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.