repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
scotthartbti/android_external_chromium_org
|
tools/json_comment_eater/json_comment_eater.py
|
86
|
2063
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Utility to remove comments from JSON files so that they can be parsed by
json.loads.
'''
import sys
def _Rcount(string, chars):
'''Returns the number of consecutive characters from |chars| that occur at the
end of |string|.
'''
return len(string) - len(string.rstrip(chars))
def _FindNextToken(string, tokens, start):
'''Finds the next token in |tokens| that occurs in |string| from |start|.
Returns a tuple (index, token key).
'''
min_index, min_key = (-1, None)
for k in tokens:
index = string.find(k, start)
if index != -1 and (min_index == -1 or index < min_index):
min_index, min_key = (index, k)
return (min_index, min_key)
def _ReadString(input, start, output):
output.append('"')
start_range, end_range = (start, input.find('"', start))
# \" escapes the ", \\" doesn't, \\\" does, etc.
while (end_range != -1 and
_Rcount(input[start_range:end_range], '\\') % 2 == 1):
start_range, end_range = (end_range, input.find('"', end_range + 1))
if end_range == -1:
return start_range + 1
output.append(input[start:end_range + 1])
return end_range + 1
def _ReadComment(input, start, output):
eol_tokens = ('\n', '\r')
eol_token_index, eol_token = _FindNextToken(input, eol_tokens, start)
if eol_token is None:
return len(input)
output.append(eol_token)
return eol_token_index + len(eol_token)
def Nom(input):
token_actions = {
'"': _ReadString,
'//': _ReadComment,
}
output = []
pos = 0
while pos < len(input):
token_index, token = _FindNextToken(input, token_actions.keys(), pos)
if token is None:
output.append(input[pos:])
break
output.append(input[pos:token_index])
pos = token_actions[token](input, token_index + len(token), output)
return ''.join(output)
if __name__ == '__main__':
sys.stdout.write(Nom(sys.stdin.read()))
|
bsd-3-clause
|
ProfessorX/Config
|
.PyCharm30/system/python_stubs/-1247972723/PyKDE4/kdecore/KEncodingDetector.py
|
1
|
2735
|
# encoding: utf-8
# module PyKDE4.kdecore
# from /usr/lib/python2.7/dist-packages/PyKDE4/kdecore.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtNetwork as __PyQt4_QtNetwork
class KEncodingDetector(): # skipped bases: <type 'sip.wrapper'>
# no doc
def analyze(self, *args, **kwargs): # real signature unknown
pass
def autoDetectLanguage(self, *args, **kwargs): # real signature unknown
pass
def decode(self, *args, **kwargs): # real signature unknown
pass
def decodedInvalidCharacters(self, *args, **kwargs): # real signature unknown
pass
def decoder(self, *args, **kwargs): # real signature unknown
pass
def decodeWithBuffering(self, *args, **kwargs): # real signature unknown
pass
def encoding(self, *args, **kwargs): # real signature unknown
pass
def encodingChoiceSource(self, *args, **kwargs): # real signature unknown
pass
def errorsIfUtf8(self, *args, **kwargs): # real signature unknown
pass
def flush(self, *args, **kwargs): # real signature unknown
pass
def hasAutoDetectionForScript(self, *args, **kwargs): # real signature unknown
pass
def nameForScript(self, *args, **kwargs): # real signature unknown
pass
def processNull(self, *args, **kwargs): # real signature unknown
pass
def resetDecoder(self, *args, **kwargs): # real signature unknown
pass
def scriptForName(self, *args, **kwargs): # real signature unknown
pass
def setAutoDetectLanguage(self, *args, **kwargs): # real signature unknown
pass
def setEncoding(self, *args, **kwargs): # real signature unknown
pass
def visuallyOrdered(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
Arabic = 2
AutoDetectedEncoding = 1
AutoDetectScript = None # (!) real value is ''
Baltic = 3
BOM = 2
CentralEuropean = 4
ChineseSimplified = 5
ChineseTraditional = 6
Cyrillic = 7
DefaultEncoding = 0
EncodingChoiceSource = None # (!) real value is ''
EncodingFromHTTPHeader = 5
EncodingFromMetaTag = 4
EncodingFromXMLHeader = 3
Greek = 8
Hebrew = 9
Japanese = 10
Korean = 11
None = 0
NorthernSaami = 12
SemiautomaticDetection = 1
SouthEasternEurope = 13
Thai = 14
Turkish = 15
Unicode = 16
UserChosenEncoding = 6
WesternEuropean = 17
|
gpl-2.0
|
muhkuh-sys/org.muhkuh.tests-ramtest
|
jonchki/dulwich/objects.py
|
4
|
45599
|
# objects.py -- Access to base git objects
# Copyright (C) 2007 James Westby <jw+debian@jameswestby.net>
# Copyright (C) 2008-2013 Jelmer Vernooij <jelmer@jelmer.uk>
#
# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
# General Public License as public by the Free Software Foundation; version 2.0
# or (at your option) any later version. You can redistribute it and/or
# modify it under the terms of either of these two licenses.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# You should have received a copy of the licenses; if not, see
# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
# License, Version 2.0.
#
"""Access to base git objects."""
import binascii
from io import BytesIO
from collections import namedtuple
import os
import posixpath
import stat
import sys
import warnings
import zlib
from hashlib import sha1
from dulwich.errors import (
ChecksumMismatch,
NotBlobError,
NotCommitError,
NotTagError,
NotTreeError,
ObjectFormatException,
EmptyFileException,
)
from dulwich.file import GitFile
ZERO_SHA = b'0' * 40
# Header fields for commits
_TREE_HEADER = b'tree'
_PARENT_HEADER = b'parent'
_AUTHOR_HEADER = b'author'
_COMMITTER_HEADER = b'committer'
_ENCODING_HEADER = b'encoding'
_MERGETAG_HEADER = b'mergetag'
_GPGSIG_HEADER = b'gpgsig'
# Header fields for objects
_OBJECT_HEADER = b'object'
_TYPE_HEADER = b'type'
_TAG_HEADER = b'tag'
_TAGGER_HEADER = b'tagger'
S_IFGITLINK = 0o160000
MAX_TIME = 9223372036854775807 # (2**63) - 1 - signed long int max
def S_ISGITLINK(m):
"""Check if a mode indicates a submodule.
:param m: Mode to check
:return: a ``boolean``
"""
return (stat.S_IFMT(m) == S_IFGITLINK)
def _decompress(string):
dcomp = zlib.decompressobj()
dcomped = dcomp.decompress(string)
dcomped += dcomp.flush()
return dcomped
def sha_to_hex(sha):
"""Takes a string and returns the hex of the sha within"""
hexsha = binascii.hexlify(sha)
assert len(hexsha) == 40, "Incorrect length of sha1 string: %d" % hexsha
return hexsha
def hex_to_sha(hex):
"""Takes a hex sha and returns a binary sha"""
assert len(hex) == 40, "Incorrect length of hexsha: %s" % hex
try:
return binascii.unhexlify(hex)
except TypeError as exc:
if not isinstance(hex, bytes):
raise
raise ValueError(exc.args[0])
def valid_hexsha(hex):
if len(hex) != 40:
return False
try:
binascii.unhexlify(hex)
except (TypeError, binascii.Error):
return False
else:
return True
def hex_to_filename(path, hex):
"""Takes a hex sha and returns its filename relative to the given path."""
# os.path.join accepts bytes or unicode, but all args must be of the same
# type. Make sure that hex which is expected to be bytes, is the same type
# as path.
if getattr(path, 'encode', None) is not None:
hex = hex.decode('ascii')
dir = hex[:2]
file = hex[2:]
# Check from object dir
return os.path.join(path, dir, file)
def filename_to_hex(filename):
"""Takes an object filename and returns its corresponding hex sha."""
# grab the last (up to) two path components
names = filename.rsplit(os.path.sep, 2)[-2:]
errmsg = "Invalid object filename: %s" % filename
assert len(names) == 2, errmsg
base, rest = names
assert len(base) == 2 and len(rest) == 38, errmsg
hex = (base + rest).encode('ascii')
hex_to_sha(hex)
return hex
def object_header(num_type, length):
"""Return an object header for the given numeric type and text length."""
return (object_class(num_type).type_name +
b' ' + str(length).encode('ascii') + b'\0')
def serializable_property(name, docstring=None):
"""A property that helps tracking whether serialization is necessary.
"""
def set(obj, value):
setattr(obj, "_"+name, value)
obj._needs_serialization = True
def get(obj):
return getattr(obj, "_"+name)
return property(get, set, doc=docstring)
def object_class(type):
"""Get the object class corresponding to the given type.
:param type: Either a type name string or a numeric type.
:return: The ShaFile subclass corresponding to the given type, or None if
type is not a valid type name/number.
"""
return _TYPE_MAP.get(type, None)
def check_hexsha(hex, error_msg):
"""Check if a string is a valid hex sha string.
:param hex: Hex string to check
:param error_msg: Error message to use in exception
:raise ObjectFormatException: Raised when the string is not valid
"""
if not valid_hexsha(hex):
raise ObjectFormatException("%s %s" % (error_msg, hex))
def check_identity(identity, error_msg):
"""Check if the specified identity is valid.
This will raise an exception if the identity is not valid.
:param identity: Identity string
:param error_msg: Error message to use in exception
"""
email_start = identity.find(b'<')
email_end = identity.find(b'>')
if (email_start < 0 or email_end < 0 or email_end <= email_start
or identity.find(b'<', email_start + 1) >= 0
or identity.find(b'>', email_end + 1) >= 0
or not identity.endswith(b'>')):
raise ObjectFormatException(error_msg)
def check_time(time_seconds):
"""Check if the specified time is not prone to overflow error.
This will raise an exception if the time is not valid.
:param time_info: author/committer/tagger info
"""
# Prevent overflow error
if time_seconds > MAX_TIME:
raise ObjectFormatException(
'Date field should not exceed %s' % MAX_TIME)
def git_line(*items):
"""Formats items into a space separated line."""
return b' '.join(items) + b'\n'
class FixedSha(object):
"""SHA object that behaves like hashlib's but is given a fixed value."""
__slots__ = ('_hexsha', '_sha')
def __init__(self, hexsha):
if getattr(hexsha, 'encode', None) is not None:
hexsha = hexsha.encode('ascii')
if not isinstance(hexsha, bytes):
raise TypeError('Expected bytes for hexsha, got %r' % hexsha)
self._hexsha = hexsha
self._sha = hex_to_sha(hexsha)
def digest(self):
"""Return the raw SHA digest."""
return self._sha
def hexdigest(self):
"""Return the hex SHA digest."""
return self._hexsha.decode('ascii')
class ShaFile(object):
"""A git SHA file."""
__slots__ = ('_chunked_text', '_sha', '_needs_serialization')
@staticmethod
def _parse_legacy_object_header(magic, f):
"""Parse a legacy object, creating it but not reading the file."""
bufsize = 1024
decomp = zlib.decompressobj()
header = decomp.decompress(magic)
start = 0
end = -1
while end < 0:
extra = f.read(bufsize)
header += decomp.decompress(extra)
magic += extra
end = header.find(b'\0', start)
start = len(header)
header = header[:end]
type_name, size = header.split(b' ', 1)
size = int(size) # sanity check
obj_class = object_class(type_name)
if not obj_class:
raise ObjectFormatException("Not a known type: %s" % type_name)
return obj_class()
def _parse_legacy_object(self, map):
"""Parse a legacy object, setting the raw string."""
text = _decompress(map)
header_end = text.find(b'\0')
if header_end < 0:
raise ObjectFormatException("Invalid object header, no \\0")
self.set_raw_string(text[header_end+1:])
def as_legacy_object_chunks(self):
"""Return chunks representing the object in the experimental format.
:return: List of strings
"""
compobj = zlib.compressobj()
yield compobj.compress(self._header())
for chunk in self.as_raw_chunks():
yield compobj.compress(chunk)
yield compobj.flush()
def as_legacy_object(self):
"""Return string representing the object in the experimental format.
"""
return b''.join(self.as_legacy_object_chunks())
def as_raw_chunks(self):
"""Return chunks with serialization of the object.
:return: List of strings, not necessarily one per line
"""
if self._needs_serialization:
self._sha = None
self._chunked_text = self._serialize()
self._needs_serialization = False
return self._chunked_text
def as_raw_string(self):
"""Return raw string with serialization of the object.
:return: String object
"""
return b''.join(self.as_raw_chunks())
if sys.version_info[0] >= 3:
def __bytes__(self):
"""Return raw string serialization of this object."""
return self.as_raw_string()
else:
def __str__(self):
"""Return raw string serialization of this object."""
return self.as_raw_string()
def __hash__(self):
"""Return unique hash for this object."""
return hash(self.id)
def as_pretty_string(self):
"""Return a string representing this object, fit for display."""
return self.as_raw_string()
def set_raw_string(self, text, sha=None):
"""Set the contents of this object from a serialized string."""
if not isinstance(text, bytes):
raise TypeError('Expected bytes for text, got %r' % text)
self.set_raw_chunks([text], sha)
def set_raw_chunks(self, chunks, sha=None):
"""Set the contents of this object from a list of chunks."""
self._chunked_text = chunks
self._deserialize(chunks)
if sha is None:
self._sha = None
else:
self._sha = FixedSha(sha)
self._needs_serialization = False
@staticmethod
def _parse_object_header(magic, f):
"""Parse a new style object, creating it but not reading the file."""
num_type = (ord(magic[0:1]) >> 4) & 7
obj_class = object_class(num_type)
if not obj_class:
raise ObjectFormatException("Not a known type %d" % num_type)
return obj_class()
def _parse_object(self, map):
"""Parse a new style object, setting self._text."""
# skip type and size; type must have already been determined, and
# we trust zlib to fail if it's otherwise corrupted
byte = ord(map[0:1])
used = 1
while (byte & 0x80) != 0:
byte = ord(map[used:used+1])
used += 1
raw = map[used:]
self.set_raw_string(_decompress(raw))
@classmethod
def _is_legacy_object(cls, magic):
b0 = ord(magic[0:1])
b1 = ord(magic[1:2])
word = (b0 << 8) + b1
return (b0 & 0x8F) == 0x08 and (word % 31) == 0
@classmethod
def _parse_file(cls, f):
map = f.read()
if not map:
raise EmptyFileException('Corrupted empty file detected')
if cls._is_legacy_object(map):
obj = cls._parse_legacy_object_header(map, f)
obj._parse_legacy_object(map)
else:
obj = cls._parse_object_header(map, f)
obj._parse_object(map)
return obj
def __init__(self):
"""Don't call this directly"""
self._sha = None
self._chunked_text = []
self._needs_serialization = True
def _deserialize(self, chunks):
raise NotImplementedError(self._deserialize)
def _serialize(self):
raise NotImplementedError(self._serialize)
@classmethod
def from_path(cls, path):
"""Open a SHA file from disk."""
with GitFile(path, 'rb') as f:
return cls.from_file(f)
@classmethod
def from_file(cls, f):
"""Get the contents of a SHA file on disk."""
try:
obj = cls._parse_file(f)
obj._sha = None
return obj
except (IndexError, ValueError):
raise ObjectFormatException("invalid object header")
@staticmethod
def from_raw_string(type_num, string, sha=None):
"""Creates an object of the indicated type from the raw string given.
:param type_num: The numeric type of the object.
:param string: The raw uncompressed contents.
:param sha: Optional known sha for the object
"""
obj = object_class(type_num)()
obj.set_raw_string(string, sha)
return obj
@staticmethod
def from_raw_chunks(type_num, chunks, sha=None):
"""Creates an object of the indicated type from the raw chunks given.
:param type_num: The numeric type of the object.
:param chunks: An iterable of the raw uncompressed contents.
:param sha: Optional known sha for the object
"""
obj = object_class(type_num)()
obj.set_raw_chunks(chunks, sha)
return obj
@classmethod
def from_string(cls, string):
"""Create a ShaFile from a string."""
obj = cls()
obj.set_raw_string(string)
return obj
def _check_has_member(self, member, error_msg):
"""Check that the object has a given member variable.
:param member: the member variable to check for
:param error_msg: the message for an error if the member is missing
:raise ObjectFormatException: with the given error_msg if member is
missing or is None
"""
if getattr(self, member, None) is None:
raise ObjectFormatException(error_msg)
def check(self):
"""Check this object for internal consistency.
:raise ObjectFormatException: if the object is malformed in some way
:raise ChecksumMismatch: if the object was created with a SHA that does
not match its contents
"""
# TODO: if we find that error-checking during object parsing is a
# performance bottleneck, those checks should be moved to the class's
# check() method during optimization so we can still check the object
# when necessary.
old_sha = self.id
try:
self._deserialize(self.as_raw_chunks())
self._sha = None
new_sha = self.id
except Exception as e:
raise ObjectFormatException(e)
if old_sha != new_sha:
raise ChecksumMismatch(new_sha, old_sha)
def _header(self):
return object_header(self.type, self.raw_length())
def raw_length(self):
"""Returns the length of the raw string of this object."""
ret = 0
for chunk in self.as_raw_chunks():
ret += len(chunk)
return ret
def sha(self):
"""The SHA1 object that is the name of this object."""
if self._sha is None or self._needs_serialization:
# this is a local because as_raw_chunks() overwrites self._sha
new_sha = sha1()
new_sha.update(self._header())
for chunk in self.as_raw_chunks():
new_sha.update(chunk)
self._sha = new_sha
return self._sha
def copy(self):
"""Create a new copy of this SHA1 object from its raw string"""
obj_class = object_class(self.get_type())
return obj_class.from_raw_string(
self.get_type(),
self.as_raw_string(),
self.id)
@property
def id(self):
"""The hex SHA of this object."""
return self.sha().hexdigest().encode('ascii')
def get_type(self):
"""Return the type number for this object class."""
return self.type_num
def set_type(self, type):
"""Set the type number for this object class."""
self.type_num = type
# DEPRECATED: use type_num or type_name as needed.
type = property(get_type, set_type)
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.id)
def __ne__(self, other):
return not isinstance(other, ShaFile) or self.id != other.id
def __eq__(self, other):
"""Return True if the SHAs of the two objects match.
It doesn't make sense to talk about an order on ShaFiles, so we don't
override the rich comparison methods (__le__, etc.).
"""
return isinstance(other, ShaFile) and self.id == other.id
def __lt__(self, other):
if not isinstance(other, ShaFile):
raise TypeError
return self.id < other.id
def __le__(self, other):
if not isinstance(other, ShaFile):
raise TypeError
return self.id <= other.id
def __cmp__(self, other):
if not isinstance(other, ShaFile):
raise TypeError
return cmp(self.id, other.id) # noqa: F821
class Blob(ShaFile):
"""A Git Blob object."""
__slots__ = ()
type_name = b'blob'
type_num = 3
def __init__(self):
super(Blob, self).__init__()
self._chunked_text = []
self._needs_serialization = False
def _get_data(self):
return self.as_raw_string()
def _set_data(self, data):
self.set_raw_string(data)
data = property(_get_data, _set_data,
"The text contained within the blob object.")
def _get_chunked(self):
return self._chunked_text
def _set_chunked(self, chunks):
self._chunked_text = chunks
def _serialize(self):
return self._chunked_text
def _deserialize(self, chunks):
self._chunked_text = chunks
chunked = property(
_get_chunked, _set_chunked,
"The text within the blob object, as chunks (not necessarily lines).")
@classmethod
def from_path(cls, path):
blob = ShaFile.from_path(path)
if not isinstance(blob, cls):
raise NotBlobError(path)
return blob
def check(self):
"""Check this object for internal consistency.
:raise ObjectFormatException: if the object is malformed in some way
"""
super(Blob, self).check()
def splitlines(self):
"""Return list of lines in this blob.
This preserves the original line endings.
"""
chunks = self.chunked
if not chunks:
return []
if len(chunks) == 1:
return chunks[0].splitlines(True)
remaining = None
ret = []
for chunk in chunks:
lines = chunk.splitlines(True)
if len(lines) > 1:
ret.append((remaining or b"") + lines[0])
ret.extend(lines[1:-1])
remaining = lines[-1]
elif len(lines) == 1:
if remaining is None:
remaining = lines.pop()
else:
remaining += lines.pop()
if remaining is not None:
ret.append(remaining)
return ret
def _parse_message(chunks):
"""Parse a message with a list of fields and a body.
:param chunks: the raw chunks of the tag or commit object.
:return: iterator of tuples of (field, value), one per header line, in the
order read from the text, possibly including duplicates. Includes a
field named None for the freeform tag/commit text.
"""
f = BytesIO(b''.join(chunks))
k = None
v = ""
eof = False
def _strip_last_newline(value):
"""Strip the last newline from value"""
if value and value.endswith(b'\n'):
return value[:-1]
return value
# Parse the headers
#
# Headers can contain newlines. The next line is indented with a space.
# We store the latest key as 'k', and the accumulated value as 'v'.
for line in f:
if line.startswith(b' '):
# Indented continuation of the previous line
v += line[1:]
else:
if k is not None:
# We parsed a new header, return its value
yield (k, _strip_last_newline(v))
if line == b'\n':
# Empty line indicates end of headers
break
(k, v) = line.split(b' ', 1)
else:
# We reached end of file before the headers ended. We still need to
# return the previous header, then we need to return a None field for
# the text.
eof = True
if k is not None:
yield (k, _strip_last_newline(v))
yield (None, None)
if not eof:
# We didn't reach the end of file while parsing headers. We can return
# the rest of the file as a message.
yield (None, f.read())
f.close()
class Tag(ShaFile):
"""A Git Tag object."""
type_name = b'tag'
type_num = 4
__slots__ = ('_tag_timezone_neg_utc', '_name', '_object_sha',
'_object_class', '_tag_time', '_tag_timezone',
'_tagger', '_message')
def __init__(self):
super(Tag, self).__init__()
self._tagger = None
self._tag_time = None
self._tag_timezone = None
self._tag_timezone_neg_utc = False
@classmethod
def from_path(cls, filename):
tag = ShaFile.from_path(filename)
if not isinstance(tag, cls):
raise NotTagError(filename)
return tag
def check(self):
"""Check this object for internal consistency.
:raise ObjectFormatException: if the object is malformed in some way
"""
super(Tag, self).check()
self._check_has_member("_object_sha", "missing object sha")
self._check_has_member("_object_class", "missing object type")
self._check_has_member("_name", "missing tag name")
if not self._name:
raise ObjectFormatException("empty tag name")
check_hexsha(self._object_sha, "invalid object sha")
if getattr(self, "_tagger", None):
check_identity(self._tagger, "invalid tagger")
self._check_has_member("_tag_time", "missing tag time")
check_time(self._tag_time)
last = None
for field, _ in _parse_message(self._chunked_text):
if field == _OBJECT_HEADER and last is not None:
raise ObjectFormatException("unexpected object")
elif field == _TYPE_HEADER and last != _OBJECT_HEADER:
raise ObjectFormatException("unexpected type")
elif field == _TAG_HEADER and last != _TYPE_HEADER:
raise ObjectFormatException("unexpected tag name")
elif field == _TAGGER_HEADER and last != _TAG_HEADER:
raise ObjectFormatException("unexpected tagger")
last = field
def _serialize(self):
chunks = []
chunks.append(git_line(_OBJECT_HEADER, self._object_sha))
chunks.append(git_line(_TYPE_HEADER, self._object_class.type_name))
chunks.append(git_line(_TAG_HEADER, self._name))
if self._tagger:
if self._tag_time is None:
chunks.append(git_line(_TAGGER_HEADER, self._tagger))
else:
chunks.append(git_line(
_TAGGER_HEADER, self._tagger,
str(self._tag_time).encode('ascii'),
format_timezone(
self._tag_timezone, self._tag_timezone_neg_utc)))
if self._message is not None:
chunks.append(b'\n') # To close headers
chunks.append(self._message)
return chunks
def _deserialize(self, chunks):
"""Grab the metadata attached to the tag"""
self._tagger = None
self._tag_time = None
self._tag_timezone = None
self._tag_timezone_neg_utc = False
for field, value in _parse_message(chunks):
if field == _OBJECT_HEADER:
self._object_sha = value
elif field == _TYPE_HEADER:
obj_class = object_class(value)
if not obj_class:
raise ObjectFormatException("Not a known type: %s" % value)
self._object_class = obj_class
elif field == _TAG_HEADER:
self._name = value
elif field == _TAGGER_HEADER:
(self._tagger,
self._tag_time,
(self._tag_timezone,
self._tag_timezone_neg_utc)) = parse_time_entry(value)
elif field is None:
self._message = value
else:
raise ObjectFormatException("Unknown field %s" % field)
def _get_object(self):
"""Get the object pointed to by this tag.
:return: tuple of (object class, sha).
"""
return (self._object_class, self._object_sha)
def _set_object(self, value):
(self._object_class, self._object_sha) = value
self._needs_serialization = True
object = property(_get_object, _set_object)
name = serializable_property("name", "The name of this tag")
tagger = serializable_property(
"tagger",
"Returns the name of the person who created this tag")
tag_time = serializable_property(
"tag_time",
"The creation timestamp of the tag. As the number of seconds "
"since the epoch")
tag_timezone = serializable_property(
"tag_timezone",
"The timezone that tag_time is in.")
message = serializable_property(
"message", "The message attached to this tag")
class TreeEntry(namedtuple('TreeEntry', ['path', 'mode', 'sha'])):
"""Named tuple encapsulating a single tree entry."""
def in_path(self, path):
"""Return a copy of this entry with the given path prepended."""
if not isinstance(self.path, bytes):
raise TypeError('Expected bytes for path, got %r' % path)
return TreeEntry(posixpath.join(path, self.path), self.mode, self.sha)
def parse_tree(text, strict=False):
"""Parse a tree text.
:param text: Serialized text to parse
:return: iterator of tuples of (name, mode, sha)
:raise ObjectFormatException: if the object was malformed in some way
"""
count = 0
length = len(text)
while count < length:
mode_end = text.index(b' ', count)
mode_text = text[count:mode_end]
if strict and mode_text.startswith(b'0'):
raise ObjectFormatException("Invalid mode '%s'" % mode_text)
try:
mode = int(mode_text, 8)
except ValueError:
raise ObjectFormatException("Invalid mode '%s'" % mode_text)
name_end = text.index(b'\0', mode_end)
name = text[mode_end+1:name_end]
count = name_end+21
sha = text[name_end+1:count]
if len(sha) != 20:
raise ObjectFormatException("Sha has invalid length")
hexsha = sha_to_hex(sha)
yield (name, mode, hexsha)
def serialize_tree(items):
"""Serialize the items in a tree to a text.
:param items: Sorted iterable over (name, mode, sha) tuples
:return: Serialized tree text as chunks
"""
for name, mode, hexsha in items:
yield (("%04o" % mode).encode('ascii') + b' ' + name +
b'\0' + hex_to_sha(hexsha))
def sorted_tree_items(entries, name_order):
"""Iterate over a tree entries dictionary.
:param name_order: If True, iterate entries in order of their name. If
False, iterate entries in tree order, that is, treat subtree entries as
having '/' appended.
:param entries: Dictionary mapping names to (mode, sha) tuples
:return: Iterator over (name, mode, hexsha)
"""
key_func = name_order and key_entry_name_order or key_entry
for name, entry in sorted(entries.items(), key=key_func):
mode, hexsha = entry
# Stricter type checks than normal to mirror checks in the C version.
mode = int(mode)
if not isinstance(hexsha, bytes):
raise TypeError('Expected bytes for SHA, got %r' % hexsha)
yield TreeEntry(name, mode, hexsha)
def key_entry(entry):
"""Sort key for tree entry.
:param entry: (name, value) tuplee
"""
(name, value) = entry
if stat.S_ISDIR(value[0]):
name += b'/'
return name
def key_entry_name_order(entry):
"""Sort key for tree entry in name order."""
return entry[0]
def pretty_format_tree_entry(name, mode, hexsha, encoding="utf-8"):
"""Pretty format tree entry.
:param name: Name of the directory entry
:param mode: Mode of entry
:param hexsha: Hexsha of the referenced object
:return: string describing the tree entry
"""
if mode & stat.S_IFDIR:
kind = "tree"
else:
kind = "blob"
return "%04o %s %s\t%s\n" % (
mode, kind, hexsha.decode('ascii'),
name.decode(encoding, 'replace'))
class Tree(ShaFile):
"""A Git tree object"""
type_name = b'tree'
type_num = 2
__slots__ = ('_entries')
def __init__(self):
super(Tree, self).__init__()
self._entries = {}
@classmethod
def from_path(cls, filename):
tree = ShaFile.from_path(filename)
if not isinstance(tree, cls):
raise NotTreeError(filename)
return tree
def __contains__(self, name):
return name in self._entries
def __getitem__(self, name):
return self._entries[name]
def __setitem__(self, name, value):
"""Set a tree entry by name.
:param name: The name of the entry, as a string.
:param value: A tuple of (mode, hexsha), where mode is the mode of the
entry as an integral type and hexsha is the hex SHA of the entry as
a string.
"""
mode, hexsha = value
self._entries[name] = (mode, hexsha)
self._needs_serialization = True
def __delitem__(self, name):
del self._entries[name]
self._needs_serialization = True
def __len__(self):
return len(self._entries)
def __iter__(self):
return iter(self._entries)
def add(self, name, mode, hexsha):
"""Add an entry to the tree.
:param mode: The mode of the entry as an integral type. Not all
possible modes are supported by git; see check() for details.
:param name: The name of the entry, as a string.
:param hexsha: The hex SHA of the entry as a string.
"""
if isinstance(name, int) and isinstance(mode, bytes):
(name, mode) = (mode, name)
warnings.warn(
"Please use Tree.add(name, mode, hexsha)",
category=DeprecationWarning, stacklevel=2)
self._entries[name] = mode, hexsha
self._needs_serialization = True
def iteritems(self, name_order=False):
"""Iterate over entries.
:param name_order: If True, iterate in name order instead of tree
order.
:return: Iterator over (name, mode, sha) tuples
"""
return sorted_tree_items(self._entries, name_order)
def items(self):
"""Return the sorted entries in this tree.
:return: List with (name, mode, sha) tuples
"""
return list(self.iteritems())
def _deserialize(self, chunks):
"""Grab the entries in the tree"""
try:
parsed_entries = parse_tree(b''.join(chunks))
except ValueError as e:
raise ObjectFormatException(e)
# TODO: list comprehension is for efficiency in the common (small)
# case; if memory efficiency in the large case is a concern, use a
# genexp.
self._entries = dict([(n, (m, s)) for n, m, s in parsed_entries])
def check(self):
"""Check this object for internal consistency.
:raise ObjectFormatException: if the object is malformed in some way
"""
super(Tree, self).check()
last = None
allowed_modes = (stat.S_IFREG | 0o755, stat.S_IFREG | 0o644,
stat.S_IFLNK, stat.S_IFDIR, S_IFGITLINK,
# TODO: optionally exclude as in git fsck --strict
stat.S_IFREG | 0o664)
for name, mode, sha in parse_tree(b''.join(self._chunked_text),
True):
check_hexsha(sha, 'invalid sha %s' % sha)
if b'/' in name or name in (b'', b'.', b'..', b'.git'):
raise ObjectFormatException(
'invalid name %s' %
name.decode('utf-8', 'replace'))
if mode not in allowed_modes:
raise ObjectFormatException('invalid mode %06o' % mode)
entry = (name, (mode, sha))
if last:
if key_entry(last) > key_entry(entry):
raise ObjectFormatException('entries not sorted')
if name == last[0]:
raise ObjectFormatException('duplicate entry %s' % name)
last = entry
def _serialize(self):
return list(serialize_tree(self.iteritems()))
def as_pretty_string(self):
text = []
for name, mode, hexsha in self.iteritems():
text.append(pretty_format_tree_entry(name, mode, hexsha))
return "".join(text)
def lookup_path(self, lookup_obj, path):
"""Look up an object in a Git tree.
:param lookup_obj: Callback for retrieving object by SHA1
:param path: Path to lookup
:return: A tuple of (mode, SHA) of the resulting path.
"""
parts = path.split(b'/')
sha = self.id
mode = None
for p in parts:
if not p:
continue
obj = lookup_obj(sha)
if not isinstance(obj, Tree):
raise NotTreeError(sha)
mode, sha = obj[p]
return mode, sha
def parse_timezone(text):
"""Parse a timezone text fragment (e.g. '+0100').
:param text: Text to parse.
:return: Tuple with timezone as seconds difference to UTC
and a boolean indicating whether this was a UTC timezone
prefixed with a negative sign (-0000).
"""
# cgit parses the first character as the sign, and the rest
# as an integer (using strtol), which could also be negative.
# We do the same for compatibility. See #697828.
if not text[0] in b'+-':
raise ValueError("Timezone must start with + or - (%(text)s)" % vars())
sign = text[:1]
offset = int(text[1:])
if sign == b'-':
offset = -offset
unnecessary_negative_timezone = (offset >= 0 and sign == b'-')
signum = (offset < 0) and -1 or 1
offset = abs(offset)
hours = int(offset / 100)
minutes = (offset % 100)
return (signum * (hours * 3600 + minutes * 60),
unnecessary_negative_timezone)
def format_timezone(offset, unnecessary_negative_timezone=False):
"""Format a timezone for Git serialization.
:param offset: Timezone offset as seconds difference to UTC
:param unnecessary_negative_timezone: Whether to use a minus sign for
UTC or positive timezones (-0000 and --700 rather than +0000 / +0700).
"""
if offset % 60 != 0:
raise ValueError("Unable to handle non-minute offset.")
if offset < 0 or unnecessary_negative_timezone:
sign = '-'
offset = -offset
else:
sign = '+'
return ('%c%02d%02d' %
(sign, offset / 3600, (offset / 60) % 60)).encode('ascii')
def parse_time_entry(value):
"""Parse time entry behavior
:param value: Bytes representing a git commit/tag line
:raise: ObjectFormatException in case of parsing error (malformed
field date)
:return: Tuple of (author, time, (timezone, timezone_neg_utc))
"""
try:
sep = value.index(b'> ')
except ValueError:
return (value, None, (None, False))
try:
person = value[0:sep+1]
rest = value[sep+2:]
timetext, timezonetext = rest.rsplit(b' ', 1)
time = int(timetext)
timezone, timezone_neg_utc = parse_timezone(timezonetext)
except ValueError as e:
raise ObjectFormatException(e)
return person, time, (timezone, timezone_neg_utc)
def parse_commit(chunks):
"""Parse a commit object from chunks.
:param chunks: Chunks to parse
:return: Tuple of (tree, parents, author_info, commit_info,
encoding, mergetag, gpgsig, message, extra)
"""
parents = []
extra = []
tree = None
author_info = (None, None, (None, None))
commit_info = (None, None, (None, None))
encoding = None
mergetag = []
message = None
gpgsig = None
for field, value in _parse_message(chunks):
# TODO(jelmer): Enforce ordering
if field == _TREE_HEADER:
tree = value
elif field == _PARENT_HEADER:
parents.append(value)
elif field == _AUTHOR_HEADER:
author_info = parse_time_entry(value)
elif field == _COMMITTER_HEADER:
commit_info = parse_time_entry(value)
elif field == _ENCODING_HEADER:
encoding = value
elif field == _MERGETAG_HEADER:
mergetag.append(Tag.from_string(value + b'\n'))
elif field == _GPGSIG_HEADER:
gpgsig = value
elif field is None:
message = value
else:
extra.append((field, value))
return (tree, parents, author_info, commit_info, encoding, mergetag,
gpgsig, message, extra)
class Commit(ShaFile):
"""A git commit object"""
type_name = b'commit'
type_num = 1
__slots__ = ('_parents', '_encoding', '_extra', '_author_timezone_neg_utc',
'_commit_timezone_neg_utc', '_commit_time',
'_author_time', '_author_timezone', '_commit_timezone',
'_author', '_committer', '_tree', '_message',
'_mergetag', '_gpgsig')
def __init__(self):
super(Commit, self).__init__()
self._parents = []
self._encoding = None
self._mergetag = []
self._gpgsig = None
self._extra = []
self._author_timezone_neg_utc = False
self._commit_timezone_neg_utc = False
@classmethod
def from_path(cls, path):
commit = ShaFile.from_path(path)
if not isinstance(commit, cls):
raise NotCommitError(path)
return commit
def _deserialize(self, chunks):
(self._tree, self._parents, author_info, commit_info, self._encoding,
self._mergetag, self._gpgsig, self._message, self._extra) = (
parse_commit(chunks))
(self._author, self._author_time,
(self._author_timezone, self._author_timezone_neg_utc)) = author_info
(self._committer, self._commit_time,
(self._commit_timezone, self._commit_timezone_neg_utc)) = commit_info
def check(self):
"""Check this object for internal consistency.
:raise ObjectFormatException: if the object is malformed in some way
"""
super(Commit, self).check()
self._check_has_member("_tree", "missing tree")
self._check_has_member("_author", "missing author")
self._check_has_member("_committer", "missing committer")
self._check_has_member("_author_time", "missing author time")
self._check_has_member("_commit_time", "missing commit time")
for parent in self._parents:
check_hexsha(parent, "invalid parent sha")
check_hexsha(self._tree, "invalid tree sha")
check_identity(self._author, "invalid author")
check_identity(self._committer, "invalid committer")
check_time(self._author_time)
check_time(self._commit_time)
last = None
for field, _ in _parse_message(self._chunked_text):
if field == _TREE_HEADER and last is not None:
raise ObjectFormatException("unexpected tree")
elif field == _PARENT_HEADER and last not in (_PARENT_HEADER,
_TREE_HEADER):
raise ObjectFormatException("unexpected parent")
elif field == _AUTHOR_HEADER and last not in (_TREE_HEADER,
_PARENT_HEADER):
raise ObjectFormatException("unexpected author")
elif field == _COMMITTER_HEADER and last != _AUTHOR_HEADER:
raise ObjectFormatException("unexpected committer")
elif field == _ENCODING_HEADER and last != _COMMITTER_HEADER:
raise ObjectFormatException("unexpected encoding")
last = field
# TODO: optionally check for duplicate parents
def _serialize(self):
chunks = []
tree_bytes = (
self._tree.id if isinstance(self._tree, Tree) else self._tree)
chunks.append(git_line(_TREE_HEADER, tree_bytes))
for p in self._parents:
chunks.append(git_line(_PARENT_HEADER, p))
chunks.append(git_line(
_AUTHOR_HEADER, self._author,
str(self._author_time).encode('ascii'),
format_timezone(
self._author_timezone, self._author_timezone_neg_utc)))
chunks.append(git_line(
_COMMITTER_HEADER, self._committer,
str(self._commit_time).encode('ascii'),
format_timezone(self._commit_timezone,
self._commit_timezone_neg_utc)))
if self.encoding:
chunks.append(git_line(_ENCODING_HEADER, self.encoding))
for mergetag in self.mergetag:
mergetag_chunks = mergetag.as_raw_string().split(b'\n')
chunks.append(git_line(_MERGETAG_HEADER, mergetag_chunks[0]))
# Embedded extra header needs leading space
for chunk in mergetag_chunks[1:]:
chunks.append(b' ' + chunk + b'\n')
# No trailing empty line
if chunks[-1].endswith(b' \n'):
chunks[-1] = chunks[-1][:-2]
for k, v in self.extra:
if b'\n' in k or b'\n' in v:
raise AssertionError(
"newline in extra data: %r -> %r" % (k, v))
chunks.append(git_line(k, v))
if self.gpgsig:
sig_chunks = self.gpgsig.split(b'\n')
chunks.append(git_line(_GPGSIG_HEADER, sig_chunks[0]))
for chunk in sig_chunks[1:]:
chunks.append(git_line(b'', chunk))
chunks.append(b'\n') # There must be a new line after the headers
chunks.append(self._message)
return chunks
tree = serializable_property(
"tree", "Tree that is the state of this commit")
def _get_parents(self):
"""Return a list of parents of this commit."""
return self._parents
def _set_parents(self, value):
"""Set a list of parents of this commit."""
self._needs_serialization = True
self._parents = value
parents = property(_get_parents, _set_parents,
doc="Parents of this commit, by their SHA1.")
def _get_extra(self):
"""Return extra settings of this commit."""
return self._extra
extra = property(
_get_extra,
doc="Extra header fields not understood (presumably added in a "
"newer version of git). Kept verbatim so the object can "
"be correctly reserialized. For private commit metadata, use "
"pseudo-headers in Commit.message, rather than this field.")
author = serializable_property(
"author",
"The name of the author of the commit")
committer = serializable_property(
"committer",
"The name of the committer of the commit")
message = serializable_property(
"message", "The commit message")
commit_time = serializable_property(
"commit_time",
"The timestamp of the commit. As the number of seconds since the "
"epoch.")
commit_timezone = serializable_property(
"commit_timezone",
"The zone the commit time is in")
author_time = serializable_property(
"author_time",
"The timestamp the commit was written. As the number of "
"seconds since the epoch.")
author_timezone = serializable_property(
"author_timezone", "Returns the zone the author time is in.")
encoding = serializable_property(
"encoding", "Encoding of the commit message.")
mergetag = serializable_property(
"mergetag", "Associated signed tag.")
gpgsig = serializable_property(
"gpgsig", "GPG Signature.")
OBJECT_CLASSES = (
Commit,
Tree,
Blob,
Tag,
)
_TYPE_MAP = {}
for cls in OBJECT_CLASSES:
_TYPE_MAP[cls.type_name] = cls
_TYPE_MAP[cls.type_num] = cls
# Hold on to the pure-python implementations for testing
_parse_tree_py = parse_tree
_sorted_tree_items_py = sorted_tree_items
try:
# Try to import C versions
from dulwich._objects import parse_tree, sorted_tree_items
except ImportError:
pass
|
gpl-2.0
|
Amechi101/concepteur-market-app
|
venv/lib/python2.7/site-packages/django/contrib/gis/maps/google/__init__.py
|
603
|
2648
|
"""
This module houses the GoogleMap object, used for generating
the needed javascript to embed Google Maps in a Web page.
Google(R) is a registered trademark of Google, Inc. of Mountain View, California.
Example:
* In the view:
return render_to_response('template.html', {'google' : GoogleMap(key="abcdefg")})
* In the template:
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
{{ google.xhtml }}
<head>
<title>Google Maps via GeoDjango</title>
{{ google.style }}
{{ google.scripts }}
</head>
{{ google.body }}
<div id="{{ google.dom_id }}" style="width:600px;height:400px;"></div>
</body>
</html>
Note: If you want to be more explicit in your templates, the following are
equivalent:
{{ google.body }} => "<body {{ google.onload }} {{ google.onunload }}>"
{{ google.xhtml }} => "<html xmlns="http://www.w3.org/1999/xhtml" {{ google.xmlns }}>"
{{ google.style }} => "<style>{{ google.vml_css }}</style>"
Explanation:
- The `xhtml` property provides the correct XML namespace needed for
Google Maps to operate in IE using XHTML. Google Maps on IE uses
VML to draw polylines. Returns, by default:
<html xmlns="http://www.w3.org/1999/xhtml" xmlns:v="urn:schemas-microsoft-com:vml">
- The `style` property provides the correct style tag for the CSS
properties required by Google Maps on IE:
<style type="text/css">v\:* {behavior:url(#default#VML);}</style>
- The `scripts` property provides the necessary <script> tags for
including the Google Maps javascript, as well as including the
generated javascript.
- The `body` property provides the correct attributes for the
body tag to load the generated javascript. By default, returns:
<body onload="gmap_load()" onunload="GUnload()">
- The `dom_id` property returns the DOM id for the map. Defaults to "map".
The following attributes may be set or customized in your local settings:
* GOOGLE_MAPS_API_KEY: String of your Google Maps API key. These are tied to
to a domain. May be obtained from http://www.google.com/apis/maps/
* GOOGLE_MAPS_API_VERSION (optional): Defaults to using "2.x"
* GOOGLE_MAPS_URL (optional): Must have a substitution ('%s') for the API
version.
"""
from django.contrib.gis.maps.google.gmap import GoogleMap, GoogleMapSet
from django.contrib.gis.maps.google.overlays import GEvent, GIcon, GMarker, GPolygon, GPolyline
from django.contrib.gis.maps.google.zoom import GoogleZoom
|
mit
|
tamland/wimpy
|
docs/conf.py
|
1
|
7997
|
# -*- coding: utf-8 -*-
#
# complexity documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'tidalapi'
copyright = u'2014, Thomas Amland'
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = '0.5.0'
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'pyramid'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'tidalapidoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
#latex_documents = []
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
#man_pages = []
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
#texinfo_documents = []
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
lgpl-3.0
|
mph-/lcapy
|
lcapy/nexpr.py
|
1
|
7914
|
"""This module provides the DiscreteTimeDomainExpression class to
represent discrete-time expressions.
Copyright 2020--2021 Michael Hayes, UCECE
"""
from __future__ import division
from .domains import DiscreteTimeDomain
from .sequence import Sequence
from .functions import exp
from .sym import j, oo, pi, fsym, oo
from .dsym import nsym, ksym, zsym, dt
from .ztransform import ztransform
from .dft import DFT
from .seqexpr import SequenceExpression
from .nseq import DiscreteTimeDomainSequence, nseq
from sympy import Sum, summation, limit, DiracDelta
__all__ = ('nexpr', )
class DiscreteTimeDomainExpression(DiscreteTimeDomain, SequenceExpression):
"""Discrete-time expression or symbol."""
var = nsym
seqcls = DiscreteTimeDomainSequence
def __init__(self, val, **assumptions):
check = assumptions.pop('check', True)
if 'integer' not in assumptions:
assumptions['real'] = True
super(DiscreteTimeDomainExpression, self).__init__(val, **assumptions)
expr = self.expr
if check and expr.has(zsym) and not expr.has(Sum):
raise ValueError(
'n-domain expression %s cannot depend on z' % expr)
if check and expr.has(ksym) and not expr.has(Sum):
raise ValueError(
'n-domain expression %s cannot depend on k' % expr)
def _mul_compatible_domains(self, x):
if self.domain == x.domain:
return True
return x.is_constant_domain
def _div_compatible_domains(self, x):
if self.domain == x.domain:
return True
return x.is_constant_domain
def as_expr(self):
return DiscreteTimeDomainExpression(self)
def differentiate(self):
"""First order difference."""
result = (self.expr - self.subs(n - 1).expr) / dt
return self.__class__(result, **self.assumptions)
def integrate(self):
"""First order integration."""
from .sym import symsymbol
from .utils import factor_const
from .extrafunctions import UnitImpulse
from .functions import u
# TODO, get SymPy to optimize this case.
expr = self.expr
const, expr = factor_const(expr, nsym)
if expr.is_Function and expr.func == UnitImpulse:
return dt * u(expr.args[0]) * const
msym = symsymbol('m', integer=True)
result = dt * summation(self.subs(msym).expr, (msym, -oo, nsym))
return self.__class__(result, **self.assumptions)
def ztransform(self, evaluate=True, **assumptions):
"""Determine one-sided z-transform."""
assumptions = self.assumptions.merge_and_infer(self, **assumptions)
result = ztransform(self.expr, self.var, zsym, evaluate)
return self.change(result, domain='Z', **assumptions)
def ZT(self, **assumptions):
return self.ztransform(**assumptions)
def plot(self, ni=None, **kwargs):
"""Plot the sequence. If `ni` is not specified, it defaults to the
range (-20, 20). `ni` can be a vector of specified sequence
indices, a tuple specifing the range, or a constant specifying
the maximum value with the minimum value set to 0.
kwargs include:
axes - the plot axes to use otherwise a new figure is created
xlabel - the x-axis label
ylabel - the y-axis label
xscale - the x-axis scaling, say for plotting as ms
yscale - the y-axis scaling, say for plotting mV
in addition to those supported by the matplotlib plot command.
The plot axes are returned.
"""
if ni is None:
ni = (-20, 20)
from .plot import plot_sequence
return plot_sequence(self, ni, **kwargs)
def initial_value(self):
"""Determine value at n = 0."""
return self.subs(0)
def final_value(self):
"""Determine value at n = oo."""
return self.__class__(limit(self.expr, self.var, oo))
def DFT(self, N=None, evaluate=True):
if N is None:
from .sym import symsymbol
N = symsymbol('N', integer=True, positive=True)
result = DFT(self.expr, nsym, ksym, N, evaluate=evaluate)
return self.change(result, domain='discrete fourier')
def delay(self,m):
"""Delay signal by m samples."""
return self.subs(n - m)
def extent(self, n1=-100, n2=100):
"""Determine extent of the signal.
For example, nexpr([1, 1]).extent() = 2
nexpr([1, 0, 1]).extent() = 3
nexpr([0, 1, 0, 1]).extent() = 3
This performs a search between n=n1 and n=n2."""
return self.seq((n1, n2)).extent()
def discrete_time_fourier_transform(self, var=None,
images=oo, **assumptions):
"""Convert to Fourier domain using discrete time Fourier transform.
Use `images = 0` to avoid the infinite number of spectral images.
"""
return self.DTFT(var, images, **assumptions)
def DTFT(self, var=None, images=oo, **assumptions):
"""Convert to Fourier domain using discrete time Fourier transform.
By default this returns the DTFT in terms of `f`. Use
`.DTFT(w)` to get the angular frequency form, `.DTFT(F)` to
get the normalised frequency form, or `.DTFT(W)` to get the
normalised angular frequency form.
Use `images = 0` to avoid the infinite number of spectral images.
"""
from .extrafunctions import UnitStep
from .symbols import f, omega, Omega, F
from .fexpr import fexpr
from .dtft import DTFT
if var is None:
var = f
if id(var) not in (id(f), id(F), id(omega), id(Omega)):
raise ValueError('DTFT requires var to be f, F, omega, or Omega`, not %s' % var)
dtft = DTFT(self.expr, self.var, fsym, images=images)
result = fexpr(dtft)(var)
result = result.simplify_dirac_delta()
result = result.simplify_heaviside()
result = result.simplify_rect()
# There is a bug in SymPy when simplifying Sum('X(n - m)', (m, -oo, oo))
# result = result.simplify()
result = result.cancel_terms()
return result
def norm_angular_fourier(self, **assumptions):
from .normomegaexpr import Omega
return self.DTFT()(Omega)
def difference_equation(self, inputsym='x', outputsym='y', form='iir'):
"""Create difference equation from impulse response.
`form` can be 'fir' or 'iir' ('direct form I').
"""
H = self.ZT()
return H.difference_equation(inputsym, outputsym, form)
def remove_condition(self):
"""Remove the piecewise condition from the expression."""
if not self.is_conditional:
return self
expr = self.expr
expr = expr.args[0].args[0]
return self.__class__(expr)
def nexpr(arg, **assumptions):
"""Create nExpr object. If `arg` is nsym return n"""
from .expr import Expr
from .seq import seq
if arg is nsym:
return n
if isinstance(arg, Expr):
if assumptions == {}:
return arg
return arg.__class__(arg, **assumptions)
if isinstance(arg, str) and arg.startswith('{'):
return nseq(arg)
from numpy import ndarray
if isinstance(arg, (list, ndarray)):
return DiscreteTimeDomainSequence(arg, var=n).as_impulses()
return DiscreteTimeDomainExpression(arg, **assumptions)
from .expressionclasses import expressionclasses
expressionclasses.register('discrete time', DiscreteTimeDomainExpression)
n = DiscreteTimeDomainExpression('n', integer=True)
|
lgpl-2.1
|
gurneyalex/server-tools
|
__unported__/cron_run_manually/model/ir_cron.py
|
12
|
3161
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2013 Therp BV (<http://therp.nl>)
# Code snippets from openobject-server copyright (C) 2004-2013 OpenERP S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import psycopg2
import logging
from openerp.osv import orm
from openerp.tools import SUPERUSER_ID
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval
class irCron(orm.Model):
_inherit = 'ir.cron'
def run_manually(self, cr, uid, ids, context=None):
"""
Run a job from the cron form view.
Cut and paste of code snippets from addons/base/ir/ir_cron.py
"""
logger = logging.getLogger('cron_run_manually')
cr.execute(
"""
SELECT * from ir_cron
WHERE id in %s
""", (tuple(ids),))
jobs = cr.dictfetchall()
for job in jobs:
if uid != SUPERUSER_ID and (
not job['active'] or not job['numbercall']):
raise orm.except_orm(
_('Error'),
_('Only the admin user is allowed to '
'execute inactive cron jobs manually'))
try:
# Try to grab an exclusive lock on the job row
# until the end of the transaction
cr.execute(
"""SELECT *
FROM ir_cron
WHERE id=%s
FOR UPDATE NOWAIT""",
(job['id'],), log_exceptions=False)
# Got the lock on the job row, run its code
logger.debug('Job `%s` triggered from form', job['name'])
model = self.pool.get(job['model'])
method = getattr(model, job['function'])
args = safe_eval('tuple(%s)' % (job['args'] or ''))
method(cr, job['user_id'], *args)
except psycopg2.OperationalError as e:
# User friendly error if the lock could not be claimed
if e.pgcode == '55P03':
raise orm.except_orm(
_('Error'),
_('Another process/thread is already busy '
'executing this job'))
raise
return True
|
agpl-3.0
|
gibil5/openhealth
|
travis/run_pylint.py
|
4
|
4202
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import sys
import click
import pylint.lint
import getaddons
CLICK_DIR = click.Path(exists=True, dir_okay=True, resolve_path=True)
def get_count_fails(linter_stats, msgs_no_count=None):
"""Verify the dictionary statistics to get number of errors.
:param linter_stats: Dict of type pylint.lint.Run().linter.stats
:param no_count: List of messages that will not add to the failure count.
:return: Integer with quantity of fails found.
"""
return sum([
linter_stats['by_msg'][msg]
for msg in linter_stats['by_msg']
if msg not in msgs_no_count])
def get_subpaths(paths):
"""Get list of subdirectories
if `__init__.py` file not exists in root path then
get subdirectories.
Why? More info here:
https://www.mail-archive.com/code-quality@python.org/msg00294.html
:param paths: List of paths
:return: Return list of paths with subdirectories.
"""
subpaths = []
for path in paths:
if not os.path.isfile(os.path.join(path, '__init__.py')):
subpaths.extend(
[os.path.join(path, item)
for item in os.listdir(path)
if os.path.isfile(os.path.join(path, item, '__init__.py')) and
(not getaddons.is_module(os.path.join(path, item)) or
getaddons.is_installable_module(os.path.join(path, item)))])
else:
if not getaddons.is_module(path) or \
getaddons.is_installable_module(path):
subpaths.append(path)
return subpaths
def run_pylint(paths, cfg, beta_msgs=None, sys_paths=None, extra_params=None):
"""Execute pylint command from original python library
:param paths: List of paths of python modules to check pylint
:param cfg: String name of pylint configuration file
:param sys_paths: List of paths to append to sys path
:param extra_params: List of parameters extra to append
in pylint command
:return: Dict with python linter stats
"""
if sys_paths is None:
sys_paths = []
if extra_params is None:
extra_params = []
sys.path.extend(sys_paths)
cmd = ['--rcfile=' + cfg]
cmd.extend(extra_params)
subpaths = get_subpaths(paths)
if not subpaths:
raise UserWarning("Python modules not found in paths"
" {paths}".format(paths=paths))
cmd.extend(subpaths)
pylint_res = pylint.lint.Run(cmd, exit=False)
return pylint_res.linter.stats
@click.command()
@click.option('paths', '--path', envvar='TRAVIS_BUILD_DIR',
multiple=True, type=CLICK_DIR, required=True,
default=[os.getcwd()],
help="Addons paths to check pylint")
@click.option('--config-file', '-c',
type=click.File('r', lazy=True), required=True,
help="Pylint config file")
@click.option('--sys-paths', '-sys-path', envvar='PYTHONPATH',
multiple=True, type=CLICK_DIR,
help="Additional paths to append in sys path.")
@click.option('--extra-params', '-extra-param', multiple=True,
help="Extra pylint params to append "
"in pylint command")
@click.option('--msgs-no-count', '-msgs-no-count', multiple=True,
help="List of messages that will not add to the failure count.")
def main(paths, config_file, msgs_no_count=None,
sys_paths=None, extra_params=None):
"""Script to run pylint command with additional params
to check fails of odoo modules.
If expected errors is equal to count fails found then
this program exit with zero otherwise exit with counted fails"""
try:
stats = run_pylint(
list(paths), config_file.name,
sys_paths=sys_paths,
extra_params=extra_params)
count_fails = get_count_fails(stats, list(msgs_no_count))
except UserWarning:
count_fails = -1
return count_fails
if __name__ == '__main__':
try:
exit(main(standalone_mode=False))
except click.ClickException as e:
e.show()
exit(e.exit_code)
|
agpl-3.0
|
fast90/youtube-dl
|
youtube_dl/extractor/macgamestore.py
|
142
|
1275
|
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import ExtractorError
class MacGameStoreIE(InfoExtractor):
IE_NAME = 'macgamestore'
IE_DESC = 'MacGameStore trailers'
_VALID_URL = r'https?://www\.macgamestore\.com/mediaviewer\.php\?trailer=(?P<id>\d+)'
_TEST = {
'url': 'http://www.macgamestore.com/mediaviewer.php?trailer=2450',
'md5': '8649b8ea684b6666b4c5be736ecddc61',
'info_dict': {
'id': '2450',
'ext': 'm4v',
'title': 'Crow',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(
url, video_id, 'Downloading trailer page')
if '>Missing Media<' in webpage:
raise ExtractorError(
'Trailer %s does not exist' % video_id, expected=True)
video_title = self._html_search_regex(
r'<title>MacGameStore: (.*?) Trailer</title>', webpage, 'title')
video_url = self._html_search_regex(
r'(?s)<div\s+id="video-player".*?href="([^"]+)"\s*>',
webpage, 'video URL')
return {
'id': video_id,
'url': video_url,
'title': video_title
}
|
unlicense
|
FrodeSolheim/fs-uae-launcher
|
fsgamesys/GameDatabase.py
|
3
|
7945
|
import json
import os
import sqlite3
import zlib
from binascii import hexlify, unhexlify
from .BaseDatabase import BaseDatabase
VERSION = 19
RESET_VERSION = 19
DUMMY_UUID = b"'\\x8b\\xbb\\x00Y\\x8bqM\\x15\\x972\\xa8-t_\\xb2\\xfd'"
class IncompleteGameException(Exception):
pass
class GameDatabase(BaseDatabase):
def __init__(self, path):
BaseDatabase.__init__(self, BaseDatabase.SENTINEL)
self._path = path
self._name = os.path.splitext(os.path.basename(path))[0]
def name(self):
return self._name
def path(self):
return self._path
def get_path(self):
return self._path
def version(self):
return VERSION
def get_version(self):
return VERSION
def get_reset_version(self):
return RESET_VERSION
@staticmethod
def query(q):
return q.replace("%s", "?")
def get_last_game_id(self):
cursor = self.internal_cursor()
cursor.execute("SELECT max(id) FROM game")
row = cursor.fetchone()
return row[0] or 0
def get_ratings_for_game(self, game_uuid):
cursor = self.internal_cursor()
cursor.execute(
"SELECT like_rating, work_rating FROM rating WHERE "
"game_uuid = ?",
(game_uuid,),
)
row = cursor.fetchone()
if row is not None:
return row[0], row[1]
else:
return 0, 0
# noinspection PyMethodMayBeStatic,PyUnusedLocal
def get_license_code_for_url(self, url):
# cursor = self.internal_cursor()
# cursor.execute(
# "SELECT license_code FROM file "
# "WHERE external = ? LIMIT 1", (url,))
# row = cursor.fetchone()
# if not row:
# return None
# return row[0]
return None
def add_game(self, game_id, game_uuid, game_data):
cursor = self.internal_cursor()
cursor.execute(
"DELETE FROM game WHERE uuid = ?", (sqlite3.Binary(game_uuid),)
)
cursor.execute(
"INSERT INTO game (id, uuid, data) VALUES (?, ?, ?)",
(game_id, sqlite3.Binary(game_uuid), sqlite3.Binary(game_data)),
)
def delete_game(self, game_id, game_uuid):
cursor = self.internal_cursor()
cursor.execute(
"DELETE FROM game WHERE uuid = ?", (sqlite3.Binary(game_uuid),)
)
cursor.execute(
"DELETE FROM game WHERE uuid = ?", (sqlite3.Binary(DUMMY_UUID),)
)
cursor.execute(
"INSERT INTO game (id, uuid, data) VALUES (?, ?, ?)",
(game_id, sqlite3.Binary(DUMMY_UUID), ""),
)
@staticmethod
def binary_uuid_to_str(data):
s = hexlify(data).decode("ASCII")
return "{}-{}-{}-{}-{}".format(
s[0:8], s[8:12], s[12:16], s[16:20], s[20:32]
)
def get_all_uuids(self):
cursor = self.internal_cursor()
cursor.execute("SELECT uuid FROM game WHERE data IS NOT NULL")
result = set()
for row in cursor:
data = row[0]
if len(data) != 16:
print(
"WARNING: Invalid UUID ({} bytes) found: {}".format(
len(data), repr(data)
)
)
continue
result.add(self.binary_uuid_to_str(data))
return result
def get_game_values_for_uuid(self, game_uuid, recursive=True):
# print("get_game_values_for_uuid", game_uuid)
assert game_uuid
assert isinstance(game_uuid, str)
return self.get_game_values(game_uuid=game_uuid, recursive=recursive)
def get_game_values(self, game_id=None, *, game_uuid=None, recursive=True):
cursor = self.internal_cursor()
if game_uuid is not None:
cursor.execute(
"SELECT uuid, data FROM game WHERE uuid = ?",
(sqlite3.Binary(unhexlify(game_uuid.replace("-", ""))),),
)
row = cursor.fetchone()
if not row:
raise LookupError("Cannot find game uuid {}".format(game_uuid))
else:
cursor.execute(
"SELECT uuid, data FROM game WHERE id = ?", (game_id,)
)
row = cursor.fetchone()
if not row:
raise LookupError("Cannot find game id {}".format(game_id))
if game_uuid is None:
game_uuid = self.binary_uuid_to_str(row[0])
else:
assert self.binary_uuid_to_str(row[0]) == game_uuid
data = zlib.decompress(row[1])
data = data.decode("UTF-8")
doc = json.loads(data)
doc["variant_uuid"] = game_uuid
# This is a hack so we can retrieve the published-status of the
# child entry. The original key should have been named _published,
# probably, to avoid it being inherited.
variant_published = doc.get("publish", "")
next_parent_uuid = doc.get("parent_uuid", "")
next_parent_database = doc.get("parent_database", "")
while next_parent_uuid and recursive:
# Treat game_uuid special, it will be the first parent_uuid
# in the chain.
doc["game_uuid"] = next_parent_uuid
if next_parent_database:
break
else:
cursor.execute(
"SELECT data FROM game WHERE uuid = ?",
(
sqlite3.Binary(
unhexlify(next_parent_uuid.replace("-", ""))
),
),
)
row = cursor.fetchone()
if not row:
raise IncompleteGameException(
"Could not find parent {0} of game {1}".format(
next_parent_uuid, game_uuid
)
)
data = zlib.decompress(row[0])
data = data.decode("UTF-8")
next_doc = json.loads(data)
next_parent_uuid = next_doc.get("parent_uuid", "")
next_parent_database = next_doc.get("parent_database", "")
# Let child doc overwrite and append values to parent doc.
next_doc.update(doc)
doc = next_doc
doc["__publish_hack__"] = variant_published
return doc
def get_game_database_version(self):
cursor = self.internal_cursor()
cursor.execute("SELECT database_version FROM metadata")
return cursor.fetchone()[0]
def set_game_database_version(self, version):
cursor = self.internal_cursor()
cursor.execute("UPDATE metadata SET database_version = ?", (version,))
def clear(self):
cursor = self.internal_cursor()
cursor.execute("DELETE FROM rating")
cursor.execute("DELETE FROM game")
def update_database_to_version_19(self):
cursor = self.internal_cursor()
cursor.execute(
"""
ALTER TABLE metadata ADD COLUMN games_version
INTEGER NOT NULL DEFAULT 0;
"""
)
cursor.execute(
"""
CREATE TABLE game (
id INTEGER PRIMARY KEY,
uuid BLOB,
data BLOB
);
"""
)
cursor.execute(
"""
CREATE INDEX game_uuid ON game (uuid);
"""
)
cursor.execute(
"""
CREATE TABLE rating (
game_uuid VARCHAR(36) PRIMARY KEY NOT NULL,
work_rating INT NOT NULL,
like_rating INT NOT NULL,
updated TIMESTAMP NULL
);
"""
)
cursor.execute(
"""
ALTER TABLE metadata ADD COLUMN database_version
INTEGER NOT NULL DEFAULT 0;
"""
)
|
gpl-2.0
|
bottompawn/kbengine
|
kbe/src/lib/python/Doc/tools/sphinxext/suspicious.py
|
37
|
9428
|
"""
Try to detect suspicious constructs, resembling markup
that has leaked into the final output.
Suspicious lines are reported in a comma-separated-file,
``suspicious.csv``, located in the output directory.
The file is utf-8 encoded, and each line contains four fields:
* document name (normalized)
* line number in the source document
* problematic text
* complete line showing the problematic text in context
It is common to find many false positives. To avoid reporting them
again and again, they may be added to the ``ignored.csv`` file
(located in the configuration directory). The file has the same
format as ``suspicious.csv`` with a few differences:
- each line defines a rule; if the rule matches, the issue
is ignored.
- line number may be empty (that is, nothing between the
commas: ",,"). In this case, line numbers are ignored (the
rule matches anywhere in the file).
- the last field does not have to be a complete line; some
surrounding text (never more than a line) is enough for
context.
Rules are processed sequentially. A rule matches when:
* document names are the same
* problematic texts are the same
* line numbers are close to each other (5 lines up or down)
* the rule text is completely contained into the source line
The simplest way to create the ignored.csv file is by copying
undesired entries from suspicious.csv (possibly trimming the last
field.)
Copyright 2009 Gabriel A. Genellina
"""
import os
import re
import csv
import sys
from docutils import nodes
from sphinx.builders import Builder
detect_all = re.compile(r'''
::(?=[^=])| # two :: (but NOT ::=)
:[a-zA-Z][a-zA-Z0-9]+| # :foo
`| # ` (seldom used by itself)
(?<!\.)\.\.[ \t]*\w+: # .. foo: (but NOT ... else:)
''', re.UNICODE | re.VERBOSE).finditer
py3 = sys.version_info >= (3, 0)
class Rule:
def __init__(self, docname, lineno, issue, line):
"""A rule for ignoring issues"""
self.docname = docname # document to which this rule applies
self.lineno = lineno # line number in the original source;
# this rule matches only near that.
# None -> don't care
self.issue = issue # the markup fragment that triggered this rule
self.line = line # text of the container element (single line only)
self.used = False
def __repr__(self):
return '{0.docname},,{0.issue},{0.line}'.format(self)
class dialect(csv.excel):
"""Our dialect: uses only linefeed as newline."""
lineterminator = '\n'
class CheckSuspiciousMarkupBuilder(Builder):
"""
Checks for possibly invalid markup that may leak into the output.
"""
name = 'suspicious'
def init(self):
# create output file
self.log_file_name = os.path.join(self.outdir, 'suspicious.csv')
open(self.log_file_name, 'w').close()
# load database of previously ignored issues
self.load_rules(os.path.join(os.path.dirname(__file__),
'susp-ignored.csv'))
def get_outdated_docs(self):
return self.env.found_docs
def get_target_uri(self, docname, typ=None):
return ''
def prepare_writing(self, docnames):
pass
def write_doc(self, docname, doctree):
# set when any issue is encountered in this document
self.any_issue = False
self.docname = docname
visitor = SuspiciousVisitor(doctree, self)
doctree.walk(visitor)
def finish(self):
unused_rules = [rule for rule in self.rules if not rule.used]
if unused_rules:
self.warn('Found %s/%s unused rules:' %
(len(unused_rules), len(self.rules)))
for rule in unused_rules:
self.info(repr(rule))
return
def check_issue(self, line, lineno, issue):
if not self.is_ignored(line, lineno, issue):
self.report_issue(line, lineno, issue)
def is_ignored(self, line, lineno, issue):
"""Determine whether this issue should be ignored."""
docname = self.docname
for rule in self.rules:
if rule.docname != docname: continue
if rule.issue != issue: continue
# Both lines must match *exactly*. This is rather strict,
# and probably should be improved.
# Doing fuzzy matches with levenshtein distance could work,
# but that means bringing other libraries...
# Ok, relax that requirement: just check if the rule fragment
# is contained in the document line
if rule.line not in line: continue
# Check both line numbers. If they're "near"
# this rule matches. (lineno=None means "don't care")
if (rule.lineno is not None) and \
abs(rule.lineno - lineno) > 5: continue
# if it came this far, the rule matched
rule.used = True
return True
return False
def report_issue(self, text, lineno, issue):
if not self.any_issue: self.info()
self.any_issue = True
self.write_log_entry(lineno, issue, text)
if py3:
self.warn('[%s:%d] "%s" found in "%-.120s"' %
(self.docname, lineno, issue, text))
else:
self.warn('[%s:%d] "%s" found in "%-.120s"' % (
self.docname.encode(sys.getdefaultencoding(),'replace'),
lineno,
issue.encode(sys.getdefaultencoding(),'replace'),
text.strip().encode(sys.getdefaultencoding(),'replace')))
self.app.statuscode = 1
def write_log_entry(self, lineno, issue, text):
if py3:
f = open(self.log_file_name, 'a')
writer = csv.writer(f, dialect)
writer.writerow([self.docname, lineno, issue, text.strip()])
f.close()
else:
f = open(self.log_file_name, 'ab')
writer = csv.writer(f, dialect)
writer.writerow([self.docname.encode('utf-8'),
lineno,
issue.encode('utf-8'),
text.strip().encode('utf-8')])
f.close()
def load_rules(self, filename):
"""Load database of previously ignored issues.
A csv file, with exactly the same format as suspicious.csv
Fields: document name (normalized), line number, issue, surrounding text
"""
self.info("loading ignore rules... ", nonl=1)
self.rules = rules = []
try:
if py3:
f = open(filename, 'r')
else:
f = open(filename, 'rb')
except IOError:
return
for i, row in enumerate(csv.reader(f)):
if len(row) != 4:
raise ValueError(
"wrong format in %s, line %d: %s" % (filename, i+1, row))
docname, lineno, issue, text = row
if lineno:
lineno = int(lineno)
else:
lineno = None
if not py3:
docname = docname.decode('utf-8')
issue = issue.decode('utf-8')
text = text.decode('utf-8')
rule = Rule(docname, lineno, issue, text)
rules.append(rule)
f.close()
self.info('done, %d rules loaded' % len(self.rules))
def get_lineno(node):
"""Obtain line number information for a node."""
lineno = None
while lineno is None and node:
node = node.parent
lineno = node.line
return lineno
def extract_line(text, index):
"""text may be a multiline string; extract
only the line containing the given character index.
>>> extract_line("abc\ndefgh\ni", 6)
>>> 'defgh'
>>> for i in (0, 2, 3, 4, 10):
... print extract_line("abc\ndefgh\ni", i)
abc
abc
abc
defgh
defgh
i
"""
p = text.rfind('\n', 0, index) + 1
q = text.find('\n', index)
if q < 0:
q = len(text)
return text[p:q]
class SuspiciousVisitor(nodes.GenericNodeVisitor):
lastlineno = 0
def __init__(self, document, builder):
nodes.GenericNodeVisitor.__init__(self, document)
self.builder = builder
def default_visit(self, node):
if isinstance(node, (nodes.Text, nodes.image)): # direct text containers
text = node.astext()
# lineno seems to go backwards sometimes (?)
self.lastlineno = lineno = max(get_lineno(node) or 0, self.lastlineno)
seen = set() # don't report the same issue more than only once per line
for match in detect_all(text):
issue = match.group()
line = extract_line(text, match.start())
if (issue, line) not in seen:
self.builder.check_issue(line, lineno, issue)
seen.add((issue, line))
unknown_visit = default_visit
def visit_document(self, node):
self.lastlineno = 0
def visit_comment(self, node):
# ignore comments -- too much false positives.
# (although doing this could miss some errors;
# there were two sections "commented-out" by mistake
# in the Python docs that would not be catched)
raise nodes.SkipNode
|
lgpl-3.0
|
vmax-feihu/hue
|
desktop/core/ext-py/Django-1.6.10/tests/indexes/tests.py
|
40
|
1585
|
from django.core.management.color import no_style
from django.db import connection, connections, DEFAULT_DB_ALIAS
from django.test import TestCase
from django.utils.unittest import skipUnless
from .models import Article
class IndexesTests(TestCase):
def test_index_together(self):
connection = connections[DEFAULT_DB_ALIAS]
index_sql = connection.creation.sql_indexes_for_model(Article, no_style())
self.assertEqual(len(index_sql), 1)
@skipUnless(connections[DEFAULT_DB_ALIAS].vendor == 'postgresql',
"This is a postgresql-specific issue")
def test_postgresql_text_indexes(self):
"""Test creation of PostgreSQL-specific text indexes (#12234)"""
from .models import IndexedArticle
connection = connections[DEFAULT_DB_ALIAS]
index_sql = connection.creation.sql_indexes_for_model(IndexedArticle, no_style())
self.assertEqual(len(index_sql), 5)
self.assertIn('("headline" varchar_pattern_ops)', index_sql[1])
self.assertIn('("body" text_pattern_ops)', index_sql[3])
# unique=True and db_index=True should only create the varchar-specific
# index (#19441).
self.assertIn('("slug" varchar_pattern_ops)', index_sql[4])
@skipUnless(connection.vendor == 'postgresql',
"This is a postgresql-specific issue")
def test_postgresql_virtual_relation_indexes(self):
"""Test indexes are not created for related objects"""
index_sql = connection.creation.sql_indexes_for_model(Article, no_style())
self.assertEqual(len(index_sql), 1)
|
apache-2.0
|
onestarshang/flask_super_config
|
venv/lib/python2.7/site-packages/jinja2/ext.py
|
603
|
25078
|
# -*- coding: utf-8 -*-
"""
jinja2.ext
~~~~~~~~~~
Jinja extensions allow to add custom tags similar to the way django custom
tags work. By default two example extensions exist: an i18n and a cache
extension.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from jinja2 import nodes
from jinja2.defaults import BLOCK_START_STRING, \
BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \
COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \
LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \
KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS
from jinja2.environment import Environment
from jinja2.runtime import concat
from jinja2.exceptions import TemplateAssertionError, TemplateSyntaxError
from jinja2.utils import contextfunction, import_string, Markup
from jinja2._compat import next, with_metaclass, string_types, iteritems
# the only real useful gettext functions for a Jinja template. Note
# that ugettext must be assigned to gettext as Jinja doesn't support
# non unicode strings.
GETTEXT_FUNCTIONS = ('_', 'gettext', 'ngettext')
class ExtensionRegistry(type):
"""Gives the extension an unique identifier."""
def __new__(cls, name, bases, d):
rv = type.__new__(cls, name, bases, d)
rv.identifier = rv.__module__ + '.' + rv.__name__
return rv
class Extension(with_metaclass(ExtensionRegistry, object)):
"""Extensions can be used to add extra functionality to the Jinja template
system at the parser level. Custom extensions are bound to an environment
but may not store environment specific data on `self`. The reason for
this is that an extension can be bound to another environment (for
overlays) by creating a copy and reassigning the `environment` attribute.
As extensions are created by the environment they cannot accept any
arguments for configuration. One may want to work around that by using
a factory function, but that is not possible as extensions are identified
by their import name. The correct way to configure the extension is
storing the configuration values on the environment. Because this way the
environment ends up acting as central configuration storage the
attributes may clash which is why extensions have to ensure that the names
they choose for configuration are not too generic. ``prefix`` for example
is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
name as includes the name of the extension (fragment cache).
"""
#: if this extension parses this is the list of tags it's listening to.
tags = set()
#: the priority of that extension. This is especially useful for
#: extensions that preprocess values. A lower value means higher
#: priority.
#:
#: .. versionadded:: 2.4
priority = 100
def __init__(self, environment):
self.environment = environment
def bind(self, environment):
"""Create a copy of this extension bound to another environment."""
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.environment = environment
return rv
def preprocess(self, source, name, filename=None):
"""This method is called before the actual lexing and can be used to
preprocess the source. The `filename` is optional. The return value
must be the preprocessed source.
"""
return source
def filter_stream(self, stream):
"""It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
to filter tokens returned. This method has to return an iterable of
:class:`~jinja2.lexer.Token`\s, but it doesn't have to return a
:class:`~jinja2.lexer.TokenStream`.
In the `ext` folder of the Jinja2 source distribution there is a file
called `inlinegettext.py` which implements a filter that utilizes this
method.
"""
return stream
def parse(self, parser):
"""If any of the :attr:`tags` matched this method is called with the
parser as first argument. The token the parser stream is pointing at
is the name token that matched. This method has to return one or a
list of multiple nodes.
"""
raise NotImplementedError()
def attr(self, name, lineno=None):
"""Return an attribute node for the current extension. This is useful
to pass constants on extensions to generated template code.
::
self.attr('_my_attribute', lineno=lineno)
"""
return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
def call_method(self, name, args=None, kwargs=None, dyn_args=None,
dyn_kwargs=None, lineno=None):
"""Call a method of the extension. This is a shortcut for
:meth:`attr` + :class:`jinja2.nodes.Call`.
"""
if args is None:
args = []
if kwargs is None:
kwargs = []
return nodes.Call(self.attr(name, lineno=lineno), args, kwargs,
dyn_args, dyn_kwargs, lineno=lineno)
@contextfunction
def _gettext_alias(__context, *args, **kwargs):
return __context.call(__context.resolve('gettext'), *args, **kwargs)
def _make_new_gettext(func):
@contextfunction
def gettext(__context, __string, **variables):
rv = __context.call(func, __string)
if __context.eval_ctx.autoescape:
rv = Markup(rv)
return rv % variables
return gettext
def _make_new_ngettext(func):
@contextfunction
def ngettext(__context, __singular, __plural, __num, **variables):
variables.setdefault('num', __num)
rv = __context.call(func, __singular, __plural, __num)
if __context.eval_ctx.autoescape:
rv = Markup(rv)
return rv % variables
return ngettext
class InternationalizationExtension(Extension):
"""This extension adds gettext support to Jinja2."""
tags = set(['trans'])
# TODO: the i18n extension is currently reevaluating values in a few
# situations. Take this example:
# {% trans count=something() %}{{ count }} foo{% pluralize
# %}{{ count }} fooss{% endtrans %}
# something is called twice here. One time for the gettext value and
# the other time for the n-parameter of the ngettext function.
def __init__(self, environment):
Extension.__init__(self, environment)
environment.globals['_'] = _gettext_alias
environment.extend(
install_gettext_translations=self._install,
install_null_translations=self._install_null,
install_gettext_callables=self._install_callables,
uninstall_gettext_translations=self._uninstall,
extract_translations=self._extract,
newstyle_gettext=False
)
def _install(self, translations, newstyle=None):
gettext = getattr(translations, 'ugettext', None)
if gettext is None:
gettext = translations.gettext
ngettext = getattr(translations, 'ungettext', None)
if ngettext is None:
ngettext = translations.ngettext
self._install_callables(gettext, ngettext, newstyle)
def _install_null(self, newstyle=None):
self._install_callables(
lambda x: x,
lambda s, p, n: (n != 1 and (p,) or (s,))[0],
newstyle
)
def _install_callables(self, gettext, ngettext, newstyle=None):
if newstyle is not None:
self.environment.newstyle_gettext = newstyle
if self.environment.newstyle_gettext:
gettext = _make_new_gettext(gettext)
ngettext = _make_new_ngettext(ngettext)
self.environment.globals.update(
gettext=gettext,
ngettext=ngettext
)
def _uninstall(self, translations):
for key in 'gettext', 'ngettext':
self.environment.globals.pop(key, None)
def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
if isinstance(source, string_types):
source = self.environment.parse(source)
return extract_from_ast(source, gettext_functions)
def parse(self, parser):
"""Parse a translatable tag."""
lineno = next(parser.stream).lineno
num_called_num = False
# find all the variables referenced. Additionally a variable can be
# defined in the body of the trans block too, but this is checked at
# a later state.
plural_expr = None
plural_expr_assignment = None
variables = {}
while parser.stream.current.type != 'block_end':
if variables:
parser.stream.expect('comma')
# skip colon for python compatibility
if parser.stream.skip_if('colon'):
break
name = parser.stream.expect('name')
if name.value in variables:
parser.fail('translatable variable %r defined twice.' %
name.value, name.lineno,
exc=TemplateAssertionError)
# expressions
if parser.stream.current.type == 'assign':
next(parser.stream)
variables[name.value] = var = parser.parse_expression()
else:
variables[name.value] = var = nodes.Name(name.value, 'load')
if plural_expr is None:
if isinstance(var, nodes.Call):
plural_expr = nodes.Name('_trans', 'load')
variables[name.value] = plural_expr
plural_expr_assignment = nodes.Assign(
nodes.Name('_trans', 'store'), var)
else:
plural_expr = var
num_called_num = name.value == 'num'
parser.stream.expect('block_end')
plural = plural_names = None
have_plural = False
referenced = set()
# now parse until endtrans or pluralize
singular_names, singular = self._parse_block(parser, True)
if singular_names:
referenced.update(singular_names)
if plural_expr is None:
plural_expr = nodes.Name(singular_names[0], 'load')
num_called_num = singular_names[0] == 'num'
# if we have a pluralize block, we parse that too
if parser.stream.current.test('name:pluralize'):
have_plural = True
next(parser.stream)
if parser.stream.current.type != 'block_end':
name = parser.stream.expect('name')
if name.value not in variables:
parser.fail('unknown variable %r for pluralization' %
name.value, name.lineno,
exc=TemplateAssertionError)
plural_expr = variables[name.value]
num_called_num = name.value == 'num'
parser.stream.expect('block_end')
plural_names, plural = self._parse_block(parser, False)
next(parser.stream)
referenced.update(plural_names)
else:
next(parser.stream)
# register free names as simple name expressions
for var in referenced:
if var not in variables:
variables[var] = nodes.Name(var, 'load')
if not have_plural:
plural_expr = None
elif plural_expr is None:
parser.fail('pluralize without variables', lineno)
node = self._make_node(singular, plural, variables, plural_expr,
bool(referenced),
num_called_num and have_plural)
node.set_lineno(lineno)
if plural_expr_assignment is not None:
return [plural_expr_assignment, node]
else:
return node
def _parse_block(self, parser, allow_pluralize):
"""Parse until the next block tag with a given name."""
referenced = []
buf = []
while 1:
if parser.stream.current.type == 'data':
buf.append(parser.stream.current.value.replace('%', '%%'))
next(parser.stream)
elif parser.stream.current.type == 'variable_begin':
next(parser.stream)
name = parser.stream.expect('name').value
referenced.append(name)
buf.append('%%(%s)s' % name)
parser.stream.expect('variable_end')
elif parser.stream.current.type == 'block_begin':
next(parser.stream)
if parser.stream.current.test('name:endtrans'):
break
elif parser.stream.current.test('name:pluralize'):
if allow_pluralize:
break
parser.fail('a translatable section can have only one '
'pluralize section')
parser.fail('control structures in translatable sections are '
'not allowed')
elif parser.stream.eos:
parser.fail('unclosed translation block')
else:
assert False, 'internal parser error'
return referenced, concat(buf)
def _make_node(self, singular, plural, variables, plural_expr,
vars_referenced, num_called_num):
"""Generates a useful node from the data provided."""
# no variables referenced? no need to escape for old style
# gettext invocations only if there are vars.
if not vars_referenced and not self.environment.newstyle_gettext:
singular = singular.replace('%%', '%')
if plural:
plural = plural.replace('%%', '%')
# singular only:
if plural_expr is None:
gettext = nodes.Name('gettext', 'load')
node = nodes.Call(gettext, [nodes.Const(singular)],
[], None, None)
# singular and plural
else:
ngettext = nodes.Name('ngettext', 'load')
node = nodes.Call(ngettext, [
nodes.Const(singular),
nodes.Const(plural),
plural_expr
], [], None, None)
# in case newstyle gettext is used, the method is powerful
# enough to handle the variable expansion and autoescape
# handling itself
if self.environment.newstyle_gettext:
for key, value in iteritems(variables):
# the function adds that later anyways in case num was
# called num, so just skip it.
if num_called_num and key == 'num':
continue
node.kwargs.append(nodes.Keyword(key, value))
# otherwise do that here
else:
# mark the return value as safe if we are in an
# environment with autoescaping turned on
node = nodes.MarkSafeIfAutoescape(node)
if variables:
node = nodes.Mod(node, nodes.Dict([
nodes.Pair(nodes.Const(key), value)
for key, value in variables.items()
]))
return nodes.Output([node])
class ExprStmtExtension(Extension):
"""Adds a `do` tag to Jinja2 that works like the print statement just
that it doesn't print the return value.
"""
tags = set(['do'])
def parse(self, parser):
node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
node.node = parser.parse_tuple()
return node
class LoopControlExtension(Extension):
"""Adds break and continue to the template engine."""
tags = set(['break', 'continue'])
def parse(self, parser):
token = next(parser.stream)
if token.value == 'break':
return nodes.Break(lineno=token.lineno)
return nodes.Continue(lineno=token.lineno)
class WithExtension(Extension):
"""Adds support for a django-like with block."""
tags = set(['with'])
def parse(self, parser):
node = nodes.Scope(lineno=next(parser.stream).lineno)
assignments = []
while parser.stream.current.type != 'block_end':
lineno = parser.stream.current.lineno
if assignments:
parser.stream.expect('comma')
target = parser.parse_assign_target()
parser.stream.expect('assign')
expr = parser.parse_expression()
assignments.append(nodes.Assign(target, expr, lineno=lineno))
node.body = assignments + \
list(parser.parse_statements(('name:endwith',),
drop_needle=True))
return node
class AutoEscapeExtension(Extension):
"""Changes auto escape rules for a scope."""
tags = set(['autoescape'])
def parse(self, parser):
node = nodes.ScopedEvalContextModifier(lineno=next(parser.stream).lineno)
node.options = [
nodes.Keyword('autoescape', parser.parse_expression())
]
node.body = parser.parse_statements(('name:endautoescape',),
drop_needle=True)
return nodes.Scope([node])
def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS,
babel_style=True):
"""Extract localizable strings from the given template node. Per
default this function returns matches in babel style that means non string
parameters as well as keyword arguments are returned as `None`. This
allows Babel to figure out what you really meant if you are using
gettext functions that allow keyword arguments for placeholder expansion.
If you don't want that behavior set the `babel_style` parameter to `False`
which causes only strings to be returned and parameters are always stored
in tuples. As a consequence invalid gettext calls (calls without a single
string parameter or string parameters after non-string parameters) are
skipped.
This example explains the behavior:
>>> from jinja2 import Environment
>>> env = Environment()
>>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
>>> list(extract_from_ast(node))
[(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))]
>>> list(extract_from_ast(node, babel_style=False))
[(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))]
For every string found this function yields a ``(lineno, function,
message)`` tuple, where:
* ``lineno`` is the number of the line on which the string was found,
* ``function`` is the name of the ``gettext`` function used (if the
string was extracted from embedded Python code), and
* ``message`` is the string itself (a ``unicode`` object, or a tuple
of ``unicode`` objects for functions with multiple string arguments).
This extraction function operates on the AST and is because of that unable
to extract any comments. For comment support you have to use the babel
extraction interface or extract comments yourself.
"""
for node in node.find_all(nodes.Call):
if not isinstance(node.node, nodes.Name) or \
node.node.name not in gettext_functions:
continue
strings = []
for arg in node.args:
if isinstance(arg, nodes.Const) and \
isinstance(arg.value, string_types):
strings.append(arg.value)
else:
strings.append(None)
for arg in node.kwargs:
strings.append(None)
if node.dyn_args is not None:
strings.append(None)
if node.dyn_kwargs is not None:
strings.append(None)
if not babel_style:
strings = tuple(x for x in strings if x is not None)
if not strings:
continue
else:
if len(strings) == 1:
strings = strings[0]
else:
strings = tuple(strings)
yield node.lineno, node.node.name, strings
class _CommentFinder(object):
"""Helper class to find comments in a token stream. Can only
find comments for gettext calls forwards. Once the comment
from line 4 is found, a comment for line 1 will not return a
usable value.
"""
def __init__(self, tokens, comment_tags):
self.tokens = tokens
self.comment_tags = comment_tags
self.offset = 0
self.last_lineno = 0
def find_backwards(self, offset):
try:
for _, token_type, token_value in \
reversed(self.tokens[self.offset:offset]):
if token_type in ('comment', 'linecomment'):
try:
prefix, comment = token_value.split(None, 1)
except ValueError:
continue
if prefix in self.comment_tags:
return [comment.rstrip()]
return []
finally:
self.offset = offset
def find_comments(self, lineno):
if not self.comment_tags or self.last_lineno > lineno:
return []
for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset:]):
if token_lineno > lineno:
return self.find_backwards(self.offset + idx)
return self.find_backwards(len(self.tokens))
def babel_extract(fileobj, keywords, comment_tags, options):
"""Babel extraction method for Jinja templates.
.. versionchanged:: 2.3
Basic support for translation comments was added. If `comment_tags`
is now set to a list of keywords for extraction, the extractor will
try to find the best preceeding comment that begins with one of the
keywords. For best results, make sure to not have more than one
gettext call in one line of code and the matching comment in the
same line or the line before.
.. versionchanged:: 2.5.1
The `newstyle_gettext` flag can be set to `True` to enable newstyle
gettext calls.
.. versionchanged:: 2.7
A `silent` option can now be provided. If set to `False` template
syntax errors are propagated instead of being ignored.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results.
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
(comments will be empty currently)
"""
extensions = set()
for extension in options.get('extensions', '').split(','):
extension = extension.strip()
if not extension:
continue
extensions.add(import_string(extension))
if InternationalizationExtension not in extensions:
extensions.add(InternationalizationExtension)
def getbool(options, key, default=False):
return options.get(key, str(default)).lower() in \
('1', 'on', 'yes', 'true')
silent = getbool(options, 'silent', True)
environment = Environment(
options.get('block_start_string', BLOCK_START_STRING),
options.get('block_end_string', BLOCK_END_STRING),
options.get('variable_start_string', VARIABLE_START_STRING),
options.get('variable_end_string', VARIABLE_END_STRING),
options.get('comment_start_string', COMMENT_START_STRING),
options.get('comment_end_string', COMMENT_END_STRING),
options.get('line_statement_prefix') or LINE_STATEMENT_PREFIX,
options.get('line_comment_prefix') or LINE_COMMENT_PREFIX,
getbool(options, 'trim_blocks', TRIM_BLOCKS),
getbool(options, 'lstrip_blocks', LSTRIP_BLOCKS),
NEWLINE_SEQUENCE,
getbool(options, 'keep_trailing_newline', KEEP_TRAILING_NEWLINE),
frozenset(extensions),
cache_size=0,
auto_reload=False
)
if getbool(options, 'newstyle_gettext'):
environment.newstyle_gettext = True
source = fileobj.read().decode(options.get('encoding', 'utf-8'))
try:
node = environment.parse(source)
tokens = list(environment.lex(environment.preprocess(source)))
except TemplateSyntaxError as e:
if not silent:
raise
# skip templates with syntax errors
return
finder = _CommentFinder(tokens, comment_tags)
for lineno, func, message in extract_from_ast(node, keywords):
yield lineno, func, message, finder.find_comments(lineno)
#: nicer import names
i18n = InternationalizationExtension
do = ExprStmtExtension
loopcontrols = LoopControlExtension
with_ = WithExtension
autoescape = AutoEscapeExtension
|
gpl-2.0
|
TathagataChakraborti/resource-conflicts
|
PLANROB-2015/seq-sat-lama/Python-2.5.2/Lib/test/test_httplib.py
|
22
|
5580
|
import httplib
import StringIO
import sys
from unittest import TestCase
from test import test_support
class FakeSocket:
def __init__(self, text, fileclass=StringIO.StringIO):
self.text = text
self.fileclass = fileclass
def sendall(self, data):
self.data = data
def makefile(self, mode, bufsize=None):
if mode != 'r' and mode != 'rb':
raise httplib.UnimplementedFileMode()
return self.fileclass(self.text)
class NoEOFStringIO(StringIO.StringIO):
"""Like StringIO, but raises AssertionError on EOF.
This is used below to test that httplib doesn't try to read
more from the underlying file than it should.
"""
def read(self, n=-1):
data = StringIO.StringIO.read(self, n)
if data == '':
raise AssertionError('caller tried to read past EOF')
return data
def readline(self, length=None):
data = StringIO.StringIO.readline(self, length)
if data == '':
raise AssertionError('caller tried to read past EOF')
return data
class HeaderTests(TestCase):
def test_auto_headers(self):
# Some headers are added automatically, but should not be added by
# .request() if they are explicitly set.
import httplib
class HeaderCountingBuffer(list):
def __init__(self):
self.count = {}
def append(self, item):
kv = item.split(':')
if len(kv) > 1:
# item is a 'Key: Value' header string
lcKey = kv[0].lower()
self.count.setdefault(lcKey, 0)
self.count[lcKey] += 1
list.append(self, item)
for explicit_header in True, False:
for header in 'Content-length', 'Host', 'Accept-encoding':
conn = httplib.HTTPConnection('example.com')
conn.sock = FakeSocket('blahblahblah')
conn._buffer = HeaderCountingBuffer()
body = 'spamspamspam'
headers = {}
if explicit_header:
headers[header] = str(len(body))
conn.request('POST', '/', body, headers)
self.assertEqual(conn._buffer.count[header.lower()], 1)
# Collect output to a buffer so that we don't have to cope with line-ending
# issues across platforms. Specifically, the headers will have \r\n pairs
# and some platforms will strip them from the output file.
def test():
buf = StringIO.StringIO()
_stdout = sys.stdout
try:
sys.stdout = buf
_test()
finally:
sys.stdout = _stdout
# print individual lines with endings stripped
s = buf.getvalue()
for line in s.split("\n"):
print line.strip()
def _test():
# Test HTTP status lines
body = "HTTP/1.1 200 Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock, 1)
resp.begin()
print resp.read()
resp.close()
body = "HTTP/1.1 400.100 Not Ok\r\n\r\nText"
sock = FakeSocket(body)
resp = httplib.HTTPResponse(sock, 1)
try:
resp.begin()
except httplib.BadStatusLine:
print "BadStatusLine raised as expected"
else:
print "Expect BadStatusLine"
# Check invalid host_port
for hp in ("www.python.org:abc", "www.python.org:"):
try:
h = httplib.HTTP(hp)
except httplib.InvalidURL:
print "InvalidURL raised as expected"
else:
print "Expect InvalidURL"
for hp,h,p in (("[fe80::207:e9ff:fe9b]:8000", "fe80::207:e9ff:fe9b", 8000),
("www.python.org:80", "www.python.org", 80),
("www.python.org", "www.python.org", 80),
("[fe80::207:e9ff:fe9b]", "fe80::207:e9ff:fe9b", 80)):
try:
http = httplib.HTTP(hp)
except httplib.InvalidURL:
print "InvalidURL raised erroneously"
c = http._conn
if h != c.host: raise AssertionError, ("Host incorrectly parsed", h, c.host)
if p != c.port: raise AssertionError, ("Port incorrectly parsed", p, c.host)
# test response with multiple message headers with the same field name.
text = ('HTTP/1.1 200 OK\r\n'
'Set-Cookie: Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"\r\n'
'Set-Cookie: Part_Number="Rocket_Launcher_0001"; Version="1";'
' Path="/acme"\r\n'
'\r\n'
'No body\r\n')
hdr = ('Customer="WILE_E_COYOTE"; Version="1"; Path="/acme"'
', '
'Part_Number="Rocket_Launcher_0001"; Version="1"; Path="/acme"')
s = FakeSocket(text)
r = httplib.HTTPResponse(s, 1)
r.begin()
cookies = r.getheader("Set-Cookie")
if cookies != hdr:
raise AssertionError, "multiple headers not combined properly"
# Test that the library doesn't attempt to read any data
# from a HEAD request. (Tickles SF bug #622042.)
sock = FakeSocket(
'HTTP/1.1 200 OK\r\n'
'Content-Length: 14432\r\n'
'\r\n',
NoEOFStringIO)
resp = httplib.HTTPResponse(sock, 1, method="HEAD")
resp.begin()
if resp.read() != "":
raise AssertionError, "Did not expect response from HEAD request"
resp.close()
class OfflineTest(TestCase):
def test_responses(self):
self.assertEquals(httplib.responses[httplib.NOT_FOUND], "Not Found")
def test_main(verbose=None):
tests = [HeaderTests,OfflineTest]
test_support.run_unittest(*tests)
test()
|
mit
|
ProfessionalIT/maxigenios-website
|
sdk/google_appengine/lib/django-1.4/django/core/management/commands/compilemessages.py
|
373
|
2824
|
import codecs
import os
import sys
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
def has_bom(fn):
f = open(fn, 'r')
sample = f.read(4)
return sample[:3] == '\xef\xbb\xbf' or \
sample.startswith(codecs.BOM_UTF16_LE) or \
sample.startswith(codecs.BOM_UTF16_BE)
def compile_messages(stderr, locale=None):
basedirs = [os.path.join('conf', 'locale'), 'locale']
if os.environ.get('DJANGO_SETTINGS_MODULE'):
from django.conf import settings
basedirs.extend(settings.LOCALE_PATHS)
# Gather existing directories.
basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs)))
if not basedirs:
raise CommandError("This script should be run from the Django SVN tree or your project or app tree, or with the settings module specified.")
for basedir in basedirs:
if locale:
basedir = os.path.join(basedir, locale, 'LC_MESSAGES')
for dirpath, dirnames, filenames in os.walk(basedir):
for f in filenames:
if f.endswith('.po'):
stderr.write('processing file %s in %s\n' % (f, dirpath))
fn = os.path.join(dirpath, f)
if has_bom(fn):
raise CommandError("The %s file has a BOM (Byte Order Mark). Django only supports .po files encoded in UTF-8 and without any BOM." % fn)
pf = os.path.splitext(fn)[0]
# Store the names of the .mo and .po files in an environment
# variable, rather than doing a string replacement into the
# command, so that we can take advantage of shell quoting, to
# quote any malicious characters/escaping.
# See http://cyberelk.net/tim/articles/cmdline/ar01s02.html
os.environ['djangocompilemo'] = pf + '.mo'
os.environ['djangocompilepo'] = pf + '.po'
if sys.platform == 'win32': # Different shell-variable syntax
cmd = 'msgfmt --check-format -o "%djangocompilemo%" "%djangocompilepo%"'
else:
cmd = 'msgfmt --check-format -o "$djangocompilemo" "$djangocompilepo"'
os.system(cmd)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--locale', '-l', dest='locale',
help='The locale to process. Default is to process all.'),
)
help = 'Compiles .po files to .mo files for use with builtin gettext support.'
requires_model_validation = False
can_import_settings = False
def handle(self, **options):
locale = options.get('locale')
compile_messages(self.stderr, locale=locale)
|
mit
|
ubirch/aws-tools
|
virtual-env/lib/python2.7/site-packages/pip/_vendor/packaging/version.py
|
439
|
11949
|
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import collections
import itertools
import re
from ._structures import Infinity
__all__ = [
"parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
]
_Version = collections.namedtuple(
"_Version",
["epoch", "release", "dev", "pre", "post", "local"],
)
def parse(version):
"""
Parse the given version string and return either a :class:`Version` object
or a :class:`LegacyVersion` object depending on if the given version is
a valid PEP 440 version or a legacy version.
"""
try:
return Version(version)
except InvalidVersion:
return LegacyVersion(version)
class InvalidVersion(ValueError):
"""
An invalid version was found, users should refer to PEP 440.
"""
class _BaseVersion(object):
def __hash__(self):
return hash(self._key)
def __lt__(self, other):
return self._compare(other, lambda s, o: s < o)
def __le__(self, other):
return self._compare(other, lambda s, o: s <= o)
def __eq__(self, other):
return self._compare(other, lambda s, o: s == o)
def __ge__(self, other):
return self._compare(other, lambda s, o: s >= o)
def __gt__(self, other):
return self._compare(other, lambda s, o: s > o)
def __ne__(self, other):
return self._compare(other, lambda s, o: s != o)
def _compare(self, other, method):
if not isinstance(other, _BaseVersion):
return NotImplemented
return method(self._key, other._key)
class LegacyVersion(_BaseVersion):
def __init__(self, version):
self._version = str(version)
self._key = _legacy_cmpkey(self._version)
def __str__(self):
return self._version
def __repr__(self):
return "<LegacyVersion({0})>".format(repr(str(self)))
@property
def public(self):
return self._version
@property
def base_version(self):
return self._version
@property
def local(self):
return None
@property
def is_prerelease(self):
return False
@property
def is_postrelease(self):
return False
_legacy_version_component_re = re.compile(
r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
)
_legacy_version_replacement_map = {
"pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
}
def _parse_version_parts(s):
for part in _legacy_version_component_re.split(s):
part = _legacy_version_replacement_map.get(part, part)
if not part or part == ".":
continue
if part[:1] in "0123456789":
# pad for numeric comparison
yield part.zfill(8)
else:
yield "*" + part
# ensure that alpha/beta/candidate are before final
yield "*final"
def _legacy_cmpkey(version):
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
# greater than or equal to 0. This will effectively put the LegacyVersion,
# which uses the defacto standard originally implemented by setuptools,
# as before all PEP 440 versions.
epoch = -1
# This scheme is taken from pkg_resources.parse_version setuptools prior to
# it's adoption of the packaging library.
parts = []
for part in _parse_version_parts(version.lower()):
if part.startswith("*"):
# remove "-" before a prerelease tag
if part < "*final":
while parts and parts[-1] == "*final-":
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == "00000000":
parts.pop()
parts.append(part)
parts = tuple(parts)
return epoch, parts
# Deliberately not anchored to the start and end of the string, to make it
# easier for 3rd party code to reuse
VERSION_PATTERN = r"""
v?
(?:
(?:(?P<epoch>[0-9]+)!)? # epoch
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
(?P<pre> # pre-release
[-_\.]?
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
[-_\.]?
(?P<pre_n>[0-9]+)?
)?
(?P<post> # post release
(?:-(?P<post_n1>[0-9]+))
|
(?:
[-_\.]?
(?P<post_l>post|rev|r)
[-_\.]?
(?P<post_n2>[0-9]+)?
)
)?
(?P<dev> # dev release
[-_\.]?
(?P<dev_l>dev)
[-_\.]?
(?P<dev_n>[0-9]+)?
)?
)
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
class Version(_BaseVersion):
_regex = re.compile(
r"^\s*" + VERSION_PATTERN + r"\s*$",
re.VERBOSE | re.IGNORECASE,
)
def __init__(self, version):
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
raise InvalidVersion("Invalid version: '{0}'".format(version))
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(
match.group("pre_l"),
match.group("pre_n"),
),
post=_parse_letter_version(
match.group("post_l"),
match.group("post_n1") or match.group("post_n2"),
),
dev=_parse_letter_version(
match.group("dev_l"),
match.group("dev_n"),
),
local=_parse_local_version(match.group("local")),
)
# Generate a key which will be used for sorting
self._key = _cmpkey(
self._version.epoch,
self._version.release,
self._version.pre,
self._version.post,
self._version.dev,
self._version.local,
)
def __repr__(self):
return "<Version({0})>".format(repr(str(self)))
def __str__(self):
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
# Pre-release
if self._version.pre is not None:
parts.append("".join(str(x) for x in self._version.pre))
# Post-release
if self._version.post is not None:
parts.append(".post{0}".format(self._version.post[1]))
# Development release
if self._version.dev is not None:
parts.append(".dev{0}".format(self._version.dev[1]))
# Local version segment
if self._version.local is not None:
parts.append(
"+{0}".format(".".join(str(x) for x in self._version.local))
)
return "".join(parts)
@property
def public(self):
return str(self).split("+", 1)[0]
@property
def base_version(self):
parts = []
# Epoch
if self._version.epoch != 0:
parts.append("{0}!".format(self._version.epoch))
# Release segment
parts.append(".".join(str(x) for x in self._version.release))
return "".join(parts)
@property
def local(self):
version_string = str(self)
if "+" in version_string:
return version_string.split("+", 1)[1]
@property
def is_prerelease(self):
return bool(self._version.dev or self._version.pre)
@property
def is_postrelease(self):
return bool(self._version.post)
def _parse_letter_version(letter, number):
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
if number is None:
number = 0
# We normalize any letters to their lower case form
letter = letter.lower()
# We consider some words to be alternate spellings of other words and
# in those cases we want to normalize the spellings to our preferred
# spelling.
if letter == "alpha":
letter = "a"
elif letter == "beta":
letter = "b"
elif letter in ["c", "pre", "preview"]:
letter = "rc"
elif letter in ["rev", "r"]:
letter = "post"
return letter, int(number)
if not letter and number:
# We assume if we are given a number, but we are not given a letter
# then this is using the implicit post release syntax (e.g. 1.0-1)
letter = "post"
return letter, int(number)
_local_version_seperators = re.compile(r"[\._-]")
def _parse_local_version(local):
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_seperators.split(local)
)
def _cmpkey(epoch, release, pre, post, dev, local):
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
release = tuple(
reversed(list(
itertools.dropwhile(
lambda x: x == 0,
reversed(release),
)
))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
# We'll do this by abusing the pre segment, but we _only_ want to do this
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
pre = -Infinity
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
pre = Infinity
# Versions without a post segment should sort before those with one.
if post is None:
post = -Infinity
# Versions without a development segment should sort after those with one.
if dev is None:
dev = Infinity
if local is None:
# Versions without a local segment should sort before those with one.
local = -Infinity
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
# - Alpha numeric segments sort before numeric segments
# - Alpha numeric segments sort lexicographically
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
local = tuple(
(i, "") if isinstance(i, int) else (-Infinity, i)
for i in local
)
return epoch, release, pre, post, dev, local
|
apache-2.0
|
AOSPU/external_chromium_org_tools_gyp
|
test/win/gyptest-link-defrelink.py
|
210
|
1683
|
#!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure a relink is performed when a .def file is touched.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
target = 'test_deffile_dll_ok'
def_contents = test.read('linker-flags/deffile.def')
# This first build makes sure everything is up to date.
test.run_gyp('deffile.gyp', chdir=CHDIR)
test.build('deffile.gyp', target, chdir=CHDIR)
test.up_to_date('deffile.gyp', target, chdir=CHDIR)
def HasExport(binary, export):
full_path = test.built_file_path(binary, chdir=CHDIR)
output = test.run_dumpbin('/exports', full_path)
return export in output
# Verify that only one function is exported.
if not HasExport('test_deffile_dll_ok.dll', 'AnExportedFunction'):
test.fail_test()
if HasExport('test_deffile_dll_ok.dll', 'AnotherExportedFunction'):
test.fail_test()
# Add AnotherExportedFunction to the def file, then rebuild. If it doesn't
# relink the DLL, then the subsequent check for AnotherExportedFunction will
# fail.
new_def_contents = def_contents + "\n AnotherExportedFunction"
test.write('linker-flags/deffile.def', new_def_contents)
test.build('deffile.gyp', target, chdir=CHDIR)
test.up_to_date('deffile.gyp', target, chdir=CHDIR)
if not HasExport('test_deffile_dll_ok.dll', 'AnExportedFunction'):
test.fail_test()
if not HasExport('test_deffile_dll_ok.dll', 'AnotherExportedFunction'):
test.fail_test()
test.pass_test()
|
bsd-3-clause
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_09_01/aio/operations/_default_security_rules_operations.py
|
1
|
8844
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DefaultSecurityRulesOperations:
"""DefaultSecurityRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs
) -> AsyncIterable["_models.SecurityRuleListResult"]:
"""Gets all default security rules in a network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecurityRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_09_01.models.SecurityRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-09-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SecurityRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules'} # type: ignore
async def get(
self,
resource_group_name: str,
network_security_group_name: str,
default_security_rule_name: str,
**kwargs
) -> "_models.SecurityRule":
"""Get the specified default network security rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param default_security_rule_name: The name of the default security rule.
:type default_security_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecurityRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_09_01.models.SecurityRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SecurityRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-09-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'defaultSecurityRuleName': self._serialize.url("default_security_rule_name", default_security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules/{defaultSecurityRuleName}'} # type: ignore
|
mit
|
jralls/gramps
|
gramps/gen/plug/menu/_family.py
|
11
|
1787
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2007-2008 Brian G. Matherly
# Copyright (C) 2008 Gary Burton
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Option class representing a family.
"""
#-------------------------------------------------------------------------
#
# gramps modules
#
#-------------------------------------------------------------------------
from . import StringOption
#-------------------------------------------------------------------------
#
# FamilyOption class
#
#-------------------------------------------------------------------------
class FamilyOption(StringOption):
"""
This class describes an option that allows a family from the
database to be selected.
"""
def __init__(self, label):
"""
:param label: A friendly label to be applied to this option.
Example: "Center Family"
:type label: string
:param value: A Gramps ID of a family for this option.
Example: "f11"
:type value: string
:return: nothing
"""
StringOption.__init__(self, label, "")
|
gpl-2.0
|
idiap/rgbd
|
Processing/Processor.py
|
1
|
3672
|
"""
Copyright (c) 2014 Idiap Research Institute, http://www.idiap.ch/
Written by Kenneth Funes <kenneth.funes@idiap.ch>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
from rgbd.Streaming.RGBDStreamer import RGBDStreamer
from rgbd.Rendering.RGBDViewer import RGBDViewer
from PySide import QtCore, QtGui
import sys
class Processor(QtCore.QObject):
"""
Class to handle a stream of RGB-D data and to display it
"""
newFrameSignal = QtCore.Signal()
def __init__(self, rendering=True):
super(Processor, self).__init__()
self.connected=False
self.viewer=None
self.streamer=None
self.frame_callback=None
self.rendering = rendering
self.count = 0
self.app = None
def __del__(self):
self.stop()
def createGUI(self):
# Creates the widget where to visualize the RGB-D data
self.viewer = RGBDViewer(self.app, render3D=self.rendering, close_callback=self.stop)
self.viewer.show()
self.connect(self.viewer.pauseButton, QtCore.SIGNAL("clicked()"), self.pause)
self.newFrameSignal.connect(self.processFrame)
def pause(self):
"""
Toggle the pause status
"""
self.streamer.pause(not self.streamer.paused)
def run(self, source=0, calibrationFile=None, frame_callback =None):
# Sets the function to be called each time a new frame data is available
self.frame_callback=frame_callback
# Creates the RGB-D data streaming class
self.streamer=RGBDStreamer(frame_callback=self.newFrameSignal.emit, connection_callback=self.connectionUpdate, calibrate = True) # frame_callback=self.newFrame
self.streamer.connect(source, calibrationFile)
# Now create the Qt Application (basically for the Qt events loop)
self.app = QtGui.QApplication(sys.argv)
# Creates the necessary GUI
self.createGUI()
# Puts the streamer to run freely
self.streamer.pause(False)
# Runs the Qt Loop
sys.exit(self.app.exec_())
def stop(self):
"""
Stops the process of data generation
"""
if self.streamer is not None:
self.streamer.pause(True)
self.streamer.disconnect()
self.streamer.kill()
self.streamer=None
def connectionUpdate(self, state=False):
self.connected = state
def processFrame(self):
"""
This function is called within the Qt events loop, as response tot he newFrameSignal activation
"""
if self.streamer is None:
return
data = self.streamer.popFrame()
if data is not None:
self.frame, self.frameIndex = data
self.newFrameAvailable = False
if self.frame is not None:
if self.frame_callback is not None:
self.frame_callback(self.frame, self.frameIndex)
self.viewer.setNewData(self.frame, self.frameIndex)
else:
self.frameMesh, self.state = None, None
|
lgpl-3.0
|
tcr3dr/ardupilot-releases
|
Tools/LogAnalyzer/tests/TestDupeLogData.py
|
273
|
2651
|
from LogAnalyzer import Test,TestResult
import DataflashLog
class TestDupeLogData(Test):
'''test for duplicated data in log, which has been happening on PX4/Pixhawk'''
def __init__(self):
Test.__init__(self)
self.name = "Dupe Log Data"
def __matchSample(self, sample, sampleStartIndex, logdata):
'''return the line number where a match is found, otherwise return False'''
# ignore if all data in sample is the same value
nSame = 0
for s in sample:
if s[1] == sample[0][1]:
nSame += 1
if nSame == 20:
return False
# c
data = logdata.channels["ATT"]["Pitch"].listData
for i in range(sampleStartIndex, len(data)):
#print "Checking against index %d" % i
if i == sampleStartIndex:
continue # skip matching against ourselves
j = 0
while j<20 and (i+j)<len(data) and data[i+j][1] == sample[j][1]:
#print "### Match found, j=%d, data=%f, sample=%f, log data matched to sample at line %d" % (j,data[i+j][1],sample[j][1],data[i+j][0])
j += 1
if j == 20: # all samples match
return data[i][0]
return False
def run(self, logdata, verbose):
self.result = TestResult()
self.result.status = TestResult.StatusType.GOOD
# this could be made more flexible by not hard-coding to use ATT data, could make it dynamic based on whatever is available as long as it is highly variable
if "ATT" not in logdata.channels:
self.result.status = TestResult.StatusType.UNKNOWN
self.result.statusMessage = "No ATT log data"
return
# pick 10 sample points within the range of ATT data we have
sampleStartIndices = []
attStartIndex = 0
attEndIndex = len(logdata.channels["ATT"]["Pitch"].listData)-1
step = attEndIndex / 11
for i in range(step,attEndIndex-step,step):
sampleStartIndices.append(i)
#print "Dupe data sample point index %d at line %d" % (i, logdata.channels["ATT"]["Pitch"].listData[i][0])
# get 20 datapoints of pitch from each sample location and check for a match elsewhere
sampleIndex = 0
for i in range(sampleStartIndices[0], len(logdata.channels["ATT"]["Pitch"].listData)):
if i == sampleStartIndices[sampleIndex]:
#print "Checking sample %d" % i
sample = logdata.channels["ATT"]["Pitch"].listData[i:i+20]
matchedLine = self.__matchSample(sample, i, logdata)
if matchedLine:
#print "Data from line %d found duplicated at line %d" % (sample[0][0],matchedLine)
self.result.status = TestResult.StatusType.FAIL
self.result.statusMessage = "Duplicate data chunks found in log (%d and %d)" % (sample[0][0],matchedLine)
return
sampleIndex += 1
if sampleIndex >= len(sampleStartIndices):
break
|
gpl-3.0
|
urumican/linux-3.8.8
|
tools/perf/scripts/python/net_dropmonitor.py
|
4235
|
1554
|
# Monitor the system for dropped packets and proudce a report of drop locations and counts
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
drop_log = {}
kallsyms = []
def get_kallsyms_table():
global kallsyms
try:
f = open("/proc/kallsyms", "r")
linecount = 0
for line in f:
linecount = linecount+1
f.seek(0)
except:
return
j = 0
for line in f:
loc = int(line.split()[0], 16)
name = line.split()[2]
j = j +1
if ((j % 100) == 0):
print "\r" + str(j) + "/" + str(linecount),
kallsyms.append({ 'loc': loc, 'name' : name})
print "\r" + str(j) + "/" + str(linecount)
kallsyms.sort()
return
def get_sym(sloc):
loc = int(sloc)
for i in kallsyms:
if (i['loc'] >= loc):
return (i['name'], i['loc']-loc)
return (None, 0)
def print_drop_table():
print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT")
for i in drop_log.keys():
(sym, off) = get_sym(i)
if sym == None:
sym = i
print "%25s %25s %25s" % (sym, off, drop_log[i])
def trace_begin():
print "Starting trace (Ctrl-C to dump results)"
def trace_end():
print "Gathering kallsyms data"
get_kallsyms_table()
print_drop_table()
# called from perf, when it finds a correspoinding event
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
slocation = str(location)
try:
drop_log[slocation] = drop_log[slocation] + 1
except:
drop_log[slocation] = 1
|
gpl-2.0
|
jhartnett/ipCounter
|
src/ipCounter.py
|
1
|
1779
|
#!/bin/python
#/**************************************************************************
#* File: ipCounter.py
#*
#* This is a basic program to count the total number of IPs
#* in a given range. Input is a txt formatted file similar
#* to the sample provided
#*
#* This updated version uses Python to make it more agnostic.
#* Author: Hon1nbo
#***************************************************************************/
import math
import sys
fileName = sys.argv[1]
ipCount = 0
ipList = ""
# Open the file & read contents in
with open(fileName) as ipListFile:
ipList = ipListFile.read()
tmpTuple = ('not','\n','null')
tmpTupleSmall = ('not',' ','null')
tmpCIDR = 0
tmpTuple = ipList.split("\n")
entriesCount = len(tmpTuple)
x = 0
while (entriesCount - x) != 0:
tmpTupleSmall = tmpTuple[x].partition("/")
if tmpTupleSmall[2] != "":
tmpCount = math.pow(2, (32-int(tmpTupleSmall[2])))
print(tmpTuple[x],": ",int(tmpCount))
ipCount += tmpCount
else:
tmpTupleSmall = tmpTuple[x].partition("-");
if tmpTupleSmall[1] == "-":
startIP = tmpTupleSmall[0].split(".") # start IP
endIP = tmpTupleSmall[2].split(".") # end IP
tmpCount = 0
for octet in range (0,4):
# Compare each octet one by one based on iteration
difference = int(endIP[octet]) - int(startIP[octet])
# account for the inclusion of smaller number
# ex) 192.168.1.0-192.168.1.255
if difference != 0:
difference += 1
# 256 addresses in each octet, raise to power to do maths
tmpCount += (difference * pow(256, (3 - octet)))
print(tmpTuple[x],": ",int(tmpCount))
ipCount += tmpCount
else:
print(tmpTuple[x],": 1")
ipCount += 1
x += 1
print('iterated through ',int(x),' entries')
print('Total IPs Counted: ',int(ipCount))
|
apache-2.0
|
aregee/moksaya
|
userena/migrations/0001_initial.py
|
7
|
5639
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from userena.utils import user_model_label
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'UserenaSignup'
db.create_table('userena_userenasignup', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name=u'userena_signup', unique=True, to=orm[user_model_label])),
('last_active', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('activation_key', self.gf('django.db.models.fields.CharField')(max_length=40, blank=True)),
('activation_notification_send', self.gf('django.db.models.fields.BooleanField')(default=False)),
('email_unconfirmed', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('email_confirmation_key', self.gf('django.db.models.fields.CharField')(max_length=40, blank=True)),
('email_confirmation_key_created', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
))
db.send_create_signal('userena', ['UserenaSignup'])
def backwards(self, orm):
# Deleting model 'UserenaSignup'
db.delete_table('userena_userenasignup')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': user_model_label.split('.')[-1]},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'userena.userenasignup': {
'Meta': {'object_name': 'UserenaSignup'},
'activation_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'activation_notification_send': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_confirmation_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'email_confirmation_key_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'email_unconfirmed': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'userena_signup'", 'unique': 'True', 'to': "orm['%s']" % user_model_label})
}
}
complete_apps = ['userena']
|
mit
|
JSkernel/G_pro2_msm8974_JSKernel
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py
|
11088
|
3246
|
# Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
|
gpl-2.0
|
dunkhong/grr
|
grr/test_lib/db_test_lib.py
|
1
|
2046
|
#!/usr/bin/env python
"""Test utilities for RELDB-related testing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import functools
import sys
import mock
from grr_response_core.lib.util import compatibility
from grr_response_server import data_store
from grr_response_server.databases import db as abstract_db
from grr_response_server.databases import db_test_mixin
from grr_response_server.databases import mem
from grr_response_server.databases import mysql_test
def TestDatabases(mysql=True):
"""Decorator that creates additional RELDB-enabled test classes."""
def _TestDatabasesDecorator(cls):
"""Decorator that creates additional RELDB-enabled test classes."""
module = sys.modules[cls.__module__]
cls_name = compatibility.GetName(cls)
# Prevent MRO issues caused by inheriting the same Mixin multiple times.
base_classes = ()
if not issubclass(cls, db_test_mixin.GlobalDatabaseTestMixin):
base_classes += (db_test_mixin.GlobalDatabaseTestMixin,)
if mysql:
db_test_cls_name = "{}_MySQLEnabled".format(cls_name)
db_test_cls = compatibility.MakeType(
name=db_test_cls_name,
base_classes=base_classes +
(mysql_test.MySQLDatabaseProviderMixin, cls),
namespace={})
setattr(module, db_test_cls_name, db_test_cls)
return cls
return _TestDatabasesDecorator
def WithDatabase(func):
"""A decorator for database-dependent test methods.
This decorator is intended for tests that need to access database in their
code. It will also augment the test function signature so that the database
object is provided and can be manipulated.
Args:
func: A test method to be decorated.
Returns:
A database-aware function.
"""
@functools.wraps(func)
def Wrapper(*args, **kwargs):
db = abstract_db.DatabaseValidationWrapper(mem.InMemoryDB())
with mock.patch.object(data_store, "REL_DB", db):
func(*(args + (db,)), **kwargs)
return Wrapper
|
apache-2.0
|
npdoty/pywikibot
|
scripts/editarticle.py
|
3
|
4128
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Edit a Wikipedia article with your favourite editor.
TODO: - non existing pages
- edit conflicts
- minor edits
- watch/unwatch
- ...
The following parameters are supported:
-r Edit redirect pages without following them
--edit_redirect automatically.
--edit-redirect
-p P Choose which page to edit.
--page P This argument can be passed positionally.
-w Add the page to the user's watchlist after editing.
--watch
"""
#
# (C) Gerrit Holl, 2004
# (C) Pywikibot team, 2004-2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
import argparse
import os
import sys
import tempfile
import pywikibot
from pywikibot import i18n
from pywikibot.editor import TextEditor
class ArticleEditor(object):
"""Edit a wiki page."""
# join lines if line starts with this ones
# TODO: No apparent usage
# joinchars = string.letters + '[]' + string.digits
def __init__(self, *args):
"""Constructor."""
self.set_options(*args)
self.setpage()
self.site = pywikibot.Site()
def set_options(self, *args):
"""Parse commandline and set options attribute."""
my_args = pywikibot.handle_args(args)
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument("-r", "--edit_redirect", "--edit-redirect",
action="store_true", help="Ignore/edit redirects")
parser.add_argument("-p", "--page", help="Page to edit")
parser.add_argument("-w", "--watch", action="store_true",
help="Watch article after edit")
# convenience positional argument so we can act like a normal editor
parser.add_argument("wikipage", nargs="?", help="Page to edit")
self.options = parser.parse_args(my_args)
if self.options.page and self.options.wikipage:
pywikibot.error("Multiple pages passed. Please specify a single "
"page to edit.")
sys.exit(1)
self.options.page = self.options.page or self.options.wikipage
def setpage(self):
"""Set page and page title."""
site = pywikibot.Site()
pageTitle = self.options.page or pywikibot.input("Page to edit:")
self.page = pywikibot.Page(pywikibot.Link(pageTitle, site))
if not self.options.edit_redirect and self.page.isRedirectPage():
self.page = self.page.getRedirectTarget()
def handle_edit_conflict(self, new):
"""When an edit conflict occures save the new text to a file."""
fn = os.path.join(tempfile.gettempdir(), self.page.title())
fp = open(fn, 'w')
fp.write(new)
fp.close()
pywikibot.output(
"An edit conflict has arisen. Your edit has been saved to %s. "
"Please try again." % fn)
def run(self):
"""Run the bot."""
self.site.login()
try:
old = self.page.get(get_redirect=self.options.edit_redirect)
except pywikibot.NoPage:
old = ""
textEditor = TextEditor()
new = textEditor.edit(old)
if new and old != new:
pywikibot.showDiff(old, new)
changes = pywikibot.input("What did you change?")
comment = i18n.twtranslate(pywikibot.Site(), 'editarticle-edit',
{'description': changes})
try:
self.page.put(new, summary=comment, minorEdit=False,
watchArticle=self.options.watch)
except pywikibot.EditConflict:
self.handle_edit_conflict(new)
else:
pywikibot.output("Nothing changed")
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: list of unicode
"""
app = ArticleEditor(*args)
app.run()
if __name__ == "__main__":
main()
|
mit
|
fja05680/pinkfish
|
examples/310.cryptocurrencies/strategy.py
|
1
|
6833
|
"""
The SMA-ROC-portfolio stategy.
This is SMA-ROC strategy applied to a portfolio.
SMA-ROC is a rate of change calculation smoothed by
a moving average.
This module allows us to examine this strategy and try different
period, stop loss percent, margin, and whether to use a regime filter
or not. We split up the total capital between the symbols in the
portfolio and allocate based on either equal weight or volatility
parity weight (inverse volatility).
"""
import datetime
import matplotlib.pyplot as plt
import pandas as pd
from talib.abstract import *
import pinkfish as pf
# A custom indicator to use in this strategy.
def SMA_ROC(ts, mom_lookback=1, sma_timeperiod=20, price='close'):
""" Returns a series which is an SMA with of a daily MOM. """
mom = pf.MOMENTUM(ts, lookback=mom_lookback, time_frame='daily', price=price)
sma_mom = SMA(mom, timeperiod=sma_timeperiod)
return sma_mom
default_options = {
'use_adj' : False,
'use_cache' : True,
'stock_market_calendar' : False,
'stop_loss_pct' : 1.0,
'margin' : 1,
'lookback' : 1,
'sma_timeperiod': 20,
'sma_pct_band': 0,
'use_regime_filter' : True,
'use_vola_weight' : False
}
class Strategy:
def __init__(self, symbols, capital, start, end, options=default_options):
self.symbols = symbols
self.capital = capital
self.start = start
self.end = end
self.options = options.copy()
self.ts = None
self.rlog = None
self.tlog = None
self.dbal = None
self.stats = None
def _algo(self):
pf.TradeLog.cash = self.capital
pf.TradeLog.margin = self.options['margin']
# Create a stop_loss dict for each symbol.
stop_loss = {symbol:0 for symbol in self.portfolio.symbols}
# stop loss pct should range between 0 and 1, user may have
# expressed this as a percentage 0-100
if self.options['stop_loss_pct'] > 1:
self.options['stop_loss_pct'] /= 100
upper_band = self.options['sma_pct_band']/1000
lower_band = -self.options['sma_pct_band']/1000
# Loop though timeseries.
for i, row in enumerate(self.ts.itertuples()):
date = row.Index.to_pydatetime()
end_flag = pf.is_last_row(self.ts, i)
# Get the prices for this row, put in dict p.
p = self.portfolio.get_prices(row,
fields=['close', 'regime', 'sma_roc', 'vola'])
# Sum the inverse volatility for each row.
inverse_vola_sum = 0
for symbol in self.portfolio.symbols:
inverse_vola_sum += 1 / p[symbol]['vola']
# Loop though each symbol in portfolio.
for symbol in self.portfolio.symbols:
# Use variables to make code cleaner.
close = p[symbol]['close']
regime = p[symbol]['regime']
sma_roc = p[symbol]['sma_roc']
inverse_vola = 1 / p[symbol]['vola']
# Sell Logic
# First we check if an existing position in symbol should be sold
# - sell sma_roc < 0
# - sell if price closes below stop loss
# - sell if end of data by adjusted the percent to zero
if symbol in self.portfolio.positions:
if sma_roc < lower_band or close < stop_loss[symbol] or end_flag:
if close < stop_loss[symbol]: print('STOP LOSS!!!')
self.portfolio.adjust_percent(date, close, 0, symbol, row)
# Buy Logic
# First we check to see if there is an existing position, if so do nothing
# - Buy if (regime > 0 or not use_regime_filter) and sma_roc > 0
else:
if (regime > 0 or not self.options['use_regime_filter']) and sma_roc > upper_band:
# Use volatility weight.
if self.options['use_vola_weight']:
weight = inverse_vola / inverse_vola_sum
# Use equal weight.
else:
weight = 1 / len(self.portfolio.symbols)
self.portfolio.adjust_percent(date, close, weight, symbol, row)
# Set stop loss
stop_loss[symbol] = (1-self.options['stop_loss_pct'])*close
# record daily balance
self.portfolio.record_daily_balance(date, row)
def run(self):
self.portfolio = pf.Portfolio()
self.ts = self.portfolio.fetch_timeseries(self.symbols, self.start, self.end,
fields=['close'], use_cache=self.options['use_cache'],
use_adj=self.options['use_adj'],
dir_name='cryptocurrencies',
stock_market_calendar=self.options['stock_market_calendar'])
# Add technical indicator: 200 sma regime filter for each symbol.
def _crossover(ts, ta_param, input_column):
return pf.CROSSOVER(ts, timeperiod_fast=1, timeperiod_slow=200,
price=input_column, prevday=False)
self.ts = self.portfolio.add_technical_indicator(
self.ts, ta_func=_crossover, ta_param=None,
output_column_suffix='regime', input_column_suffix='close')
# Add technical indicator: volatility.
def _volatility(ts, ta_param, input_column):
return pf.VOLATILITY(ts, price=input_column)
self.ts = self.portfolio.add_technical_indicator(
self.ts, ta_func=_volatility, ta_param=None,
output_column_suffix='vola', input_column_suffix='close')
# Add techincal indicator: X day SMA_ROC.
def _sma_roc(ts, ta_param, input_column):
return SMA_ROC(ts, mom_lookback=self.options['lookback'],
sma_timeperiod=self.options['sma_timeperiod'],
price=input_column)
self.ts = self.portfolio.add_technical_indicator(
self.ts, ta_func=_sma_roc, ta_param=None,
output_column_suffix='sma_roc', input_column_suffix='close')
# Finalize timeseries.
self.ts, self.start = self.portfolio.finalize_timeseries(self.ts, self.start)
# Init trade log objects.
self.portfolio.init_trade_logs(self.ts)
self._algo()
self._get_logs()
self._get_stats()
def _get_logs(self):
self.rlog, self.tlog, self.dbal = self.portfolio.get_logs()
def _get_stats(self):
self.stats = pf.stats(self.ts, self.tlog, self.dbal, self.capital)
|
mit
|
codemac/servo
|
python/mozlog/mozlog/structured/formatters/xunit.py
|
46
|
3804
|
import types
from xml.etree import ElementTree
import base
def format_test_id(test_id):
"""Take a test id and return something that looks a bit like
a class path"""
if type(test_id) not in types.StringTypes:
#Not sure how to deal with reftests yet
raise NotImplementedError
#Turn a path into something like a class heirachy
return test_id.replace('.', '_').replace('/', ".")
class XUnitFormatter(base.BaseFormatter):
"""Formatter that produces XUnit-style XML output.
The tree is created in-memory so this formatter may be problematic
with very large log files.
Note that the data model isn't a perfect match. In
particular XUnit assumes that each test has a unittest-style
class name and function name, which isn't the case for us. The
implementation currently replaces path names with something that
looks like class names, but this doesn't work for test types that
actually produce class names, or for test types that have multiple
components in their test id (e.g. reftests)."""
def __init__(self):
self.tree = ElementTree.ElementTree()
self.root = None
self.suite_start_time = None
self.test_start_time = None
self.tests_run = 0
self.errors = 0
self.failures = 0
self.skips = 0
def suite_start(self, data):
self.root = ElementTree.Element("testsuite")
self.tree.root = self.root
self.suite_start_time = data["time"]
def test_start(self, data):
self.tests_run += 1
self.test_start_time = data["time"]
def _create_result(self, data):
test = ElementTree.SubElement(self.root, "testcase")
name = format_test_id(data["test"])
extra = data.get('extra') or {}
test.attrib["classname"] = extra.get('class_name') or name
if "subtest" in data:
test.attrib["name"] = data["subtest"]
# We generally don't know how long subtests take
test.attrib["time"] = "0"
else:
if "." in name:
test_name = name.rsplit(".", 1)[1]
else:
test_name = name
test.attrib["name"] = extra.get('method_name') or test_name
test.attrib["time"] = "%.2f" % ((data["time"] - self.test_start_time) / 1000.0)
if ("expected" in data and data["expected"] != data["status"]):
if data["status"] in ("NOTRUN", "ASSERT", "ERROR"):
result = ElementTree.SubElement(test, "error")
self.errors += 1
else:
result = ElementTree.SubElement(test, "failure")
self.failures += 1
result.attrib["message"] = "Expected %s, got %s" % (data["expected"], data["status"])
result.text = '%s\n%s' % (data.get('stack', ''), data.get('message', ''))
elif data["status"] == "SKIP":
result = ElementTree.SubElement(test, "skipped")
self.skips += 1
def test_status(self, data):
self._create_result(data)
def test_end(self, data):
self._create_result(data)
def suite_end(self, data):
self.root.attrib.update({"tests": str(self.tests_run),
"errors": str(self.errors),
"failures": str(self.failures),
"skips": str(self.skips),
"time": "%.2f" % (
(data["time"] - self.suite_start_time) / 1000.0)})
xml_string = ElementTree.tostring(self.root, encoding="utf8")
# pretty printing can not be done from xml.etree
from xml.dom import minidom
return minidom.parseString(xml_string).toprettyxml(encoding="utf8")
|
mpl-2.0
|
daenamkim/ansible
|
lib/ansible/modules/cloud/amazon/ec2_group_facts.py
|
48
|
5042
|
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ec2_group_facts
short_description: Gather facts about ec2 security groups in AWS.
description:
- Gather facts about ec2 security groups in AWS.
version_added: "2.3"
requirements: [ boto3 ]
author: "Henrique Rodrigues (github.com/Sodki)"
options:
filters:
description:
- A dict of filters to apply. Each dict item consists of a filter key and a filter value. See \
U(https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeSecurityGroups.html) for \
possible filters. Filter names and values are case sensitive. You can also use underscores (_) \
instead of dashes (-) in the filter keys, which will take precedence in case of conflict.
required: false
default: {}
notes:
- By default, the module will return all security groups. To limit results use the appropriate filters.
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Gather facts about all security groups
- ec2_group_facts:
# Gather facts about all security groups in a specific VPC
- ec2_group_facts:
filters:
vpc-id: vpc-12345678
# Gather facts about all security groups in a specific VPC
- ec2_group_facts:
filters:
vpc-id: vpc-12345678
# Gather facts about a security group
- ec2_group_facts:
filters:
group-name: example-1
# Gather facts about a security group by id
- ec2_group_facts:
filters:
group-id: sg-12345678
# Gather facts about a security group with multiple filters, also mixing the use of underscores as filter keys
- ec2_group_facts:
filters:
group_id: sg-12345678
vpc-id: vpc-12345678
# Gather facts about various security groups
- ec2_group_facts:
filters:
group-name:
- example-1
- example-2
- example-3
# Gather facts about any security group with a tag key Name and value Example. The quotes around 'tag:name' are important because of the colon in the value
- ec2_group_facts:
filters:
"tag:Name": Example
'''
RETURN = '''
security_groups:
description: Security groups that match the provided filters. Each element consists of a dict with all the information related to that security group.
type: list
returned: always
sample:
'''
import traceback
try:
from botocore.exceptions import ClientError
except ImportError:
pass # caught by imported HAS_BOTO3
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import (ec2_argument_spec, boto3_conn, HAS_BOTO3, get_aws_connection_info,
boto3_tag_list_to_ansible_dict, ansible_dict_to_boto3_filter_list,
camel_dict_to_snake_dict)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
filters=dict(default={}, type='dict')
)
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
if region:
connection = boto3_conn(
module,
conn_type='client',
resource='ec2',
region=region,
endpoint=ec2_url,
**aws_connect_params
)
else:
module.fail_json(msg="region must be specified")
# Replace filter key underscores with dashes, for compatibility, except if we're dealing with tags
sanitized_filters = module.params.get("filters")
for key in sanitized_filters:
if not key.startswith("tag:"):
sanitized_filters[key.replace("_", "-")] = sanitized_filters.pop(key)
try:
security_groups = connection.describe_security_groups(
Filters=ansible_dict_to_boto3_filter_list(sanitized_filters)
)
except ClientError as e:
module.fail_json(msg=e.message, exception=traceback.format_exc())
snaked_security_groups = []
for security_group in security_groups['SecurityGroups']:
# Modify boto3 tags list to be ansible friendly dict
# but don't camel case tags
security_group = camel_dict_to_snake_dict(security_group)
security_group['tags'] = boto3_tag_list_to_ansible_dict(security_group.get('tags', {}), tag_name_key_name='key', tag_value_key_name='value')
snaked_security_groups.append(security_group)
module.exit_json(security_groups=snaked_security_groups)
if __name__ == '__main__':
main()
|
gpl-3.0
|
Fusion-Rom/android_external_chromium_org_third_party_WebKit
|
Tools/Scripts/webkitpy/style/checkers/test_expectations_unittest.py
|
45
|
4347
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import sys
import unittest
from test_expectations import TestExpectationsChecker
from webkitpy.common.host_mock import MockHost
class ErrorCollector(object):
"""An error handler class for unit tests."""
def __init__(self):
self._errors = []
self.turned_off_filtering = False
def turn_off_line_filtering(self):
self.turned_off_filtering = True
def __call__(self, lineno, category, confidence, message):
self._errors.append('%s [%s] [%d]' % (message, category, confidence))
return True
def get_errors(self):
return ''.join(self._errors)
def reset_errors(self):
self._errors = []
self.turned_off_filtering = False
class TestExpectationsTestCase(unittest.TestCase):
"""TestCase for test_expectations.py"""
def setUp(self):
self._error_collector = ErrorCollector()
self._test_file = 'passes/text.html'
def assert_lines_lint(self, lines, should_pass, expected_output=None):
self._error_collector.reset_errors()
host = MockHost()
checker = TestExpectationsChecker('test/TestExpectations',
self._error_collector, host=host)
# We should have a valid port, but override it with a test port so we
# can check the lines.
self.assertIsNotNone(checker._port_obj)
checker._port_obj = host.port_factory.get('test-mac-leopard')
checker.check_test_expectations(expectations_str='\n'.join(lines),
tests=[self._test_file])
checker.check_tabs(lines)
if should_pass:
self.assertEqual('', self._error_collector.get_errors())
elif expected_output:
self.assertEqual(expected_output, self._error_collector.get_errors())
else:
self.assertNotEquals('', self._error_collector.get_errors())
# Note that a patch might change a line that introduces errors elsewhere, but we
# don't want to lint the whole file (it can unfairly punish patches for pre-existing errors).
# We rely on a separate lint-webkitpy step on the bots to keep the whole file okay.
# FIXME: See https://bugs.webkit.org/show_bug.cgi?id=104712 .
self.assertFalse(self._error_collector.turned_off_filtering)
def test_valid_expectations(self):
self.assert_lines_lint(["crbug.com/1234 [ Mac ] passes/text.html [ Pass Failure ]"], should_pass=True)
def test_invalid_expectations(self):
self.assert_lines_lint(["Bug(me) passes/text.html [ Give Up]"], should_pass=False)
def test_tab(self):
self.assert_lines_lint(["\twebkit.org/b/1 passes/text.html [ Pass ]"], should_pass=False, expected_output="Line contains tab character. [whitespace/tab] [5]")
|
bsd-3-clause
|
incaser/odoo-odoo
|
addons/stock/procurement.py
|
227
|
22183
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT, float_compare, float_round
from openerp import SUPERUSER_ID
from dateutil.relativedelta import relativedelta
from datetime import datetime
from psycopg2 import OperationalError
import openerp
class procurement_group(osv.osv):
_inherit = 'procurement.group'
_columns = {
'partner_id': fields.many2one('res.partner', 'Partner')
}
class procurement_rule(osv.osv):
_inherit = 'procurement.rule'
def _get_action(self, cr, uid, context=None):
result = super(procurement_rule, self)._get_action(cr, uid, context=context)
return result + [('move', _('Move From Another Location'))]
def _get_rules(self, cr, uid, ids, context=None):
res = []
for route in self.browse(cr, uid, ids):
res += [x.id for x in route.pull_ids]
return res
_columns = {
'location_id': fields.many2one('stock.location', 'Procurement Location'),
'location_src_id': fields.many2one('stock.location', 'Source Location',
help="Source location is action=move"),
'route_id': fields.many2one('stock.location.route', 'Route',
help="If route_id is False, the rule is global"),
'procure_method': fields.selection([('make_to_stock', 'Take From Stock'), ('make_to_order', 'Create Procurement')], 'Move Supply Method', required=True,
help="""Determines the procurement method of the stock move that will be generated: whether it will need to 'take from the available stock' in its source location or needs to ignore its stock and create a procurement over there."""),
'route_sequence': fields.related('route_id', 'sequence', string='Route Sequence',
store={
'stock.location.route': (_get_rules, ['sequence'], 10),
'procurement.rule': (lambda self, cr, uid, ids, c={}: ids, ['route_id'], 10),
}),
'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type',
help="Picking Type determines the way the picking should be shown in the view, reports, ..."),
'delay': fields.integer('Number of Days'),
'partner_address_id': fields.many2one('res.partner', 'Partner Address'),
'propagate': fields.boolean('Propagate cancel and split', help='If checked, when the previous move of the move (which was generated by a next procurement) is cancelled or split, the move generated by this move will too'),
'warehouse_id': fields.many2one('stock.warehouse', 'Served Warehouse', help='The warehouse this rule is for'),
'propagate_warehouse_id': fields.many2one('stock.warehouse', 'Warehouse to Propagate', help="The warehouse to propagate on the created move/procurement, which can be different of the warehouse this rule is for (e.g for resupplying rules from another warehouse)"),
}
_defaults = {
'procure_method': 'make_to_stock',
'propagate': True,
'delay': 0,
}
class procurement_order(osv.osv):
_inherit = "procurement.order"
_columns = {
'location_id': fields.many2one('stock.location', 'Procurement Location'), # not required because task may create procurements that aren't linked to a location with sale_service
'partner_dest_id': fields.many2one('res.partner', 'Customer Address', help="In case of dropshipping, we need to know the destination address more precisely"),
'move_ids': fields.one2many('stock.move', 'procurement_id', 'Moves', help="Moves created by the procurement"),
'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Move which caused (created) the procurement"),
'route_ids': fields.many2many('stock.location.route', 'stock_location_route_procurement', 'procurement_id', 'route_id', 'Preferred Routes', help="Preferred route to be followed by the procurement order. Usually copied from the generating document (SO) but could be set up manually."),
'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', help="Warehouse to consider for the route selection"),
'orderpoint_id': fields.many2one('stock.warehouse.orderpoint', 'Minimum Stock Rule'),
}
def propagate_cancel(self, cr, uid, procurement, context=None):
if procurement.rule_id.action == 'move' and procurement.move_ids:
self.pool.get('stock.move').action_cancel(cr, uid, [m.id for m in procurement.move_ids], context=context)
def cancel(self, cr, uid, ids, context=None):
if context is None:
context = {}
to_cancel_ids = self.get_cancel_ids(cr, uid, ids, context=context)
ctx = context.copy()
#set the context for the propagation of the procurement cancelation
ctx['cancel_procurement'] = True
for procurement in self.browse(cr, uid, to_cancel_ids, context=ctx):
self.propagate_cancel(cr, uid, procurement, context=ctx)
return super(procurement_order, self).cancel(cr, uid, to_cancel_ids, context=ctx)
def _find_parent_locations(self, cr, uid, procurement, context=None):
location = procurement.location_id
res = [location.id]
while location.location_id:
location = location.location_id
res.append(location.id)
return res
def change_warehouse_id(self, cr, uid, ids, warehouse_id, context=None):
if warehouse_id:
warehouse = self.pool.get('stock.warehouse').browse(cr, uid, warehouse_id, context=context)
return {'value': {'location_id': warehouse.lot_stock_id.id}}
return {}
def _search_suitable_rule(self, cr, uid, procurement, domain, context=None):
'''we try to first find a rule among the ones defined on the procurement order group and if none is found, we try on the routes defined for the product, and finally we fallback on the default behavior'''
pull_obj = self.pool.get('procurement.rule')
warehouse_route_ids = []
if procurement.warehouse_id:
domain += ['|', ('warehouse_id', '=', procurement.warehouse_id.id), ('warehouse_id', '=', False)]
warehouse_route_ids = [x.id for x in procurement.warehouse_id.route_ids]
product_route_ids = [x.id for x in procurement.product_id.route_ids + procurement.product_id.categ_id.total_route_ids]
procurement_route_ids = [x.id for x in procurement.route_ids]
res = pull_obj.search(cr, uid, domain + [('route_id', 'in', procurement_route_ids)], order='route_sequence, sequence', context=context)
if not res:
res = pull_obj.search(cr, uid, domain + [('route_id', 'in', product_route_ids)], order='route_sequence, sequence', context=context)
if not res:
res = warehouse_route_ids and pull_obj.search(cr, uid, domain + [('route_id', 'in', warehouse_route_ids)], order='route_sequence, sequence', context=context) or []
if not res:
res = pull_obj.search(cr, uid, domain + [('route_id', '=', False)], order='sequence', context=context)
return res
def _find_suitable_rule(self, cr, uid, procurement, context=None):
rule_id = super(procurement_order, self)._find_suitable_rule(cr, uid, procurement, context=context)
if not rule_id:
#a rule defined on 'Stock' is suitable for a procurement in 'Stock\Bin A'
all_parent_location_ids = self._find_parent_locations(cr, uid, procurement, context=context)
rule_id = self._search_suitable_rule(cr, uid, procurement, [('location_id', 'in', all_parent_location_ids)], context=context)
rule_id = rule_id and rule_id[0] or False
return rule_id
def _run_move_create(self, cr, uid, procurement, context=None):
''' Returns a dictionary of values that will be used to create a stock move from a procurement.
This function assumes that the given procurement has a rule (action == 'move') set on it.
:param procurement: browse record
:rtype: dictionary
'''
newdate = (datetime.strptime(procurement.date_planned, '%Y-%m-%d %H:%M:%S') - relativedelta(days=procurement.rule_id.delay or 0)).strftime('%Y-%m-%d %H:%M:%S')
group_id = False
if procurement.rule_id.group_propagation_option == 'propagate':
group_id = procurement.group_id and procurement.group_id.id or False
elif procurement.rule_id.group_propagation_option == 'fixed':
group_id = procurement.rule_id.group_id and procurement.rule_id.group_id.id or False
#it is possible that we've already got some move done, so check for the done qty and create
#a new move with the correct qty
already_done_qty = 0
already_done_qty_uos = 0
for move in procurement.move_ids:
already_done_qty += move.product_uom_qty if move.state == 'done' else 0
already_done_qty_uos += move.product_uos_qty if move.state == 'done' else 0
qty_left = max(procurement.product_qty - already_done_qty, 0)
qty_uos_left = max(procurement.product_uos_qty - already_done_qty_uos, 0)
vals = {
'name': procurement.name,
'company_id': procurement.rule_id.company_id.id or procurement.rule_id.location_src_id.company_id.id or procurement.rule_id.location_id.company_id.id or procurement.company_id.id,
'product_id': procurement.product_id.id,
'product_uom': procurement.product_uom.id,
'product_uom_qty': qty_left,
'product_uos_qty': (procurement.product_uos and qty_uos_left) or qty_left,
'product_uos': (procurement.product_uos and procurement.product_uos.id) or procurement.product_uom.id,
'partner_id': procurement.rule_id.partner_address_id.id or (procurement.group_id and procurement.group_id.partner_id.id) or False,
'location_id': procurement.rule_id.location_src_id.id,
'location_dest_id': procurement.location_id.id,
'move_dest_id': procurement.move_dest_id and procurement.move_dest_id.id or False,
'procurement_id': procurement.id,
'rule_id': procurement.rule_id.id,
'procure_method': procurement.rule_id.procure_method,
'origin': procurement.origin,
'picking_type_id': procurement.rule_id.picking_type_id.id,
'group_id': group_id,
'route_ids': [(4, x.id) for x in procurement.route_ids],
'warehouse_id': procurement.rule_id.propagate_warehouse_id.id or procurement.rule_id.warehouse_id.id,
'date': newdate,
'date_expected': newdate,
'propagate': procurement.rule_id.propagate,
'priority': procurement.priority,
}
return vals
def _run(self, cr, uid, procurement, context=None):
if procurement.rule_id and procurement.rule_id.action == 'move':
if not procurement.rule_id.location_src_id:
self.message_post(cr, uid, [procurement.id], body=_('No source location defined!'), context=context)
return False
move_obj = self.pool.get('stock.move')
move_dict = self._run_move_create(cr, uid, procurement, context=context)
#create the move as SUPERUSER because the current user may not have the rights to do it (mto product launched by a sale for example)
move_obj.create(cr, SUPERUSER_ID, move_dict, context=context)
return True
return super(procurement_order, self)._run(cr, uid, procurement, context=context)
def run(self, cr, uid, ids, autocommit=False, context=None):
new_ids = [x.id for x in self.browse(cr, uid, ids, context=context) if x.state not in ('running', 'done', 'cancel')]
res = super(procurement_order, self).run(cr, uid, new_ids, autocommit=autocommit, context=context)
#after all the procurements are run, check if some created a draft stock move that needs to be confirmed
#(we do that in batch because it fasts the picking assignation and the picking state computation)
move_to_confirm_ids = []
for procurement in self.browse(cr, uid, new_ids, context=context):
if procurement.state == "running" and procurement.rule_id and procurement.rule_id.action == "move":
move_to_confirm_ids += [m.id for m in procurement.move_ids if m.state == 'draft']
if move_to_confirm_ids:
self.pool.get('stock.move').action_confirm(cr, uid, move_to_confirm_ids, context=context)
return res
def _check(self, cr, uid, procurement, context=None):
''' Implement the procurement checking for rules of type 'move'. The procurement will be satisfied only if all related
moves are done/cancel and if the requested quantity is moved.
'''
if procurement.rule_id and procurement.rule_id.action == 'move':
uom_obj = self.pool.get('product.uom')
# In case Phantom BoM splits only into procurements
if not procurement.move_ids:
return True
cancel_test_list = [x.state == 'cancel' for x in procurement.move_ids]
done_cancel_test_list = [x.state in ('done', 'cancel') for x in procurement.move_ids]
at_least_one_cancel = any(cancel_test_list)
all_done_or_cancel = all(done_cancel_test_list)
all_cancel = all(cancel_test_list)
if not all_done_or_cancel:
return False
elif all_done_or_cancel and not all_cancel:
return True
elif all_cancel:
self.message_post(cr, uid, [procurement.id], body=_('All stock moves have been cancelled for this procurement.'), context=context)
self.write(cr, uid, [procurement.id], {'state': 'cancel'}, context=context)
return False
return super(procurement_order, self)._check(cr, uid, procurement, context)
def do_view_pickings(self, cr, uid, ids, context=None):
'''
This function returns an action that display the pickings of the procurements belonging
to the same procurement group of given ids.
'''
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj.get_object_reference(cr, uid, 'stock', 'do_view_pickings')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
group_ids = set([proc.group_id.id for proc in self.browse(cr, uid, ids, context=context) if proc.group_id])
result['domain'] = "[('group_id','in',[" + ','.join(map(str, list(group_ids))) + "])]"
return result
def run_scheduler(self, cr, uid, use_new_cursor=False, company_id=False, context=None):
'''
Call the scheduler in order to check the running procurements (super method), to check the minimum stock rules
and the availability of moves. This function is intended to be run for all the companies at the same time, so
we run functions as SUPERUSER to avoid intercompanies and access rights issues.
@param self: The object pointer
@param cr: The current row, from the database cursor,
@param uid: The current user ID for security checks
@param ids: List of selected IDs
@param use_new_cursor: if set, use a dedicated cursor and auto-commit after processing each procurement.
This is appropriate for batch jobs only.
@param context: A standard dictionary for contextual values
@return: Dictionary of values
'''
super(procurement_order, self).run_scheduler(cr, uid, use_new_cursor=use_new_cursor, company_id=company_id, context=context)
if context is None:
context = {}
try:
if use_new_cursor:
cr = openerp.registry(cr.dbname).cursor()
move_obj = self.pool.get('stock.move')
#Minimum stock rules
self._procure_orderpoint_confirm(cr, SUPERUSER_ID, use_new_cursor=use_new_cursor, company_id=company_id, context=context)
#Search all confirmed stock_moves and try to assign them
confirmed_ids = move_obj.search(cr, uid, [('state', '=', 'confirmed')], limit=None, order='priority desc, date_expected asc', context=context)
for x in xrange(0, len(confirmed_ids), 100):
move_obj.action_assign(cr, uid, confirmed_ids[x:x + 100], context=context)
if use_new_cursor:
cr.commit()
if use_new_cursor:
cr.commit()
finally:
if use_new_cursor:
try:
cr.close()
except Exception:
pass
return {}
def _get_orderpoint_date_planned(self, cr, uid, orderpoint, start_date, context=None):
date_planned = start_date + relativedelta(days=orderpoint.product_id.seller_delay or 0.0)
return date_planned.strftime(DEFAULT_SERVER_DATE_FORMAT)
def _prepare_orderpoint_procurement(self, cr, uid, orderpoint, product_qty, context=None):
return {
'name': orderpoint.name,
'date_planned': self._get_orderpoint_date_planned(cr, uid, orderpoint, datetime.today(), context=context),
'product_id': orderpoint.product_id.id,
'product_qty': product_qty,
'company_id': orderpoint.company_id.id,
'product_uom': orderpoint.product_uom.id,
'location_id': orderpoint.location_id.id,
'origin': orderpoint.name,
'warehouse_id': orderpoint.warehouse_id.id,
'orderpoint_id': orderpoint.id,
'group_id': orderpoint.group_id.id,
}
def _product_virtual_get(self, cr, uid, order_point):
product_obj = self.pool.get('product.product')
return product_obj._product_available(cr, uid,
[order_point.product_id.id],
context={'location': order_point.location_id.id})[order_point.product_id.id]['virtual_available']
def _procure_orderpoint_confirm(self, cr, uid, use_new_cursor=False, company_id = False, context=None):
'''
Create procurement based on Orderpoint
:param bool use_new_cursor: if set, use a dedicated cursor and auto-commit after processing each procurement.
This is appropriate for batch jobs only.
'''
if context is None:
context = {}
if use_new_cursor:
cr = openerp.registry(cr.dbname).cursor()
orderpoint_obj = self.pool.get('stock.warehouse.orderpoint')
procurement_obj = self.pool.get('procurement.order')
dom = company_id and [('company_id', '=', company_id)] or []
orderpoint_ids = orderpoint_obj.search(cr, uid, dom)
prev_ids = []
while orderpoint_ids:
ids = orderpoint_ids[:100]
del orderpoint_ids[:100]
for op in orderpoint_obj.browse(cr, uid, ids, context=context):
try:
prods = self._product_virtual_get(cr, uid, op)
if prods is None:
continue
if float_compare(prods, op.product_min_qty, precision_rounding=op.product_uom.rounding) < 0:
qty = max(op.product_min_qty, op.product_max_qty) - prods
reste = op.qty_multiple > 0 and qty % op.qty_multiple or 0.0
if float_compare(reste, 0.0, precision_rounding=op.product_uom.rounding) > 0:
qty += op.qty_multiple - reste
if float_compare(qty, 0.0, precision_rounding=op.product_uom.rounding) <= 0:
continue
qty -= orderpoint_obj.subtract_procurements(cr, uid, op, context=context)
qty_rounded = float_round(qty, precision_rounding=op.product_uom.rounding)
if qty_rounded > 0:
proc_id = procurement_obj.create(cr, uid,
self._prepare_orderpoint_procurement(cr, uid, op, qty_rounded, context=context),
context=context)
self.check(cr, uid, [proc_id])
self.run(cr, uid, [proc_id])
if use_new_cursor:
cr.commit()
except OperationalError:
if use_new_cursor:
orderpoint_ids.append(op.id)
cr.rollback()
continue
else:
raise
if use_new_cursor:
cr.commit()
if prev_ids == ids:
break
else:
prev_ids = ids
if use_new_cursor:
cr.commit()
cr.close()
return {}
|
agpl-3.0
|
rajalokan/nova
|
nova/objects/volume_usage.py
|
41
|
2570
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova.objects import base
from nova.objects import fields
@base.NovaObjectRegistry.register
class VolumeUsage(base.NovaPersistentObject, base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.IntegerField(read_only=True),
'volume_id': fields.UUIDField(),
'instance_uuid': fields.UUIDField(nullable=True),
'project_id': fields.StringField(nullable=True),
'user_id': fields.StringField(nullable=True),
'availability_zone': fields.StringField(nullable=True),
'tot_last_refreshed': fields.DateTimeField(nullable=True,
read_only=True),
'tot_reads': fields.IntegerField(read_only=True),
'tot_read_bytes': fields.IntegerField(read_only=True),
'tot_writes': fields.IntegerField(read_only=True),
'tot_write_bytes': fields.IntegerField(read_only=True),
'curr_last_refreshed': fields.DateTimeField(nullable=True,
read_only=True),
'curr_reads': fields.IntegerField(),
'curr_read_bytes': fields.IntegerField(),
'curr_writes': fields.IntegerField(),
'curr_write_bytes': fields.IntegerField()
}
@staticmethod
def _from_db_object(context, vol_usage, db_vol_usage):
for field in vol_usage.fields:
setattr(vol_usage, field, db_vol_usage[field])
vol_usage._context = context
vol_usage.obj_reset_changes()
return vol_usage
@base.remotable
def save(self, update_totals=False):
db_vol_usage = db.vol_usage_update(
self._context, self.volume_id, self.curr_reads,
self.curr_read_bytes, self.curr_writes, self.curr_write_bytes,
self.instance_uuid, self.project_id, self.user_id,
self.availability_zone, update_totals=update_totals)
self._from_db_object(self._context, self, db_vol_usage)
|
apache-2.0
|
kingbryan/fractional
|
node_modules/gulp-sass/node_modules/node-sass/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
|
388
|
91069
|
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Notes:
#
# This is all roughly based on the Makefile system used by the Linux
# kernel, but is a non-recursive make -- we put the entire dependency
# graph in front of make and let it figure it out.
#
# The code below generates a separate .mk file for each target, but
# all are sourced by the top-level Makefile. This means that all
# variables in .mk-files clobber one another. Be careful to use :=
# where appropriate for immediate evaluation, and similarly to watch
# that you're not relying on a variable value to last beween different
# .mk files.
#
# TODOs:
#
# Global settings and utility functions are currently stuffed in the
# toplevel Makefile. It may make sense to generate some .mk files on
# the side to keep the the files readable.
import os
import re
import sys
import subprocess
import gyp
import gyp.common
import gyp.xcode_emulation
from gyp.common import GetEnvironFallback
from gyp.common import GypError
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni',
'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
'PRODUCT_DIR': '$(builddir)',
'RULE_INPUT_ROOT': '%(INPUT_ROOT)s', # This gets expanded by Python.
'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s', # This gets expanded by Python.
'RULE_INPUT_PATH': '$(abspath $<)',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
'CONFIGURATION_NAME': '$(BUILDTYPE)',
}
# Make supports multiple toolsets
generator_supports_multiple_toolsets = True
# Request sorted dependencies in the order from dependents to dependencies.
generator_wants_sorted_dependencies = False
# Placates pylint.
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
default_variables.setdefault('SHARED_LIB_SUFFIX', '.dylib')
default_variables.setdefault('SHARED_LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
default_variables.setdefault('LIB_DIR',
generator_default_variables['PRODUCT_DIR'])
# Copy additional generator configuration data from Xcode, which is shared
# by the Mac Make generator.
import gyp.generator.xcode as xcode_generator
global generator_additional_non_configuration_keys
generator_additional_non_configuration_keys = getattr(xcode_generator,
'generator_additional_non_configuration_keys', [])
global generator_additional_path_sections
generator_additional_path_sections = getattr(xcode_generator,
'generator_additional_path_sections', [])
global generator_extra_sources_for_rules
generator_extra_sources_for_rules = getattr(xcode_generator,
'generator_extra_sources_for_rules', [])
COMPILABLE_EXTENSIONS.update({'.m': 'objc', '.mm' : 'objcxx'})
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
default_variables.setdefault('SHARED_LIB_SUFFIX', '.so')
default_variables.setdefault('SHARED_LIB_DIR','$(builddir)/lib.$(TOOLSET)')
default_variables.setdefault('LIB_DIR', '$(obj).$(TOOLSET)')
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get('generator_flags', {})
android_ndk_version = generator_flags.get('android_ndk_version', None)
# Android NDK requires a strict link order.
if android_ndk_version:
global generator_wants_sorted_dependencies
generator_wants_sorted_dependencies = True
output_dir = params['options'].generator_output or \
params['options'].toplevel_dir
builddir_name = generator_flags.get('output_dir', 'out')
qualified_out_dir = os.path.normpath(os.path.join(
output_dir, builddir_name, 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': params['options'].toplevel_dir,
'qualified_out_dir': qualified_out_dir,
}
# The .d checking code below uses these functions:
# wildcard, sort, foreach, shell, wordlist
# wildcard can handle spaces, the rest can't.
# Since I could find no way to make foreach work with spaces in filenames
# correctly, the .d files have spaces replaced with another character. The .d
# file for
# Chromium\ Framework.framework/foo
# is for example
# out/Release/.deps/out/Release/Chromium?Framework.framework/foo
# This is the replacement character.
SPACE_REPLACEMENT = '?'
LINK_COMMANDS_LINUX = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
# Due to circular dependencies between libraries :(, we wrap the
# special "figure out circular dependencies" flags around the entire
# input list during linking.
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
# We support two kinds of shared objects (.so):
# 1) shared_library, which is just bundling together many dependent libraries
# into a link line.
# 2) loadable_module, which is generating a module intended for dlopen().
#
# They differ only slightly:
# In the former case, we want to package all dependent code into the .so.
# In the latter case, we want to package just the API exposed by the
# outermost module.
# This means shared_library uses --whole-archive, while loadable_module doesn't.
# (Note that --whole-archive is incompatible with the --start-group used in
# normal linking.)
# Other shared-object link notes:
# - Set SONAME to the library filename so our binaries don't reference
# the local, absolute paths used on the link command-line.
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
"""
LINK_COMMANDS_MAC = """\
quiet_cmd_alink = LIBTOOL-STATIC $@
cmd_alink = rm -f $@ && ./gyp-mac-tool filter-libtool libtool $(GYP_LIBTOOLFLAGS) -static -o $@ $(filter %.o,$^)
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o "$@" $(LD_INPUTS) $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
"""
LINK_COMMANDS_ANDROID = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) crs $@ $(filter %.o,$^)
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) crsT $@ $(filter %.o,$^)
# Due to circular dependencies between libraries :(, we wrap the
# special "figure out circular dependencies" flags around the entire
# input list during linking.
quiet_cmd_link = LINK($(TOOLSET)) $@
quiet_cmd_link_host = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ -Wl,--start-group $(LD_INPUTS) -Wl,--end-group $(LIBS)
cmd_link_host = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
# Other shared-object link notes:
# - Set SONAME to the library filename so our binaries don't reference
# the local, absolute paths used on the link command-line.
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--whole-archive $(LD_INPUTS) -Wl,--no-whole-archive $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ -Wl,--start-group $(filter-out FORCE_DO_CMD, $^) -Wl,--end-group $(LIBS)
quiet_cmd_solink_module_host = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module_host = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -Wl,-soname=$(@F) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
"""
LINK_COMMANDS_AIX = """\
quiet_cmd_alink = AR($(TOOLSET)) $@
cmd_alink = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
quiet_cmd_alink_thin = AR($(TOOLSET)) $@
cmd_alink_thin = rm -f $@ && $(AR.$(TOOLSET)) -X32_64 crs $@ $(filter %.o,$^)
quiet_cmd_link = LINK($(TOOLSET)) $@
cmd_link = $(LINK.$(TOOLSET)) $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
quiet_cmd_solink = SOLINK($(TOOLSET)) $@
cmd_solink = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(LD_INPUTS) $(LIBS)
quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
cmd_solink_module = $(LINK.$(TOOLSET)) -shared $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
"""
# Header of toplevel Makefile.
# This should go into the build tree, but it's easier to keep it here for now.
SHARED_HEADER = ("""\
# We borrow heavily from the kernel build setup, though we are simpler since
# we don't have Kconfig tweaking settings on us.
# The implicit make rules have it looking for RCS files, among other things.
# We instead explicitly write all the rules we care about.
# It's even quicker (saves ~200ms) to pass -r on the command line.
MAKEFLAGS=-r
# The source directory tree.
srcdir := %(srcdir)s
abs_srcdir := $(abspath $(srcdir))
# The name of the builddir.
builddir_name ?= %(builddir)s
# The V=1 flag on command line makes us verbosely print command lines.
ifdef V
quiet=
else
quiet=quiet_
endif
# Specify BUILDTYPE=Release on the command line for a release build.
BUILDTYPE ?= %(default_configuration)s
# Directory all our build output goes into.
# Note that this must be two directories beneath src/ for unit tests to pass,
# as they reach into the src/ directory for data with relative paths.
builddir ?= $(builddir_name)/$(BUILDTYPE)
abs_builddir := $(abspath $(builddir))
depsdir := $(builddir)/.deps
# Object output directory.
obj := $(builddir)/obj
abs_obj := $(abspath $(obj))
# We build up a list of every single one of the targets so we can slurp in the
# generated dependency rule Makefiles in one pass.
all_deps :=
%(make_global_settings)s
CC.target ?= %(CC.target)s
CFLAGS.target ?= $(CPPFLAGS) $(CFLAGS)
CXX.target ?= %(CXX.target)s
CXXFLAGS.target ?= $(CPPFLAGS) $(CXXFLAGS)
LINK.target ?= %(LINK.target)s
LDFLAGS.target ?= $(LDFLAGS)
AR.target ?= $(AR)
# C++ apps need to be linked with g++.
LINK ?= $(CXX.target)
# TODO(evan): move all cross-compilation logic to gyp-time so we don't need
# to replicate this environment fallback in make as well.
CC.host ?= %(CC.host)s
CFLAGS.host ?= $(CPPFLAGS_host) $(CFLAGS_host)
CXX.host ?= %(CXX.host)s
CXXFLAGS.host ?= $(CPPFLAGS_host) $(CXXFLAGS_host)
LINK.host ?= %(LINK.host)s
LDFLAGS.host ?=
AR.host ?= %(AR.host)s
# Define a dir function that can handle spaces.
# http://www.gnu.org/software/make/manual/make.html#Syntax-of-Functions
# "leading spaces cannot appear in the text of the first argument as written.
# These characters can be put into the argument value by variable substitution."
empty :=
space := $(empty) $(empty)
# http://stackoverflow.com/questions/1189781/using-make-dir-or-notdir-on-a-path-with-spaces
replace_spaces = $(subst $(space),""" + SPACE_REPLACEMENT + """,$1)
unreplace_spaces = $(subst """ + SPACE_REPLACEMENT + """,$(space),$1)
dirx = $(call unreplace_spaces,$(dir $(call replace_spaces,$1)))
# Flags to make gcc output dependency info. Note that you need to be
# careful here to use the flags that ccache and distcc can understand.
# We write to a dep file on the side first and then rename at the end
# so we can't end up with a broken dep file.
depfile = $(depsdir)/$(call replace_spaces,$@).d
DEPFLAGS = -MMD -MF $(depfile).raw
# We have to fixup the deps output in a few ways.
# (1) the file output should mention the proper .o file.
# ccache or distcc lose the path to the target, so we convert a rule of
# the form:
# foobar.o: DEP1 DEP2
# into
# path/to/foobar.o: DEP1 DEP2
# (2) we want missing files not to cause us to fail to build.
# We want to rewrite
# foobar.o: DEP1 DEP2 \\
# DEP3
# to
# DEP1:
# DEP2:
# DEP3:
# so if the files are missing, they're just considered phony rules.
# We have to do some pretty insane escaping to get those backslashes
# and dollar signs past make, the shell, and sed at the same time.
# Doesn't work with spaces, but that's fine: .d files have spaces in
# their names replaced with other characters."""
r"""
define fixup_dep
# The depfile may not exist if the input file didn't have any #includes.
touch $(depfile).raw
# Fixup path as in (1).
sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)
# Add extra rules as in (2).
# We remove slashes and replace spaces with new lines;
# remove blank lines;
# delete the first line and append a colon to the remaining lines.
sed -e 's|\\||' -e 'y| |\n|' $(depfile).raw |\
grep -v '^$$' |\
sed -e 1d -e 's|$$|:|' \
>> $(depfile)
rm $(depfile).raw
endef
"""
"""
# Command definitions:
# - cmd_foo is the actual command to run;
# - quiet_cmd_foo is the brief-output summary of the command.
quiet_cmd_cc = CC($(TOOLSET)) $@
cmd_cc = $(CC.$(TOOLSET)) $(GYP_CFLAGS) $(DEPFLAGS) $(CFLAGS.$(TOOLSET)) -c -o $@ $<
quiet_cmd_cxx = CXX($(TOOLSET)) $@
cmd_cxx = $(CXX.$(TOOLSET)) $(GYP_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
%(extra_commands)s
quiet_cmd_touch = TOUCH $@
cmd_touch = touch $@
quiet_cmd_copy = COPY $@
# send stderr to /dev/null to ignore messages when linking directories.
cmd_copy = rm -rf "$@" && cp %(copy_archive_args)s "$<" "$@"
%(link_commands)s
"""
r"""
# Define an escape_quotes function to escape single quotes.
# This allows us to handle quotes properly as long as we always use
# use single quotes and escape_quotes.
escape_quotes = $(subst ','\'',$(1))
# This comment is here just to include a ' to unconfuse syntax highlighting.
# Define an escape_vars function to escape '$' variable syntax.
# This allows us to read/write command lines with shell variables (e.g.
# $LD_LIBRARY_PATH), without triggering make substitution.
escape_vars = $(subst $$,$$$$,$(1))
# Helper that expands to a shell command to echo a string exactly as it is in
# make. This uses printf instead of echo because printf's behaviour with respect
# to escape sequences is more portable than echo's across different shells
# (e.g., dash, bash).
exact_echo = printf '%%s\n' '$(call escape_quotes,$(1))'
"""
"""
# Helper to compare the command we're about to run against the command
# we logged the last time we ran the command. Produces an empty
# string (false) when the commands match.
# Tricky point: Make has no string-equality test function.
# The kernel uses the following, but it seems like it would have false
# positives, where one string reordered its arguments.
# arg_check = $(strip $(filter-out $(cmd_$(1)), $(cmd_$@)) \\
# $(filter-out $(cmd_$@), $(cmd_$(1))))
# We instead substitute each for the empty string into the other, and
# say they're equal if both substitutions produce the empty string.
# .d files contain """ + SPACE_REPLACEMENT + \
""" instead of spaces, take that into account.
command_changed = $(or $(subst $(cmd_$(1)),,$(cmd_$(call replace_spaces,$@))),\\
$(subst $(cmd_$(call replace_spaces,$@)),,$(cmd_$(1))))
# Helper that is non-empty when a prerequisite changes.
# Normally make does this implicitly, but we force rules to always run
# so we can check their command lines.
# $? -- new prerequisites
# $| -- order-only dependencies
prereq_changed = $(filter-out FORCE_DO_CMD,$(filter-out $|,$?))
# Helper that executes all postbuilds until one fails.
define do_postbuilds
@E=0;\\
for p in $(POSTBUILDS); do\\
eval $$p;\\
E=$$?;\\
if [ $$E -ne 0 ]; then\\
break;\\
fi;\\
done;\\
if [ $$E -ne 0 ]; then\\
rm -rf "$@";\\
exit $$E;\\
fi
endef
# do_cmd: run a command via the above cmd_foo names, if necessary.
# Should always run for a given target to handle command-line changes.
# Second argument, if non-zero, makes it do asm/C/C++ dependency munging.
# Third argument, if non-zero, makes it do POSTBUILDS processing.
# Note: We intentionally do NOT call dirx for depfile, since it contains """ + \
SPACE_REPLACEMENT + """ for
# spaces already and dirx strips the """ + SPACE_REPLACEMENT + \
""" characters.
define do_cmd
$(if $(or $(command_changed),$(prereq_changed)),
@$(call exact_echo, $($(quiet)cmd_$(1)))
@mkdir -p "$(call dirx,$@)" "$(dir $(depfile))"
$(if $(findstring flock,$(word %(flock_index)d,$(cmd_$1))),
@$(cmd_$(1))
@echo " $(quiet_cmd_$(1)): Finished",
@$(cmd_$(1))
)
@$(call exact_echo,$(call escape_vars,cmd_$(call replace_spaces,$@) := $(cmd_$(1)))) > $(depfile)
@$(if $(2),$(fixup_dep))
$(if $(and $(3), $(POSTBUILDS)),
$(call do_postbuilds)
)
)
endef
# Declare the "%(default_target)s" target first so it is the default,
# even though we don't have the deps yet.
.PHONY: %(default_target)s
%(default_target)s:
# make looks for ways to re-generate included makefiles, but in our case, we
# don't have a direct way. Explicitly telling make that it has nothing to do
# for them makes it go faster.
%%.d: ;
# Use FORCE_DO_CMD to force a target to run. Should be coupled with
# do_cmd.
.PHONY: FORCE_DO_CMD
FORCE_DO_CMD:
""")
SHARED_HEADER_MAC_COMMANDS = """
quiet_cmd_objc = CXX($(TOOLSET)) $@
cmd_objc = $(CC.$(TOOLSET)) $(GYP_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
quiet_cmd_objcxx = CXX($(TOOLSET)) $@
cmd_objcxx = $(CXX.$(TOOLSET)) $(GYP_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
# Commands for precompiled header files.
quiet_cmd_pch_c = CXX($(TOOLSET)) $@
cmd_pch_c = $(CC.$(TOOLSET)) $(GYP_PCH_CFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
quiet_cmd_pch_cc = CXX($(TOOLSET)) $@
cmd_pch_cc = $(CC.$(TOOLSET)) $(GYP_PCH_CXXFLAGS) $(DEPFLAGS) $(CXXFLAGS.$(TOOLSET)) -c -o $@ $<
quiet_cmd_pch_m = CXX($(TOOLSET)) $@
cmd_pch_m = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCFLAGS) $(DEPFLAGS) -c -o $@ $<
quiet_cmd_pch_mm = CXX($(TOOLSET)) $@
cmd_pch_mm = $(CC.$(TOOLSET)) $(GYP_PCH_OBJCXXFLAGS) $(DEPFLAGS) -c -o $@ $<
# gyp-mac-tool is written next to the root Makefile by gyp.
# Use $(4) for the command, since $(2) and $(3) are used as flag by do_cmd
# already.
quiet_cmd_mac_tool = MACTOOL $(4) $<
cmd_mac_tool = ./gyp-mac-tool $(4) $< "$@"
quiet_cmd_mac_package_framework = PACKAGE FRAMEWORK $@
cmd_mac_package_framework = ./gyp-mac-tool package-framework "$@" $(4)
quiet_cmd_infoplist = INFOPLIST $@
cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
"""
def WriteRootHeaderSuffixRules(writer):
extensions = sorted(COMPILABLE_EXTENSIONS.keys(), key=str.lower)
writer.write('# Suffix rules, putting all outputs into $(obj).\n')
for ext in extensions:
writer.write('$(obj).$(TOOLSET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD\n' % ext)
writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
writer.write('\n# Try building from generated source, too.\n')
for ext in extensions:
writer.write(
'$(obj).$(TOOLSET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD\n' % ext)
writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
writer.write('\n')
for ext in extensions:
writer.write('$(obj).$(TOOLSET)/%%.o: $(obj)/%%%s FORCE_DO_CMD\n' % ext)
writer.write('\t@$(call do_cmd,%s,1)\n' % COMPILABLE_EXTENSIONS[ext])
writer.write('\n')
SHARED_HEADER_SUFFIX_RULES_COMMENT1 = ("""\
# Suffix rules, putting all outputs into $(obj).
""")
SHARED_HEADER_SUFFIX_RULES_COMMENT2 = ("""\
# Try building from generated source, too.
""")
SHARED_FOOTER = """\
# "all" is a concatenation of the "all" targets from all the included
# sub-makefiles. This is just here to clarify.
all:
# Add in dependency-tracking rules. $(all_deps) is the list of every single
# target in our tree. Only consider the ones with .d (dependency) info:
d_files := $(wildcard $(foreach f,$(all_deps),$(depsdir)/$(f).d))
ifneq ($(d_files),)
include $(d_files)
endif
"""
header = """\
# This file is generated by gyp; do not edit.
"""
# Maps every compilable file extension to the do_cmd that compiles it.
COMPILABLE_EXTENSIONS = {
'.c': 'cc',
'.cc': 'cxx',
'.cpp': 'cxx',
'.cxx': 'cxx',
'.s': 'cc',
'.S': 'cc',
}
def Compilable(filename):
"""Return true if the file is compilable (should be in OBJS)."""
for res in (filename.endswith(e) for e in COMPILABLE_EXTENSIONS):
if res:
return True
return False
def Linkable(filename):
"""Return true if the file is linkable (should be on the link line)."""
return filename.endswith('.o')
def Target(filename):
"""Translate a compilable filename to its .o target."""
return os.path.splitext(filename)[0] + '.o'
def EscapeShellArgument(s):
"""Quotes an argument so that it will be interpreted literally by a POSIX
shell. Taken from
http://stackoverflow.com/questions/35817/whats-the-best-way-to-escape-ossystem-calls-in-python
"""
return "'" + s.replace("'", "'\\''") + "'"
def EscapeMakeVariableExpansion(s):
"""Make has its own variable expansion syntax using $. We must escape it for
string to be interpreted literally."""
return s.replace('$', '$$')
def EscapeCppDefine(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = EscapeShellArgument(s)
s = EscapeMakeVariableExpansion(s)
# '#' characters must be escaped even embedded in a string, else Make will
# treat it as the start of a comment.
return s.replace('#', r'\#')
def QuoteIfNecessary(string):
"""TODO: Should this ideally be replaced with one or more of the above
functions?"""
if '"' in string:
string = '"' + string.replace('"', '\\"') + '"'
return string
def StringToMakefileVariable(string):
"""Convert a string to a value that is acceptable as a make variable name."""
return re.sub('[^a-zA-Z0-9_]', '_', string)
srcdir_prefix = ''
def Sourceify(path):
"""Convert a path to its source directory form."""
if '$(' in path:
return path
if os.path.isabs(path):
return path
return srcdir_prefix + path
def QuoteSpaces(s, quote=r'\ '):
return s.replace(' ', quote)
# TODO: Avoid code duplication with _ValidateSourcesForMSVSProject in msvs.py.
def _ValidateSourcesForOSX(spec, all_sources):
"""Makes sure if duplicate basenames are not specified in the source list.
Arguments:
spec: The target dictionary containing the properties of the target.
"""
if spec.get('type', None) != 'static_library':
return
basenames = {}
for source in all_sources:
name, ext = os.path.splitext(source)
is_compiled_file = ext in [
'.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
if not is_compiled_file:
continue
basename = os.path.basename(name) # Don't include extension.
basenames.setdefault(basename, []).append(source)
error = ''
for basename, files in basenames.iteritems():
if len(files) > 1:
error += ' %s: %s\n' % (basename, ' '.join(files))
if error:
print('static library %s has several files with the same basename:\n' %
spec['target_name'] + error + 'libtool on OS X will generate' +
' warnings for them.')
raise GypError('Duplicate basenames in sources section, see list above')
# Map from qualified target to path to output.
target_outputs = {}
# Map from qualified target to any linkable output. A subset
# of target_outputs. E.g. when mybinary depends on liba, we want to
# include liba in the linker line; when otherbinary depends on
# mybinary, we just want to build mybinary first.
target_link_deps = {}
class MakefileWriter(object):
"""MakefileWriter packages up the writing of one target-specific foobar.mk.
Its only real entry point is Write(), and is mostly used for namespacing.
"""
def __init__(self, generator_flags, flavor):
self.generator_flags = generator_flags
self.flavor = flavor
self.suffix_rules_srcdir = {}
self.suffix_rules_objdir1 = {}
self.suffix_rules_objdir2 = {}
# Generate suffix rules for all compilable extensions.
for ext in COMPILABLE_EXTENSIONS.keys():
# Suffix rules for source folder.
self.suffix_rules_srcdir.update({ext: ("""\
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(srcdir)/%%%s FORCE_DO_CMD
@$(call do_cmd,%s,1)
""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
# Suffix rules for generated source files.
self.suffix_rules_objdir1.update({ext: ("""\
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj).$(TOOLSET)/%%%s FORCE_DO_CMD
@$(call do_cmd,%s,1)
""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
self.suffix_rules_objdir2.update({ext: ("""\
$(obj).$(TOOLSET)/$(TARGET)/%%.o: $(obj)/%%%s FORCE_DO_CMD
@$(call do_cmd,%s,1)
""" % (ext, COMPILABLE_EXTENSIONS[ext]))})
def Write(self, qualified_target, base_path, output_filename, spec, configs,
part_of_all):
"""The main entry point: writes a .mk file for a single target.
Arguments:
qualified_target: target we're generating
base_path: path relative to source root we're building in, used to resolve
target-relative paths
output_filename: output .mk file name to write
spec, configs: gyp info
part_of_all: flag indicating this target is part of 'all'
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
self.qualified_target = qualified_target
self.path = base_path
self.target = spec['target_name']
self.type = spec['type']
self.toolset = spec['toolset']
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
else:
self.xcode_settings = None
deps, link_deps = self.ComputeDeps(spec)
# Some of the generation below can add extra output, sources, or
# link dependencies. All of the out params of the functions that
# follow use names like extra_foo.
extra_outputs = []
extra_sources = []
extra_link_deps = []
extra_mac_bundle_resources = []
mac_bundle_deps = []
if self.is_mac_bundle:
self.output = self.ComputeMacBundleOutput(spec)
self.output_binary = self.ComputeMacBundleBinaryOutput(spec)
else:
self.output = self.output_binary = self.ComputeOutput(spec)
self.is_standalone_static_library = bool(
spec.get('standalone_static_library', 0))
self._INSTALLABLE_TARGETS = ('executable', 'loadable_module',
'shared_library')
if (self.is_standalone_static_library or
self.type in self._INSTALLABLE_TARGETS):
self.alias = os.path.basename(self.output)
install_path = self._InstallableTargetInstallPath()
else:
self.alias = self.output
install_path = self.output
self.WriteLn("TOOLSET := " + self.toolset)
self.WriteLn("TARGET := " + self.target)
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
self.WriteActions(spec['actions'], extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all)
# Rules must be early like actions.
if 'rules' in spec:
self.WriteRules(spec['rules'], extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all)
if 'copies' in spec:
self.WriteCopies(spec['copies'], extra_outputs, part_of_all)
# Bundle resources.
if self.is_mac_bundle:
all_mac_bundle_resources = (
spec.get('mac_bundle_resources', []) + extra_mac_bundle_resources)
self.WriteMacBundleResources(all_mac_bundle_resources, mac_bundle_deps)
self.WriteMacInfoPlist(mac_bundle_deps)
# Sources.
all_sources = spec.get('sources', []) + extra_sources
if all_sources:
if self.flavor == 'mac':
# libtool on OS X generates warnings for duplicate basenames in the same
# target.
_ValidateSourcesForOSX(spec, all_sources)
self.WriteSources(
configs, deps, all_sources, extra_outputs,
extra_link_deps, part_of_all,
gyp.xcode_emulation.MacPrefixHeader(
self.xcode_settings, lambda p: Sourceify(self.Absolutify(p)),
self.Pchify))
sources = filter(Compilable, all_sources)
if sources:
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT1)
extensions = set([os.path.splitext(s)[1] for s in sources])
for ext in extensions:
if ext in self.suffix_rules_srcdir:
self.WriteLn(self.suffix_rules_srcdir[ext])
self.WriteLn(SHARED_HEADER_SUFFIX_RULES_COMMENT2)
for ext in extensions:
if ext in self.suffix_rules_objdir1:
self.WriteLn(self.suffix_rules_objdir1[ext])
for ext in extensions:
if ext in self.suffix_rules_objdir2:
self.WriteLn(self.suffix_rules_objdir2[ext])
self.WriteLn('# End of this set of suffix rules')
# Add dependency from bundle to bundle binary.
if self.is_mac_bundle:
mac_bundle_deps.append(self.output_binary)
self.WriteTarget(spec, configs, deps, extra_link_deps + link_deps,
mac_bundle_deps, extra_outputs, part_of_all)
# Update global list of target outputs, used in dependency tracking.
target_outputs[qualified_target] = install_path
# Update global list of link dependencies.
if self.type in ('static_library', 'shared_library'):
target_link_deps[qualified_target] = self.output_binary
# Currently any versions have the same effect, but in future the behavior
# could be different.
if self.generator_flags.get('android_ndk_version', None):
self.WriteAndroidNdkModuleRule(self.target, all_sources, link_deps)
self.fp.close()
def WriteSubMake(self, output_filename, makefile_path, targets, build_dir):
"""Write a "sub-project" Makefile.
This is a small, wrapper Makefile that calls the top-level Makefile to build
the targets from a single gyp file (i.e. a sub-project).
Arguments:
output_filename: sub-project Makefile name to write
makefile_path: path to the top-level Makefile
targets: list of "all" targets for this sub-project
build_dir: build output directory, relative to the sub-project
"""
gyp.common.EnsureDirExists(output_filename)
self.fp = open(output_filename, 'w')
self.fp.write(header)
# For consistency with other builders, put sub-project build output in the
# sub-project dir (see test/subdirectory/gyptest-subdir-all.py).
self.WriteLn('export builddir_name ?= %s' %
os.path.join(os.path.dirname(output_filename), build_dir))
self.WriteLn('.PHONY: all')
self.WriteLn('all:')
if makefile_path:
makefile_path = ' -C ' + makefile_path
self.WriteLn('\t$(MAKE)%s %s' % (makefile_path, ' '.join(targets)))
self.fp.close()
def WriteActions(self, actions, extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all):
"""Write Makefile code for any 'actions' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
actions (used to make other pieces dependent on these
actions)
part_of_all: flag indicating this target is part of 'all'
"""
env = self.GetSortedXcodeEnv()
for action in actions:
name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
action['action_name']))
self.WriteLn('### Rules for action "%s":' % action['action_name'])
inputs = action['inputs']
outputs = action['outputs']
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set()
for out in outputs:
dir = os.path.split(out)[0]
if dir:
dirs.add(dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources += outputs
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
# Write the actual command.
action_commands = action['action']
if self.flavor == 'mac':
action_commands = [gyp.xcode_emulation.ExpandEnvVars(command, env)
for command in action_commands]
command = gyp.common.EncodePOSIXShellList(action_commands)
if 'message' in action:
self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, action['message']))
else:
self.WriteLn('quiet_cmd_%s = ACTION %s $@' % (name, name))
if len(dirs) > 0:
command = 'mkdir -p %s' % ' '.join(dirs) + '; ' + command
cd_action = 'cd %s; ' % Sourceify(self.path or '.')
# command and cd_action get written to a toplevel variable called
# cmd_foo. Toplevel variables can't handle things that change per
# makefile like $(TARGET), so hardcode the target.
command = command.replace('$(TARGET)', self.target)
cd_action = cd_action.replace('$(TARGET)', self.target)
# Set LD_LIBRARY_PATH in case the action runs an executable from this
# build which links to shared libs from this build.
# actions run on the host, so they should in theory only use host
# libraries, but until everything is made cross-compile safe, also use
# target libraries.
# TODO(piman): when everything is cross-compile safe, remove lib.target
self.WriteLn('cmd_%s = LD_LIBRARY_PATH=$(builddir)/lib.host:'
'$(builddir)/lib.target:$$LD_LIBRARY_PATH; '
'export LD_LIBRARY_PATH; '
'%s%s'
% (name, cd_action, command))
self.WriteLn()
outputs = map(self.Absolutify, outputs)
# The makefile rules are all relative to the top dir, but the gyp actions
# are defined relative to their containing dir. This replaces the obj
# variable for the action rule with an absolute version so that the output
# goes in the right place.
# Only write the 'obj' and 'builddir' rules for the "primary" output (:1);
# it's superfluous for the "extra outputs", and this avoids accidentally
# writing duplicate dummy rules for those outputs.
# Same for environment.
self.WriteLn("%s: obj := $(abs_obj)" % QuoteSpaces(outputs[0]))
self.WriteLn("%s: builddir := $(abs_builddir)" % QuoteSpaces(outputs[0]))
self.WriteSortedXcodeEnv(outputs[0], self.GetSortedXcodeEnv())
for input in inputs:
assert ' ' not in input, (
"Spaces in action input filenames not supported (%s)" % input)
for output in outputs:
assert ' ' not in output, (
"Spaces in action output filenames not supported (%s)" % output)
# See the comment in WriteCopies about expanding env vars.
outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
self.WriteDoCmd(outputs, map(Sourceify, map(self.Absolutify, inputs)),
part_of_all=part_of_all, command=name)
# Stuff the outputs in a variable so we can refer to them later.
outputs_variable = 'action_%s_outputs' % name
self.WriteLn('%s := %s' % (outputs_variable, ' '.join(outputs)))
extra_outputs.append('$(%s)' % outputs_variable)
self.WriteLn()
self.WriteLn()
def WriteRules(self, rules, extra_sources, extra_outputs,
extra_mac_bundle_resources, part_of_all):
"""Write Makefile code for any 'rules' from the gyp input.
extra_sources: a list that will be filled in with newly generated source
files, if any
extra_outputs: a list that will be filled in with any outputs of these
rules (used to make other pieces dependent on these rules)
part_of_all: flag indicating this target is part of 'all'
"""
env = self.GetSortedXcodeEnv()
for rule in rules:
name = StringToMakefileVariable('%s_%s' % (self.qualified_target,
rule['rule_name']))
count = 0
self.WriteLn('### Generated for rule %s:' % name)
all_outputs = []
for rule_source in rule.get('rule_sources', []):
dirs = set()
(rule_source_dirname, rule_source_basename) = os.path.split(rule_source)
(rule_source_root, rule_source_ext) = \
os.path.splitext(rule_source_basename)
outputs = [self.ExpandInputRoot(out, rule_source_root,
rule_source_dirname)
for out in rule['outputs']]
for out in outputs:
dir = os.path.dirname(out)
if dir:
dirs.add(dir)
if int(rule.get('process_outputs_as_sources', False)):
extra_sources += outputs
if int(rule.get('process_outputs_as_mac_bundle_resources', False)):
extra_mac_bundle_resources += outputs
inputs = map(Sourceify, map(self.Absolutify, [rule_source] +
rule.get('inputs', [])))
actions = ['$(call do_cmd,%s_%d)' % (name, count)]
if name == 'resources_grit':
# HACK: This is ugly. Grit intentionally doesn't touch the
# timestamp of its output file when the file doesn't change,
# which is fine in hash-based dependency systems like scons
# and forge, but not kosher in the make world. After some
# discussion, hacking around it here seems like the least
# amount of pain.
actions += ['@touch --no-create $@']
# See the comment in WriteCopies about expanding env vars.
outputs = [gyp.xcode_emulation.ExpandEnvVars(o, env) for o in outputs]
inputs = [gyp.xcode_emulation.ExpandEnvVars(i, env) for i in inputs]
outputs = map(self.Absolutify, outputs)
all_outputs += outputs
# Only write the 'obj' and 'builddir' rules for the "primary" output
# (:1); it's superfluous for the "extra outputs", and this avoids
# accidentally writing duplicate dummy rules for those outputs.
self.WriteLn('%s: obj := $(abs_obj)' % outputs[0])
self.WriteLn('%s: builddir := $(abs_builddir)' % outputs[0])
self.WriteMakeRule(outputs, inputs, actions,
command="%s_%d" % (name, count))
# Spaces in rule filenames are not supported, but rule variables have
# spaces in them (e.g. RULE_INPUT_PATH expands to '$(abspath $<)').
# The spaces within the variables are valid, so remove the variables
# before checking.
variables_with_spaces = re.compile(r'\$\([^ ]* \$<\)')
for output in outputs:
output = re.sub(variables_with_spaces, '', output)
assert ' ' not in output, (
"Spaces in rule filenames not yet supported (%s)" % output)
self.WriteLn('all_deps += %s' % ' '.join(outputs))
action = [self.ExpandInputRoot(ac, rule_source_root,
rule_source_dirname)
for ac in rule['action']]
mkdirs = ''
if len(dirs) > 0:
mkdirs = 'mkdir -p %s; ' % ' '.join(dirs)
cd_action = 'cd %s; ' % Sourceify(self.path or '.')
# action, cd_action, and mkdirs get written to a toplevel variable
# called cmd_foo. Toplevel variables can't handle things that change
# per makefile like $(TARGET), so hardcode the target.
if self.flavor == 'mac':
action = [gyp.xcode_emulation.ExpandEnvVars(command, env)
for command in action]
action = gyp.common.EncodePOSIXShellList(action)
action = action.replace('$(TARGET)', self.target)
cd_action = cd_action.replace('$(TARGET)', self.target)
mkdirs = mkdirs.replace('$(TARGET)', self.target)
# Set LD_LIBRARY_PATH in case the rule runs an executable from this
# build which links to shared libs from this build.
# rules run on the host, so they should in theory only use host
# libraries, but until everything is made cross-compile safe, also use
# target libraries.
# TODO(piman): when everything is cross-compile safe, remove lib.target
self.WriteLn(
"cmd_%(name)s_%(count)d = LD_LIBRARY_PATH="
"$(builddir)/lib.host:$(builddir)/lib.target:$$LD_LIBRARY_PATH; "
"export LD_LIBRARY_PATH; "
"%(cd_action)s%(mkdirs)s%(action)s" % {
'action': action,
'cd_action': cd_action,
'count': count,
'mkdirs': mkdirs,
'name': name,
})
self.WriteLn(
'quiet_cmd_%(name)s_%(count)d = RULE %(name)s_%(count)d $@' % {
'count': count,
'name': name,
})
self.WriteLn()
count += 1
outputs_variable = 'rule_%s_outputs' % name
self.WriteList(all_outputs, outputs_variable)
extra_outputs.append('$(%s)' % outputs_variable)
self.WriteLn('### Finished generating for rule: %s' % name)
self.WriteLn()
self.WriteLn('### Finished generating for all rules')
self.WriteLn('')
def WriteCopies(self, copies, extra_outputs, part_of_all):
"""Write Makefile code for any 'copies' from the gyp input.
extra_outputs: a list that will be filled in with any outputs of this action
(used to make other pieces dependent on this action)
part_of_all: flag indicating this target is part of 'all'
"""
self.WriteLn('### Generated for copy rule.')
variable = StringToMakefileVariable(self.qualified_target + '_copies')
outputs = []
for copy in copies:
for path in copy['files']:
# Absolutify() may call normpath, and will strip trailing slashes.
path = Sourceify(self.Absolutify(path))
filename = os.path.split(path)[1]
output = Sourceify(self.Absolutify(os.path.join(copy['destination'],
filename)))
# If the output path has variables in it, which happens in practice for
# 'copies', writing the environment as target-local doesn't work,
# because the variables are already needed for the target name.
# Copying the environment variables into global make variables doesn't
# work either, because then the .d files will potentially contain spaces
# after variable expansion, and .d file handling cannot handle spaces.
# As a workaround, manually expand variables at gyp time. Since 'copies'
# can't run scripts, there's no need to write the env then.
# WriteDoCmd() will escape spaces for .d files.
env = self.GetSortedXcodeEnv()
output = gyp.xcode_emulation.ExpandEnvVars(output, env)
path = gyp.xcode_emulation.ExpandEnvVars(path, env)
self.WriteDoCmd([output], [path], 'copy', part_of_all)
outputs.append(output)
self.WriteLn('%s = %s' % (variable, ' '.join(map(QuoteSpaces, outputs))))
extra_outputs.append('$(%s)' % variable)
self.WriteLn()
def WriteMacBundleResources(self, resources, bundle_deps):
"""Writes Makefile code for 'mac_bundle_resources'."""
self.WriteLn('### Generated for mac_bundle_resources')
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
map(Sourceify, map(self.Absolutify, resources))):
_, ext = os.path.splitext(output)
if ext != '.xcassets':
# Make does not supports '.xcassets' emulation.
self.WriteDoCmd([output], [res], 'mac_tool,,,copy-bundle-resource',
part_of_all=True)
bundle_deps.append(output)
def WriteMacInfoPlist(self, bundle_deps):
"""Write Makefile code for bundle Info.plist files."""
info_plist, out, defines, extra_env = gyp.xcode_emulation.GetMacInfoPlist(
generator_default_variables['PRODUCT_DIR'], self.xcode_settings,
lambda p: Sourceify(self.Absolutify(p)))
if not info_plist:
return
if defines:
# Create an intermediate file to store preprocessed results.
intermediate_plist = ('$(obj).$(TOOLSET)/$(TARGET)/' +
os.path.basename(info_plist))
self.WriteList(defines, intermediate_plist + ': INFOPLIST_DEFINES', '-D',
quoter=EscapeCppDefine)
self.WriteMakeRule([intermediate_plist], [info_plist],
['$(call do_cmd,infoplist)',
# "Convert" the plist so that any weird whitespace changes from the
# preprocessor do not affect the XML parser in mac_tool.
'@plutil -convert xml1 $@ $@'])
info_plist = intermediate_plist
# plists can contain envvars and substitute them into the file.
self.WriteSortedXcodeEnv(
out, self.GetSortedXcodeEnv(additional_settings=extra_env))
self.WriteDoCmd([out], [info_plist], 'mac_tool,,,copy-info-plist',
part_of_all=True)
bundle_deps.append(out)
def WriteSources(self, configs, deps, sources,
extra_outputs, extra_link_deps,
part_of_all, precompiled_header):
"""Write Makefile code for any 'sources' from the gyp input.
These are source files necessary to build the current target.
configs, deps, sources: input from gyp.
extra_outputs: a list of extra outputs this action should be dependent on;
used to serialize action/rules before compilation
extra_link_deps: a list that will be filled in with any outputs of
compilation (to be used in link lines)
part_of_all: flag indicating this target is part of 'all'
"""
# Write configuration-specific variables for CFLAGS, etc.
for configname in sorted(configs.keys()):
config = configs[configname]
self.WriteList(config.get('defines'), 'DEFS_%s' % configname, prefix='-D',
quoter=EscapeCppDefine)
if self.flavor == 'mac':
cflags = self.xcode_settings.GetCflags(configname)
cflags_c = self.xcode_settings.GetCflagsC(configname)
cflags_cc = self.xcode_settings.GetCflagsCC(configname)
cflags_objc = self.xcode_settings.GetCflagsObjC(configname)
cflags_objcc = self.xcode_settings.GetCflagsObjCC(configname)
else:
cflags = config.get('cflags')
cflags_c = config.get('cflags_c')
cflags_cc = config.get('cflags_cc')
self.WriteLn("# Flags passed to all source files.");
self.WriteList(cflags, 'CFLAGS_%s' % configname)
self.WriteLn("# Flags passed to only C files.");
self.WriteList(cflags_c, 'CFLAGS_C_%s' % configname)
self.WriteLn("# Flags passed to only C++ files.");
self.WriteList(cflags_cc, 'CFLAGS_CC_%s' % configname)
if self.flavor == 'mac':
self.WriteLn("# Flags passed to only ObjC files.");
self.WriteList(cflags_objc, 'CFLAGS_OBJC_%s' % configname)
self.WriteLn("# Flags passed to only ObjC++ files.");
self.WriteList(cflags_objcc, 'CFLAGS_OBJCC_%s' % configname)
includes = config.get('include_dirs')
if includes:
includes = map(Sourceify, map(self.Absolutify, includes))
self.WriteList(includes, 'INCS_%s' % configname, prefix='-I')
compilable = filter(Compilable, sources)
objs = map(self.Objectify, map(self.Absolutify, map(Target, compilable)))
self.WriteList(objs, 'OBJS')
for obj in objs:
assert ' ' not in obj, (
"Spaces in object filenames not supported (%s)" % obj)
self.WriteLn('# Add to the list of files we specially track '
'dependencies for.')
self.WriteLn('all_deps += $(OBJS)')
self.WriteLn()
# Make sure our dependencies are built first.
if deps:
self.WriteMakeRule(['$(OBJS)'], deps,
comment = 'Make sure our dependencies are built '
'before any of us.',
order_only = True)
# Make sure the actions and rules run first.
# If they generate any extra headers etc., the per-.o file dep tracking
# will catch the proper rebuilds, so order only is still ok here.
if extra_outputs:
self.WriteMakeRule(['$(OBJS)'], extra_outputs,
comment = 'Make sure our actions/rules run '
'before any of us.',
order_only = True)
pchdeps = precompiled_header.GetObjDependencies(compilable, objs )
if pchdeps:
self.WriteLn('# Dependencies from obj files to their precompiled headers')
for source, obj, gch in pchdeps:
self.WriteLn('%s: %s' % (obj, gch))
self.WriteLn('# End precompiled header dependencies')
if objs:
extra_link_deps.append('$(OBJS)')
self.WriteLn("""\
# CFLAGS et al overrides must be target-local.
# See "Target-specific Variable Values" in the GNU Make manual.""")
self.WriteLn("$(OBJS): TOOLSET := $(TOOLSET)")
self.WriteLn("$(OBJS): GYP_CFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('c') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_C_$(BUILDTYPE))")
self.WriteLn("$(OBJS): GYP_CXXFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('cc') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_CC_$(BUILDTYPE))")
if self.flavor == 'mac':
self.WriteLn("$(OBJS): GYP_OBJCFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('m') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_C_$(BUILDTYPE)) "
"$(CFLAGS_OBJC_$(BUILDTYPE))")
self.WriteLn("$(OBJS): GYP_OBJCXXFLAGS := "
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"%s " % precompiled_header.GetInclude('mm') +
"$(CFLAGS_$(BUILDTYPE)) "
"$(CFLAGS_CC_$(BUILDTYPE)) "
"$(CFLAGS_OBJCC_$(BUILDTYPE))")
self.WritePchTargets(precompiled_header.GetPchBuildCommands())
# If there are any object files in our input file list, link them into our
# output.
extra_link_deps += filter(Linkable, sources)
self.WriteLn()
def WritePchTargets(self, pch_commands):
"""Writes make rules to compile prefix headers."""
if not pch_commands:
return
for gch, lang_flag, lang, input in pch_commands:
extra_flags = {
'c': '$(CFLAGS_C_$(BUILDTYPE))',
'cc': '$(CFLAGS_CC_$(BUILDTYPE))',
'm': '$(CFLAGS_C_$(BUILDTYPE)) $(CFLAGS_OBJC_$(BUILDTYPE))',
'mm': '$(CFLAGS_CC_$(BUILDTYPE)) $(CFLAGS_OBJCC_$(BUILDTYPE))',
}[lang]
var_name = {
'c': 'GYP_PCH_CFLAGS',
'cc': 'GYP_PCH_CXXFLAGS',
'm': 'GYP_PCH_OBJCFLAGS',
'mm': 'GYP_PCH_OBJCXXFLAGS',
}[lang]
self.WriteLn("%s: %s := %s " % (gch, var_name, lang_flag) +
"$(DEFS_$(BUILDTYPE)) "
"$(INCS_$(BUILDTYPE)) "
"$(CFLAGS_$(BUILDTYPE)) " +
extra_flags)
self.WriteLn('%s: %s FORCE_DO_CMD' % (gch, input))
self.WriteLn('\t@$(call do_cmd,pch_%s,1)' % lang)
self.WriteLn('')
assert ' ' not in gch, (
"Spaces in gch filenames not supported (%s)" % gch)
self.WriteLn('all_deps += %s' % gch)
self.WriteLn('')
def ComputeOutputBasename(self, spec):
"""Return the 'output basename' of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'libfoobar.so'
"""
assert not self.is_mac_bundle
if self.flavor == 'mac' and self.type in (
'static_library', 'executable', 'shared_library', 'loadable_module'):
return self.xcode_settings.GetExecutablePath()
target = spec['target_name']
target_prefix = ''
target_ext = ''
if self.type == 'static_library':
if target[:3] == 'lib':
target = target[3:]
target_prefix = 'lib'
target_ext = '.a'
elif self.type in ('loadable_module', 'shared_library'):
if target[:3] == 'lib':
target = target[3:]
target_prefix = 'lib'
target_ext = '.so'
elif self.type == 'none':
target = '%s.stamp' % target
elif self.type != 'executable':
print ("ERROR: What output file should be generated?",
"type", self.type, "target", target)
target_prefix = spec.get('product_prefix', target_prefix)
target = spec.get('product_name', target)
product_ext = spec.get('product_extension')
if product_ext:
target_ext = '.' + product_ext
return target_prefix + target + target_ext
def _InstallImmediately(self):
return self.toolset == 'target' and self.flavor == 'mac' and self.type in (
'static_library', 'executable', 'shared_library', 'loadable_module')
def ComputeOutput(self, spec):
"""Return the 'output' (full output path) of a gyp spec.
E.g., the loadable module 'foobar' in directory 'baz' will produce
'$(obj)/baz/libfoobar.so'
"""
assert not self.is_mac_bundle
path = os.path.join('$(obj).' + self.toolset, self.path)
if self.type == 'executable' or self._InstallImmediately():
path = '$(builddir)'
path = spec.get('product_dir', path)
return os.path.join(path, self.ComputeOutputBasename(spec))
def ComputeMacBundleOutput(self, spec):
"""Return the 'output' (full output path) to a bundle output directory."""
assert self.is_mac_bundle
path = generator_default_variables['PRODUCT_DIR']
return os.path.join(path, self.xcode_settings.GetWrapperName())
def ComputeMacBundleBinaryOutput(self, spec):
"""Return the 'output' (full output path) to the binary in a bundle."""
path = generator_default_variables['PRODUCT_DIR']
return os.path.join(path, self.xcode_settings.GetExecutablePath())
def ComputeDeps(self, spec):
"""Compute the dependencies of a gyp spec.
Returns a tuple (deps, link_deps), where each is a list of
filenames that will need to be put in front of make for either
building (deps) or linking (link_deps).
"""
deps = []
link_deps = []
if 'dependencies' in spec:
deps.extend([target_outputs[dep] for dep in spec['dependencies']
if target_outputs[dep]])
for dep in spec['dependencies']:
if dep in target_link_deps:
link_deps.append(target_link_deps[dep])
deps.extend(link_deps)
# TODO: It seems we need to transitively link in libraries (e.g. -lfoo)?
# This hack makes it work:
# link_deps.extend(spec.get('libraries', []))
return (gyp.common.uniquer(deps), gyp.common.uniquer(link_deps))
def WriteDependencyOnExtraOutputs(self, target, extra_outputs):
self.WriteMakeRule([self.output_binary], extra_outputs,
comment = 'Build our special outputs first.',
order_only = True)
def WriteTarget(self, spec, configs, deps, link_deps, bundle_deps,
extra_outputs, part_of_all):
"""Write Makefile code to produce the final target of the gyp spec.
spec, configs: input from gyp.
deps, link_deps: dependency lists; see ComputeDeps()
extra_outputs: any extra outputs that our target should depend on
part_of_all: flag indicating this target is part of 'all'
"""
self.WriteLn('### Rules for final target.')
if extra_outputs:
self.WriteDependencyOnExtraOutputs(self.output_binary, extra_outputs)
self.WriteMakeRule(extra_outputs, deps,
comment=('Preserve order dependency of '
'special output on deps.'),
order_only = True)
target_postbuilds = {}
if self.type != 'none':
for configname in sorted(configs.keys()):
config = configs[configname]
if self.flavor == 'mac':
ldflags = self.xcode_settings.GetLdflags(configname,
generator_default_variables['PRODUCT_DIR'],
lambda p: Sourceify(self.Absolutify(p)))
# TARGET_POSTBUILDS_$(BUILDTYPE) is added to postbuilds later on.
gyp_to_build = gyp.common.InvertRelativePath(self.path)
target_postbuild = self.xcode_settings.AddImplicitPostbuilds(
configname,
QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
self.output))),
QuoteSpaces(os.path.normpath(os.path.join(gyp_to_build,
self.output_binary))))
if target_postbuild:
target_postbuilds[configname] = target_postbuild
else:
ldflags = config.get('ldflags', [])
# Compute an rpath for this output if needed.
if any(dep.endswith('.so') or '.so.' in dep for dep in deps):
# We want to get the literal string "$ORIGIN" into the link command,
# so we need lots of escaping.
ldflags.append(r'-Wl,-rpath=\$$ORIGIN/lib.%s/' % self.toolset)
ldflags.append(r'-Wl,-rpath-link=\$(builddir)/lib.%s/' %
self.toolset)
library_dirs = config.get('library_dirs', [])
ldflags += [('-L%s' % library_dir) for library_dir in library_dirs]
self.WriteList(ldflags, 'LDFLAGS_%s' % configname)
if self.flavor == 'mac':
self.WriteList(self.xcode_settings.GetLibtoolflags(configname),
'LIBTOOLFLAGS_%s' % configname)
libraries = spec.get('libraries')
if libraries:
# Remove duplicate entries
libraries = gyp.common.uniquer(libraries)
if self.flavor == 'mac':
libraries = self.xcode_settings.AdjustLibraries(libraries)
self.WriteList(libraries, 'LIBS')
self.WriteLn('%s: GYP_LDFLAGS := $(LDFLAGS_$(BUILDTYPE))' %
QuoteSpaces(self.output_binary))
self.WriteLn('%s: LIBS := $(LIBS)' % QuoteSpaces(self.output_binary))
if self.flavor == 'mac':
self.WriteLn('%s: GYP_LIBTOOLFLAGS := $(LIBTOOLFLAGS_$(BUILDTYPE))' %
QuoteSpaces(self.output_binary))
# Postbuild actions. Like actions, but implicitly depend on the target's
# output.
postbuilds = []
if self.flavor == 'mac':
if target_postbuilds:
postbuilds.append('$(TARGET_POSTBUILDS_$(BUILDTYPE))')
postbuilds.extend(
gyp.xcode_emulation.GetSpecPostbuildCommands(spec))
if postbuilds:
# Envvars may be referenced by TARGET_POSTBUILDS_$(BUILDTYPE),
# so we must output its definition first, since we declare variables
# using ":=".
self.WriteSortedXcodeEnv(self.output, self.GetSortedXcodePostbuildEnv())
for configname in target_postbuilds:
self.WriteLn('%s: TARGET_POSTBUILDS_%s := %s' %
(QuoteSpaces(self.output),
configname,
gyp.common.EncodePOSIXShellList(target_postbuilds[configname])))
# Postbuilds expect to be run in the gyp file's directory, so insert an
# implicit postbuild to cd to there.
postbuilds.insert(0, gyp.common.EncodePOSIXShellList(['cd', self.path]))
for i in xrange(len(postbuilds)):
if not postbuilds[i].startswith('$'):
postbuilds[i] = EscapeShellArgument(postbuilds[i])
self.WriteLn('%s: builddir := $(abs_builddir)' % QuoteSpaces(self.output))
self.WriteLn('%s: POSTBUILDS := %s' % (
QuoteSpaces(self.output), ' '.join(postbuilds)))
# A bundle directory depends on its dependencies such as bundle resources
# and bundle binary. When all dependencies have been built, the bundle
# needs to be packaged.
if self.is_mac_bundle:
# If the framework doesn't contain a binary, then nothing depends
# on the actions -- make the framework depend on them directly too.
self.WriteDependencyOnExtraOutputs(self.output, extra_outputs)
# Bundle dependencies. Note that the code below adds actions to this
# target, so if you move these two lines, move the lines below as well.
self.WriteList(map(QuoteSpaces, bundle_deps), 'BUNDLE_DEPS')
self.WriteLn('%s: $(BUNDLE_DEPS)' % QuoteSpaces(self.output))
# After the framework is built, package it. Needs to happen before
# postbuilds, since postbuilds depend on this.
if self.type in ('shared_library', 'loadable_module'):
self.WriteLn('\t@$(call do_cmd,mac_package_framework,,,%s)' %
self.xcode_settings.GetFrameworkVersion())
# Bundle postbuilds can depend on the whole bundle, so run them after
# the bundle is packaged, not already after the bundle binary is done.
if postbuilds:
self.WriteLn('\t@$(call do_postbuilds)')
postbuilds = [] # Don't write postbuilds for target's output.
# Needed by test/mac/gyptest-rebuild.py.
self.WriteLn('\t@true # No-op, used by tests')
# Since this target depends on binary and resources which are in
# nested subfolders, the framework directory will be older than
# its dependencies usually. To prevent this rule from executing
# on every build (expensive, especially with postbuilds), expliclity
# update the time on the framework directory.
self.WriteLn('\t@touch -c %s' % QuoteSpaces(self.output))
if postbuilds:
assert not self.is_mac_bundle, ('Postbuilds for bundles should be done '
'on the bundle, not the binary (target \'%s\')' % self.target)
assert 'product_dir' not in spec, ('Postbuilds do not work with '
'custom product_dir')
if self.type == 'executable':
self.WriteLn('%s: LD_INPUTS := %s' % (
QuoteSpaces(self.output_binary),
' '.join(map(QuoteSpaces, link_deps))))
if self.toolset == 'host' and self.flavor == 'android':
self.WriteDoCmd([self.output_binary], link_deps, 'link_host',
part_of_all, postbuilds=postbuilds)
else:
self.WriteDoCmd([self.output_binary], link_deps, 'link', part_of_all,
postbuilds=postbuilds)
elif self.type == 'static_library':
for link_dep in link_deps:
assert ' ' not in link_dep, (
"Spaces in alink input filenames not supported (%s)" % link_dep)
if (self.flavor not in ('mac', 'openbsd', 'netbsd', 'win') and not
self.is_standalone_static_library):
self.WriteDoCmd([self.output_binary], link_deps, 'alink_thin',
part_of_all, postbuilds=postbuilds)
else:
self.WriteDoCmd([self.output_binary], link_deps, 'alink', part_of_all,
postbuilds=postbuilds)
elif self.type == 'shared_library':
self.WriteLn('%s: LD_INPUTS := %s' % (
QuoteSpaces(self.output_binary),
' '.join(map(QuoteSpaces, link_deps))))
self.WriteDoCmd([self.output_binary], link_deps, 'solink', part_of_all,
postbuilds=postbuilds)
elif self.type == 'loadable_module':
for link_dep in link_deps:
assert ' ' not in link_dep, (
"Spaces in module input filenames not supported (%s)" % link_dep)
if self.toolset == 'host' and self.flavor == 'android':
self.WriteDoCmd([self.output_binary], link_deps, 'solink_module_host',
part_of_all, postbuilds=postbuilds)
else:
self.WriteDoCmd(
[self.output_binary], link_deps, 'solink_module', part_of_all,
postbuilds=postbuilds)
elif self.type == 'none':
# Write a stamp line.
self.WriteDoCmd([self.output_binary], deps, 'touch', part_of_all,
postbuilds=postbuilds)
else:
print "WARNING: no output for", self.type, target
# Add an alias for each target (if there are any outputs).
# Installable target aliases are created below.
if ((self.output and self.output != self.target) and
(self.type not in self._INSTALLABLE_TARGETS)):
self.WriteMakeRule([self.target], [self.output],
comment='Add target alias', phony = True)
if part_of_all:
self.WriteMakeRule(['all'], [self.target],
comment = 'Add target alias to "all" target.',
phony = True)
# Add special-case rules for our installable targets.
# 1) They need to install to the build dir or "product" dir.
# 2) They get shortcuts for building (e.g. "make chrome").
# 3) They are part of "make all".
if (self.type in self._INSTALLABLE_TARGETS or
self.is_standalone_static_library):
if self.type == 'shared_library':
file_desc = 'shared library'
elif self.type == 'static_library':
file_desc = 'static library'
else:
file_desc = 'executable'
install_path = self._InstallableTargetInstallPath()
installable_deps = [self.output]
if (self.flavor == 'mac' and not 'product_dir' in spec and
self.toolset == 'target'):
# On mac, products are created in install_path immediately.
assert install_path == self.output, '%s != %s' % (
install_path, self.output)
# Point the target alias to the final binary output.
self.WriteMakeRule([self.target], [install_path],
comment='Add target alias', phony = True)
if install_path != self.output:
assert not self.is_mac_bundle # See comment a few lines above.
self.WriteDoCmd([install_path], [self.output], 'copy',
comment = 'Copy this to the %s output path.' %
file_desc, part_of_all=part_of_all)
installable_deps.append(install_path)
if self.output != self.alias and self.alias != self.target:
self.WriteMakeRule([self.alias], installable_deps,
comment = 'Short alias for building this %s.' %
file_desc, phony = True)
if part_of_all:
self.WriteMakeRule(['all'], [install_path],
comment = 'Add %s to "all" target.' % file_desc,
phony = True)
def WriteList(self, value_list, variable=None, prefix='',
quoter=QuoteIfNecessary):
"""Write a variable definition that is a list of values.
E.g. WriteList(['a','b'], 'foo', prefix='blah') writes out
foo = blaha blahb
but in a pretty-printed style.
"""
values = ''
if value_list:
value_list = [quoter(prefix + l) for l in value_list]
values = ' \\\n\t' + ' \\\n\t'.join(value_list)
self.fp.write('%s :=%s\n\n' % (variable, values))
def WriteDoCmd(self, outputs, inputs, command, part_of_all, comment=None,
postbuilds=False):
"""Write a Makefile rule that uses do_cmd.
This makes the outputs dependent on the command line that was run,
as well as support the V= make command line flag.
"""
suffix = ''
if postbuilds:
assert ',' not in command
suffix = ',,1' # Tell do_cmd to honor $POSTBUILDS
self.WriteMakeRule(outputs, inputs,
actions = ['$(call do_cmd,%s%s)' % (command, suffix)],
comment = comment,
command = command,
force = True)
# Add our outputs to the list of targets we read depfiles from.
# all_deps is only used for deps file reading, and for deps files we replace
# spaces with ? because escaping doesn't work with make's $(sort) and
# other functions.
outputs = [QuoteSpaces(o, SPACE_REPLACEMENT) for o in outputs]
self.WriteLn('all_deps += %s' % ' '.join(outputs))
def WriteMakeRule(self, outputs, inputs, actions=None, comment=None,
order_only=False, force=False, phony=False, command=None):
"""Write a Makefile rule, with some extra tricks.
outputs: a list of outputs for the rule (note: this is not directly
supported by make; see comments below)
inputs: a list of inputs for the rule
actions: a list of shell commands to run for the rule
comment: a comment to put in the Makefile above the rule (also useful
for making this Python script's code self-documenting)
order_only: if true, makes the dependency order-only
force: if true, include FORCE_DO_CMD as an order-only dep
phony: if true, the rule does not actually generate the named output, the
output is just a name to run the rule
command: (optional) command name to generate unambiguous labels
"""
outputs = map(QuoteSpaces, outputs)
inputs = map(QuoteSpaces, inputs)
if comment:
self.WriteLn('# ' + comment)
if phony:
self.WriteLn('.PHONY: ' + ' '.join(outputs))
if actions:
self.WriteLn("%s: TOOLSET := $(TOOLSET)" % outputs[0])
force_append = ' FORCE_DO_CMD' if force else ''
if order_only:
# Order only rule: Just write a simple rule.
# TODO(evanm): just make order_only a list of deps instead of this hack.
self.WriteLn('%s: | %s%s' %
(' '.join(outputs), ' '.join(inputs), force_append))
elif len(outputs) == 1:
# Regular rule, one output: Just write a simple rule.
self.WriteLn('%s: %s%s' % (outputs[0], ' '.join(inputs), force_append))
else:
# Regular rule, more than one output: Multiple outputs are tricky in
# make. We will write three rules:
# - All outputs depend on an intermediate file.
# - Make .INTERMEDIATE depend on the intermediate.
# - The intermediate file depends on the inputs and executes the
# actual command.
# - The intermediate recipe will 'touch' the intermediate file.
# - The multi-output rule will have an do-nothing recipe.
intermediate = "%s.intermediate" % (command if command else self.target)
self.WriteLn('%s: %s' % (' '.join(outputs), intermediate))
self.WriteLn('\t%s' % '@:');
self.WriteLn('%s: %s' % ('.INTERMEDIATE', intermediate))
self.WriteLn('%s: %s%s' %
(intermediate, ' '.join(inputs), force_append))
actions.insert(0, '$(call do_cmd,touch)')
if actions:
for action in actions:
self.WriteLn('\t%s' % action)
self.WriteLn()
def WriteAndroidNdkModuleRule(self, module_name, all_sources, link_deps):
"""Write a set of LOCAL_XXX definitions for Android NDK.
These variable definitions will be used by Android NDK but do nothing for
non-Android applications.
Arguments:
module_name: Android NDK module name, which must be unique among all
module names.
all_sources: A list of source files (will be filtered by Compilable).
link_deps: A list of link dependencies, which must be sorted in
the order from dependencies to dependents.
"""
if self.type not in ('executable', 'shared_library', 'static_library'):
return
self.WriteLn('# Variable definitions for Android applications')
self.WriteLn('include $(CLEAR_VARS)')
self.WriteLn('LOCAL_MODULE := ' + module_name)
self.WriteLn('LOCAL_CFLAGS := $(CFLAGS_$(BUILDTYPE)) '
'$(DEFS_$(BUILDTYPE)) '
# LOCAL_CFLAGS is applied to both of C and C++. There is
# no way to specify $(CFLAGS_C_$(BUILDTYPE)) only for C
# sources.
'$(CFLAGS_C_$(BUILDTYPE)) '
# $(INCS_$(BUILDTYPE)) includes the prefix '-I' while
# LOCAL_C_INCLUDES does not expect it. So put it in
# LOCAL_CFLAGS.
'$(INCS_$(BUILDTYPE))')
# LOCAL_CXXFLAGS is obsolete and LOCAL_CPPFLAGS is preferred.
self.WriteLn('LOCAL_CPPFLAGS := $(CFLAGS_CC_$(BUILDTYPE))')
self.WriteLn('LOCAL_C_INCLUDES :=')
self.WriteLn('LOCAL_LDLIBS := $(LDFLAGS_$(BUILDTYPE)) $(LIBS)')
# Detect the C++ extension.
cpp_ext = {'.cc': 0, '.cpp': 0, '.cxx': 0}
default_cpp_ext = '.cpp'
for filename in all_sources:
ext = os.path.splitext(filename)[1]
if ext in cpp_ext:
cpp_ext[ext] += 1
if cpp_ext[ext] > cpp_ext[default_cpp_ext]:
default_cpp_ext = ext
self.WriteLn('LOCAL_CPP_EXTENSION := ' + default_cpp_ext)
self.WriteList(map(self.Absolutify, filter(Compilable, all_sources)),
'LOCAL_SRC_FILES')
# Filter out those which do not match prefix and suffix and produce
# the resulting list without prefix and suffix.
def DepsToModules(deps, prefix, suffix):
modules = []
for filepath in deps:
filename = os.path.basename(filepath)
if filename.startswith(prefix) and filename.endswith(suffix):
modules.append(filename[len(prefix):-len(suffix)])
return modules
# Retrieve the default value of 'SHARED_LIB_SUFFIX'
params = {'flavor': 'linux'}
default_variables = {}
CalculateVariables(default_variables, params)
self.WriteList(
DepsToModules(link_deps,
generator_default_variables['SHARED_LIB_PREFIX'],
default_variables['SHARED_LIB_SUFFIX']),
'LOCAL_SHARED_LIBRARIES')
self.WriteList(
DepsToModules(link_deps,
generator_default_variables['STATIC_LIB_PREFIX'],
generator_default_variables['STATIC_LIB_SUFFIX']),
'LOCAL_STATIC_LIBRARIES')
if self.type == 'executable':
self.WriteLn('include $(BUILD_EXECUTABLE)')
elif self.type == 'shared_library':
self.WriteLn('include $(BUILD_SHARED_LIBRARY)')
elif self.type == 'static_library':
self.WriteLn('include $(BUILD_STATIC_LIBRARY)')
self.WriteLn()
def WriteLn(self, text=''):
self.fp.write(text + '\n')
def GetSortedXcodeEnv(self, additional_settings=None):
return gyp.xcode_emulation.GetSortedXcodeEnv(
self.xcode_settings, "$(abs_builddir)",
os.path.join("$(abs_srcdir)", self.path), "$(BUILDTYPE)",
additional_settings)
def GetSortedXcodePostbuildEnv(self):
# CHROMIUM_STRIP_SAVE_FILE is a chromium-specific hack.
# TODO(thakis): It would be nice to have some general mechanism instead.
strip_save_file = self.xcode_settings.GetPerTargetSetting(
'CHROMIUM_STRIP_SAVE_FILE', '')
# Even if strip_save_file is empty, explicitly write it. Else a postbuild
# might pick up an export from an earlier target.
return self.GetSortedXcodeEnv(
additional_settings={'CHROMIUM_STRIP_SAVE_FILE': strip_save_file})
def WriteSortedXcodeEnv(self, target, env):
for k, v in env:
# For
# foo := a\ b
# the escaped space does the right thing. For
# export foo := a\ b
# it does not -- the backslash is written to the env as literal character.
# So don't escape spaces in |env[k]|.
self.WriteLn('%s: export %s := %s' % (QuoteSpaces(target), k, v))
def Objectify(self, path):
"""Convert a path to its output directory form."""
if '$(' in path:
path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/' % self.toolset)
if not '$(obj)' in path:
path = '$(obj).%s/$(TARGET)/%s' % (self.toolset, path)
return path
def Pchify(self, path, lang):
"""Convert a prefix header path to its output directory form."""
path = self.Absolutify(path)
if '$(' in path:
path = path.replace('$(obj)/', '$(obj).%s/$(TARGET)/pch-%s' %
(self.toolset, lang))
return path
return '$(obj).%s/$(TARGET)/pch-%s/%s' % (self.toolset, lang, path)
def Absolutify(self, path):
"""Convert a subdirectory-relative path into a base-relative path.
Skips over paths that contain variables."""
if '$(' in path:
# Don't call normpath in this case, as it might collapse the
# path too aggressively if it features '..'. However it's still
# important to strip trailing slashes.
return path.rstrip('/')
return os.path.normpath(os.path.join(self.path, path))
def ExpandInputRoot(self, template, expansion, dirname):
if '%(INPUT_ROOT)s' not in template and '%(INPUT_DIRNAME)s' not in template:
return template
path = template % {
'INPUT_ROOT': expansion,
'INPUT_DIRNAME': dirname,
}
return path
def _InstallableTargetInstallPath(self):
"""Returns the location of the final output for an installable target."""
# Xcode puts shared_library results into PRODUCT_DIR, and some gyp files
# rely on this. Emulate this behavior for mac.
# XXX(TooTallNate): disabling this code since we don't want this behavior...
#if (self.type == 'shared_library' and
# (self.flavor != 'mac' or self.toolset != 'target')):
# # Install all shared libs into a common directory (per toolset) for
# # convenient access with LD_LIBRARY_PATH.
# return '$(builddir)/lib.%s/%s' % (self.toolset, self.alias)
return '$(builddir)/' + self.alias
def WriteAutoRegenerationRule(params, root_makefile, makefile_name,
build_files):
"""Write the target to regenerate the Makefile."""
options = params['options']
build_files_args = [gyp.common.RelativePath(filename, options.toplevel_dir)
for filename in params['build_files_arg']]
gyp_binary = gyp.common.FixIfRelativePath(params['gyp_binary'],
options.toplevel_dir)
if not gyp_binary.startswith(os.sep):
gyp_binary = os.path.join('.', gyp_binary)
root_makefile.write(
"quiet_cmd_regen_makefile = ACTION Regenerating $@\n"
"cmd_regen_makefile = cd $(srcdir); %(cmd)s\n"
"%(makefile_name)s: %(deps)s\n"
"\t$(call do_cmd,regen_makefile)\n\n" % {
'makefile_name': makefile_name,
'deps': ' '.join(map(Sourceify, build_files)),
'cmd': gyp.common.EncodePOSIXShellList(
[gyp_binary, '-fmake'] +
gyp.RegenerateFlags(options) +
build_files_args)})
def PerformBuild(data, configurations, params):
options = params['options']
for config in configurations:
arguments = ['make']
if options.toplevel_dir and options.toplevel_dir != '.':
arguments += '-C', options.toplevel_dir
arguments.append('BUILDTYPE=' + config)
print 'Building [%s]: %s' % (config, arguments)
subprocess.check_call(arguments)
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
flavor = gyp.common.GetFlavor(params)
generator_flags = params.get('generator_flags', {})
builddir_name = generator_flags.get('output_dir', 'out')
android_ndk_version = generator_flags.get('android_ndk_version', None)
default_target = generator_flags.get('default_target', 'all')
def CalculateMakefilePath(build_file, base_name):
"""Determine where to write a Makefile for a given gyp file."""
# Paths in gyp files are relative to the .gyp file, but we want
# paths relative to the source root for the master makefile. Grab
# the path of the .gyp file as the base to relativize against.
# E.g. "foo/bar" when we're constructing targets for "foo/bar/baz.gyp".
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.depth)
# We write the file in the base_path directory.
output_file = os.path.join(options.depth, base_path, base_name)
if options.generator_output:
output_file = os.path.join(
options.depth, options.generator_output, base_path, base_name)
base_path = gyp.common.RelativePath(os.path.dirname(build_file),
options.toplevel_dir)
return base_path, output_file
# TODO: search for the first non-'Default' target. This can go
# away when we add verification that all targets have the
# necessary configurations.
default_configuration = None
toolsets = set([target_dicts[target]['toolset'] for target in target_list])
for target in target_list:
spec = target_dicts[target]
if spec['default_configuration'] != 'Default':
default_configuration = spec['default_configuration']
break
if not default_configuration:
default_configuration = 'Default'
srcdir = '.'
makefile_name = 'Makefile' + options.suffix
makefile_path = os.path.join(options.toplevel_dir, makefile_name)
if options.generator_output:
global srcdir_prefix
makefile_path = os.path.join(
options.toplevel_dir, options.generator_output, makefile_name)
srcdir = gyp.common.RelativePath(srcdir, options.generator_output)
srcdir_prefix = '$(srcdir)/'
flock_command= 'flock'
copy_archive_arguments = '-af'
header_params = {
'default_target': default_target,
'builddir': builddir_name,
'default_configuration': default_configuration,
'flock': flock_command,
'flock_index': 1,
'link_commands': LINK_COMMANDS_LINUX,
'extra_commands': '',
'srcdir': srcdir,
'copy_archive_args': copy_archive_arguments,
}
if flavor == 'mac':
flock_command = './gyp-mac-tool flock'
header_params.update({
'flock': flock_command,
'flock_index': 2,
'link_commands': LINK_COMMANDS_MAC,
'extra_commands': SHARED_HEADER_MAC_COMMANDS,
})
elif flavor == 'android':
header_params.update({
'link_commands': LINK_COMMANDS_ANDROID,
})
elif flavor == 'solaris':
header_params.update({
'flock': './gyp-flock-tool flock',
'flock_index': 2,
})
elif flavor == 'freebsd':
# Note: OpenBSD has sysutils/flock. lockf seems to be FreeBSD specific.
header_params.update({
'flock': 'lockf',
})
elif flavor == 'openbsd':
copy_archive_arguments = '-pPRf'
header_params.update({
'copy_archive_args': copy_archive_arguments,
})
elif flavor == 'aix':
copy_archive_arguments = '-pPRf'
header_params.update({
'copy_archive_args': copy_archive_arguments,
'link_commands': LINK_COMMANDS_AIX,
'flock': './gyp-flock-tool flock',
'flock_index': 2,
})
header_params.update({
'CC.target': GetEnvironFallback(('CC_target', 'CC'), '$(CC)'),
'AR.target': GetEnvironFallback(('AR_target', 'AR'), '$(AR)'),
'CXX.target': GetEnvironFallback(('CXX_target', 'CXX'), '$(CXX)'),
'LINK.target': GetEnvironFallback(('LINK_target', 'LINK'), '$(LINK)'),
'CC.host': GetEnvironFallback(('CC_host',), 'gcc'),
'AR.host': GetEnvironFallback(('AR_host',), 'ar'),
'CXX.host': GetEnvironFallback(('CXX_host',), 'g++'),
'LINK.host': GetEnvironFallback(('LINK_host',), '$(CXX.host)'),
})
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_array = data[build_file].get('make_global_settings', [])
wrappers = {}
for key, value in make_global_settings_array:
if key.endswith('_wrapper'):
wrappers[key[:-len('_wrapper')]] = '$(abspath %s)' % value
make_global_settings = ''
for key, value in make_global_settings_array:
if re.match('.*_wrapper', key):
continue
if value[0] != '$':
value = '$(abspath %s)' % value
wrapper = wrappers.get(key)
if wrapper:
value = '%s %s' % (wrapper, value)
del wrappers[key]
if key in ('CC', 'CC.host', 'CXX', 'CXX.host'):
make_global_settings += (
'ifneq (,$(filter $(origin %s), undefined default))\n' % key)
# Let gyp-time envvars win over global settings.
env_key = key.replace('.', '_') # CC.host -> CC_host
if env_key in os.environ:
value = os.environ[env_key]
make_global_settings += ' %s = %s\n' % (key, value)
make_global_settings += 'endif\n'
else:
make_global_settings += '%s ?= %s\n' % (key, value)
# TODO(ukai): define cmd when only wrapper is specified in
# make_global_settings.
header_params['make_global_settings'] = make_global_settings
gyp.common.EnsureDirExists(makefile_path)
root_makefile = open(makefile_path, 'w')
root_makefile.write(SHARED_HEADER % header_params)
# Currently any versions have the same effect, but in future the behavior
# could be different.
if android_ndk_version:
root_makefile.write(
'# Define LOCAL_PATH for build of Android applications.\n'
'LOCAL_PATH := $(call my-dir)\n'
'\n')
for toolset in toolsets:
root_makefile.write('TOOLSET := %s\n' % toolset)
WriteRootHeaderSuffixRules(root_makefile)
# Put build-time support tools next to the root Makefile.
dest_path = os.path.dirname(makefile_path)
gyp.common.CopyTool(flavor, dest_path)
# Find the list of targets that derive from the gyp file(s) being built.
needed_targets = set()
for build_file in params['build_files']:
for target in gyp.common.AllTargets(target_list, target_dicts, build_file):
needed_targets.add(target)
build_files = set()
include_list = set()
for qualified_target in target_list:
build_file, target, toolset = gyp.common.ParseQualifiedTarget(
qualified_target)
this_make_global_settings = data[build_file].get('make_global_settings', [])
assert make_global_settings_array == this_make_global_settings, (
"make_global_settings needs to be the same for all targets. %s vs. %s" %
(this_make_global_settings, make_global_settings))
build_files.add(gyp.common.RelativePath(build_file, options.toplevel_dir))
included_files = data[build_file]['included_files']
for included_file in included_files:
# The included_files entries are relative to the dir of the build file
# that included them, so we have to undo that and then make them relative
# to the root dir.
relative_include_file = gyp.common.RelativePath(
gyp.common.UnrelativePath(included_file, build_file),
options.toplevel_dir)
abs_include_file = os.path.abspath(relative_include_file)
# If the include file is from the ~/.gyp dir, we should use absolute path
# so that relocating the src dir doesn't break the path.
if (params['home_dot_gyp'] and
abs_include_file.startswith(params['home_dot_gyp'])):
build_files.add(abs_include_file)
else:
build_files.add(relative_include_file)
base_path, output_file = CalculateMakefilePath(build_file,
target + '.' + toolset + options.suffix + '.mk')
spec = target_dicts[qualified_target]
configs = spec['configurations']
if flavor == 'mac':
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[build_file], spec)
writer = MakefileWriter(generator_flags, flavor)
writer.Write(qualified_target, base_path, output_file, spec, configs,
part_of_all=qualified_target in needed_targets)
# Our root_makefile lives at the source root. Compute the relative path
# from there to the output_file for including.
mkfile_rel_path = gyp.common.RelativePath(output_file,
os.path.dirname(makefile_path))
include_list.add(mkfile_rel_path)
# Write out per-gyp (sub-project) Makefiles.
depth_rel_path = gyp.common.RelativePath(options.depth, os.getcwd())
for build_file in build_files:
# The paths in build_files were relativized above, so undo that before
# testing against the non-relativized items in target_list and before
# calculating the Makefile path.
build_file = os.path.join(depth_rel_path, build_file)
gyp_targets = [target_dicts[target]['target_name'] for target in target_list
if target.startswith(build_file) and
target in needed_targets]
# Only generate Makefiles for gyp files with targets.
if not gyp_targets:
continue
base_path, output_file = CalculateMakefilePath(build_file,
os.path.splitext(os.path.basename(build_file))[0] + '.Makefile')
makefile_rel_path = gyp.common.RelativePath(os.path.dirname(makefile_path),
os.path.dirname(output_file))
writer.WriteSubMake(output_file, makefile_rel_path, gyp_targets,
builddir_name)
# Write out the sorted list of includes.
root_makefile.write('\n')
for include_file in sorted(include_list):
# We wrap each .mk include in an if statement so users can tell make to
# not load a file by setting NO_LOAD. The below make code says, only
# load the .mk file if the .mk filename doesn't start with a token in
# NO_LOAD.
root_makefile.write(
"ifeq ($(strip $(foreach prefix,$(NO_LOAD),\\\n"
" $(findstring $(join ^,$(prefix)),\\\n"
" $(join ^," + include_file + ")))),)\n")
root_makefile.write(" include " + include_file + "\n")
root_makefile.write("endif\n")
root_makefile.write('\n')
if (not generator_flags.get('standalone')
and generator_flags.get('auto_regeneration', True)):
WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
root_makefile.write(SHARED_FOOTER)
root_makefile.close()
|
mit
|
tasoc/photometry
|
notes/halo_shift.py
|
1
|
2629
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
.. codeauthor:: Rasmus Handberg <rasmush@phys.au.dk>
"""
import numpy as np
import matplotlib.pyplot as plt
from astropy.io import fits
import sqlite3
import os.path
#------------------------------------------------------------------------------
def mag2flux(mag):
"""
Convert from magnitude to flux using scaling relation from
aperture photometry. This is an estimate.
Parameters:
mag (float): Magnitude in TESS band.
Returns:
float: Corresponding flux value
"""
return 10**(-0.4*(mag - 20.54))
if __name__ == '__main__':
pass
folder = r'C:\Users\au195407\Documents\tess_data_local\S01_DR01-2114872'
conn = sqlite3.connect(os.path.join(folder, 'todo.sqlite'))
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
cursor.execute("SELECT todolist.starid,tmag,onedge,edgeflux FROM todolist INNER JOIN diagnostics ON todolist.priority=diagnostics.priority;")
results = cursor.fetchall()
starid = np.array([row['starid'] for row in results], dtype='int64')
tmag = np.array([row['tmag'] for row in results])
OnEdge = np.array([np.NaN if row['onedge'] is None else row['onedge'] for row in results])
EdgeFlux = np.array([np.NaN if row['edgeflux'] is None else row['edgeflux'] for row in results])
cursor.close()
conn.close()
print(tmag)
print(OnEdge)
print(EdgeFlux)
tmag_limit = 3.0
flux_limit = 1e-3
indx = (OnEdge > 0)
indx_halo = (tmag <= tmag_limit) & (OnEdge > 0) & (EdgeFlux/mag2flux(tmag) > flux_limit)
indx_spec = (starid == 382420379)
print(starid[indx_halo])
fig = plt.figure()
ax = fig.add_subplot(111)
plt.scatter(tmag[indx], OnEdge[indx], alpha=0.5)
plt.scatter(tmag[indx_halo], OnEdge[indx_halo], marker='x', c='r')
plt.xlim(xmax=tmag_limit)
plt.ylim(ymin=0)
ax.set_xlabel('Tmag')
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(tmag[indx], EdgeFlux[indx], alpha=0.5)
ax.set_xlim(xmax=5.0)
#ax.set_ylim(ymin=0.0)
ax.set_yscale('log')
ax.set_xlabel('Tmag')
fig = plt.figure()
ax = fig.add_subplot(111)
plt.scatter(tmag[indx], EdgeFlux[indx]/mag2flux(tmag[indx]), alpha=0.5)
plt.scatter(tmag[indx_halo], EdgeFlux[indx_halo]/mag2flux(tmag[indx_halo]), alpha=0.3, marker='x', c='r')
plt.scatter(tmag[indx_spec], EdgeFlux[indx_spec]/mag2flux(tmag[indx_spec]), alpha=0.3, marker='o', c='g', lw=2)
plt.plot([2.0, 6.0], [1e-3, 2e-2], 'r--')
plt.axhline(flux_limit, c='r', ls='--')
plt.axvline(tmag_limit, c='r', ls='--')
#plt.xlim(xmax=tmag_limit)
ax.set_ylim(ymin=1e-5, ymax=1)
ax.set_yscale('log')
ax.set_ylabel('Edge Flux / Expected Total Flux')
ax.set_xlabel('Tmag')
plt.show()
|
gpl-3.0
|
roadmapper/ansible
|
test/units/modules/network/vyos/test_vyos_ping.py
|
52
|
4225
|
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.vyos import vyos_ping
from units.modules.utils import set_module_args
from .vyos_module import TestVyosModule, load_fixture
class TestVyosPingModule(TestVyosModule):
module = vyos_ping
def setUp(self):
super(TestVyosPingModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.vyos.vyos_ping.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
super(TestVyosPingModule, self).tearDown()
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
def load_from_file(*args, **kwargs):
commands = kwargs['commands']
output = list()
for command in commands:
filename = str(command).split(' | ')[0].replace(' ', '_')
output.append(load_fixture('vyos_ping_%s' % filename))
return output
self.run_commands.side_effect = load_from_file
def test_vyos_ping_expected_success(self):
''' Test for successful pings when destination should be reachable '''
set_module_args(dict(count=2, dest="10.10.10.10"))
self.execute_module()
def test_vyos_ping_expected_failure(self):
''' Test for unsuccessful pings when destination should not be reachable '''
set_module_args(dict(count=4, dest="10.10.10.20", state="absent"))
self.execute_module()
def test_vyos_ping_unexpected_success(self):
''' Test for successful pings when destination should not be reachable - FAIL. '''
set_module_args(dict(count=2, dest="10.10.10.10", state="absent"))
self.execute_module(failed=True)
def test_vyos_ping_unexpected_failure(self):
''' Test for unsuccessful pings when destination should be reachable - FAIL. '''
set_module_args(dict(count=4, dest="10.10.10.20"))
self.execute_module(failed=True)
def test_vyos_ping_failure_stats(self):
'''Test for asserting stats when ping fails'''
set_module_args(dict(count=4, dest="10.10.10.20"))
result = self.execute_module(failed=True)
self.assertEqual(result['packet_loss'], '100%')
self.assertEqual(result['packets_rx'], 0)
self.assertEqual(result['packets_tx'], 4)
def test_vyos_ping_success_stats(self):
'''Test for asserting stats when ping passes'''
set_module_args(dict(count=2, dest="10.10.10.10"))
result = self.execute_module()
self.assertEqual(result['packet_loss'], '0%')
self.assertEqual(result['packets_rx'], 2)
self.assertEqual(result['packets_tx'], 2)
self.assertEqual(result['rtt']['min'], 12)
self.assertEqual(result['rtt']['avg'], 17)
self.assertEqual(result['rtt']['max'], 22)
self.assertEqual(result['rtt']['mdev'], 10)
def test_vyos_ping_success_stats_with_options(self):
set_module_args(dict(count=10, ttl=128, size=512, dest="10.10.10.11"))
result = self.execute_module()
self.assertEqual(result['packet_loss'], '0%')
self.assertEqual(result['packets_rx'], 10)
self.assertEqual(result['packets_tx'], 10)
self.assertEqual(result['rtt']['min'], 1)
self.assertEqual(result['rtt']['avg'], 3)
self.assertEqual(result['rtt']['max'], 21)
self.assertEqual(result['rtt']['mdev'], 5)
|
gpl-3.0
|
mrrrgn/olympia
|
lib/constants.py
|
37
|
6488
|
# -*- coding: utf8 -*-
from tower import ugettext_lazy as _lazy
ALL_CURRENCIES = {
'AED': _lazy(u'United Arab Emirates Dirham'),
'AFN': _lazy(u'Afghanistan Afghani'),
'ALL': _lazy(u'Albania Lek'),
'AMD': _lazy(u'Armenia Dram'),
'ANG': _lazy(u'Netherlands Antilles Guilder'),
'AOA': _lazy(u'Angola Kwanza'),
'ARS': _lazy(u'Argentina Peso'),
'AUD': _lazy(u'Australia Dollar'),
'AWG': _lazy(u'Aruba Guilder'),
'AZN': _lazy(u'Azerbaijan New Manat'),
'BAM': _lazy(u'Bosnia and Herzegovina Convertible Marka'),
'BBD': _lazy(u'Barbados Dollar'),
'BDT': _lazy(u'Bangladesh Taka'),
'BGN': _lazy(u'Bulgaria Lev'),
'BHD': _lazy(u'Bahrain Dinar'),
'BIF': _lazy(u'Burundi Franc'),
'BMD': _lazy(u'Bermuda Dollar'),
'BND': _lazy(u'Brunei Darussalam Dollar'),
'BOB': _lazy(u'Bolivia Boliviano'),
'BRL': _lazy(u'Brazil Real'),
'BSD': _lazy(u'Bahamas Dollar'),
'BTN': _lazy(u'Bhutan Ngultrum'),
'BWP': _lazy(u'Botswana Pula'),
'BYR': _lazy(u'Belarus Ruble'),
'BZD': _lazy(u'Belize Dollar'),
'CAD': _lazy(u'Canada Dollar'),
'CDF': _lazy(u'Congo/Kinshasa Franc'),
'CHF': _lazy(u'Switzerland Franc'),
'CLP': _lazy(u'Chile Peso'),
'CNY': _lazy(u'China Yuan Renminbi'),
'COP': _lazy(u'Colombia Peso'),
'CRC': _lazy(u'Costa Rica Colon'),
'CUC': _lazy(u'Cuba Convertible Peso'),
'CUP': _lazy(u'Cuba Peso'),
'CVE': _lazy(u'Cape Verde Escudo'),
'CZK': _lazy(u'Czech Republic Koruna'),
'DJF': _lazy(u'Djibouti Franc'),
'DKK': _lazy(u'Denmark Krone'),
'DOP': _lazy(u'Dominican Republic Peso'),
'DZD': _lazy(u'Algeria Dinar'),
'EGP': _lazy(u'Egypt Pound'),
'ERN': _lazy(u'Eritrea Nakfa'),
'ETB': _lazy(u'Ethiopia Birr'),
'EUR': _lazy(u'Euro Member Countries'),
'FJD': _lazy(u'Fiji Dollar'),
'FKP': _lazy(u'Falkland Islands (Malvinas) Pound'),
'GBP': _lazy(u'United Kingdom Pound'),
'GEL': _lazy(u'Georgia Lari'),
'GGP': _lazy(u'Guernsey Pound'),
'GHS': _lazy(u'Ghana Cedi'),
'GIP': _lazy(u'Gibraltar Pound'),
'GMD': _lazy(u'Gambia Dalasi'),
'GNF': _lazy(u'Guinea Franc'),
'GTQ': _lazy(u'Guatemala Quetzal'),
'GYD': _lazy(u'Guyana Dollar'),
'HKD': _lazy(u'Hong Kong Dollar'),
'HNL': _lazy(u'Honduras Lempira'),
'HRK': _lazy(u'Croatia Kuna'),
'HTG': _lazy(u'Haiti Gourde'),
'HUF': _lazy(u'Hungary Forint'),
'IDR': _lazy(u'Indonesia Rupiah'),
'ILS': _lazy(u'Israel Shekel'),
'IMP': _lazy(u'Isle of Man Pound'),
'INR': _lazy(u'India Rupee'),
'IQD': _lazy(u'Iraq Dinar'),
'IRR': _lazy(u'Iran Rial'),
'ISK': _lazy(u'Iceland Krona'),
'JEP': _lazy(u'Jersey Pound'),
'JMD': _lazy(u'Jamaica Dollar'),
'JOD': _lazy(u'Jordan Dinar'),
'JPY': _lazy(u'Japan Yen'),
'KES': _lazy(u'Kenya Shilling'),
'KGS': _lazy(u'Kyrgyzstan Som'),
'KHR': _lazy(u'Cambodia Riel'),
'KMF': _lazy(u'Comoros Franc'),
'KPW': _lazy(u'Korea (North) Won'),
'KRW': _lazy(u'Korea (South) Won'),
'KWD': _lazy(u'Kuwait Dinar'),
'KYD': _lazy(u'Cayman Islands Dollar'),
'KZT': _lazy(u'Kazakhstan Tenge'),
'LAK': _lazy(u'Laos Kip'),
'LBP': _lazy(u'Lebanon Pound'),
'LKR': _lazy(u'Sri Lanka Rupee'),
'LRD': _lazy(u'Liberia Dollar'),
'LSL': _lazy(u'Lesotho Loti'),
'LTL': _lazy(u'Lithuania Litas'),
'LVL': _lazy(u'Latvia Lat'),
'LYD': _lazy(u'Libya Dinar'),
'MAD': _lazy(u'Morocco Dirham'),
'MDL': _lazy(u'Moldova Leu'),
'MGA': _lazy(u'Madagascar Ariary'),
'MKD': _lazy(u'Macedonia Denar'),
'MMK': _lazy(u'Myanmar (Burma) Kyat'),
'MNT': _lazy(u'Mongolia Tughrik'),
'MOP': _lazy(u'Macau Pataca'),
'MRO': _lazy(u'Mauritania Ouguiya'),
'MUR': _lazy(u'Mauritius Rupee'),
'MVR': _lazy(u'Maldives (Maldive Islands) Rufiyaa'),
'MWK': _lazy(u'Malawi Kwacha'),
'MXN': _lazy(u'Mexico Peso'),
'MYR': _lazy(u'Malaysia Ringgit'),
'MZN': _lazy(u'Mozambique Metical'),
'NAD': _lazy(u'Namibia Dollar'),
'NGN': _lazy(u'Nigeria Naira'),
'NIO': _lazy(u'Nicaragua Cordoba'),
'NOK': _lazy(u'Norway Krone'),
'NPR': _lazy(u'Nepal Rupee'),
'NZD': _lazy(u'New Zealand Dollar'),
'OMR': _lazy(u'Oman Rial'),
'PAB': _lazy(u'Panama Balboa'),
'PEN': _lazy(u'Peru Nuevo Sol'),
'PGK': _lazy(u'Papua New Guinea Kina'),
'PHP': _lazy(u'Philippines Peso'),
'PKR': _lazy(u'Pakistan Rupee'),
'PLN': _lazy(u'Poland Zloty'),
'PYG': _lazy(u'Paraguay Guarani'),
'QAR': _lazy(u'Qatar Riyal'),
'RON': _lazy(u'Romania New Leu'),
'RSD': _lazy(u'Serbia Dinar'),
'RUB': _lazy(u'Russia Ruble'),
'RWF': _lazy(u'Rwanda Franc'),
'SAR': _lazy(u'Saudi Arabia Riyal'),
'SBD': _lazy(u'Solomon Islands Dollar'),
'SCR': _lazy(u'Seychelles Rupee'),
'SDG': _lazy(u'Sudan Pound'),
'SEK': _lazy(u'Sweden Krona'),
'SGD': _lazy(u'Singapore Dollar'),
'SHP': _lazy(u'Saint Helena Pound'),
'SLL': _lazy(u'Sierra Leone Leone'),
'SOS': _lazy(u'Somalia Shilling'),
'SPL': _lazy(u'Seborga Luigino'),
'SRD': _lazy(u'Suriname Dollar'),
'STD': _lazy(u'São Tomé and Príncipe Dobra'),
'SVC': _lazy(u'El Salvador Colon'),
'SYP': _lazy(u'Syria Pound'),
'SZL': _lazy(u'Swaziland Lilangeni'),
'THB': _lazy(u'Thailand Baht'),
'TJS': _lazy(u'Tajikistan Somoni'),
'TMT': _lazy(u'Turkmenistan Manat'),
'TND': _lazy(u'Tunisia Dinar'),
'TOP': _lazy("Tonga Pa'anga"),
'TRY': _lazy(u'Turkey Lira'),
'TTD': _lazy(u'Trinidad and Tobago Dollar'),
'TVD': _lazy(u'Tuvalu Dollar'),
'TWD': _lazy(u'Taiwan New Dollar'),
'TZS': _lazy(u'Tanzania Shilling'),
'UAH': _lazy(u'Ukraine Hryvna'),
'UGX': _lazy(u'Uganda Shilling'),
'USD': _lazy(u'United States Dollar'),
'UYU': _lazy(u'Uruguay Peso'),
'UZS': _lazy(u'Uzbekistan Som'),
'VEF': _lazy(u'Venezuela Bolivar'),
'VND': _lazy(u'Viet Nam Dong'),
'VUV': _lazy(u'Vanuatu Vatu'),
'WST': _lazy(u'Samoa Tala'),
'XAF': _lazy(u'Communauté Financière Africaine (BEAC) CFA Franc BEAC'),
'XCD': _lazy(u'East Caribbean Dollar'),
'XDR': _lazy(u'International Monetary Fund (IMF) Special Drawing Rights'),
'XOF': _lazy(u'Communauté Financière Africaine (BCEAO) Franc'),
'XPF': _lazy(u'Comptoirs Français du Pacifique (CFP) Franc'),
'YER': _lazy(u'Yemen Rial'),
'ZAR': _lazy(u'South Africa Rand'),
'ZMW': _lazy(u'Zambia Kwacha'),
'ZWD': _lazy(u'Zimbabwe Dollar')
}
|
bsd-3-clause
|
CacaoMovil/guia-de-cacao-django
|
cacao_app/config/common.py
|
1
|
11423
|
# -*- coding: utf-8 -*-
"""
Django settings for cacao_app project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from os.path import join, dirname, abspath
from configurations import Configuration, values
BASE_DIR = dirname(dirname(abspath(__file__)))
class Common(Configuration):
# APP CONFIGURATION
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# redirects app
'django.contrib.redirects',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'suitlocale',
'suit',
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'crispy_forms',
'allauth',
# 'allauth.account',
'sorl.thumbnail',
'envelope',
'solo',
'django_perseus',
'rest_framework',
'ckeditor',
'widget_tweaks',
'wkhtmltopdf',
'taggit',
'google_cse',
)
# Apps specific for this project go here.
LOCAL_APPS = (
'pdf_kit',
'cacao',
'configuracion',
'event',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# END APP CONFIGURATION
# MIDDLEWARE CONFIGURATION
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
# 'djangosecure.middleware.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
# redirect middleware
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',
)
# END MIDDLEWARE CONFIGURATION
# MIGRATIONS CONFIGURATION
MIGRATION_MODULES = {
'sites': 'contrib.sites.migrations'
}
# END MIGRATIONS CONFIGURATION
# DEBUG
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = values.BooleanValue(False)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
# END DEBUG
# SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
# In production, this is changed to a values.SecretValue() setting
SECRET_KEY = "CHANGEME!!!"
# END SECRET CONFIGURATION
# FIXTURE CONFIGURATION
# See:
# https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
join(BASE_DIR, 'fixtures'),
)
# END FIXTURE CONFIGURATION
# EMAIL CONFIGURATION
EMAIL_BACKEND = values.Value('django.core.mail.backends.smtp.EmailBackend')
# https://docs.djangoproject.com/en/1.10/ref/settings/#default-from-email
DEFAULT_FROM_EMAIL = values.SecretValue()
# END EMAIL CONFIGURATION
# MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = values.SingleNestedTupleValue((
('Alice', 'alice@localhost'),
('Bob', 'bob@localhost'),
))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# END MANAGER CONFIGURATION
# DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = values.DatabaseURLValue('postgres://localhost/cacao_app')
# END DATABASE CONFIGURATION
# CACHING
# Do this here because thanks to django-pylibmc-sasl and pylibmc
# memcacheify (used on heroku) is painful to install on windows.
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# END CACHING
# GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = 'America/Los_Angeles'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'es-NI'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# END GENERAL CONFIGURATION
# TEMPLATE CONFIGURATION
# See:
# https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
"allauth.account.context_processors.account",
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
# Your stuff: custom template context processers go here
'context.guia_items',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
join(BASE_DIR, 'templates'),
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = join(os.path.dirname(BASE_DIR), 'staticfiles')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See:
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
join(BASE_DIR, 'static'),
)
# See:
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# END STATIC FILE CONFIGURATION
# MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = join(BASE_DIR, 'media')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# END MEDIA CONFIGURATION
# URL Configuration
ROOT_URLCONF = 'urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'wsgi.application'
# End URL Configuration
# AUTHENTICATION CONFIGURATION
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = "username"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
# END AUTHENTICATION CONFIGURATION
# Custom user app defaults
# Select the correct user model
LOGIN_REDIRECT_URL = "/"
LOGIN_URL = "account_login"
# END Custom user app defaults
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = "slugify.slugify"
# END SLUGLIFIER
# LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
# Django REST Framework hide API docs
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# END LOGGING CONFIGURATION
# Your common stuff: Below this line define 3rd party library settings
SUIT_CONFIG = {
'ADMIN_NAME': 'Cacao',
'SHOW_REQUIRED_ASTERISK': True,
'CONFIRM_UNSAVED_CHANGES': True,
'MENU': (
{'app': 'cacao', 'label': 'Guias de Cacao', 'icon': 'icon-leaf'},
{'app': 'configuracion', 'icon': 'icon-cog'},
{'app': 'event', 'icon': 'icon-chevron-right'},
{'label': 'Archivos estaticos', 'icon': 'icon-globe', 'models': (
{'label': 'Generar archivos estaticos',
'url': '/admin/static-generator/'},
)},
{'app': 'auth', 'label': 'Usuarios y Grupos', 'icon': 'icon-lock'},
{'app': 'sites', 'icon': 'icon-chevron-right'},
{'app': 'redirects', 'icon': 'icon-repeat'},
),
# misc
'LIST_PER_PAGE': 15,
'HEADER_DATE_FORMAT': 'l, j, F Y',
}
# CKEditor
CKEDITOR_UPLOAD_PATH = "uploads/"
CKEDITOR_IMAGE_BACKEND = "pillow"
CKEDITOR_JQUERY_URL = '//ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js'
CKEDITOR_CONFIGS = {
'default': {
'toolbar': [
['Undo', 'Redo',
'-', 'Format', 'Bold', 'Italic', 'Underline', 'NumberedList', 'BulletedList', 'Blockquote',
'-', 'JustifyLeft', 'JustifyCenter', 'JustifyRight', 'JustifyBlock',
'-', 'Link', 'Unlink', 'Scayt',
'-', 'Cut', 'Copy', 'PasteText',
'-', 'Source', 'Image', 'Iframe',
],
],
'width': 'auto',
'allowedContent': True,
'removePlugins': 'stylesheetparser',
'extraAllowedContent': 'iframe[*]',
},
}
# FB App ID
FB_APP_ID = values.SecretValue()
# GA APP ID
GA_APP_ID = values.SecretValue()
CX_CODE = values.SecretValue()
# used for the views delete folders and open the guide folder
PROJECT_DIR = dirname(dirname(abspath(__file__)))
PERSEUS_BUILD_DIR = '/tmp/perseus/build'
PERSEUS_SOURCE_DIR = '/tmp/perseus/guia'
# config for create pdf's
PDF_KIT_MODEL = 'cacao.Content'
|
bsd-3-clause
|
rizvve/pmip6ns3.13new
|
src/mesh/bindings/modulegen__gcc_ILP32.py
|
14
|
686373
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.mesh', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacType [enumeration]
module.add_enum('WifiMacType', ['WIFI_MAC_CTL_RTS', 'WIFI_MAC_CTL_CTS', 'WIFI_MAC_CTL_ACK', 'WIFI_MAC_CTL_BACKREQ', 'WIFI_MAC_CTL_BACKRESP', 'WIFI_MAC_MGT_BEACON', 'WIFI_MAC_MGT_ASSOCIATION_REQUEST', 'WIFI_MAC_MGT_ASSOCIATION_RESPONSE', 'WIFI_MAC_MGT_DISASSOCIATION', 'WIFI_MAC_MGT_REASSOCIATION_REQUEST', 'WIFI_MAC_MGT_REASSOCIATION_RESPONSE', 'WIFI_MAC_MGT_PROBE_REQUEST', 'WIFI_MAC_MGT_PROBE_RESPONSE', 'WIFI_MAC_MGT_AUTHENTICATION', 'WIFI_MAC_MGT_DEAUTHENTICATION', 'WIFI_MAC_MGT_ACTION', 'WIFI_MAC_MGT_ACTION_NO_ACK', 'WIFI_MAC_MGT_MULTIHOP_ACTION', 'WIFI_MAC_DATA', 'WIFI_MAC_DATA_CFACK', 'WIFI_MAC_DATA_CFPOLL', 'WIFI_MAC_DATA_CFACK_CFPOLL', 'WIFI_MAC_DATA_NULL', 'WIFI_MAC_DATA_NULL_CFACK', 'WIFI_MAC_DATA_NULL_CFPOLL', 'WIFI_MAC_DATA_NULL_CFACK_CFPOLL', 'WIFI_MAC_QOSDATA', 'WIFI_MAC_QOSDATA_CFACK', 'WIFI_MAC_QOSDATA_CFPOLL', 'WIFI_MAC_QOSDATA_CFACK_CFPOLL', 'WIFI_MAC_QOSDATA_NULL', 'WIFI_MAC_QOSDATA_NULL_CFPOLL', 'WIFI_MAC_QOSDATA_NULL_CFACK_CFPOLL'], import_from_module='ns.wifi')
## wifi-preamble.h (module 'wifi'): ns3::WifiPreamble [enumeration]
module.add_enum('WifiPreamble', ['WIFI_PREAMBLE_LONG', 'WIFI_PREAMBLE_SHORT'], import_from_module='ns.wifi')
## wifi-mode.h (module 'wifi'): ns3::WifiModulationClass [enumeration]
module.add_enum('WifiModulationClass', ['WIFI_MOD_CLASS_UNKNOWN', 'WIFI_MOD_CLASS_IR', 'WIFI_MOD_CLASS_FHSS', 'WIFI_MOD_CLASS_DSSS', 'WIFI_MOD_CLASS_ERP_PBCC', 'WIFI_MOD_CLASS_DSSS_OFDM', 'WIFI_MOD_CLASS_ERP_OFDM', 'WIFI_MOD_CLASS_OFDM', 'WIFI_MOD_CLASS_HT'], import_from_module='ns.wifi')
## wifi-phy-standard.h (module 'wifi'): ns3::WifiPhyStandard [enumeration]
module.add_enum('WifiPhyStandard', ['WIFI_PHY_STANDARD_80211a', 'WIFI_PHY_STANDARD_80211b', 'WIFI_PHY_STANDARD_80211g', 'WIFI_PHY_STANDARD_80211_10MHZ', 'WIFI_PHY_STANDARD_80211_5MHZ', 'WIFI_PHY_STANDARD_holland', 'WIFI_PHY_STANDARD_80211p_CCH', 'WIFI_PHY_STANDARD_80211p_SCH'], import_from_module='ns.wifi')
## wifi-mode.h (module 'wifi'): ns3::WifiCodeRate [enumeration]
module.add_enum('WifiCodeRate', ['WIFI_CODE_RATE_UNDEFINED', 'WIFI_CODE_RATE_3_4', 'WIFI_CODE_RATE_2_3', 'WIFI_CODE_RATE_1_2'], import_from_module='ns.wifi')
## qos-utils.h (module 'wifi'): ns3::AcIndex [enumeration]
module.add_enum('AcIndex', ['AC_BE', 'AC_BK', 'AC_VI', 'AC_VO', 'AC_BE_NQOS', 'AC_UNDEF'], import_from_module='ns.wifi')
## edca-txop-n.h (module 'wifi'): ns3::TypeOfStation [enumeration]
module.add_enum('TypeOfStation', ['STA', 'AP', 'ADHOC_STA', 'MESH'], import_from_module='ns.wifi')
## ctrl-headers.h (module 'wifi'): ns3::BlockAckType [enumeration]
module.add_enum('BlockAckType', ['BASIC_BLOCK_ACK', 'COMPRESSED_BLOCK_ACK', 'MULTI_TID_BLOCK_ACK'], import_from_module='ns.wifi')
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper [class]
module.add_class('AsciiTraceHelper', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice [class]
module.add_class('AsciiTraceHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## block-ack-manager.h (module 'wifi'): ns3::Bar [struct]
module.add_class('Bar', import_from_module='ns.wifi')
## block-ack-agreement.h (module 'wifi'): ns3::BlockAckAgreement [class]
module.add_class('BlockAckAgreement', import_from_module='ns.wifi')
## block-ack-manager.h (module 'wifi'): ns3::BlockAckManager [class]
module.add_class('BlockAckManager', import_from_module='ns.wifi')
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## capability-information.h (module 'wifi'): ns3::CapabilityInformation [class]
module.add_class('CapabilityInformation', import_from_module='ns.wifi')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## mesh-helper.h (module 'mesh'): ns3::MeshHelper [class]
module.add_class('MeshHelper')
## mesh-helper.h (module 'mesh'): ns3::MeshHelper::ChannelPolicy [enumeration]
module.add_enum('ChannelPolicy', ['SPREAD_CHANNELS', 'ZERO_CHANNEL'], outer_class=root_module['ns3::MeshHelper'])
## mesh-wifi-beacon.h (module 'mesh'): ns3::MeshWifiBeacon [class]
module.add_class('MeshWifiBeacon')
## net-device-container.h (module 'network'): ns3::NetDeviceContainer [class]
module.add_class('NetDeviceContainer', import_from_module='ns.network')
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## originator-block-ack-agreement.h (module 'wifi'): ns3::OriginatorBlockAckAgreement [class]
module.add_class('OriginatorBlockAckAgreement', import_from_module='ns.wifi', parent=root_module['ns3::BlockAckAgreement'])
## originator-block-ack-agreement.h (module 'wifi'): ns3::OriginatorBlockAckAgreement::State [enumeration]
module.add_enum('State', ['PENDING', 'ESTABLISHED', 'INACTIVE', 'UNSUCCESSFUL'], outer_class=root_module['ns3::OriginatorBlockAckAgreement'], import_from_module='ns.wifi')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## pcap-file.h (module 'network'): ns3::PcapFile [class]
module.add_class('PcapFile', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelper [class]
module.add_class('PcapHelper', import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelper [enumeration]
module.add_enum('', ['DLT_NULL', 'DLT_EN10MB', 'DLT_PPP', 'DLT_RAW', 'DLT_IEEE802_11', 'DLT_PRISM_HEADER', 'DLT_IEEE802_11_RADIO'], outer_class=root_module['ns3::PcapHelper'], import_from_module='ns.network')
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice [class]
module.add_class('PcapHelperForDevice', allow_subclassing=True, import_from_module='ns.network')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## status-code.h (module 'wifi'): ns3::StatusCode [class]
module.add_class('StatusCode', import_from_module='ns.wifi')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## wifi-helper.h (module 'wifi'): ns3::WifiHelper [class]
module.add_class('WifiHelper', import_from_module='ns.wifi')
## wifi-helper.h (module 'wifi'): ns3::WifiMacHelper [class]
module.add_class('WifiMacHelper', allow_subclassing=True, import_from_module='ns.wifi')
## wifi-mode.h (module 'wifi'): ns3::WifiMode [class]
module.add_class('WifiMode', import_from_module='ns.wifi')
## wifi-mode.h (module 'wifi'): ns3::WifiModeFactory [class]
module.add_class('WifiModeFactory', import_from_module='ns.wifi')
## wifi-helper.h (module 'wifi'): ns3::WifiPhyHelper [class]
module.add_class('WifiPhyHelper', allow_subclassing=True, import_from_module='ns.wifi')
## wifi-phy.h (module 'wifi'): ns3::WifiPhyListener [class]
module.add_class('WifiPhyListener', allow_subclassing=True, import_from_module='ns.wifi')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation [struct]
module.add_class('WifiRemoteStation', import_from_module='ns.wifi')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationInfo [class]
module.add_class('WifiRemoteStationInfo', import_from_module='ns.wifi')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState [struct]
module.add_class('WifiRemoteStationState', import_from_module='ns.wifi')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState [enumeration]
module.add_enum('', ['BRAND_NEW', 'DISASSOC', 'WAIT_ASSOC_TX_OK', 'GOT_ASSOC_TX_OK'], outer_class=root_module['ns3::WifiRemoteStationState'], import_from_module='ns.wifi')
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaRequestHeader [class]
module.add_class('MgtAddBaRequestHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaResponseHeader [class]
module.add_class('MgtAddBaResponseHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtAssocRequestHeader [class]
module.add_class('MgtAssocRequestHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtAssocResponseHeader [class]
module.add_class('MgtAssocResponseHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtDelBaHeader [class]
module.add_class('MgtDelBaHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtProbeRequestHeader [class]
module.add_class('MgtProbeRequestHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::MgtProbeResponseHeader [class]
module.add_class('MgtProbeResponseHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## pcap-file-wrapper.h (module 'network'): ns3::PcapFileWrapper [class]
module.add_class('PcapFileWrapper', import_from_module='ns.network', parent=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::MeshWifiInterfaceMacPlugin, ns3::empty, ns3::DefaultDeleter<ns3::MeshWifiInterfaceMacPlugin> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::MeshWifiInterfaceMacPlugin', 'ns3::empty', 'ns3::DefaultDeleter<ns3::MeshWifiInterfaceMacPlugin>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::WifiInformationElement', 'ns3::empty', 'ns3::DefaultDeleter<ns3::WifiInformationElement>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::dot11s::DestinationAddressUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::DestinationAddressUnit> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::dot11s::DestinationAddressUnit', 'ns3::empty', 'ns3::DefaultDeleter<ns3::dot11s::DestinationAddressUnit>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::dot11s::IeBeaconTimingUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::IeBeaconTimingUnit> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, template_parameters=['ns3::dot11s::IeBeaconTimingUnit', 'ns3::empty', 'ns3::DefaultDeleter<ns3::dot11s::IeBeaconTimingUnit>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader [class]
module.add_class('WifiActionHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::CategoryValue [enumeration]
module.add_enum('CategoryValue', ['BLOCK_ACK', 'MESH_PEERING_MGT', 'MESH_LINK_METRIC', 'MESH_PATH_SELECTION', 'MESH_INTERWORKING', 'MESH_RESOURCE_COORDINATION', 'MESH_PROXY_FORWARDING'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi')
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::PeerLinkMgtActionValue [enumeration]
module.add_enum('PeerLinkMgtActionValue', ['PEER_LINK_OPEN', 'PEER_LINK_CONFIRM', 'PEER_LINK_CLOSE'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi')
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::LinkMetricActionValue [enumeration]
module.add_enum('LinkMetricActionValue', ['LINK_METRIC_REQUEST', 'LINK_METRIC_REPORT'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi')
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::PathSelectionActionValue [enumeration]
module.add_enum('PathSelectionActionValue', ['PATH_SELECTION'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi')
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::InterworkActionValue [enumeration]
module.add_enum('InterworkActionValue', ['PORTAL_ANNOUNCEMENT'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi')
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ResourceCoordinationActionValue [enumeration]
module.add_enum('ResourceCoordinationActionValue', ['CONGESTION_CONTROL_NOTIFICATION', 'MDA_SETUP_REQUEST', 'MDA_SETUP_REPLY', 'MDAOP_ADVERTISMENT_REQUEST', 'MDAOP_ADVERTISMENTS', 'MDAOP_SET_TEARDOWN', 'BEACON_TIMING_REQUEST', 'BEACON_TIMING_RESPONSE', 'TBTT_ADJUSTMENT_REQUEST', 'MESH_CHANNEL_SWITCH_ANNOUNCEMENT'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi')
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::BlockAckActionValue [enumeration]
module.add_enum('BlockAckActionValue', ['BLOCK_ACK_ADDBA_REQUEST', 'BLOCK_ACK_ADDBA_RESPONSE', 'BLOCK_ACK_DELBA'], outer_class=root_module['ns3::WifiActionHeader'], import_from_module='ns.wifi')
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue [union]
module.add_class('ActionValue', import_from_module='ns.wifi', outer_class=root_module['ns3::WifiActionHeader'])
## wifi-information-element.h (module 'wifi'): ns3::WifiInformationElement [class]
module.add_class('WifiInformationElement', import_from_module='ns.wifi', parent=root_module['ns3::SimpleRefCount< ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >'])
## wifi-information-element-vector.h (module 'wifi'): ns3::WifiInformationElementVector [class]
module.add_class('WifiInformationElementVector', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## wifi-mac.h (module 'wifi'): ns3::WifiMac [class]
module.add_class('WifiMac', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader [class]
module.add_class('WifiMacHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader::QosAckPolicy [enumeration]
module.add_enum('QosAckPolicy', ['NORMAL_ACK', 'NO_ACK', 'NO_EXPLICIT_ACK', 'BLOCK_ACK'], outer_class=root_module['ns3::WifiMacHeader'], import_from_module='ns.wifi')
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader::AddressType [enumeration]
module.add_enum('AddressType', ['ADDR1', 'ADDR2', 'ADDR3', 'ADDR4'], outer_class=root_module['ns3::WifiMacHeader'], import_from_module='ns.wifi')
## wifi-phy.h (module 'wifi'): ns3::WifiPhy [class]
module.add_class('WifiPhy', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
## wifi-phy.h (module 'wifi'): ns3::WifiPhy::State [enumeration]
module.add_enum('State', ['IDLE', 'CCA_BUSY', 'TX', 'RX', 'SWITCHING'], outer_class=root_module['ns3::WifiPhy'], import_from_module='ns.wifi')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationManager [class]
module.add_class('WifiRemoteStationManager', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## boolean.h (module 'core'): ns3::BooleanChecker [class]
module.add_class('BooleanChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## boolean.h (module 'core'): ns3::BooleanValue [class]
module.add_class('BooleanValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## channel.h (module 'network'): ns3::Channel [class]
module.add_class('Channel', import_from_module='ns.network', parent=root_module['ns3::Object'])
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckRequestHeader [class]
module.add_class('CtrlBAckRequestHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckResponseHeader [class]
module.add_class('CtrlBAckResponseHeader', import_from_module='ns.wifi', parent=root_module['ns3::Header'])
## dcf.h (module 'wifi'): ns3::Dcf [class]
module.add_class('Dcf', import_from_module='ns.wifi', parent=root_module['ns3::Object'])
## double.h (module 'core'): ns3::DoubleValue [class]
module.add_class('DoubleValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## edca-txop-n.h (module 'wifi'): ns3::EdcaTxopN [class]
module.add_class('EdcaTxopN', import_from_module='ns.wifi', parent=root_module['ns3::Dcf'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## enum.h (module 'core'): ns3::EnumChecker [class]
module.add_class('EnumChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## enum.h (module 'core'): ns3::EnumValue [class]
module.add_class('EnumValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## supported-rates.h (module 'wifi'): ns3::ExtendedSupportedRatesIE [class]
module.add_class('ExtendedSupportedRatesIE', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement'])
## integer.h (module 'core'): ns3::IntegerValue [class]
module.add_class('IntegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mesh-information-element-vector.h (module 'mesh'): ns3::MeshInformationElementVector [class]
module.add_class('MeshInformationElementVector', parent=root_module['ns3::WifiInformationElementVector'])
## mesh-l2-routing-protocol.h (module 'mesh'): ns3::MeshL2RoutingProtocol [class]
module.add_class('MeshL2RoutingProtocol', parent=root_module['ns3::Object'])
## mesh-stack-installer.h (module 'mesh'): ns3::MeshStack [class]
module.add_class('MeshStack', parent=root_module['ns3::Object'])
## mesh-wifi-interface-mac-plugin.h (module 'mesh'): ns3::MeshWifiInterfaceMacPlugin [class]
module.add_class('MeshWifiInterfaceMacPlugin', parent=root_module['ns3::SimpleRefCount< ns3::MeshWifiInterfaceMacPlugin, ns3::empty, ns3::DefaultDeleter<ns3::MeshWifiInterfaceMacPlugin> >'])
## mgt-headers.h (module 'wifi'): ns3::MgtBeaconHeader [class]
module.add_class('MgtBeaconHeader', import_from_module='ns.wifi', parent=root_module['ns3::MgtProbeResponseHeader'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper [class]
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
## regular-wifi-mac.h (module 'wifi'): ns3::RegularWifiMac [class]
module.add_class('RegularWifiMac', import_from_module='ns.wifi', parent=root_module['ns3::WifiMac'])
## ssid.h (module 'wifi'): ns3::Ssid [class]
module.add_class('Ssid', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement'])
## ssid.h (module 'wifi'): ns3::SsidChecker [class]
module.add_class('SsidChecker', import_from_module='ns.wifi', parent=root_module['ns3::AttributeChecker'])
## ssid.h (module 'wifi'): ns3::SsidValue [class]
module.add_class('SsidValue', import_from_module='ns.wifi', parent=root_module['ns3::AttributeValue'])
## supported-rates.h (module 'wifi'): ns3::SupportedRates [class]
module.add_class('SupportedRates', import_from_module='ns.wifi', parent=root_module['ns3::WifiInformationElement'])
## nstime.h (module 'core'): ns3::TimeChecker [class]
module.add_class('TimeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## uinteger.h (module 'core'): ns3::UintegerValue [class]
module.add_class('UintegerValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## wifi-mode.h (module 'wifi'): ns3::WifiModeChecker [class]
module.add_class('WifiModeChecker', import_from_module='ns.wifi', parent=root_module['ns3::AttributeChecker'])
## wifi-mode.h (module 'wifi'): ns3::WifiModeValue [class]
module.add_class('WifiModeValue', import_from_module='ns.wifi', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## bridge-channel.h (module 'bridge'): ns3::BridgeChannel [class]
module.add_class('BridgeChannel', import_from_module='ns.bridge', parent=root_module['ns3::Channel'])
## dca-txop.h (module 'wifi'): ns3::DcaTxop [class]
module.add_class('DcaTxop', import_from_module='ns.wifi', parent=root_module['ns3::Dcf'])
## dot11s-installer.h (module 'mesh'): ns3::Dot11sStack [class]
module.add_class('Dot11sStack', parent=root_module['ns3::MeshStack'])
## flame-installer.h (module 'mesh'): ns3::FlameStack [class]
module.add_class('FlameStack', parent=root_module['ns3::MeshStack'])
## mesh-point-device.h (module 'mesh'): ns3::MeshPointDevice [class]
module.add_class('MeshPointDevice', parent=root_module['ns3::NetDevice'])
## mesh-wifi-interface-mac.h (module 'mesh'): ns3::MeshWifiInterfaceMac [class]
module.add_class('MeshWifiInterfaceMac', parent=root_module['ns3::RegularWifiMac'])
module.add_container('ns3::WifiModeList', 'ns3::WifiMode', container_type='vector')
module.add_container('std::vector< ns3::Ptr< ns3::NetDevice > >', 'ns3::Ptr< ns3::NetDevice >', container_type='vector')
typehandlers.add_type_alias('uint8_t', 'ns3::WifiInformationElementId')
typehandlers.add_type_alias('uint8_t*', 'ns3::WifiInformationElementId*')
typehandlers.add_type_alias('uint8_t&', 'ns3::WifiInformationElementId&')
typehandlers.add_type_alias('__gnu_cxx::__normal_iterator< ns3::WifiMode const *, std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > > >', 'ns3::WifiModeListIterator')
typehandlers.add_type_alias('__gnu_cxx::__normal_iterator< ns3::WifiMode const *, std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > > >*', 'ns3::WifiModeListIterator*')
typehandlers.add_type_alias('__gnu_cxx::__normal_iterator< ns3::WifiMode const *, std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > > >&', 'ns3::WifiModeListIterator&')
typehandlers.add_type_alias('std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > >', 'ns3::WifiModeList')
typehandlers.add_type_alias('std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > >*', 'ns3::WifiModeList*')
typehandlers.add_type_alias('std::vector< ns3::WifiMode, std::allocator< ns3::WifiMode > >&', 'ns3::WifiModeList&')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace dot11s
nested_module = module.add_cpp_namespace('dot11s')
register_types_ns3_dot11s(nested_module)
## Register a nested module for the namespace flame
nested_module = module.add_cpp_namespace('flame')
register_types_ns3_flame(nested_module)
## Register a nested module for the namespace internal
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_dot11s(module):
root_module = module.get_root()
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::dot11sPathSelectionProtocol [enumeration]
module.add_enum('dot11sPathSelectionProtocol', ['PROTOCOL_HWMP'])
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::dot11sSynchronizationProtocolIdentifier [enumeration]
module.add_enum('dot11sSynchronizationProtocolIdentifier', ['SYNC_NEIGHBOUR_OFFSET', 'SYNC_NULL'])
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::dot11sCongestionControlMode [enumeration]
module.add_enum('dot11sCongestionControlMode', ['CONGESTION_SIGNALING', 'CONGESTION_NULL'])
## ie-dot11s-peer-management.h (module 'mesh'): ns3::dot11s::PmpReasonCode [enumeration]
module.add_enum('PmpReasonCode', ['REASON11S_PEERING_CANCELLED', 'REASON11S_MESH_MAX_PEERS', 'REASON11S_MESH_CAPABILITY_POLICY_VIOLATION', 'REASON11S_MESH_CLOSE_RCVD', 'REASON11S_MESH_MAX_RETRIES', 'REASON11S_MESH_CONFIRM_TIMEOUT', 'REASON11S_MESH_INVALID_GTK', 'REASON11S_MESH_INCONSISTENT_PARAMETERS', 'REASON11S_MESH_INVALID_SECURITY_CAPABILITY', 'REASON11S_RESERVED'])
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::dot11sPathSelectionMetric [enumeration]
module.add_enum('dot11sPathSelectionMetric', ['METRIC_AIRTIME'])
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::dot11sAuthenticationProtocol [enumeration]
module.add_enum('dot11sAuthenticationProtocol', ['AUTH_NULL', 'AUTH_SAE'])
## ie-dot11s-preq.h (module 'mesh'): ns3::dot11s::DestinationAddressUnit [class]
module.add_class('DestinationAddressUnit', parent=root_module['ns3::SimpleRefCount< ns3::dot11s::DestinationAddressUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::DestinationAddressUnit> >'])
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::Dot11sMeshCapability [class]
module.add_class('Dot11sMeshCapability')
## hwmp-protocol.h (module 'mesh'): ns3::dot11s::HwmpProtocol [class]
module.add_class('HwmpProtocol', parent=root_module['ns3::MeshL2RoutingProtocol'])
## hwmp-protocol.h (module 'mesh'): ns3::dot11s::HwmpProtocol::FailedDestination [struct]
module.add_class('FailedDestination', outer_class=root_module['ns3::dot11s::HwmpProtocol'])
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable [class]
module.add_class('HwmpRtable', parent=root_module['ns3::Object'])
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult [struct]
module.add_class('LookupResult', outer_class=root_module['ns3::dot11s::HwmpRtable'])
## ie-dot11s-beacon-timing.h (module 'mesh'): ns3::dot11s::IeBeaconTiming [class]
module.add_class('IeBeaconTiming', parent=root_module['ns3::WifiInformationElement'])
## ie-dot11s-beacon-timing.h (module 'mesh'): ns3::dot11s::IeBeaconTimingUnit [class]
module.add_class('IeBeaconTimingUnit', parent=root_module['ns3::SimpleRefCount< ns3::dot11s::IeBeaconTimingUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::IeBeaconTimingUnit> >'])
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::IeConfiguration [class]
module.add_class('IeConfiguration', parent=root_module['ns3::WifiInformationElement'])
## ie-dot11s-metric-report.h (module 'mesh'): ns3::dot11s::IeLinkMetricReport [class]
module.add_class('IeLinkMetricReport', parent=root_module['ns3::WifiInformationElement'])
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshId [class]
module.add_class('IeMeshId', parent=root_module['ns3::WifiInformationElement'])
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshIdChecker [class]
module.add_class('IeMeshIdChecker', parent=root_module['ns3::AttributeChecker'])
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshIdValue [class]
module.add_class('IeMeshIdValue', parent=root_module['ns3::AttributeValue'])
## ie-dot11s-peer-management.h (module 'mesh'): ns3::dot11s::IePeerManagement [class]
module.add_class('IePeerManagement', parent=root_module['ns3::WifiInformationElement'])
## ie-dot11s-peer-management.h (module 'mesh'): ns3::dot11s::IePeerManagement::Subtype [enumeration]
module.add_enum('Subtype', ['PEER_OPEN', 'PEER_CONFIRM', 'PEER_CLOSE'], outer_class=root_module['ns3::dot11s::IePeerManagement'])
## ie-dot11s-peering-protocol.h (module 'mesh'): ns3::dot11s::IePeeringProtocol [class]
module.add_class('IePeeringProtocol', parent=root_module['ns3::WifiInformationElement'])
## ie-dot11s-perr.h (module 'mesh'): ns3::dot11s::IePerr [class]
module.add_class('IePerr', parent=root_module['ns3::WifiInformationElement'])
## ie-dot11s-prep.h (module 'mesh'): ns3::dot11s::IePrep [class]
module.add_class('IePrep', parent=root_module['ns3::WifiInformationElement'])
## ie-dot11s-preq.h (module 'mesh'): ns3::dot11s::IePreq [class]
module.add_class('IePreq', parent=root_module['ns3::WifiInformationElement'])
## ie-dot11s-rann.h (module 'mesh'): ns3::dot11s::IeRann [class]
module.add_class('IeRann', parent=root_module['ns3::WifiInformationElement'])
## dot11s-mac-header.h (module 'mesh'): ns3::dot11s::MeshHeader [class]
module.add_class('MeshHeader', parent=root_module['ns3::Header'])
## peer-link.h (module 'mesh'): ns3::dot11s::PeerLink [class]
module.add_class('PeerLink', parent=root_module['ns3::Object'])
## peer-link.h (module 'mesh'): ns3::dot11s::PeerLink::PeerState [enumeration]
module.add_enum('PeerState', ['IDLE', 'OPN_SNT', 'CNF_RCVD', 'OPN_RCVD', 'ESTAB', 'HOLDING'], outer_class=root_module['ns3::dot11s::PeerLink'])
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart [class]
module.add_class('PeerLinkFrameStart', parent=root_module['ns3::Header'])
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields [struct]
module.add_class('PlinkFrameStartFields', outer_class=root_module['ns3::dot11s::PeerLinkFrameStart'])
## peer-management-protocol.h (module 'mesh'): ns3::dot11s::PeerManagementProtocol [class]
module.add_class('PeerManagementProtocol', parent=root_module['ns3::Object'])
module.add_container('std::vector< std::pair< unsigned int, ns3::Mac48Address > >', 'std::pair< unsigned int, ns3::Mac48Address >', container_type='vector')
module.add_container('std::vector< ns3::dot11s::HwmpProtocol::FailedDestination >', 'ns3::dot11s::HwmpProtocol::FailedDestination', container_type='vector')
module.add_container('std::vector< ns3::Ptr< ns3::dot11s::IeBeaconTimingUnit > >', 'ns3::Ptr< ns3::dot11s::IeBeaconTimingUnit >', container_type='vector')
module.add_container('std::vector< ns3::Ptr< ns3::dot11s::DestinationAddressUnit > >', 'ns3::Ptr< ns3::dot11s::DestinationAddressUnit >', container_type='vector')
module.add_container('std::vector< ns3::Ptr< ns3::dot11s::PeerLink > >', 'ns3::Ptr< ns3::dot11s::PeerLink >', container_type='vector')
module.add_container('std::vector< ns3::Mac48Address >', 'ns3::Mac48Address', container_type='vector')
def register_types_ns3_flame(module):
root_module = module.get_root()
## flame-header.h (module 'mesh'): ns3::flame::FlameHeader [class]
module.add_class('FlameHeader', parent=root_module['ns3::Header'])
## flame-protocol.h (module 'mesh'): ns3::flame::FlameProtocol [class]
module.add_class('FlameProtocol', parent=root_module['ns3::MeshL2RoutingProtocol'])
## flame-protocol-mac.h (module 'mesh'): ns3::flame::FlameProtocolMac [class]
module.add_class('FlameProtocolMac', parent=root_module['ns3::MeshWifiInterfaceMacPlugin'])
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable [class]
module.add_class('FlameRtable', parent=root_module['ns3::Object'])
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable::LookupResult [struct]
module.add_class('LookupResult', outer_class=root_module['ns3::flame::FlameRtable'])
## flame-protocol.h (module 'mesh'): ns3::flame::FlameTag [class]
module.add_class('FlameTag', parent=root_module['ns3::Tag'])
def register_types_ns3_internal(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AsciiTraceHelper_methods(root_module, root_module['ns3::AsciiTraceHelper'])
register_Ns3AsciiTraceHelperForDevice_methods(root_module, root_module['ns3::AsciiTraceHelperForDevice'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Bar_methods(root_module, root_module['ns3::Bar'])
register_Ns3BlockAckAgreement_methods(root_module, root_module['ns3::BlockAckAgreement'])
register_Ns3BlockAckManager_methods(root_module, root_module['ns3::BlockAckManager'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3CapabilityInformation_methods(root_module, root_module['ns3::CapabilityInformation'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3MeshHelper_methods(root_module, root_module['ns3::MeshHelper'])
register_Ns3MeshWifiBeacon_methods(root_module, root_module['ns3::MeshWifiBeacon'])
register_Ns3NetDeviceContainer_methods(root_module, root_module['ns3::NetDeviceContainer'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3OriginatorBlockAckAgreement_methods(root_module, root_module['ns3::OriginatorBlockAckAgreement'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3PcapFile_methods(root_module, root_module['ns3::PcapFile'])
register_Ns3PcapHelper_methods(root_module, root_module['ns3::PcapHelper'])
register_Ns3PcapHelperForDevice_methods(root_module, root_module['ns3::PcapHelperForDevice'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3StatusCode_methods(root_module, root_module['ns3::StatusCode'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3WifiHelper_methods(root_module, root_module['ns3::WifiHelper'])
register_Ns3WifiMacHelper_methods(root_module, root_module['ns3::WifiMacHelper'])
register_Ns3WifiMode_methods(root_module, root_module['ns3::WifiMode'])
register_Ns3WifiModeFactory_methods(root_module, root_module['ns3::WifiModeFactory'])
register_Ns3WifiPhyHelper_methods(root_module, root_module['ns3::WifiPhyHelper'])
register_Ns3WifiPhyListener_methods(root_module, root_module['ns3::WifiPhyListener'])
register_Ns3WifiRemoteStation_methods(root_module, root_module['ns3::WifiRemoteStation'])
register_Ns3WifiRemoteStationInfo_methods(root_module, root_module['ns3::WifiRemoteStationInfo'])
register_Ns3WifiRemoteStationState_methods(root_module, root_module['ns3::WifiRemoteStationState'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3MgtAddBaRequestHeader_methods(root_module, root_module['ns3::MgtAddBaRequestHeader'])
register_Ns3MgtAddBaResponseHeader_methods(root_module, root_module['ns3::MgtAddBaResponseHeader'])
register_Ns3MgtAssocRequestHeader_methods(root_module, root_module['ns3::MgtAssocRequestHeader'])
register_Ns3MgtAssocResponseHeader_methods(root_module, root_module['ns3::MgtAssocResponseHeader'])
register_Ns3MgtDelBaHeader_methods(root_module, root_module['ns3::MgtDelBaHeader'])
register_Ns3MgtProbeRequestHeader_methods(root_module, root_module['ns3::MgtProbeRequestHeader'])
register_Ns3MgtProbeResponseHeader_methods(root_module, root_module['ns3::MgtProbeResponseHeader'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3PcapFileWrapper_methods(root_module, root_module['ns3::PcapFileWrapper'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3MeshWifiInterfaceMacPlugin_Ns3Empty_Ns3DefaultDeleter__lt__ns3MeshWifiInterfaceMacPlugin__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::MeshWifiInterfaceMacPlugin, ns3::empty, ns3::DefaultDeleter<ns3::MeshWifiInterfaceMacPlugin> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3SimpleRefCount__Ns3WifiInformationElement_Ns3Empty_Ns3DefaultDeleter__lt__ns3WifiInformationElement__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >'])
register_Ns3SimpleRefCount__Ns3Dot11sDestinationAddressUnit_Ns3Empty_Ns3DefaultDeleter__lt__ns3Dot11sDestinationAddressUnit__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::dot11s::DestinationAddressUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::DestinationAddressUnit> >'])
register_Ns3SimpleRefCount__Ns3Dot11sIeBeaconTimingUnit_Ns3Empty_Ns3DefaultDeleter__lt__ns3Dot11sIeBeaconTimingUnit__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::dot11s::IeBeaconTimingUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::IeBeaconTimingUnit> >'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3WifiActionHeader_methods(root_module, root_module['ns3::WifiActionHeader'])
register_Ns3WifiActionHeaderActionValue_methods(root_module, root_module['ns3::WifiActionHeader::ActionValue'])
register_Ns3WifiInformationElement_methods(root_module, root_module['ns3::WifiInformationElement'])
register_Ns3WifiInformationElementVector_methods(root_module, root_module['ns3::WifiInformationElementVector'])
register_Ns3WifiMac_methods(root_module, root_module['ns3::WifiMac'])
register_Ns3WifiMacHeader_methods(root_module, root_module['ns3::WifiMacHeader'])
register_Ns3WifiPhy_methods(root_module, root_module['ns3::WifiPhy'])
register_Ns3WifiRemoteStationManager_methods(root_module, root_module['ns3::WifiRemoteStationManager'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3BooleanChecker_methods(root_module, root_module['ns3::BooleanChecker'])
register_Ns3BooleanValue_methods(root_module, root_module['ns3::BooleanValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3Channel_methods(root_module, root_module['ns3::Channel'])
register_Ns3CtrlBAckRequestHeader_methods(root_module, root_module['ns3::CtrlBAckRequestHeader'])
register_Ns3CtrlBAckResponseHeader_methods(root_module, root_module['ns3::CtrlBAckResponseHeader'])
register_Ns3Dcf_methods(root_module, root_module['ns3::Dcf'])
register_Ns3DoubleValue_methods(root_module, root_module['ns3::DoubleValue'])
register_Ns3EdcaTxopN_methods(root_module, root_module['ns3::EdcaTxopN'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3EnumChecker_methods(root_module, root_module['ns3::EnumChecker'])
register_Ns3EnumValue_methods(root_module, root_module['ns3::EnumValue'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3ExtendedSupportedRatesIE_methods(root_module, root_module['ns3::ExtendedSupportedRatesIE'])
register_Ns3IntegerValue_methods(root_module, root_module['ns3::IntegerValue'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3MeshInformationElementVector_methods(root_module, root_module['ns3::MeshInformationElementVector'])
register_Ns3MeshL2RoutingProtocol_methods(root_module, root_module['ns3::MeshL2RoutingProtocol'])
register_Ns3MeshStack_methods(root_module, root_module['ns3::MeshStack'])
register_Ns3MeshWifiInterfaceMacPlugin_methods(root_module, root_module['ns3::MeshWifiInterfaceMacPlugin'])
register_Ns3MgtBeaconHeader_methods(root_module, root_module['ns3::MgtBeaconHeader'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3RegularWifiMac_methods(root_module, root_module['ns3::RegularWifiMac'])
register_Ns3Ssid_methods(root_module, root_module['ns3::Ssid'])
register_Ns3SsidChecker_methods(root_module, root_module['ns3::SsidChecker'])
register_Ns3SsidValue_methods(root_module, root_module['ns3::SsidValue'])
register_Ns3SupportedRates_methods(root_module, root_module['ns3::SupportedRates'])
register_Ns3TimeChecker_methods(root_module, root_module['ns3::TimeChecker'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3UintegerValue_methods(root_module, root_module['ns3::UintegerValue'])
register_Ns3WifiModeChecker_methods(root_module, root_module['ns3::WifiModeChecker'])
register_Ns3WifiModeValue_methods(root_module, root_module['ns3::WifiModeValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3BridgeChannel_methods(root_module, root_module['ns3::BridgeChannel'])
register_Ns3DcaTxop_methods(root_module, root_module['ns3::DcaTxop'])
register_Ns3Dot11sStack_methods(root_module, root_module['ns3::Dot11sStack'])
register_Ns3FlameStack_methods(root_module, root_module['ns3::FlameStack'])
register_Ns3MeshPointDevice_methods(root_module, root_module['ns3::MeshPointDevice'])
register_Ns3MeshWifiInterfaceMac_methods(root_module, root_module['ns3::MeshWifiInterfaceMac'])
register_Ns3Dot11sDestinationAddressUnit_methods(root_module, root_module['ns3::dot11s::DestinationAddressUnit'])
register_Ns3Dot11sDot11sMeshCapability_methods(root_module, root_module['ns3::dot11s::Dot11sMeshCapability'])
register_Ns3Dot11sHwmpProtocol_methods(root_module, root_module['ns3::dot11s::HwmpProtocol'])
register_Ns3Dot11sHwmpProtocolFailedDestination_methods(root_module, root_module['ns3::dot11s::HwmpProtocol::FailedDestination'])
register_Ns3Dot11sHwmpRtable_methods(root_module, root_module['ns3::dot11s::HwmpRtable'])
register_Ns3Dot11sHwmpRtableLookupResult_methods(root_module, root_module['ns3::dot11s::HwmpRtable::LookupResult'])
register_Ns3Dot11sIeBeaconTiming_methods(root_module, root_module['ns3::dot11s::IeBeaconTiming'])
register_Ns3Dot11sIeBeaconTimingUnit_methods(root_module, root_module['ns3::dot11s::IeBeaconTimingUnit'])
register_Ns3Dot11sIeConfiguration_methods(root_module, root_module['ns3::dot11s::IeConfiguration'])
register_Ns3Dot11sIeLinkMetricReport_methods(root_module, root_module['ns3::dot11s::IeLinkMetricReport'])
register_Ns3Dot11sIeMeshId_methods(root_module, root_module['ns3::dot11s::IeMeshId'])
register_Ns3Dot11sIeMeshIdChecker_methods(root_module, root_module['ns3::dot11s::IeMeshIdChecker'])
register_Ns3Dot11sIeMeshIdValue_methods(root_module, root_module['ns3::dot11s::IeMeshIdValue'])
register_Ns3Dot11sIePeerManagement_methods(root_module, root_module['ns3::dot11s::IePeerManagement'])
register_Ns3Dot11sIePeeringProtocol_methods(root_module, root_module['ns3::dot11s::IePeeringProtocol'])
register_Ns3Dot11sIePerr_methods(root_module, root_module['ns3::dot11s::IePerr'])
register_Ns3Dot11sIePrep_methods(root_module, root_module['ns3::dot11s::IePrep'])
register_Ns3Dot11sIePreq_methods(root_module, root_module['ns3::dot11s::IePreq'])
register_Ns3Dot11sIeRann_methods(root_module, root_module['ns3::dot11s::IeRann'])
register_Ns3Dot11sMeshHeader_methods(root_module, root_module['ns3::dot11s::MeshHeader'])
register_Ns3Dot11sPeerLink_methods(root_module, root_module['ns3::dot11s::PeerLink'])
register_Ns3Dot11sPeerLinkFrameStart_methods(root_module, root_module['ns3::dot11s::PeerLinkFrameStart'])
register_Ns3Dot11sPeerLinkFrameStartPlinkFrameStartFields_methods(root_module, root_module['ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields'])
register_Ns3Dot11sPeerManagementProtocol_methods(root_module, root_module['ns3::dot11s::PeerManagementProtocol'])
register_Ns3FlameFlameHeader_methods(root_module, root_module['ns3::flame::FlameHeader'])
register_Ns3FlameFlameProtocol_methods(root_module, root_module['ns3::flame::FlameProtocol'])
register_Ns3FlameFlameProtocolMac_methods(root_module, root_module['ns3::flame::FlameProtocolMac'])
register_Ns3FlameFlameRtable_methods(root_module, root_module['ns3::flame::FlameRtable'])
register_Ns3FlameFlameRtableLookupResult_methods(root_module, root_module['ns3::flame::FlameRtable::LookupResult'])
register_Ns3FlameFlameTag_methods(root_module, root_module['ns3::flame::FlameTag'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AsciiTraceHelper_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper::AsciiTraceHelper(ns3::AsciiTraceHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AsciiTraceHelper const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::AsciiTraceHelper::AsciiTraceHelper() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): ns3::Ptr<ns3::OutputStreamWrapper> ns3::AsciiTraceHelper::CreateFileStream(std::string filename, std::_Ios_Openmode filemode=std::ios_base::out) [member function]
cls.add_method('CreateFileStream',
'ns3::Ptr< ns3::OutputStreamWrapper >',
[param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode', default_value='std::ios_base::out')])
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDequeueSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultDequeueSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDequeueSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultDequeueSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDropSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultDropSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultDropSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultDropSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultEnqueueSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultEnqueueSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultEnqueueSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultEnqueueSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultReceiveSinkWithContext(ns3::Ptr<ns3::OutputStreamWrapper> file, std::string context, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultReceiveSinkWithContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('std::string', 'context'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): static void ns3::AsciiTraceHelper::DefaultReceiveSinkWithoutContext(ns3::Ptr<ns3::OutputStreamWrapper> file, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('DefaultReceiveSinkWithoutContext',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'file'), param('ns3::Ptr< ns3::Packet const >', 'p')],
is_static=True)
## trace-helper.h (module 'network'): std::string ns3::AsciiTraceHelper::GetFilenameFromDevice(std::string prefix, ns3::Ptr<ns3::NetDevice> device, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromDevice',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'useObjectNames', default_value='true')])
## trace-helper.h (module 'network'): std::string ns3::AsciiTraceHelper::GetFilenameFromInterfacePair(std::string prefix, ns3::Ptr<ns3::Object> object, uint32_t interface, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromInterfacePair',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::Object >', 'object'), param('uint32_t', 'interface'), param('bool', 'useObjectNames', default_value='true')])
return
def register_Ns3AsciiTraceHelperForDevice_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice::AsciiTraceHelperForDevice(ns3::AsciiTraceHelperForDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AsciiTraceHelperForDevice const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::AsciiTraceHelperForDevice::AsciiTraceHelperForDevice() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool explicitFilename=false) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::Ptr<ns3::NetDevice> nd) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::Ptr< ns3::NetDevice >', 'nd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, std::string ndName, bool explicitFilename=false) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('std::string', 'ndName'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, std::string ndName) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('std::string', 'ndName')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::NetDeviceContainer d) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::NetDeviceContainer', 'd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::NetDeviceContainer d) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::NetDeviceContainer', 'd')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, ns3::NodeContainer n) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('ns3::NodeContainer', 'n')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, ns3::NodeContainer n) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('ns3::NodeContainer', 'n')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(std::string prefix, uint32_t nodeid, uint32_t deviceid, bool explicitFilename) [member function]
cls.add_method('EnableAscii',
'void',
[param('std::string', 'prefix'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid'), param('bool', 'explicitFilename')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAscii(ns3::Ptr<ns3::OutputStreamWrapper> stream, uint32_t nodeid, uint32_t deviceid) [member function]
cls.add_method('EnableAscii',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiAll(std::string prefix) [member function]
cls.add_method('EnableAsciiAll',
'void',
[param('std::string', 'prefix')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiAll(ns3::Ptr<ns3::OutputStreamWrapper> stream) [member function]
cls.add_method('EnableAsciiAll',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')])
## trace-helper.h (module 'network'): void ns3::AsciiTraceHelperForDevice::EnableAsciiInternal(ns3::Ptr<ns3::OutputStreamWrapper> stream, std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool explicitFilename) [member function]
cls.add_method('EnableAsciiInternal',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream'), param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'explicitFilename')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Bar_methods(root_module, cls):
## block-ack-manager.h (module 'wifi'): ns3::Bar::Bar(ns3::Bar const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Bar const &', 'arg0')])
## block-ack-manager.h (module 'wifi'): ns3::Bar::Bar() [constructor]
cls.add_constructor([])
## block-ack-manager.h (module 'wifi'): ns3::Bar::Bar(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address recipient, uint8_t tid, bool immediate) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid'), param('bool', 'immediate')])
## block-ack-manager.h (module 'wifi'): ns3::Bar::bar [variable]
cls.add_instance_attribute('bar', 'ns3::Ptr< ns3::Packet const >', is_const=False)
## block-ack-manager.h (module 'wifi'): ns3::Bar::immediate [variable]
cls.add_instance_attribute('immediate', 'bool', is_const=False)
## block-ack-manager.h (module 'wifi'): ns3::Bar::recipient [variable]
cls.add_instance_attribute('recipient', 'ns3::Mac48Address', is_const=False)
## block-ack-manager.h (module 'wifi'): ns3::Bar::tid [variable]
cls.add_instance_attribute('tid', 'uint8_t', is_const=False)
return
def register_Ns3BlockAckAgreement_methods(root_module, cls):
## block-ack-agreement.h (module 'wifi'): ns3::BlockAckAgreement::BlockAckAgreement(ns3::BlockAckAgreement const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BlockAckAgreement const &', 'arg0')])
## block-ack-agreement.h (module 'wifi'): ns3::BlockAckAgreement::BlockAckAgreement() [constructor]
cls.add_constructor([])
## block-ack-agreement.h (module 'wifi'): ns3::BlockAckAgreement::BlockAckAgreement(ns3::Mac48Address peer, uint8_t tid) [constructor]
cls.add_constructor([param('ns3::Mac48Address', 'peer'), param('uint8_t', 'tid')])
## block-ack-agreement.h (module 'wifi'): uint16_t ns3::BlockAckAgreement::GetBufferSize() const [member function]
cls.add_method('GetBufferSize',
'uint16_t',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): ns3::Mac48Address ns3::BlockAckAgreement::GetPeer() const [member function]
cls.add_method('GetPeer',
'ns3::Mac48Address',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): uint16_t ns3::BlockAckAgreement::GetStartingSequence() const [member function]
cls.add_method('GetStartingSequence',
'uint16_t',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): uint16_t ns3::BlockAckAgreement::GetStartingSequenceControl() const [member function]
cls.add_method('GetStartingSequenceControl',
'uint16_t',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): uint8_t ns3::BlockAckAgreement::GetTid() const [member function]
cls.add_method('GetTid',
'uint8_t',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): uint16_t ns3::BlockAckAgreement::GetTimeout() const [member function]
cls.add_method('GetTimeout',
'uint16_t',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): bool ns3::BlockAckAgreement::IsAmsduSupported() const [member function]
cls.add_method('IsAmsduSupported',
'bool',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): bool ns3::BlockAckAgreement::IsImmediateBlockAck() const [member function]
cls.add_method('IsImmediateBlockAck',
'bool',
[],
is_const=True)
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetAmsduSupport(bool supported) [member function]
cls.add_method('SetAmsduSupport',
'void',
[param('bool', 'supported')])
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetBufferSize(uint16_t bufferSize) [member function]
cls.add_method('SetBufferSize',
'void',
[param('uint16_t', 'bufferSize')])
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetDelayedBlockAck() [member function]
cls.add_method('SetDelayedBlockAck',
'void',
[])
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetImmediateBlockAck() [member function]
cls.add_method('SetImmediateBlockAck',
'void',
[])
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetStartingSequence(uint16_t seq) [member function]
cls.add_method('SetStartingSequence',
'void',
[param('uint16_t', 'seq')])
## block-ack-agreement.h (module 'wifi'): void ns3::BlockAckAgreement::SetTimeout(uint16_t timeout) [member function]
cls.add_method('SetTimeout',
'void',
[param('uint16_t', 'timeout')])
return
def register_Ns3BlockAckManager_methods(root_module, cls):
## block-ack-manager.h (module 'wifi'): ns3::BlockAckManager::BlockAckManager() [constructor]
cls.add_constructor([])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::CreateAgreement(ns3::MgtAddBaRequestHeader const * reqHdr, ns3::Mac48Address recipient) [member function]
cls.add_method('CreateAgreement',
'void',
[param('ns3::MgtAddBaRequestHeader const *', 'reqHdr'), param('ns3::Mac48Address', 'recipient')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::DestroyAgreement(ns3::Mac48Address recipient, uint8_t tid) [member function]
cls.add_method('DestroyAgreement',
'void',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')])
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::ExistsAgreement(ns3::Mac48Address recipient, uint8_t tid) const [member function]
cls.add_method('ExistsAgreement',
'bool',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')],
is_const=True)
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::ExistsAgreementInState(ns3::Mac48Address recipient, uint8_t tid, ns3::OriginatorBlockAckAgreement::State state) const [member function]
cls.add_method('ExistsAgreementInState',
'bool',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid'), param('ns3::OriginatorBlockAckAgreement::State', 'state')],
is_const=True)
## block-ack-manager.h (module 'wifi'): uint32_t ns3::BlockAckManager::GetNBufferedPackets(ns3::Mac48Address recipient, uint8_t tid) const [member function]
cls.add_method('GetNBufferedPackets',
'uint32_t',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')],
is_const=True)
## block-ack-manager.h (module 'wifi'): uint32_t ns3::BlockAckManager::GetNRetryNeededPackets(ns3::Mac48Address recipient, uint8_t tid) const [member function]
cls.add_method('GetNRetryNeededPackets',
'uint32_t',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')],
is_const=True)
## block-ack-manager.h (module 'wifi'): ns3::Ptr<ns3::Packet const> ns3::BlockAckManager::GetNextPacket(ns3::WifiMacHeader & hdr) [member function]
cls.add_method('GetNextPacket',
'ns3::Ptr< ns3::Packet const >',
[param('ns3::WifiMacHeader &', 'hdr')])
## block-ack-manager.h (module 'wifi'): uint32_t ns3::BlockAckManager::GetNextPacketSize() const [member function]
cls.add_method('GetNextPacketSize',
'uint32_t',
[],
is_const=True)
## block-ack-manager.h (module 'wifi'): uint16_t ns3::BlockAckManager::GetSeqNumOfNextRetryPacket(ns3::Mac48Address recipient, uint8_t tid) const [member function]
cls.add_method('GetSeqNumOfNextRetryPacket',
'uint16_t',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')],
is_const=True)
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::HasBar(ns3::Bar & bar) [member function]
cls.add_method('HasBar',
'bool',
[param('ns3::Bar &', 'bar')])
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::HasOtherFragments(uint16_t sequenceNumber) const [member function]
cls.add_method('HasOtherFragments',
'bool',
[param('uint16_t', 'sequenceNumber')],
is_const=True)
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::HasPackets() const [member function]
cls.add_method('HasPackets',
'bool',
[],
is_const=True)
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::NotifyAgreementEstablished(ns3::Mac48Address recipient, uint8_t tid, uint16_t startingSeq) [member function]
cls.add_method('NotifyAgreementEstablished',
'void',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid'), param('uint16_t', 'startingSeq')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::NotifyAgreementUnsuccessful(ns3::Mac48Address recipient, uint8_t tid) [member function]
cls.add_method('NotifyAgreementUnsuccessful',
'void',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::NotifyGotBlockAck(ns3::CtrlBAckResponseHeader const * blockAck, ns3::Mac48Address recipient) [member function]
cls.add_method('NotifyGotBlockAck',
'void',
[param('ns3::CtrlBAckResponseHeader const *', 'blockAck'), param('ns3::Mac48Address', 'recipient')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::NotifyMpduTransmission(ns3::Mac48Address recipient, uint8_t tid, uint16_t nextSeqNumber) [member function]
cls.add_method('NotifyMpduTransmission',
'void',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid'), param('uint16_t', 'nextSeqNumber')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetBlockAckInactivityCallback(ns3::Callback<void, ns3::Mac48Address, unsigned char, bool, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetBlockAckInactivityCallback',
'void',
[param('ns3::Callback< void, ns3::Mac48Address, unsigned char, bool, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetBlockAckThreshold(uint8_t nPackets) [member function]
cls.add_method('SetBlockAckThreshold',
'void',
[param('uint8_t', 'nPackets')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetBlockAckType(ns3::BlockAckType bAckType) [member function]
cls.add_method('SetBlockAckType',
'void',
[param('ns3::BlockAckType', 'bAckType')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetBlockDestinationCallback(ns3::Callback<void, ns3::Mac48Address, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetBlockDestinationCallback',
'void',
[param('ns3::Callback< void, ns3::Mac48Address, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetMaxPacketDelay(ns3::Time maxDelay) [member function]
cls.add_method('SetMaxPacketDelay',
'void',
[param('ns3::Time', 'maxDelay')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetQueue(ns3::Ptr<ns3::WifiMacQueue> queue) [member function]
cls.add_method('SetQueue',
'void',
[param('ns3::Ptr< ns3::WifiMacQueue >', 'queue')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetTxMiddle(ns3::MacTxMiddle * txMiddle) [member function]
cls.add_method('SetTxMiddle',
'void',
[param('ns3::MacTxMiddle *', 'txMiddle')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::SetUnblockDestinationCallback(ns3::Callback<void, ns3::Mac48Address, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetUnblockDestinationCallback',
'void',
[param('ns3::Callback< void, ns3::Mac48Address, unsigned char, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::StorePacket(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const & hdr, ns3::Time tStamp) [member function]
cls.add_method('StorePacket',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const &', 'hdr'), param('ns3::Time', 'tStamp')])
## block-ack-manager.h (module 'wifi'): bool ns3::BlockAckManager::SwitchToBlockAckIfNeeded(ns3::Mac48Address recipient, uint8_t tid, uint16_t startingSeq) [member function]
cls.add_method('SwitchToBlockAckIfNeeded',
'bool',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid'), param('uint16_t', 'startingSeq')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::TearDownBlockAck(ns3::Mac48Address recipient, uint8_t tid) [member function]
cls.add_method('TearDownBlockAck',
'void',
[param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')])
## block-ack-manager.h (module 'wifi'): void ns3::BlockAckManager::UpdateAgreement(ns3::MgtAddBaResponseHeader const * respHdr, ns3::Mac48Address recipient) [member function]
cls.add_method('UpdateAgreement',
'void',
[param('ns3::MgtAddBaResponseHeader const *', 'respHdr'), param('ns3::Mac48Address', 'recipient')])
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'bool',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'bool',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFullCopy() const [member function]
cls.add_method('CreateFullCopy',
'ns3::Buffer',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentEndOffset() const [member function]
cls.add_method('GetCurrentEndOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentStartOffset() const [member function]
cls.add_method('GetCurrentStartOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t adjustment, int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t adjustment, int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3CapabilityInformation_methods(root_module, cls):
## capability-information.h (module 'wifi'): ns3::CapabilityInformation::CapabilityInformation(ns3::CapabilityInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CapabilityInformation const &', 'arg0')])
## capability-information.h (module 'wifi'): ns3::CapabilityInformation::CapabilityInformation() [constructor]
cls.add_constructor([])
## capability-information.h (module 'wifi'): ns3::Buffer::Iterator ns3::CapabilityInformation::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'start')])
## capability-information.h (module 'wifi'): uint32_t ns3::CapabilityInformation::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## capability-information.h (module 'wifi'): bool ns3::CapabilityInformation::IsEss() const [member function]
cls.add_method('IsEss',
'bool',
[],
is_const=True)
## capability-information.h (module 'wifi'): bool ns3::CapabilityInformation::IsIbss() const [member function]
cls.add_method('IsIbss',
'bool',
[],
is_const=True)
## capability-information.h (module 'wifi'): ns3::Buffer::Iterator ns3::CapabilityInformation::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True)
## capability-information.h (module 'wifi'): void ns3::CapabilityInformation::SetEss() [member function]
cls.add_method('SetEss',
'void',
[])
## capability-information.h (module 'wifi'): void ns3::CapabilityInformation::SetIbss() [member function]
cls.add_method('SetIbss',
'void',
[])
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('==')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3MeshHelper_methods(root_module, cls):
## mesh-helper.h (module 'mesh'): ns3::MeshHelper::MeshHelper(ns3::MeshHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MeshHelper const &', 'arg0')])
## mesh-helper.h (module 'mesh'): ns3::MeshHelper::MeshHelper() [constructor]
cls.add_constructor([])
## mesh-helper.h (module 'mesh'): static ns3::MeshHelper ns3::MeshHelper::Default() [member function]
cls.add_method('Default',
'ns3::MeshHelper',
[],
is_static=True)
## mesh-helper.h (module 'mesh'): ns3::NetDeviceContainer ns3::MeshHelper::Install(ns3::WifiPhyHelper const & phyHelper, ns3::NodeContainer c) const [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::WifiPhyHelper const &', 'phyHelper'), param('ns3::NodeContainer', 'c')],
is_const=True)
## mesh-helper.h (module 'mesh'): void ns3::MeshHelper::Report(ns3::Ptr<ns3::NetDevice> const & arg0, std::ostream & arg1) [member function]
cls.add_method('Report',
'void',
[param('ns3::Ptr< ns3::NetDevice > const &', 'arg0'), param('std::ostream &', 'arg1')])
## mesh-helper.h (module 'mesh'): void ns3::MeshHelper::ResetStats(ns3::Ptr<ns3::NetDevice> const & arg0) [member function]
cls.add_method('ResetStats',
'void',
[param('ns3::Ptr< ns3::NetDevice > const &', 'arg0')])
## mesh-helper.h (module 'mesh'): void ns3::MeshHelper::SetMacType(std::string n0="", ns3::AttributeValue const & v0=ns3::EmptyAttributeValue(), std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetMacType',
'void',
[param('std::string', 'n0', default_value='""'), param('ns3::AttributeValue const &', 'v0', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()')])
## mesh-helper.h (module 'mesh'): void ns3::MeshHelper::SetNumberOfInterfaces(uint32_t nInterfaces) [member function]
cls.add_method('SetNumberOfInterfaces',
'void',
[param('uint32_t', 'nInterfaces')])
## mesh-helper.h (module 'mesh'): void ns3::MeshHelper::SetRemoteStationManager(std::string type, std::string n0="", ns3::AttributeValue const & v0=ns3::EmptyAttributeValue(), std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetRemoteStationManager',
'void',
[param('std::string', 'type'), param('std::string', 'n0', default_value='""'), param('ns3::AttributeValue const &', 'v0', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()')])
## mesh-helper.h (module 'mesh'): void ns3::MeshHelper::SetSpreadInterfaceChannels(ns3::MeshHelper::ChannelPolicy arg0) [member function]
cls.add_method('SetSpreadInterfaceChannels',
'void',
[param('ns3::MeshHelper::ChannelPolicy', 'arg0')])
## mesh-helper.h (module 'mesh'): void ns3::MeshHelper::SetStackInstaller(std::string type, std::string n0="", ns3::AttributeValue const & v0=ns3::EmptyAttributeValue(), std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetStackInstaller',
'void',
[param('std::string', 'type'), param('std::string', 'n0', default_value='""'), param('ns3::AttributeValue const &', 'v0', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()')])
## mesh-helper.h (module 'mesh'): void ns3::MeshHelper::SetStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('SetStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')])
return
def register_Ns3MeshWifiBeacon_methods(root_module, cls):
## mesh-wifi-beacon.h (module 'mesh'): ns3::MeshWifiBeacon::MeshWifiBeacon(ns3::MeshWifiBeacon const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MeshWifiBeacon const &', 'arg0')])
## mesh-wifi-beacon.h (module 'mesh'): ns3::MeshWifiBeacon::MeshWifiBeacon(ns3::Ssid ssid, ns3::SupportedRates rates, uint64_t us) [constructor]
cls.add_constructor([param('ns3::Ssid', 'ssid'), param('ns3::SupportedRates', 'rates'), param('uint64_t', 'us')])
## mesh-wifi-beacon.h (module 'mesh'): void ns3::MeshWifiBeacon::AddInformationElement(ns3::Ptr<ns3::WifiInformationElement> ie) [member function]
cls.add_method('AddInformationElement',
'void',
[param('ns3::Ptr< ns3::WifiInformationElement >', 'ie')])
## mesh-wifi-beacon.h (module 'mesh'): ns3::MgtBeaconHeader ns3::MeshWifiBeacon::BeaconHeader() const [member function]
cls.add_method('BeaconHeader',
'ns3::MgtBeaconHeader',
[],
is_const=True)
## mesh-wifi-beacon.h (module 'mesh'): ns3::WifiMacHeader ns3::MeshWifiBeacon::CreateHeader(ns3::Mac48Address address, ns3::Mac48Address mpAddress) [member function]
cls.add_method('CreateHeader',
'ns3::WifiMacHeader',
[param('ns3::Mac48Address', 'address'), param('ns3::Mac48Address', 'mpAddress')])
## mesh-wifi-beacon.h (module 'mesh'): ns3::Ptr<ns3::Packet> ns3::MeshWifiBeacon::CreatePacket() [member function]
cls.add_method('CreatePacket',
'ns3::Ptr< ns3::Packet >',
[])
## mesh-wifi-beacon.h (module 'mesh'): ns3::Time ns3::MeshWifiBeacon::GetBeaconInterval() const [member function]
cls.add_method('GetBeaconInterval',
'ns3::Time',
[],
is_const=True)
return
def register_Ns3NetDeviceContainer_methods(root_module, cls):
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'arg0')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer() [constructor]
cls.add_constructor([])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::Ptr<ns3::NetDevice> dev) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(std::string devName) [constructor]
cls.add_constructor([param('std::string', 'devName')])
## net-device-container.h (module 'network'): ns3::NetDeviceContainer::NetDeviceContainer(ns3::NetDeviceContainer const & a, ns3::NetDeviceContainer const & b) [constructor]
cls.add_constructor([param('ns3::NetDeviceContainer const &', 'a'), param('ns3::NetDeviceContainer const &', 'b')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::NetDeviceContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NetDeviceContainer', 'other')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## net-device-container.h (module 'network'): void ns3::NetDeviceContainer::Add(std::string deviceName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'deviceName')])
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::NetDevice>*,std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > > ns3::NetDeviceContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::NetDevice > const, std::vector< ns3::Ptr< ns3::NetDevice > > >',
[],
is_const=True)
## net-device-container.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::NetDeviceContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True)
## net-device-container.h (module 'network'): uint32_t ns3::NetDeviceContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3OriginatorBlockAckAgreement_methods(root_module, cls):
## originator-block-ack-agreement.h (module 'wifi'): ns3::OriginatorBlockAckAgreement::OriginatorBlockAckAgreement(ns3::OriginatorBlockAckAgreement const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OriginatorBlockAckAgreement const &', 'arg0')])
## originator-block-ack-agreement.h (module 'wifi'): ns3::OriginatorBlockAckAgreement::OriginatorBlockAckAgreement() [constructor]
cls.add_constructor([])
## originator-block-ack-agreement.h (module 'wifi'): ns3::OriginatorBlockAckAgreement::OriginatorBlockAckAgreement(ns3::Mac48Address recipient, uint8_t tid) [constructor]
cls.add_constructor([param('ns3::Mac48Address', 'recipient'), param('uint8_t', 'tid')])
## originator-block-ack-agreement.h (module 'wifi'): void ns3::OriginatorBlockAckAgreement::CompleteExchange() [member function]
cls.add_method('CompleteExchange',
'void',
[])
## originator-block-ack-agreement.h (module 'wifi'): bool ns3::OriginatorBlockAckAgreement::IsBlockAckRequestNeeded() const [member function]
cls.add_method('IsBlockAckRequestNeeded',
'bool',
[],
is_const=True)
## originator-block-ack-agreement.h (module 'wifi'): bool ns3::OriginatorBlockAckAgreement::IsEstablished() const [member function]
cls.add_method('IsEstablished',
'bool',
[],
is_const=True)
## originator-block-ack-agreement.h (module 'wifi'): bool ns3::OriginatorBlockAckAgreement::IsInactive() const [member function]
cls.add_method('IsInactive',
'bool',
[],
is_const=True)
## originator-block-ack-agreement.h (module 'wifi'): bool ns3::OriginatorBlockAckAgreement::IsPending() const [member function]
cls.add_method('IsPending',
'bool',
[],
is_const=True)
## originator-block-ack-agreement.h (module 'wifi'): bool ns3::OriginatorBlockAckAgreement::IsUnsuccessful() const [member function]
cls.add_method('IsUnsuccessful',
'bool',
[],
is_const=True)
## originator-block-ack-agreement.h (module 'wifi'): void ns3::OriginatorBlockAckAgreement::NotifyMpduTransmission(uint16_t nextSeqNumber) [member function]
cls.add_method('NotifyMpduTransmission',
'void',
[param('uint16_t', 'nextSeqNumber')])
## originator-block-ack-agreement.h (module 'wifi'): void ns3::OriginatorBlockAckAgreement::SetState(ns3::OriginatorBlockAckAgreement::State state) [member function]
cls.add_method('SetState',
'void',
[param('ns3::OriginatorBlockAckAgreement::State', 'state')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 20 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PcapFile_methods(root_module, cls):
## pcap-file.h (module 'network'): ns3::PcapFile::PcapFile() [constructor]
cls.add_constructor([])
## pcap-file.h (module 'network'): void ns3::PcapFile::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## pcap-file.h (module 'network'): void ns3::PcapFile::Close() [member function]
cls.add_method('Close',
'void',
[])
## pcap-file.h (module 'network'): static bool ns3::PcapFile::Diff(std::string const & f1, std::string const & f2, uint32_t & sec, uint32_t & usec, uint32_t snapLen=ns3::PcapFile::SNAPLEN_DEFAULT) [member function]
cls.add_method('Diff',
'bool',
[param('std::string const &', 'f1'), param('std::string const &', 'f2'), param('uint32_t &', 'sec'), param('uint32_t &', 'usec'), param('uint32_t', 'snapLen', default_value='ns3::PcapFile::SNAPLEN_DEFAULT')],
is_static=True)
## pcap-file.h (module 'network'): bool ns3::PcapFile::Eof() const [member function]
cls.add_method('Eof',
'bool',
[],
is_const=True)
## pcap-file.h (module 'network'): bool ns3::PcapFile::Fail() const [member function]
cls.add_method('Fail',
'bool',
[],
is_const=True)
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetDataLinkType() [member function]
cls.add_method('GetDataLinkType',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetMagic() [member function]
cls.add_method('GetMagic',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetSigFigs() [member function]
cls.add_method('GetSigFigs',
'uint32_t',
[])
## pcap-file.h (module 'network'): uint32_t ns3::PcapFile::GetSnapLen() [member function]
cls.add_method('GetSnapLen',
'uint32_t',
[])
## pcap-file.h (module 'network'): bool ns3::PcapFile::GetSwapMode() [member function]
cls.add_method('GetSwapMode',
'bool',
[])
## pcap-file.h (module 'network'): int32_t ns3::PcapFile::GetTimeZoneOffset() [member function]
cls.add_method('GetTimeZoneOffset',
'int32_t',
[])
## pcap-file.h (module 'network'): uint16_t ns3::PcapFile::GetVersionMajor() [member function]
cls.add_method('GetVersionMajor',
'uint16_t',
[])
## pcap-file.h (module 'network'): uint16_t ns3::PcapFile::GetVersionMinor() [member function]
cls.add_method('GetVersionMinor',
'uint16_t',
[])
## pcap-file.h (module 'network'): void ns3::PcapFile::Init(uint32_t dataLinkType, uint32_t snapLen=ns3::PcapFile::SNAPLEN_DEFAULT, int32_t timeZoneCorrection=ns3::PcapFile::ZONE_DEFAULT, bool swapMode=false) [member function]
cls.add_method('Init',
'void',
[param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='ns3::PcapFile::SNAPLEN_DEFAULT'), param('int32_t', 'timeZoneCorrection', default_value='ns3::PcapFile::ZONE_DEFAULT'), param('bool', 'swapMode', default_value='false')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Open(std::string const & filename, std::_Ios_Openmode mode) [member function]
cls.add_method('Open',
'void',
[param('std::string const &', 'filename'), param('std::_Ios_Openmode', 'mode')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Read(uint8_t * const data, uint32_t maxBytes, uint32_t & tsSec, uint32_t & tsUsec, uint32_t & inclLen, uint32_t & origLen, uint32_t & readLen) [member function]
cls.add_method('Read',
'void',
[param('uint8_t * const', 'data'), param('uint32_t', 'maxBytes'), param('uint32_t &', 'tsSec'), param('uint32_t &', 'tsUsec'), param('uint32_t &', 'inclLen'), param('uint32_t &', 'origLen'), param('uint32_t &', 'readLen')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, uint8_t const * const data, uint32_t totalLen) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('uint8_t const * const', 'data'), param('uint32_t', 'totalLen')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file.h (module 'network'): void ns3::PcapFile::Write(uint32_t tsSec, uint32_t tsUsec, ns3::Header & header, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('Write',
'void',
[param('uint32_t', 'tsSec'), param('uint32_t', 'tsUsec'), param('ns3::Header &', 'header'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file.h (module 'network'): ns3::PcapFile::SNAPLEN_DEFAULT [variable]
cls.add_static_attribute('SNAPLEN_DEFAULT', 'uint32_t const', is_const=True)
## pcap-file.h (module 'network'): ns3::PcapFile::ZONE_DEFAULT [variable]
cls.add_static_attribute('ZONE_DEFAULT', 'int32_t const', is_const=True)
return
def register_Ns3PcapHelper_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::PcapHelper::PcapHelper(ns3::PcapHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PcapHelper const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::PcapHelper::PcapHelper() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): ns3::Ptr<ns3::PcapFileWrapper> ns3::PcapHelper::CreateFile(std::string filename, std::_Ios_Openmode filemode, uint32_t dataLinkType, uint32_t snapLen=65535, int32_t tzCorrection=0) [member function]
cls.add_method('CreateFile',
'ns3::Ptr< ns3::PcapFileWrapper >',
[param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode'), param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='65535'), param('int32_t', 'tzCorrection', default_value='0')])
## trace-helper.h (module 'network'): std::string ns3::PcapHelper::GetFilenameFromDevice(std::string prefix, ns3::Ptr<ns3::NetDevice> device, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromDevice',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'useObjectNames', default_value='true')])
## trace-helper.h (module 'network'): std::string ns3::PcapHelper::GetFilenameFromInterfacePair(std::string prefix, ns3::Ptr<ns3::Object> object, uint32_t interface, bool useObjectNames=true) [member function]
cls.add_method('GetFilenameFromInterfacePair',
'std::string',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::Object >', 'object'), param('uint32_t', 'interface'), param('bool', 'useObjectNames', default_value='true')])
return
def register_Ns3PcapHelperForDevice_methods(root_module, cls):
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice::PcapHelperForDevice(ns3::PcapHelperForDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PcapHelperForDevice const &', 'arg0')])
## trace-helper.h (module 'network'): ns3::PcapHelperForDevice::PcapHelperForDevice() [constructor]
cls.add_constructor([])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool promiscuous=false, bool explicitFilename=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'promiscuous', default_value='false'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, std::string ndName, bool promiscuous=false, bool explicitFilename=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('std::string', 'ndName'), param('bool', 'promiscuous', default_value='false'), param('bool', 'explicitFilename', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::NetDeviceContainer d, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::NetDeviceContainer', 'd'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, ns3::NodeContainer n, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('ns3::NodeContainer', 'n'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcap(std::string prefix, uint32_t nodeid, uint32_t deviceid, bool promiscuous=false) [member function]
cls.add_method('EnablePcap',
'void',
[param('std::string', 'prefix'), param('uint32_t', 'nodeid'), param('uint32_t', 'deviceid'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcapAll(std::string prefix, bool promiscuous=false) [member function]
cls.add_method('EnablePcapAll',
'void',
[param('std::string', 'prefix'), param('bool', 'promiscuous', default_value='false')])
## trace-helper.h (module 'network'): void ns3::PcapHelperForDevice::EnablePcapInternal(std::string prefix, ns3::Ptr<ns3::NetDevice> nd, bool promiscuous, bool explicitFilename) [member function]
cls.add_method('EnablePcapInternal',
'void',
[param('std::string', 'prefix'), param('ns3::Ptr< ns3::NetDevice >', 'nd'), param('bool', 'promiscuous'), param('bool', 'explicitFilename')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Simulator_methods(root_module, cls):
## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Simulator const &', 'arg0')])
## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]
cls.add_method('Cancel',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]
cls.add_method('Destroy',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function]
cls.add_method('GetImplementation',
'ns3::Ptr< ns3::SimulatorImpl >',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]
cls.add_method('GetMaximumSimulationTime',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]
cls.add_method('IsExpired',
'bool',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]
cls.add_method('IsFinished',
'bool',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Next() [member function]
cls.add_method('Next',
'ns3::Time',
[],
is_static=True, deprecated=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]
cls.add_method('Now',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]
cls.add_method('Remove',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::RunOneEvent() [member function]
cls.add_method('RunOneEvent',
'void',
[],
is_static=True, deprecated=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function]
cls.add_method('SetImplementation',
'void',
[param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]
cls.add_method('SetScheduler',
'void',
[param('ns3::ObjectFactory', 'schedulerFactory')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & time) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'time')],
is_static=True)
return
def register_Ns3StatusCode_methods(root_module, cls):
cls.add_output_stream_operator()
## status-code.h (module 'wifi'): ns3::StatusCode::StatusCode(ns3::StatusCode const & arg0) [copy constructor]
cls.add_constructor([param('ns3::StatusCode const &', 'arg0')])
## status-code.h (module 'wifi'): ns3::StatusCode::StatusCode() [constructor]
cls.add_constructor([])
## status-code.h (module 'wifi'): ns3::Buffer::Iterator ns3::StatusCode::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'start')])
## status-code.h (module 'wifi'): uint32_t ns3::StatusCode::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## status-code.h (module 'wifi'): bool ns3::StatusCode::IsSuccess() const [member function]
cls.add_method('IsSuccess',
'bool',
[],
is_const=True)
## status-code.h (module 'wifi'): ns3::Buffer::Iterator ns3::StatusCode::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True)
## status-code.h (module 'wifi'): void ns3::StatusCode::SetFailure() [member function]
cls.add_method('SetFailure',
'void',
[])
## status-code.h (module 'wifi'): void ns3::StatusCode::SetSuccess() [member function]
cls.add_method('SetSuccess',
'void',
[])
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info')],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3WifiHelper_methods(root_module, cls):
## wifi-helper.h (module 'wifi'): ns3::WifiHelper::WifiHelper(ns3::WifiHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiHelper const &', 'arg0')])
## wifi-helper.h (module 'wifi'): ns3::WifiHelper::WifiHelper() [constructor]
cls.add_constructor([])
## wifi-helper.h (module 'wifi'): static ns3::WifiHelper ns3::WifiHelper::Default() [member function]
cls.add_method('Default',
'ns3::WifiHelper',
[],
is_static=True)
## wifi-helper.h (module 'wifi'): static void ns3::WifiHelper::EnableLogComponents() [member function]
cls.add_method('EnableLogComponents',
'void',
[],
is_static=True)
## wifi-helper.h (module 'wifi'): ns3::NetDeviceContainer ns3::WifiHelper::Install(ns3::WifiPhyHelper const & phy, ns3::WifiMacHelper const & mac, ns3::NodeContainer c) const [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::WifiPhyHelper const &', 'phy'), param('ns3::WifiMacHelper const &', 'mac'), param('ns3::NodeContainer', 'c')],
is_const=True)
## wifi-helper.h (module 'wifi'): ns3::NetDeviceContainer ns3::WifiHelper::Install(ns3::WifiPhyHelper const & phy, ns3::WifiMacHelper const & mac, ns3::Ptr<ns3::Node> node) const [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::WifiPhyHelper const &', 'phy'), param('ns3::WifiMacHelper const &', 'mac'), param('ns3::Ptr< ns3::Node >', 'node')],
is_const=True)
## wifi-helper.h (module 'wifi'): ns3::NetDeviceContainer ns3::WifiHelper::Install(ns3::WifiPhyHelper const & phy, ns3::WifiMacHelper const & mac, std::string nodeName) const [member function]
cls.add_method('Install',
'ns3::NetDeviceContainer',
[param('ns3::WifiPhyHelper const &', 'phy'), param('ns3::WifiMacHelper const &', 'mac'), param('std::string', 'nodeName')],
is_const=True)
## wifi-helper.h (module 'wifi'): void ns3::WifiHelper::SetRemoteStationManager(std::string type, std::string n0="", ns3::AttributeValue const & v0=ns3::EmptyAttributeValue(), std::string n1="", ns3::AttributeValue const & v1=ns3::EmptyAttributeValue(), std::string n2="", ns3::AttributeValue const & v2=ns3::EmptyAttributeValue(), std::string n3="", ns3::AttributeValue const & v3=ns3::EmptyAttributeValue(), std::string n4="", ns3::AttributeValue const & v4=ns3::EmptyAttributeValue(), std::string n5="", ns3::AttributeValue const & v5=ns3::EmptyAttributeValue(), std::string n6="", ns3::AttributeValue const & v6=ns3::EmptyAttributeValue(), std::string n7="", ns3::AttributeValue const & v7=ns3::EmptyAttributeValue()) [member function]
cls.add_method('SetRemoteStationManager',
'void',
[param('std::string', 'type'), param('std::string', 'n0', default_value='""'), param('ns3::AttributeValue const &', 'v0', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n1', default_value='""'), param('ns3::AttributeValue const &', 'v1', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n2', default_value='""'), param('ns3::AttributeValue const &', 'v2', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n3', default_value='""'), param('ns3::AttributeValue const &', 'v3', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n4', default_value='""'), param('ns3::AttributeValue const &', 'v4', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n5', default_value='""'), param('ns3::AttributeValue const &', 'v5', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n6', default_value='""'), param('ns3::AttributeValue const &', 'v6', default_value='ns3::EmptyAttributeValue()'), param('std::string', 'n7', default_value='""'), param('ns3::AttributeValue const &', 'v7', default_value='ns3::EmptyAttributeValue()')])
## wifi-helper.h (module 'wifi'): void ns3::WifiHelper::SetStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('SetStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')])
return
def register_Ns3WifiMacHelper_methods(root_module, cls):
## wifi-helper.h (module 'wifi'): ns3::WifiMacHelper::WifiMacHelper() [constructor]
cls.add_constructor([])
## wifi-helper.h (module 'wifi'): ns3::WifiMacHelper::WifiMacHelper(ns3::WifiMacHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiMacHelper const &', 'arg0')])
## wifi-helper.h (module 'wifi'): ns3::Ptr<ns3::WifiMac> ns3::WifiMacHelper::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::WifiMac >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3WifiMode_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## wifi-mode.h (module 'wifi'): ns3::WifiMode::WifiMode(ns3::WifiMode const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiMode const &', 'arg0')])
## wifi-mode.h (module 'wifi'): ns3::WifiMode::WifiMode() [constructor]
cls.add_constructor([])
## wifi-mode.h (module 'wifi'): ns3::WifiMode::WifiMode(std::string name) [constructor]
cls.add_constructor([param('std::string', 'name')])
## wifi-mode.h (module 'wifi'): uint32_t ns3::WifiMode::GetBandwidth() const [member function]
cls.add_method('GetBandwidth',
'uint32_t',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): ns3::WifiCodeRate ns3::WifiMode::GetCodeRate() const [member function]
cls.add_method('GetCodeRate',
'ns3::WifiCodeRate',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): uint8_t ns3::WifiMode::GetConstellationSize() const [member function]
cls.add_method('GetConstellationSize',
'uint8_t',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): uint64_t ns3::WifiMode::GetDataRate() const [member function]
cls.add_method('GetDataRate',
'uint64_t',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): ns3::WifiModulationClass ns3::WifiMode::GetModulationClass() const [member function]
cls.add_method('GetModulationClass',
'ns3::WifiModulationClass',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): uint64_t ns3::WifiMode::GetPhyRate() const [member function]
cls.add_method('GetPhyRate',
'uint64_t',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): uint32_t ns3::WifiMode::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): std::string ns3::WifiMode::GetUniqueName() const [member function]
cls.add_method('GetUniqueName',
'std::string',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): bool ns3::WifiMode::IsMandatory() const [member function]
cls.add_method('IsMandatory',
'bool',
[],
is_const=True)
return
def register_Ns3WifiModeFactory_methods(root_module, cls):
## wifi-mode.h (module 'wifi'): ns3::WifiModeFactory::WifiModeFactory(ns3::WifiModeFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiModeFactory const &', 'arg0')])
## wifi-mode.h (module 'wifi'): static ns3::WifiMode ns3::WifiModeFactory::CreateWifiMode(std::string uniqueName, ns3::WifiModulationClass modClass, bool isMandatory, uint32_t bandwidth, uint32_t dataRate, ns3::WifiCodeRate codingRate, uint8_t constellationSize) [member function]
cls.add_method('CreateWifiMode',
'ns3::WifiMode',
[param('std::string', 'uniqueName'), param('ns3::WifiModulationClass', 'modClass'), param('bool', 'isMandatory'), param('uint32_t', 'bandwidth'), param('uint32_t', 'dataRate'), param('ns3::WifiCodeRate', 'codingRate'), param('uint8_t', 'constellationSize')],
is_static=True)
return
def register_Ns3WifiPhyHelper_methods(root_module, cls):
## wifi-helper.h (module 'wifi'): ns3::WifiPhyHelper::WifiPhyHelper() [constructor]
cls.add_constructor([])
## wifi-helper.h (module 'wifi'): ns3::WifiPhyHelper::WifiPhyHelper(ns3::WifiPhyHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiPhyHelper const &', 'arg0')])
## wifi-helper.h (module 'wifi'): ns3::Ptr<ns3::WifiPhy> ns3::WifiPhyHelper::Create(ns3::Ptr<ns3::Node> node, ns3::Ptr<ns3::WifiNetDevice> device) const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::WifiPhy >',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::Ptr< ns3::WifiNetDevice >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3WifiPhyListener_methods(root_module, cls):
## wifi-phy.h (module 'wifi'): ns3::WifiPhyListener::WifiPhyListener() [constructor]
cls.add_constructor([])
## wifi-phy.h (module 'wifi'): ns3::WifiPhyListener::WifiPhyListener(ns3::WifiPhyListener const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiPhyListener const &', 'arg0')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifyMaybeCcaBusyStart(ns3::Time duration) [member function]
cls.add_method('NotifyMaybeCcaBusyStart',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifyRxEndError() [member function]
cls.add_method('NotifyRxEndError',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifyRxEndOk() [member function]
cls.add_method('NotifyRxEndOk',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifyRxStart(ns3::Time duration) [member function]
cls.add_method('NotifyRxStart',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifySwitchingStart(ns3::Time duration) [member function]
cls.add_method('NotifySwitchingStart',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhyListener::NotifyTxStart(ns3::Time duration) [member function]
cls.add_method('NotifyTxStart',
'void',
[param('ns3::Time', 'duration')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3WifiRemoteStation_methods(root_module, cls):
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::WifiRemoteStation() [constructor]
cls.add_constructor([])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::WifiRemoteStation(ns3::WifiRemoteStation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiRemoteStation const &', 'arg0')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::m_slrc [variable]
cls.add_instance_attribute('m_slrc', 'uint32_t', is_const=False)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::m_ssrc [variable]
cls.add_instance_attribute('m_ssrc', 'uint32_t', is_const=False)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::m_state [variable]
cls.add_instance_attribute('m_state', 'ns3::WifiRemoteStationState *', is_const=False)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation::m_tid [variable]
cls.add_instance_attribute('m_tid', 'uint8_t', is_const=False)
return
def register_Ns3WifiRemoteStationInfo_methods(root_module, cls):
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationInfo::WifiRemoteStationInfo(ns3::WifiRemoteStationInfo const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiRemoteStationInfo const &', 'arg0')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationInfo::WifiRemoteStationInfo() [constructor]
cls.add_constructor([])
## wifi-remote-station-manager.h (module 'wifi'): double ns3::WifiRemoteStationInfo::GetFrameErrorRate() const [member function]
cls.add_method('GetFrameErrorRate',
'double',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationInfo::NotifyTxFailed() [member function]
cls.add_method('NotifyTxFailed',
'void',
[])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationInfo::NotifyTxSuccess(uint32_t retryCounter) [member function]
cls.add_method('NotifyTxSuccess',
'void',
[param('uint32_t', 'retryCounter')])
return
def register_Ns3WifiRemoteStationState_methods(root_module, cls):
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState::WifiRemoteStationState() [constructor]
cls.add_constructor([])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState::WifiRemoteStationState(ns3::WifiRemoteStationState const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiRemoteStationState const &', 'arg0')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState::m_address [variable]
cls.add_instance_attribute('m_address', 'ns3::Mac48Address', is_const=False)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState::m_info [variable]
cls.add_instance_attribute('m_info', 'ns3::WifiRemoteStationInfo', is_const=False)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationState::m_operationalRateSet [variable]
cls.add_instance_attribute('m_operationalRateSet', 'ns3::WifiModeList', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3MgtAddBaRequestHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaRequestHeader::MgtAddBaRequestHeader(ns3::MgtAddBaRequestHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtAddBaRequestHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaRequestHeader::MgtAddBaRequestHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAddBaRequestHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAddBaRequestHeader::GetBufferSize() const [member function]
cls.add_method('GetBufferSize',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtAddBaRequestHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAddBaRequestHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAddBaRequestHeader::GetStartingSequence() const [member function]
cls.add_method('GetStartingSequence',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint8_t ns3::MgtAddBaRequestHeader::GetTid() const [member function]
cls.add_method('GetTid',
'uint8_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAddBaRequestHeader::GetTimeout() const [member function]
cls.add_method('GetTimeout',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtAddBaRequestHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): bool ns3::MgtAddBaRequestHeader::IsAmsduSupported() const [member function]
cls.add_method('IsAmsduSupported',
'bool',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): bool ns3::MgtAddBaRequestHeader::IsImmediateBlockAck() const [member function]
cls.add_method('IsImmediateBlockAck',
'bool',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetAmsduSupport(bool supported) [member function]
cls.add_method('SetAmsduSupport',
'void',
[param('bool', 'supported')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetBufferSize(uint16_t size) [member function]
cls.add_method('SetBufferSize',
'void',
[param('uint16_t', 'size')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetDelayedBlockAck() [member function]
cls.add_method('SetDelayedBlockAck',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetImmediateBlockAck() [member function]
cls.add_method('SetImmediateBlockAck',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetStartingSequence(uint16_t seq) [member function]
cls.add_method('SetStartingSequence',
'void',
[param('uint16_t', 'seq')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetTid(uint8_t tid) [member function]
cls.add_method('SetTid',
'void',
[param('uint8_t', 'tid')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaRequestHeader::SetTimeout(uint16_t timeout) [member function]
cls.add_method('SetTimeout',
'void',
[param('uint16_t', 'timeout')])
return
def register_Ns3MgtAddBaResponseHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaResponseHeader::MgtAddBaResponseHeader(ns3::MgtAddBaResponseHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtAddBaResponseHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtAddBaResponseHeader::MgtAddBaResponseHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAddBaResponseHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAddBaResponseHeader::GetBufferSize() const [member function]
cls.add_method('GetBufferSize',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtAddBaResponseHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAddBaResponseHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::StatusCode ns3::MgtAddBaResponseHeader::GetStatusCode() const [member function]
cls.add_method('GetStatusCode',
'ns3::StatusCode',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint8_t ns3::MgtAddBaResponseHeader::GetTid() const [member function]
cls.add_method('GetTid',
'uint8_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAddBaResponseHeader::GetTimeout() const [member function]
cls.add_method('GetTimeout',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtAddBaResponseHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): bool ns3::MgtAddBaResponseHeader::IsAmsduSupported() const [member function]
cls.add_method('IsAmsduSupported',
'bool',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): bool ns3::MgtAddBaResponseHeader::IsImmediateBlockAck() const [member function]
cls.add_method('IsImmediateBlockAck',
'bool',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetAmsduSupport(bool supported) [member function]
cls.add_method('SetAmsduSupport',
'void',
[param('bool', 'supported')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetBufferSize(uint16_t size) [member function]
cls.add_method('SetBufferSize',
'void',
[param('uint16_t', 'size')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetDelayedBlockAck() [member function]
cls.add_method('SetDelayedBlockAck',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetImmediateBlockAck() [member function]
cls.add_method('SetImmediateBlockAck',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetStatusCode(ns3::StatusCode code) [member function]
cls.add_method('SetStatusCode',
'void',
[param('ns3::StatusCode', 'code')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetTid(uint8_t tid) [member function]
cls.add_method('SetTid',
'void',
[param('uint8_t', 'tid')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAddBaResponseHeader::SetTimeout(uint16_t timeout) [member function]
cls.add_method('SetTimeout',
'void',
[param('uint16_t', 'timeout')])
return
def register_Ns3MgtAssocRequestHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtAssocRequestHeader::MgtAssocRequestHeader(ns3::MgtAssocRequestHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtAssocRequestHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtAssocRequestHeader::MgtAssocRequestHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAssocRequestHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtAssocRequestHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint16_t ns3::MgtAssocRequestHeader::GetListenInterval() const [member function]
cls.add_method('GetListenInterval',
'uint16_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAssocRequestHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::Ssid ns3::MgtAssocRequestHeader::GetSsid() const [member function]
cls.add_method('GetSsid',
'ns3::Ssid',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::SupportedRates ns3::MgtAssocRequestHeader::GetSupportedRates() const [member function]
cls.add_method('GetSupportedRates',
'ns3::SupportedRates',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtAssocRequestHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocRequestHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocRequestHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocRequestHeader::SetListenInterval(uint16_t interval) [member function]
cls.add_method('SetListenInterval',
'void',
[param('uint16_t', 'interval')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocRequestHeader::SetSsid(ns3::Ssid ssid) [member function]
cls.add_method('SetSsid',
'void',
[param('ns3::Ssid', 'ssid')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocRequestHeader::SetSupportedRates(ns3::SupportedRates rates) [member function]
cls.add_method('SetSupportedRates',
'void',
[param('ns3::SupportedRates', 'rates')])
return
def register_Ns3MgtAssocResponseHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtAssocResponseHeader::MgtAssocResponseHeader(ns3::MgtAssocResponseHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtAssocResponseHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtAssocResponseHeader::MgtAssocResponseHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAssocResponseHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtAssocResponseHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtAssocResponseHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::StatusCode ns3::MgtAssocResponseHeader::GetStatusCode() [member function]
cls.add_method('GetStatusCode',
'ns3::StatusCode',
[])
## mgt-headers.h (module 'wifi'): ns3::SupportedRates ns3::MgtAssocResponseHeader::GetSupportedRates() [member function]
cls.add_method('GetSupportedRates',
'ns3::SupportedRates',
[])
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtAssocResponseHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocResponseHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocResponseHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocResponseHeader::SetStatusCode(ns3::StatusCode code) [member function]
cls.add_method('SetStatusCode',
'void',
[param('ns3::StatusCode', 'code')])
## mgt-headers.h (module 'wifi'): void ns3::MgtAssocResponseHeader::SetSupportedRates(ns3::SupportedRates rates) [member function]
cls.add_method('SetSupportedRates',
'void',
[param('ns3::SupportedRates', 'rates')])
return
def register_Ns3MgtDelBaHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtDelBaHeader::MgtDelBaHeader(ns3::MgtDelBaHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtDelBaHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtDelBaHeader::MgtDelBaHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtDelBaHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtDelBaHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtDelBaHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint8_t ns3::MgtDelBaHeader::GetTid() const [member function]
cls.add_method('GetTid',
'uint8_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtDelBaHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): bool ns3::MgtDelBaHeader::IsByOriginator() const [member function]
cls.add_method('IsByOriginator',
'bool',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtDelBaHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtDelBaHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtDelBaHeader::SetByOriginator() [member function]
cls.add_method('SetByOriginator',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtDelBaHeader::SetByRecipient() [member function]
cls.add_method('SetByRecipient',
'void',
[])
## mgt-headers.h (module 'wifi'): void ns3::MgtDelBaHeader::SetTid(uint8_t arg0) [member function]
cls.add_method('SetTid',
'void',
[param('uint8_t', 'arg0')])
return
def register_Ns3MgtProbeRequestHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtProbeRequestHeader::MgtProbeRequestHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): ns3::MgtProbeRequestHeader::MgtProbeRequestHeader(ns3::MgtProbeRequestHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtProbeRequestHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtProbeRequestHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtProbeRequestHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtProbeRequestHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::Ssid ns3::MgtProbeRequestHeader::GetSsid() const [member function]
cls.add_method('GetSsid',
'ns3::Ssid',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::SupportedRates ns3::MgtProbeRequestHeader::GetSupportedRates() const [member function]
cls.add_method('GetSupportedRates',
'ns3::SupportedRates',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtProbeRequestHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeRequestHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeRequestHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeRequestHeader::SetSsid(ns3::Ssid ssid) [member function]
cls.add_method('SetSsid',
'void',
[param('ns3::Ssid', 'ssid')])
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeRequestHeader::SetSupportedRates(ns3::SupportedRates rates) [member function]
cls.add_method('SetSupportedRates',
'void',
[param('ns3::SupportedRates', 'rates')])
return
def register_Ns3MgtProbeResponseHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtProbeResponseHeader::MgtProbeResponseHeader(ns3::MgtProbeResponseHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtProbeResponseHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::MgtProbeResponseHeader::MgtProbeResponseHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtProbeResponseHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): uint64_t ns3::MgtProbeResponseHeader::GetBeaconIntervalUs() const [member function]
cls.add_method('GetBeaconIntervalUs',
'uint64_t',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::MgtProbeResponseHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::MgtProbeResponseHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::Ssid ns3::MgtProbeResponseHeader::GetSsid() const [member function]
cls.add_method('GetSsid',
'ns3::Ssid',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): ns3::SupportedRates ns3::MgtProbeResponseHeader::GetSupportedRates() const [member function]
cls.add_method('GetSupportedRates',
'ns3::SupportedRates',
[],
is_const=True)
## mgt-headers.h (module 'wifi'): uint64_t ns3::MgtProbeResponseHeader::GetTimestamp() [member function]
cls.add_method('GetTimestamp',
'uint64_t',
[])
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::MgtProbeResponseHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeResponseHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeResponseHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeResponseHeader::SetBeaconIntervalUs(uint64_t us) [member function]
cls.add_method('SetBeaconIntervalUs',
'void',
[param('uint64_t', 'us')])
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeResponseHeader::SetSsid(ns3::Ssid ssid) [member function]
cls.add_method('SetSsid',
'void',
[param('ns3::Ssid', 'ssid')])
## mgt-headers.h (module 'wifi'): void ns3::MgtProbeResponseHeader::SetSupportedRates(ns3::SupportedRates rates) [member function]
cls.add_method('SetSupportedRates',
'void',
[param('ns3::SupportedRates', 'rates')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Start() [member function]
cls.add_method('Start',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3PcapFileWrapper_methods(root_module, cls):
## pcap-file-wrapper.h (module 'network'): static ns3::TypeId ns3::PcapFileWrapper::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## pcap-file-wrapper.h (module 'network'): ns3::PcapFileWrapper::PcapFileWrapper() [constructor]
cls.add_constructor([])
## pcap-file-wrapper.h (module 'network'): bool ns3::PcapFileWrapper::Fail() const [member function]
cls.add_method('Fail',
'bool',
[],
is_const=True)
## pcap-file-wrapper.h (module 'network'): bool ns3::PcapFileWrapper::Eof() const [member function]
cls.add_method('Eof',
'bool',
[],
is_const=True)
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Open(std::string const & filename, std::_Ios_Openmode mode) [member function]
cls.add_method('Open',
'void',
[param('std::string const &', 'filename'), param('std::_Ios_Openmode', 'mode')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Close() [member function]
cls.add_method('Close',
'void',
[])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Init(uint32_t dataLinkType, uint32_t snapLen=std::numeric_limits<unsigned int>::max(), int32_t tzCorrection=ns3::PcapFile::ZONE_DEFAULT) [member function]
cls.add_method('Init',
'void',
[param('uint32_t', 'dataLinkType'), param('uint32_t', 'snapLen', default_value='std::numeric_limits<unsigned int>::max()'), param('int32_t', 'tzCorrection', default_value='ns3::PcapFile::ZONE_DEFAULT')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, ns3::Header & header, ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('ns3::Header &', 'header'), param('ns3::Ptr< ns3::Packet const >', 'p')])
## pcap-file-wrapper.h (module 'network'): void ns3::PcapFileWrapper::Write(ns3::Time t, uint8_t const * buffer, uint32_t length) [member function]
cls.add_method('Write',
'void',
[param('ns3::Time', 't'), param('uint8_t const *', 'buffer'), param('uint32_t', 'length')])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetMagic() [member function]
cls.add_method('GetMagic',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint16_t ns3::PcapFileWrapper::GetVersionMajor() [member function]
cls.add_method('GetVersionMajor',
'uint16_t',
[])
## pcap-file-wrapper.h (module 'network'): uint16_t ns3::PcapFileWrapper::GetVersionMinor() [member function]
cls.add_method('GetVersionMinor',
'uint16_t',
[])
## pcap-file-wrapper.h (module 'network'): int32_t ns3::PcapFileWrapper::GetTimeZoneOffset() [member function]
cls.add_method('GetTimeZoneOffset',
'int32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetSigFigs() [member function]
cls.add_method('GetSigFigs',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetSnapLen() [member function]
cls.add_method('GetSnapLen',
'uint32_t',
[])
## pcap-file-wrapper.h (module 'network'): uint32_t ns3::PcapFileWrapper::GetDataLinkType() [member function]
cls.add_method('GetDataLinkType',
'uint32_t',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3MeshWifiInterfaceMacPlugin_Ns3Empty_Ns3DefaultDeleter__lt__ns3MeshWifiInterfaceMacPlugin__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::MeshWifiInterfaceMacPlugin, ns3::empty, ns3::DefaultDeleter<ns3::MeshWifiInterfaceMacPlugin> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::MeshWifiInterfaceMacPlugin, ns3::empty, ns3::DefaultDeleter<ns3::MeshWifiInterfaceMacPlugin> >::SimpleRefCount(ns3::SimpleRefCount<ns3::MeshWifiInterfaceMacPlugin, ns3::empty, ns3::DefaultDeleter<ns3::MeshWifiInterfaceMacPlugin> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::MeshWifiInterfaceMacPlugin, ns3::empty, ns3::DefaultDeleter< ns3::MeshWifiInterfaceMacPlugin > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::MeshWifiInterfaceMacPlugin, ns3::empty, ns3::DefaultDeleter<ns3::MeshWifiInterfaceMacPlugin> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter< ns3::OutputStreamWrapper > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3WifiInformationElement_Ns3Empty_Ns3DefaultDeleter__lt__ns3WifiInformationElement__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >::SimpleRefCount(ns3::SimpleRefCount<ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter< ns3::WifiInformationElement > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::WifiInformationElement, ns3::empty, ns3::DefaultDeleter<ns3::WifiInformationElement> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Dot11sDestinationAddressUnit_Ns3Empty_Ns3DefaultDeleter__lt__ns3Dot11sDestinationAddressUnit__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::dot11s::DestinationAddressUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::DestinationAddressUnit> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::dot11s::DestinationAddressUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::DestinationAddressUnit> >::SimpleRefCount(ns3::SimpleRefCount<ns3::dot11s::DestinationAddressUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::DestinationAddressUnit> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::dot11s::DestinationAddressUnit, ns3::empty, ns3::DefaultDeleter< ns3::dot11s::DestinationAddressUnit > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::dot11s::DestinationAddressUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::DestinationAddressUnit> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Dot11sIeBeaconTimingUnit_Ns3Empty_Ns3DefaultDeleter__lt__ns3Dot11sIeBeaconTimingUnit__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::dot11s::IeBeaconTimingUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::IeBeaconTimingUnit> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::dot11s::IeBeaconTimingUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::IeBeaconTimingUnit> >::SimpleRefCount(ns3::SimpleRefCount<ns3::dot11s::IeBeaconTimingUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::IeBeaconTimingUnit> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::dot11s::IeBeaconTimingUnit, ns3::empty, ns3::DefaultDeleter< ns3::dot11s::IeBeaconTimingUnit > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::dot11s::IeBeaconTimingUnit, ns3::empty, ns3::DefaultDeleter<ns3::dot11s::IeBeaconTimingUnit> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & value) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'value')])
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & from, ns3::Time::Unit timeUnit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'from'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3WifiActionHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::WifiActionHeader(ns3::WifiActionHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiActionHeader const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::WifiActionHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): uint32_t ns3::WifiActionHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue ns3::WifiActionHeader::GetAction() [member function]
cls.add_method('GetAction',
'ns3::WifiActionHeader::ActionValue',
[])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::CategoryValue ns3::WifiActionHeader::GetCategory() [member function]
cls.add_method('GetCategory',
'ns3::WifiActionHeader::CategoryValue',
[])
## mgt-headers.h (module 'wifi'): ns3::TypeId ns3::WifiActionHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): uint32_t ns3::WifiActionHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): static ns3::TypeId ns3::WifiActionHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mgt-headers.h (module 'wifi'): void ns3::WifiActionHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::WifiActionHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## mgt-headers.h (module 'wifi'): void ns3::WifiActionHeader::SetAction(ns3::WifiActionHeader::CategoryValue type, ns3::WifiActionHeader::ActionValue action) [member function]
cls.add_method('SetAction',
'void',
[param('ns3::WifiActionHeader::CategoryValue', 'type'), param('ns3::WifiActionHeader::ActionValue', 'action')])
return
def register_Ns3WifiActionHeaderActionValue_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::ActionValue() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::ActionValue(ns3::WifiActionHeader::ActionValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiActionHeader::ActionValue const &', 'arg0')])
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::blockAck [variable]
cls.add_instance_attribute('blockAck', 'ns3::WifiActionHeader::BlockAckActionValue', is_const=False)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::interwork [variable]
cls.add_instance_attribute('interwork', 'ns3::WifiActionHeader::InterworkActionValue', is_const=False)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::linkMetrtic [variable]
cls.add_instance_attribute('linkMetrtic', 'ns3::WifiActionHeader::LinkMetricActionValue', is_const=False)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::pathSelection [variable]
cls.add_instance_attribute('pathSelection', 'ns3::WifiActionHeader::PathSelectionActionValue', is_const=False)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::peerLink [variable]
cls.add_instance_attribute('peerLink', 'ns3::WifiActionHeader::PeerLinkMgtActionValue', is_const=False)
## mgt-headers.h (module 'wifi'): ns3::WifiActionHeader::ActionValue::resourceCoordination [variable]
cls.add_instance_attribute('resourceCoordination', 'ns3::WifiActionHeader::ResourceCoordinationActionValue', is_const=False)
return
def register_Ns3WifiInformationElement_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
## wifi-information-element.h (module 'wifi'): ns3::WifiInformationElement::WifiInformationElement() [constructor]
cls.add_constructor([])
## wifi-information-element.h (module 'wifi'): ns3::WifiInformationElement::WifiInformationElement(ns3::WifiInformationElement const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiInformationElement const &', 'arg0')])
## wifi-information-element.h (module 'wifi'): ns3::Buffer::Iterator ns3::WifiInformationElement::Deserialize(ns3::Buffer::Iterator i) [member function]
cls.add_method('Deserialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'i')])
## wifi-information-element.h (module 'wifi'): ns3::Buffer::Iterator ns3::WifiInformationElement::DeserializeIfPresent(ns3::Buffer::Iterator i) [member function]
cls.add_method('DeserializeIfPresent',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'i')])
## wifi-information-element.h (module 'wifi'): uint8_t ns3::WifiInformationElement::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_pure_virtual=True, is_virtual=True)
## wifi-information-element.h (module 'wifi'): ns3::WifiInformationElementId ns3::WifiInformationElement::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-information-element.h (module 'wifi'): uint8_t ns3::WifiInformationElement::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-information-element.h (module 'wifi'): uint16_t ns3::WifiInformationElement::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint16_t',
[],
is_const=True)
## wifi-information-element.h (module 'wifi'): void ns3::WifiInformationElement::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## wifi-information-element.h (module 'wifi'): ns3::Buffer::Iterator ns3::WifiInformationElement::Serialize(ns3::Buffer::Iterator i) const [member function]
cls.add_method('Serialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True)
## wifi-information-element.h (module 'wifi'): void ns3::WifiInformationElement::SerializeInformationField(ns3::Buffer::Iterator start) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3WifiInformationElementVector_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## wifi-information-element-vector.h (module 'wifi'): ns3::WifiInformationElementVector::WifiInformationElementVector(ns3::WifiInformationElementVector const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiInformationElementVector const &', 'arg0')])
## wifi-information-element-vector.h (module 'wifi'): ns3::WifiInformationElementVector::WifiInformationElementVector() [constructor]
cls.add_constructor([])
## wifi-information-element-vector.h (module 'wifi'): bool ns3::WifiInformationElementVector::AddInformationElement(ns3::Ptr<ns3::WifiInformationElement> element) [member function]
cls.add_method('AddInformationElement',
'bool',
[param('ns3::Ptr< ns3::WifiInformationElement >', 'element')])
## wifi-information-element-vector.h (module 'wifi'): __gnu_cxx::__normal_iterator<ns3::Ptr<ns3::WifiInformationElement>*,std::vector<ns3::Ptr<ns3::WifiInformationElement>, std::allocator<ns3::Ptr<ns3::WifiInformationElement> > > > ns3::WifiInformationElementVector::Begin() [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::WifiInformationElement >, std::vector< ns3::Ptr< ns3::WifiInformationElement > > >',
[])
## wifi-information-element-vector.h (module 'wifi'): uint32_t ns3::WifiInformationElementVector::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): uint32_t ns3::WifiInformationElementVector::DeserializeSingleIe(ns3::Buffer::Iterator start) [member function]
cls.add_method('DeserializeSingleIe',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): __gnu_cxx::__normal_iterator<ns3::Ptr<ns3::WifiInformationElement>*,std::vector<ns3::Ptr<ns3::WifiInformationElement>, std::allocator<ns3::Ptr<ns3::WifiInformationElement> > > > ns3::WifiInformationElementVector::End() [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::WifiInformationElement >, std::vector< ns3::Ptr< ns3::WifiInformationElement > > >',
[])
## wifi-information-element-vector.h (module 'wifi'): ns3::Ptr<ns3::WifiInformationElement> ns3::WifiInformationElementVector::FindFirst(ns3::WifiInformationElementId id) const [member function]
cls.add_method('FindFirst',
'ns3::Ptr< ns3::WifiInformationElement >',
[param('ns3::WifiInformationElementId', 'id')],
is_const=True)
## wifi-information-element-vector.h (module 'wifi'): ns3::TypeId ns3::WifiInformationElementVector::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): uint32_t ns3::WifiInformationElementVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): static ns3::TypeId ns3::WifiInformationElementVector::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-information-element-vector.h (module 'wifi'): void ns3::WifiInformationElementVector::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): void ns3::WifiInformationElementVector::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## wifi-information-element-vector.h (module 'wifi'): void ns3::WifiInformationElementVector::SetMaxSize(uint16_t size) [member function]
cls.add_method('SetMaxSize',
'void',
[param('uint16_t', 'size')])
## wifi-information-element-vector.h (module 'wifi'): uint32_t ns3::WifiInformationElementVector::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True, visibility='protected')
return
def register_Ns3WifiMac_methods(root_module, cls):
## wifi-mac.h (module 'wifi'): ns3::WifiMac::WifiMac() [constructor]
cls.add_constructor([])
## wifi-mac.h (module 'wifi'): ns3::WifiMac::WifiMac(ns3::WifiMac const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiMac const &', 'arg0')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::ConfigureStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('ConfigureStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to, ns3::Mac48Address from) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to'), param('ns3::Mac48Address', 'from')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetAckTimeout() const [member function]
cls.add_method('GetAckTimeout',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Mac48Address ns3::WifiMac::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Mac48Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetBasicBlockAckTimeout() const [member function]
cls.add_method('GetBasicBlockAckTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Mac48Address ns3::WifiMac::GetBssid() const [member function]
cls.add_method('GetBssid',
'ns3::Mac48Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetCompressedBlockAckTimeout() const [member function]
cls.add_method('GetCompressedBlockAckTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetCtsTimeout() const [member function]
cls.add_method('GetCtsTimeout',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetEifsNoDifs() const [member function]
cls.add_method('GetEifsNoDifs',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetMaxPropagationDelay() const [member function]
cls.add_method('GetMaxPropagationDelay',
'ns3::Time',
[],
is_const=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetMsduLifetime() const [member function]
cls.add_method('GetMsduLifetime',
'ns3::Time',
[],
is_const=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetPifs() const [member function]
cls.add_method('GetPifs',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetSifs() const [member function]
cls.add_method('GetSifs',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Time ns3::WifiMac::GetSlot() const [member function]
cls.add_method('GetSlot',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): ns3::Ssid ns3::WifiMac::GetSsid() const [member function]
cls.add_method('GetSsid',
'ns3::Ssid',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): static ns3::TypeId ns3::WifiMac::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::NotifyPromiscRx(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyPromiscRx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::NotifyRx(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyRx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::NotifyRxDrop(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyRxDrop',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::NotifyTx(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyTx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::NotifyTxDrop(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyTxDrop',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetAckTimeout(ns3::Time ackTimeout) [member function]
cls.add_method('SetAckTimeout',
'void',
[param('ns3::Time', 'ackTimeout')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetAddress(ns3::Mac48Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Mac48Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetBasicBlockAckTimeout(ns3::Time blockAckTimeout) [member function]
cls.add_method('SetBasicBlockAckTimeout',
'void',
[param('ns3::Time', 'blockAckTimeout')],
is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetCompressedBlockAckTimeout(ns3::Time blockAckTimeout) [member function]
cls.add_method('SetCompressedBlockAckTimeout',
'void',
[param('ns3::Time', 'blockAckTimeout')],
is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetCtsTimeout(ns3::Time ctsTimeout) [member function]
cls.add_method('SetCtsTimeout',
'void',
[param('ns3::Time', 'ctsTimeout')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetEifsNoDifs(ns3::Time eifsNoDifs) [member function]
cls.add_method('SetEifsNoDifs',
'void',
[param('ns3::Time', 'eifsNoDifs')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetForwardUpCallback(ns3::Callback<void, ns3::Ptr<ns3::Packet>, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> upCallback) [member function]
cls.add_method('SetForwardUpCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'upCallback')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetLinkDownCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> linkDown) [member function]
cls.add_method('SetLinkDownCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'linkDown')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetLinkUpCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> linkUp) [member function]
cls.add_method('SetLinkUpCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'linkUp')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetMaxPropagationDelay(ns3::Time delay) [member function]
cls.add_method('SetMaxPropagationDelay',
'void',
[param('ns3::Time', 'delay')])
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetPifs(ns3::Time pifs) [member function]
cls.add_method('SetPifs',
'void',
[param('ns3::Time', 'pifs')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetPromisc() [member function]
cls.add_method('SetPromisc',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetSifs(ns3::Time sifs) [member function]
cls.add_method('SetSifs',
'void',
[param('ns3::Time', 'sifs')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetSlot(ns3::Time slotTime) [member function]
cls.add_method('SetSlot',
'void',
[param('ns3::Time', 'slotTime')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetSsid(ns3::Ssid ssid) [member function]
cls.add_method('SetSsid',
'void',
[param('ns3::Ssid', 'ssid')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetWifiPhy(ns3::Ptr<ns3::WifiPhy> phy) [member function]
cls.add_method('SetWifiPhy',
'void',
[param('ns3::Ptr< ns3::WifiPhy >', 'phy')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::SetWifiRemoteStationManager(ns3::Ptr<ns3::WifiRemoteStationManager> stationManager) [member function]
cls.add_method('SetWifiRemoteStationManager',
'void',
[param('ns3::Ptr< ns3::WifiRemoteStationManager >', 'stationManager')],
is_pure_virtual=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): bool ns3::WifiMac::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::ConfigureCCHDcf(ns3::Ptr<ns3::Dcf> dcf, uint32_t cwmin, uint32_t cwmax, ns3::AcIndex ac) [member function]
cls.add_method('ConfigureCCHDcf',
'void',
[param('ns3::Ptr< ns3::Dcf >', 'dcf'), param('uint32_t', 'cwmin'), param('uint32_t', 'cwmax'), param('ns3::AcIndex', 'ac')],
visibility='protected')
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::ConfigureDcf(ns3::Ptr<ns3::Dcf> dcf, uint32_t cwmin, uint32_t cwmax, ns3::AcIndex ac) [member function]
cls.add_method('ConfigureDcf',
'void',
[param('ns3::Ptr< ns3::Dcf >', 'dcf'), param('uint32_t', 'cwmin'), param('uint32_t', 'cwmax'), param('ns3::AcIndex', 'ac')],
visibility='protected')
## wifi-mac.h (module 'wifi'): void ns3::WifiMac::FinishConfigureStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('FinishConfigureStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3WifiMacHeader_methods(root_module, cls):
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader::WifiMacHeader(ns3::WifiMacHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiMacHeader const &', 'arg0')])
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader::WifiMacHeader() [constructor]
cls.add_constructor([])
## wifi-mac-header.h (module 'wifi'): uint32_t ns3::WifiMacHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## wifi-mac-header.h (module 'wifi'): ns3::Mac48Address ns3::WifiMacHeader::GetAddr1() const [member function]
cls.add_method('GetAddr1',
'ns3::Mac48Address',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::Mac48Address ns3::WifiMacHeader::GetAddr2() const [member function]
cls.add_method('GetAddr2',
'ns3::Mac48Address',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::Mac48Address ns3::WifiMacHeader::GetAddr3() const [member function]
cls.add_method('GetAddr3',
'ns3::Mac48Address',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::Mac48Address ns3::WifiMacHeader::GetAddr4() const [member function]
cls.add_method('GetAddr4',
'ns3::Mac48Address',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::Time ns3::WifiMacHeader::GetDuration() const [member function]
cls.add_method('GetDuration',
'ns3::Time',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint16_t ns3::WifiMacHeader::GetFragmentNumber() const [member function]
cls.add_method('GetFragmentNumber',
'uint16_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::TypeId ns3::WifiMacHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacHeader::QosAckPolicy ns3::WifiMacHeader::GetQosAckPolicy() const [member function]
cls.add_method('GetQosAckPolicy',
'ns3::WifiMacHeader::QosAckPolicy',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint8_t ns3::WifiMacHeader::GetQosTid() const [member function]
cls.add_method('GetQosTid',
'uint8_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint8_t ns3::WifiMacHeader::GetQosTxopLimit() const [member function]
cls.add_method('GetQosTxopLimit',
'uint8_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint16_t ns3::WifiMacHeader::GetRawDuration() const [member function]
cls.add_method('GetRawDuration',
'uint16_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint16_t ns3::WifiMacHeader::GetSequenceControl() const [member function]
cls.add_method('GetSequenceControl',
'uint16_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint16_t ns3::WifiMacHeader::GetSequenceNumber() const [member function]
cls.add_method('GetSequenceNumber',
'uint16_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): uint32_t ns3::WifiMacHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## wifi-mac-header.h (module 'wifi'): uint32_t ns3::WifiMacHeader::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): ns3::WifiMacType ns3::WifiMacHeader::GetType() const [member function]
cls.add_method('GetType',
'ns3::WifiMacType',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): static ns3::TypeId ns3::WifiMacHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-mac-header.h (module 'wifi'): char const * ns3::WifiMacHeader::GetTypeString() const [member function]
cls.add_method('GetTypeString',
'char const *',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsAck() const [member function]
cls.add_method('IsAck',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsAction() const [member function]
cls.add_method('IsAction',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsAssocReq() const [member function]
cls.add_method('IsAssocReq',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsAssocResp() const [member function]
cls.add_method('IsAssocResp',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsAuthentication() const [member function]
cls.add_method('IsAuthentication',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsBeacon() const [member function]
cls.add_method('IsBeacon',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsBlockAck() const [member function]
cls.add_method('IsBlockAck',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsBlockAckReq() const [member function]
cls.add_method('IsBlockAckReq',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsCfpoll() const [member function]
cls.add_method('IsCfpoll',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsCtl() const [member function]
cls.add_method('IsCtl',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsCts() const [member function]
cls.add_method('IsCts',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsData() const [member function]
cls.add_method('IsData',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsDeauthentication() const [member function]
cls.add_method('IsDeauthentication',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsDisassociation() const [member function]
cls.add_method('IsDisassociation',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsFromDs() const [member function]
cls.add_method('IsFromDs',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsMgt() const [member function]
cls.add_method('IsMgt',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsMoreFragments() const [member function]
cls.add_method('IsMoreFragments',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsMultihopAction() const [member function]
cls.add_method('IsMultihopAction',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsProbeReq() const [member function]
cls.add_method('IsProbeReq',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsProbeResp() const [member function]
cls.add_method('IsProbeResp',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosAck() const [member function]
cls.add_method('IsQosAck',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosAmsdu() const [member function]
cls.add_method('IsQosAmsdu',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosBlockAck() const [member function]
cls.add_method('IsQosBlockAck',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosData() const [member function]
cls.add_method('IsQosData',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosEosp() const [member function]
cls.add_method('IsQosEosp',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsQosNoAck() const [member function]
cls.add_method('IsQosNoAck',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsReassocReq() const [member function]
cls.add_method('IsReassocReq',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsReassocResp() const [member function]
cls.add_method('IsReassocResp',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsRetry() const [member function]
cls.add_method('IsRetry',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsRts() const [member function]
cls.add_method('IsRts',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): bool ns3::WifiMacHeader::IsToDs() const [member function]
cls.add_method('IsToDs',
'bool',
[],
is_const=True)
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAction() [member function]
cls.add_method('SetAction',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAddr1(ns3::Mac48Address address) [member function]
cls.add_method('SetAddr1',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAddr2(ns3::Mac48Address address) [member function]
cls.add_method('SetAddr2',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAddr3(ns3::Mac48Address address) [member function]
cls.add_method('SetAddr3',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAddr4(ns3::Mac48Address address) [member function]
cls.add_method('SetAddr4',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAssocReq() [member function]
cls.add_method('SetAssocReq',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetAssocResp() [member function]
cls.add_method('SetAssocResp',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetBeacon() [member function]
cls.add_method('SetBeacon',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetBlockAck() [member function]
cls.add_method('SetBlockAck',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetBlockAckReq() [member function]
cls.add_method('SetBlockAckReq',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetDsFrom() [member function]
cls.add_method('SetDsFrom',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetDsNotFrom() [member function]
cls.add_method('SetDsNotFrom',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetDsNotTo() [member function]
cls.add_method('SetDsNotTo',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetDsTo() [member function]
cls.add_method('SetDsTo',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetDuration(ns3::Time duration) [member function]
cls.add_method('SetDuration',
'void',
[param('ns3::Time', 'duration')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetFragmentNumber(uint8_t frag) [member function]
cls.add_method('SetFragmentNumber',
'void',
[param('uint8_t', 'frag')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetId(uint16_t id) [member function]
cls.add_method('SetId',
'void',
[param('uint16_t', 'id')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetMoreFragments() [member function]
cls.add_method('SetMoreFragments',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetMultihopAction() [member function]
cls.add_method('SetMultihopAction',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetNoMoreFragments() [member function]
cls.add_method('SetNoMoreFragments',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetNoRetry() [member function]
cls.add_method('SetNoRetry',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetProbeReq() [member function]
cls.add_method('SetProbeReq',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetProbeResp() [member function]
cls.add_method('SetProbeResp',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosAckPolicy(ns3::WifiMacHeader::QosAckPolicy arg0) [member function]
cls.add_method('SetQosAckPolicy',
'void',
[param('ns3::WifiMacHeader::QosAckPolicy', 'arg0')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosAmsdu() [member function]
cls.add_method('SetQosAmsdu',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosBlockAck() [member function]
cls.add_method('SetQosBlockAck',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosEosp() [member function]
cls.add_method('SetQosEosp',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosNoAck() [member function]
cls.add_method('SetQosNoAck',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosNoAmsdu() [member function]
cls.add_method('SetQosNoAmsdu',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosNoEosp() [member function]
cls.add_method('SetQosNoEosp',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosNormalAck() [member function]
cls.add_method('SetQosNormalAck',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosTid(uint8_t tid) [member function]
cls.add_method('SetQosTid',
'void',
[param('uint8_t', 'tid')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetQosTxopLimit(uint8_t txop) [member function]
cls.add_method('SetQosTxopLimit',
'void',
[param('uint8_t', 'txop')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetRawDuration(uint16_t duration) [member function]
cls.add_method('SetRawDuration',
'void',
[param('uint16_t', 'duration')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetRetry() [member function]
cls.add_method('SetRetry',
'void',
[])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetSequenceNumber(uint16_t seq) [member function]
cls.add_method('SetSequenceNumber',
'void',
[param('uint16_t', 'seq')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetType(ns3::WifiMacType type) [member function]
cls.add_method('SetType',
'void',
[param('ns3::WifiMacType', 'type')])
## wifi-mac-header.h (module 'wifi'): void ns3::WifiMacHeader::SetTypeData() [member function]
cls.add_method('SetTypeData',
'void',
[])
return
def register_Ns3WifiPhy_methods(root_module, cls):
## wifi-phy.h (module 'wifi'): ns3::WifiPhy::WifiPhy(ns3::WifiPhy const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiPhy const &', 'arg0')])
## wifi-phy.h (module 'wifi'): ns3::WifiPhy::WifiPhy() [constructor]
cls.add_constructor([])
## wifi-phy.h (module 'wifi'): double ns3::WifiPhy::CalculateSnr(ns3::WifiMode txMode, double ber) const [member function]
cls.add_method('CalculateSnr',
'double',
[param('ns3::WifiMode', 'txMode'), param('double', 'ber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): static ns3::Time ns3::WifiPhy::CalculateTxDuration(uint32_t size, ns3::WifiMode payloadMode, ns3::WifiPreamble preamble) [member function]
cls.add_method('CalculateTxDuration',
'ns3::Time',
[param('uint32_t', 'size'), param('ns3::WifiMode', 'payloadMode'), param('ns3::WifiPreamble', 'preamble')],
is_static=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::ConfigureStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('ConfigureStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): ns3::Ptr<ns3::WifiChannel> ns3::WifiPhy::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::WifiChannel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): uint16_t ns3::WifiPhy::GetChannelNumber() const [member function]
cls.add_method('GetChannelNumber',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): ns3::Time ns3::WifiPhy::GetDelayUntilIdle() [member function]
cls.add_method('GetDelayUntilIdle',
'ns3::Time',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetDsssRate11Mbps() [member function]
cls.add_method('GetDsssRate11Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetDsssRate1Mbps() [member function]
cls.add_method('GetDsssRate1Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetDsssRate2Mbps() [member function]
cls.add_method('GetDsssRate2Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetDsssRate5_5Mbps() [member function]
cls.add_method('GetDsssRate5_5Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate12Mbps() [member function]
cls.add_method('GetErpOfdmRate12Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate18Mbps() [member function]
cls.add_method('GetErpOfdmRate18Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate24Mbps() [member function]
cls.add_method('GetErpOfdmRate24Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate36Mbps() [member function]
cls.add_method('GetErpOfdmRate36Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate48Mbps() [member function]
cls.add_method('GetErpOfdmRate48Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate54Mbps() [member function]
cls.add_method('GetErpOfdmRate54Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate6Mbps() [member function]
cls.add_method('GetErpOfdmRate6Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetErpOfdmRate9Mbps() [member function]
cls.add_method('GetErpOfdmRate9Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): ns3::Time ns3::WifiPhy::GetLastRxStartTime() const [member function]
cls.add_method('GetLastRxStartTime',
'ns3::Time',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): ns3::WifiMode ns3::WifiPhy::GetMode(uint32_t mode) const [member function]
cls.add_method('GetMode',
'ns3::WifiMode',
[param('uint32_t', 'mode')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): uint32_t ns3::WifiPhy::GetNModes() const [member function]
cls.add_method('GetNModes',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): uint32_t ns3::WifiPhy::GetNTxPower() const [member function]
cls.add_method('GetNTxPower',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate12Mbps() [member function]
cls.add_method('GetOfdmRate12Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate12MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate12MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate12MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate12MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate13_5MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate13_5MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate18Mbps() [member function]
cls.add_method('GetOfdmRate18Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate18MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate18MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate1_5MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate1_5MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate24Mbps() [member function]
cls.add_method('GetOfdmRate24Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate24MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate24MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate27MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate27MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate2_25MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate2_25MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate36Mbps() [member function]
cls.add_method('GetOfdmRate36Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate3MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate3MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate3MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate3MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate48Mbps() [member function]
cls.add_method('GetOfdmRate48Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate4_5MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate4_5MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate4_5MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate4_5MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate54Mbps() [member function]
cls.add_method('GetOfdmRate54Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate6Mbps() [member function]
cls.add_method('GetOfdmRate6Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate6MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate6MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate6MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate6MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate9Mbps() [member function]
cls.add_method('GetOfdmRate9Mbps',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate9MbpsBW10MHz() [member function]
cls.add_method('GetOfdmRate9MbpsBW10MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetOfdmRate9MbpsBW5MHz() [member function]
cls.add_method('GetOfdmRate9MbpsBW5MHz',
'ns3::WifiMode',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): static uint32_t ns3::WifiPhy::GetPayloadDurationMicroSeconds(uint32_t size, ns3::WifiMode payloadMode) [member function]
cls.add_method('GetPayloadDurationMicroSeconds',
'uint32_t',
[param('uint32_t', 'size'), param('ns3::WifiMode', 'payloadMode')],
is_static=True)
## wifi-phy.h (module 'wifi'): static uint32_t ns3::WifiPhy::GetPlcpHeaderDurationMicroSeconds(ns3::WifiMode payloadMode, ns3::WifiPreamble preamble) [member function]
cls.add_method('GetPlcpHeaderDurationMicroSeconds',
'uint32_t',
[param('ns3::WifiMode', 'payloadMode'), param('ns3::WifiPreamble', 'preamble')],
is_static=True)
## wifi-phy.h (module 'wifi'): static ns3::WifiMode ns3::WifiPhy::GetPlcpHeaderMode(ns3::WifiMode payloadMode, ns3::WifiPreamble preamble) [member function]
cls.add_method('GetPlcpHeaderMode',
'ns3::WifiMode',
[param('ns3::WifiMode', 'payloadMode'), param('ns3::WifiPreamble', 'preamble')],
is_static=True)
## wifi-phy.h (module 'wifi'): static uint32_t ns3::WifiPhy::GetPlcpPreambleDurationMicroSeconds(ns3::WifiMode payloadMode, ns3::WifiPreamble preamble) [member function]
cls.add_method('GetPlcpPreambleDurationMicroSeconds',
'uint32_t',
[param('ns3::WifiMode', 'payloadMode'), param('ns3::WifiPreamble', 'preamble')],
is_static=True)
## wifi-phy.h (module 'wifi'): ns3::Time ns3::WifiPhy::GetStateDuration() [member function]
cls.add_method('GetStateDuration',
'ns3::Time',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): double ns3::WifiPhy::GetTxPowerEnd() const [member function]
cls.add_method('GetTxPowerEnd',
'double',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): double ns3::WifiPhy::GetTxPowerStart() const [member function]
cls.add_method('GetTxPowerStart',
'double',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): static ns3::TypeId ns3::WifiPhy::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateBusy() [member function]
cls.add_method('IsStateBusy',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateCcaBusy() [member function]
cls.add_method('IsStateCcaBusy',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateIdle() [member function]
cls.add_method('IsStateIdle',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateRx() [member function]
cls.add_method('IsStateRx',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateSwitching() [member function]
cls.add_method('IsStateSwitching',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): bool ns3::WifiPhy::IsStateTx() [member function]
cls.add_method('IsStateTx',
'bool',
[],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyMonitorSniffRx(ns3::Ptr<ns3::Packet const> packet, uint16_t channelFreqMhz, uint16_t channelNumber, uint32_t rate, bool isShortPreamble, double signalDbm, double noiseDbm) [member function]
cls.add_method('NotifyMonitorSniffRx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'channelFreqMhz'), param('uint16_t', 'channelNumber'), param('uint32_t', 'rate'), param('bool', 'isShortPreamble'), param('double', 'signalDbm'), param('double', 'noiseDbm')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyMonitorSniffTx(ns3::Ptr<ns3::Packet const> packet, uint16_t channelFreqMhz, uint16_t channelNumber, uint32_t rate, bool isShortPreamble) [member function]
cls.add_method('NotifyMonitorSniffTx',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'channelFreqMhz'), param('uint16_t', 'channelNumber'), param('uint32_t', 'rate'), param('bool', 'isShortPreamble')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyRxBegin(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyRxBegin',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyRxDrop(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyRxDrop',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyRxEnd(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyRxEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyTxBegin(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyTxBegin',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyTxDrop(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyTxDrop',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::NotifyTxEnd(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NotifyTxEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::RegisterListener(ns3::WifiPhyListener * listener) [member function]
cls.add_method('RegisterListener',
'void',
[param('ns3::WifiPhyListener *', 'listener')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::SendPacket(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMode mode, ns3::WifiPreamble preamble, uint8_t txPowerLevel) [member function]
cls.add_method('SendPacket',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMode', 'mode'), param('ns3::WifiPreamble', 'preamble'), param('uint8_t', 'txPowerLevel')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::SetChannelNumber(uint16_t id) [member function]
cls.add_method('SetChannelNumber',
'void',
[param('uint16_t', 'id')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::SetReceiveErrorCallback(ns3::Callback<void,ns3::Ptr<const ns3::Packet>,double,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('SetReceiveErrorCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, double, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## wifi-phy.h (module 'wifi'): void ns3::WifiPhy::SetReceiveOkCallback(ns3::Callback<void,ns3::Ptr<ns3::Packet>,double,ns3::WifiMode,ns3::WifiPreamble,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('SetReceiveOkCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, double, ns3::WifiMode, ns3::WifiPreamble, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3WifiRemoteStationManager_methods(root_module, cls):
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationManager::WifiRemoteStationManager(ns3::WifiRemoteStationManager const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiRemoteStationManager const &', 'arg0')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationManager::WifiRemoteStationManager() [constructor]
cls.add_constructor([])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::AddBasicMode(ns3::WifiMode mode) [member function]
cls.add_method('AddBasicMode',
'void',
[param('ns3::WifiMode', 'mode')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::AddSupportedMode(ns3::Mac48Address address, ns3::WifiMode mode) [member function]
cls.add_method('AddSupportedMode',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMode', 'mode')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetAckMode(ns3::Mac48Address address, ns3::WifiMode dataMode) [member function]
cls.add_method('GetAckMode',
'ns3::WifiMode',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMode', 'dataMode')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetBasicMode(uint32_t i) const [member function]
cls.add_method('GetBasicMode',
'ns3::WifiMode',
[param('uint32_t', 'i')],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetCtsMode(ns3::Mac48Address address, ns3::WifiMode rtsMode) [member function]
cls.add_method('GetCtsMode',
'ns3::WifiMode',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMode', 'rtsMode')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetDataMode(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet, uint32_t fullPacketSize) [member function]
cls.add_method('GetDataMode',
'ns3::WifiMode',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint32_t', 'fullPacketSize')])
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetDefaultMode() const [member function]
cls.add_method('GetDefaultMode',
'ns3::WifiMode',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetFragmentOffset(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet, uint32_t fragmentNumber) [member function]
cls.add_method('GetFragmentOffset',
'uint32_t',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint32_t', 'fragmentNumber')])
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetFragmentSize(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet, uint32_t fragmentNumber) [member function]
cls.add_method('GetFragmentSize',
'uint32_t',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint32_t', 'fragmentNumber')])
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetFragmentationThreshold() const [member function]
cls.add_method('GetFragmentationThreshold',
'uint32_t',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStationInfo ns3::WifiRemoteStationManager::GetInfo(ns3::Mac48Address address) [member function]
cls.add_method('GetInfo',
'ns3::WifiRemoteStationInfo',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetMaxSlrc() const [member function]
cls.add_method('GetMaxSlrc',
'uint32_t',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetMaxSsrc() const [member function]
cls.add_method('GetMaxSsrc',
'uint32_t',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetNBasicModes() const [member function]
cls.add_method('GetNBasicModes',
'uint32_t',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetNonUnicastMode() const [member function]
cls.add_method('GetNonUnicastMode',
'ns3::WifiMode',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetRtsCtsThreshold() const [member function]
cls.add_method('GetRtsCtsThreshold',
'uint32_t',
[],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetRtsMode(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('GetRtsMode',
'ns3::WifiMode',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-remote-station-manager.h (module 'wifi'): static ns3::TypeId ns3::WifiRemoteStationManager::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::IsAssociated(ns3::Mac48Address address) const [member function]
cls.add_method('IsAssociated',
'bool',
[param('ns3::Mac48Address', 'address')],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::IsBrandNew(ns3::Mac48Address address) const [member function]
cls.add_method('IsBrandNew',
'bool',
[param('ns3::Mac48Address', 'address')],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::IsLastFragment(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet, uint32_t fragmentNumber) [member function]
cls.add_method('IsLastFragment',
'bool',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint32_t', 'fragmentNumber')])
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::IsWaitAssocTxOk(ns3::Mac48Address address) const [member function]
cls.add_method('IsWaitAssocTxOk',
'bool',
[param('ns3::Mac48Address', 'address')],
is_const=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::NeedDataRetransmission(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NeedDataRetransmission',
'bool',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::NeedFragmentation(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NeedFragmentation',
'bool',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::NeedRts(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NeedRts',
'bool',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::NeedRtsRetransmission(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('NeedRtsRetransmission',
'bool',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::PrepareForQueue(ns3::Mac48Address address, ns3::WifiMacHeader const * header, ns3::Ptr<ns3::Packet const> packet, uint32_t fullPacketSize) [member function]
cls.add_method('PrepareForQueue',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint32_t', 'fullPacketSize')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::RecordDisassociated(ns3::Mac48Address address) [member function]
cls.add_method('RecordDisassociated',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::RecordGotAssocTxFailed(ns3::Mac48Address address) [member function]
cls.add_method('RecordGotAssocTxFailed',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::RecordGotAssocTxOk(ns3::Mac48Address address) [member function]
cls.add_method('RecordGotAssocTxOk',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::RecordWaitAssocTxOk(ns3::Mac48Address address) [member function]
cls.add_method('RecordWaitAssocTxOk',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportDataFailed(ns3::Mac48Address address, ns3::WifiMacHeader const * header) [member function]
cls.add_method('ReportDataFailed',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportDataOk(ns3::Mac48Address address, ns3::WifiMacHeader const * header, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('ReportDataOk',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportFinalDataFailed(ns3::Mac48Address address, ns3::WifiMacHeader const * header) [member function]
cls.add_method('ReportFinalDataFailed',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportFinalRtsFailed(ns3::Mac48Address address, ns3::WifiMacHeader const * header) [member function]
cls.add_method('ReportFinalRtsFailed',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportRtsFailed(ns3::Mac48Address address, ns3::WifiMacHeader const * header) [member function]
cls.add_method('ReportRtsFailed',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportRtsOk(ns3::Mac48Address address, ns3::WifiMacHeader const * header, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('ReportRtsOk',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::ReportRxOk(ns3::Mac48Address address, ns3::WifiMacHeader const * header, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('ReportRxOk',
'void',
[param('ns3::Mac48Address', 'address'), param('ns3::WifiMacHeader const *', 'header'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::Reset() [member function]
cls.add_method('Reset',
'void',
[])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::Reset(ns3::Mac48Address address) [member function]
cls.add_method('Reset',
'void',
[param('ns3::Mac48Address', 'address')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::SetFragmentationThreshold(uint32_t threshold) [member function]
cls.add_method('SetFragmentationThreshold',
'void',
[param('uint32_t', 'threshold')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::SetMaxSlrc(uint32_t maxSlrc) [member function]
cls.add_method('SetMaxSlrc',
'void',
[param('uint32_t', 'maxSlrc')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::SetMaxSsrc(uint32_t maxSsrc) [member function]
cls.add_method('SetMaxSsrc',
'void',
[param('uint32_t', 'maxSsrc')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::SetRtsCtsThreshold(uint32_t threshold) [member function]
cls.add_method('SetRtsCtsThreshold',
'void',
[param('uint32_t', 'threshold')])
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::SetupPhy(ns3::Ptr<ns3::WifiPhy> phy) [member function]
cls.add_method('SetupPhy',
'void',
[param('ns3::Ptr< ns3::WifiPhy >', 'phy')],
is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): uint32_t ns3::WifiRemoteStationManager::GetNSupported(ns3::WifiRemoteStation const * station) const [member function]
cls.add_method('GetNSupported',
'uint32_t',
[param('ns3::WifiRemoteStation const *', 'station')],
is_const=True, visibility='protected')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::GetSupported(ns3::WifiRemoteStation const * station, uint32_t i) const [member function]
cls.add_method('GetSupported',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation const *', 'station'), param('uint32_t', 'i')],
is_const=True, visibility='protected')
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiRemoteStation * ns3::WifiRemoteStationManager::DoCreateStation() const [member function]
cls.add_method('DoCreateStation',
'ns3::WifiRemoteStation *',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::DoGetDataMode(ns3::WifiRemoteStation * station, uint32_t size) [member function]
cls.add_method('DoGetDataMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station'), param('uint32_t', 'size')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): ns3::WifiMode ns3::WifiRemoteStationManager::DoGetRtsMode(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoGetRtsMode',
'ns3::WifiMode',
[param('ns3::WifiRemoteStation *', 'station')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::DoNeedDataRetransmission(ns3::WifiRemoteStation * station, ns3::Ptr<ns3::Packet const> packet, bool normally) [member function]
cls.add_method('DoNeedDataRetransmission',
'bool',
[param('ns3::WifiRemoteStation *', 'station'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('bool', 'normally')],
visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::DoNeedFragmentation(ns3::WifiRemoteStation * station, ns3::Ptr<ns3::Packet const> packet, bool normally) [member function]
cls.add_method('DoNeedFragmentation',
'bool',
[param('ns3::WifiRemoteStation *', 'station'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('bool', 'normally')],
visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::DoNeedRts(ns3::WifiRemoteStation * station, ns3::Ptr<ns3::Packet const> packet, bool normally) [member function]
cls.add_method('DoNeedRts',
'bool',
[param('ns3::WifiRemoteStation *', 'station'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('bool', 'normally')],
visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::DoNeedRtsRetransmission(ns3::WifiRemoteStation * station, ns3::Ptr<ns3::Packet const> packet, bool normally) [member function]
cls.add_method('DoNeedRtsRetransmission',
'bool',
[param('ns3::WifiRemoteStation *', 'station'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('bool', 'normally')],
visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportDataOk(ns3::WifiRemoteStation * station, double ackSnr, ns3::WifiMode ackMode, double dataSnr) [member function]
cls.add_method('DoReportDataOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ackSnr'), param('ns3::WifiMode', 'ackMode'), param('double', 'dataSnr')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportFinalDataFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalDataFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportFinalRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportFinalRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportRtsFailed(ns3::WifiRemoteStation * station) [member function]
cls.add_method('DoReportRtsFailed',
'void',
[param('ns3::WifiRemoteStation *', 'station')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportRtsOk(ns3::WifiRemoteStation * station, double ctsSnr, ns3::WifiMode ctsMode, double rtsSnr) [member function]
cls.add_method('DoReportRtsOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'ctsSnr'), param('ns3::WifiMode', 'ctsMode'), param('double', 'rtsSnr')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): void ns3::WifiRemoteStationManager::DoReportRxOk(ns3::WifiRemoteStation * station, double rxSnr, ns3::WifiMode txMode) [member function]
cls.add_method('DoReportRxOk',
'void',
[param('ns3::WifiRemoteStation *', 'station'), param('double', 'rxSnr'), param('ns3::WifiMode', 'txMode')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## wifi-remote-station-manager.h (module 'wifi'): bool ns3::WifiRemoteStationManager::IsLowLatency() const [member function]
cls.add_method('IsLowLatency',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3BooleanChecker_methods(root_module, cls):
## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker() [constructor]
cls.add_constructor([])
## boolean.h (module 'core'): ns3::BooleanChecker::BooleanChecker(ns3::BooleanChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BooleanChecker const &', 'arg0')])
return
def register_Ns3BooleanValue_methods(root_module, cls):
cls.add_output_stream_operator()
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(ns3::BooleanValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::BooleanValue const &', 'arg0')])
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue() [constructor]
cls.add_constructor([])
## boolean.h (module 'core'): ns3::BooleanValue::BooleanValue(bool value) [constructor]
cls.add_constructor([param('bool', 'value')])
## boolean.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::BooleanValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## boolean.h (module 'core'): bool ns3::BooleanValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## boolean.h (module 'core'): bool ns3::BooleanValue::Get() const [member function]
cls.add_method('Get',
'bool',
[],
is_const=True)
## boolean.h (module 'core'): std::string ns3::BooleanValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## boolean.h (module 'core'): void ns3::BooleanValue::Set(bool value) [member function]
cls.add_method('Set',
'void',
[param('bool', 'value')])
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3Channel_methods(root_module, cls):
## channel.h (module 'network'): ns3::Channel::Channel(ns3::Channel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Channel const &', 'arg0')])
## channel.h (module 'network'): ns3::Channel::Channel() [constructor]
cls.add_constructor([])
## channel.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Channel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## channel.h (module 'network'): uint32_t ns3::Channel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## channel.h (module 'network'): static ns3::TypeId ns3::Channel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3CtrlBAckRequestHeader_methods(root_module, cls):
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckRequestHeader::CtrlBAckRequestHeader(ns3::CtrlBAckRequestHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CtrlBAckRequestHeader const &', 'arg0')])
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckRequestHeader::CtrlBAckRequestHeader() [constructor]
cls.add_constructor([])
## ctrl-headers.h (module 'wifi'): uint32_t ns3::CtrlBAckRequestHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ctrl-headers.h (module 'wifi'): ns3::TypeId ns3::CtrlBAckRequestHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): uint32_t ns3::CtrlBAckRequestHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): uint16_t ns3::CtrlBAckRequestHeader::GetStartingSequence() const [member function]
cls.add_method('GetStartingSequence',
'uint16_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): uint16_t ns3::CtrlBAckRequestHeader::GetStartingSequenceControl() const [member function]
cls.add_method('GetStartingSequenceControl',
'uint16_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): uint8_t ns3::CtrlBAckRequestHeader::GetTidInfo() const [member function]
cls.add_method('GetTidInfo',
'uint8_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): static ns3::TypeId ns3::CtrlBAckRequestHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckRequestHeader::IsBasic() const [member function]
cls.add_method('IsBasic',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckRequestHeader::IsCompressed() const [member function]
cls.add_method('IsCompressed',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckRequestHeader::IsMultiTid() const [member function]
cls.add_method('IsMultiTid',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckRequestHeader::MustSendHtImmediateAck() const [member function]
cls.add_method('MustSendHtImmediateAck',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::SetHtImmediateAck(bool immediateAck) [member function]
cls.add_method('SetHtImmediateAck',
'void',
[param('bool', 'immediateAck')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::SetStartingSequence(uint16_t seq) [member function]
cls.add_method('SetStartingSequence',
'void',
[param('uint16_t', 'seq')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::SetTidInfo(uint8_t tid) [member function]
cls.add_method('SetTidInfo',
'void',
[param('uint8_t', 'tid')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckRequestHeader::SetType(ns3::BlockAckType type) [member function]
cls.add_method('SetType',
'void',
[param('ns3::BlockAckType', 'type')])
return
def register_Ns3CtrlBAckResponseHeader_methods(root_module, cls):
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckResponseHeader::CtrlBAckResponseHeader(ns3::CtrlBAckResponseHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CtrlBAckResponseHeader const &', 'arg0')])
## ctrl-headers.h (module 'wifi'): ns3::CtrlBAckResponseHeader::CtrlBAckResponseHeader() [constructor]
cls.add_constructor([])
## ctrl-headers.h (module 'wifi'): uint32_t ns3::CtrlBAckResponseHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ctrl-headers.h (module 'wifi'): uint16_t const * ns3::CtrlBAckResponseHeader::GetBitmap() const [member function]
cls.add_method('GetBitmap',
'uint16_t const *',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): uint64_t ns3::CtrlBAckResponseHeader::GetCompressedBitmap() const [member function]
cls.add_method('GetCompressedBitmap',
'uint64_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): ns3::TypeId ns3::CtrlBAckResponseHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): uint32_t ns3::CtrlBAckResponseHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): uint16_t ns3::CtrlBAckResponseHeader::GetStartingSequence() const [member function]
cls.add_method('GetStartingSequence',
'uint16_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): uint16_t ns3::CtrlBAckResponseHeader::GetStartingSequenceControl() const [member function]
cls.add_method('GetStartingSequenceControl',
'uint16_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): uint8_t ns3::CtrlBAckResponseHeader::GetTidInfo() const [member function]
cls.add_method('GetTidInfo',
'uint8_t',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): static ns3::TypeId ns3::CtrlBAckResponseHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::IsBasic() const [member function]
cls.add_method('IsBasic',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::IsCompressed() const [member function]
cls.add_method('IsCompressed',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::IsFragmentReceived(uint16_t seq, uint8_t frag) const [member function]
cls.add_method('IsFragmentReceived',
'bool',
[param('uint16_t', 'seq'), param('uint8_t', 'frag')],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::IsMultiTid() const [member function]
cls.add_method('IsMultiTid',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::IsPacketReceived(uint16_t seq) const [member function]
cls.add_method('IsPacketReceived',
'bool',
[param('uint16_t', 'seq')],
is_const=True)
## ctrl-headers.h (module 'wifi'): bool ns3::CtrlBAckResponseHeader::MustSendHtImmediateAck() const [member function]
cls.add_method('MustSendHtImmediateAck',
'bool',
[],
is_const=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::ResetBitmap() [member function]
cls.add_method('ResetBitmap',
'void',
[])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetHtImmediateAck(bool immeadiateAck) [member function]
cls.add_method('SetHtImmediateAck',
'void',
[param('bool', 'immeadiateAck')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetReceivedFragment(uint16_t seq, uint8_t frag) [member function]
cls.add_method('SetReceivedFragment',
'void',
[param('uint16_t', 'seq'), param('uint8_t', 'frag')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetReceivedPacket(uint16_t seq) [member function]
cls.add_method('SetReceivedPacket',
'void',
[param('uint16_t', 'seq')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetStartingSequence(uint16_t seq) [member function]
cls.add_method('SetStartingSequence',
'void',
[param('uint16_t', 'seq')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetStartingSequenceControl(uint16_t seqControl) [member function]
cls.add_method('SetStartingSequenceControl',
'void',
[param('uint16_t', 'seqControl')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetTidInfo(uint8_t tid) [member function]
cls.add_method('SetTidInfo',
'void',
[param('uint8_t', 'tid')])
## ctrl-headers.h (module 'wifi'): void ns3::CtrlBAckResponseHeader::SetType(ns3::BlockAckType type) [member function]
cls.add_method('SetType',
'void',
[param('ns3::BlockAckType', 'type')])
return
def register_Ns3Dcf_methods(root_module, cls):
## dcf.h (module 'wifi'): ns3::Dcf::Dcf() [constructor]
cls.add_constructor([])
## dcf.h (module 'wifi'): ns3::Dcf::Dcf(ns3::Dcf const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Dcf const &', 'arg0')])
## dcf.h (module 'wifi'): uint32_t ns3::Dcf::GetAifsn() const [member function]
cls.add_method('GetAifsn',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## dcf.h (module 'wifi'): uint32_t ns3::Dcf::GetMaxCw() const [member function]
cls.add_method('GetMaxCw',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## dcf.h (module 'wifi'): uint32_t ns3::Dcf::GetMinCw() const [member function]
cls.add_method('GetMinCw',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## dcf.h (module 'wifi'): static ns3::TypeId ns3::Dcf::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dcf.h (module 'wifi'): void ns3::Dcf::SetAifsn(uint32_t aifsn) [member function]
cls.add_method('SetAifsn',
'void',
[param('uint32_t', 'aifsn')],
is_pure_virtual=True, is_virtual=True)
## dcf.h (module 'wifi'): void ns3::Dcf::SetMaxCw(uint32_t maxCw) [member function]
cls.add_method('SetMaxCw',
'void',
[param('uint32_t', 'maxCw')],
is_pure_virtual=True, is_virtual=True)
## dcf.h (module 'wifi'): void ns3::Dcf::SetMinCw(uint32_t minCw) [member function]
cls.add_method('SetMinCw',
'void',
[param('uint32_t', 'minCw')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3DoubleValue_methods(root_module, cls):
## double.h (module 'core'): ns3::DoubleValue::DoubleValue() [constructor]
cls.add_constructor([])
## double.h (module 'core'): ns3::DoubleValue::DoubleValue(ns3::DoubleValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DoubleValue const &', 'arg0')])
## double.h (module 'core'): ns3::DoubleValue::DoubleValue(double const & value) [constructor]
cls.add_constructor([param('double const &', 'value')])
## double.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::DoubleValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## double.h (module 'core'): bool ns3::DoubleValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## double.h (module 'core'): double ns3::DoubleValue::Get() const [member function]
cls.add_method('Get',
'double',
[],
is_const=True)
## double.h (module 'core'): std::string ns3::DoubleValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## double.h (module 'core'): void ns3::DoubleValue::Set(double const & value) [member function]
cls.add_method('Set',
'void',
[param('double const &', 'value')])
return
def register_Ns3EdcaTxopN_methods(root_module, cls):
## edca-txop-n.h (module 'wifi'): static ns3::TypeId ns3::EdcaTxopN::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## edca-txop-n.h (module 'wifi'): ns3::EdcaTxopN::EdcaTxopN() [constructor]
cls.add_constructor([])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetLow(ns3::Ptr<ns3::MacLow> low) [member function]
cls.add_method('SetLow',
'void',
[param('ns3::Ptr< ns3::MacLow >', 'low')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetTxMiddle(ns3::MacTxMiddle * txMiddle) [member function]
cls.add_method('SetTxMiddle',
'void',
[param('ns3::MacTxMiddle *', 'txMiddle')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetManager(ns3::DcfManager * manager) [member function]
cls.add_method('SetManager',
'void',
[param('ns3::DcfManager *', 'manager')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetTxOkCallback(ns3::Callback<void, ns3::WifiMacHeader const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetTxOkCallback',
'void',
[param('ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetTxFailedCallback(ns3::Callback<void, ns3::WifiMacHeader const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetTxFailedCallback',
'void',
[param('ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetWifiRemoteStationManager(ns3::Ptr<ns3::WifiRemoteStationManager> remoteManager) [member function]
cls.add_method('SetWifiRemoteStationManager',
'void',
[param('ns3::Ptr< ns3::WifiRemoteStationManager >', 'remoteManager')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetTypeOfStation(ns3::TypeOfStation type) [member function]
cls.add_method('SetTypeOfStation',
'void',
[param('ns3::TypeOfStation', 'type')])
## edca-txop-n.h (module 'wifi'): ns3::TypeOfStation ns3::EdcaTxopN::GetTypeOfStation() const [member function]
cls.add_method('GetTypeOfStation',
'ns3::TypeOfStation',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): ns3::Ptr<ns3::WifiMacQueue> ns3::EdcaTxopN::GetQueue() const [member function]
cls.add_method('GetQueue',
'ns3::Ptr< ns3::WifiMacQueue >',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetMinCw(uint32_t minCw) [member function]
cls.add_method('SetMinCw',
'void',
[param('uint32_t', 'minCw')],
is_virtual=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetMaxCw(uint32_t maxCw) [member function]
cls.add_method('SetMaxCw',
'void',
[param('uint32_t', 'maxCw')],
is_virtual=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetAifsn(uint32_t aifsn) [member function]
cls.add_method('SetAifsn',
'void',
[param('uint32_t', 'aifsn')],
is_virtual=True)
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetMinCw() const [member function]
cls.add_method('GetMinCw',
'uint32_t',
[],
is_const=True, is_virtual=True)
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetMaxCw() const [member function]
cls.add_method('GetMaxCw',
'uint32_t',
[],
is_const=True, is_virtual=True)
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetAifsn() const [member function]
cls.add_method('GetAifsn',
'uint32_t',
[],
is_const=True, is_virtual=True)
## edca-txop-n.h (module 'wifi'): ns3::Ptr<ns3::MacLow> ns3::EdcaTxopN::Low() [member function]
cls.add_method('Low',
'ns3::Ptr< ns3::MacLow >',
[])
## edca-txop-n.h (module 'wifi'): ns3::Ptr<ns3::MsduAggregator> ns3::EdcaTxopN::GetMsduAggregator() const [member function]
cls.add_method('GetMsduAggregator',
'ns3::Ptr< ns3::MsduAggregator >',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::NeedsAccess() const [member function]
cls.add_method('NeedsAccess',
'bool',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::NotifyAccessGranted() [member function]
cls.add_method('NotifyAccessGranted',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::NotifyInternalCollision() [member function]
cls.add_method('NotifyInternalCollision',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::NotifyCollision() [member function]
cls.add_method('NotifyCollision',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::NotifyChannelSwitching() [member function]
cls.add_method('NotifyChannelSwitching',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::GotCts(double snr, ns3::WifiMode txMode) [member function]
cls.add_method('GotCts',
'void',
[param('double', 'snr'), param('ns3::WifiMode', 'txMode')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::MissedCts() [member function]
cls.add_method('MissedCts',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::GotAck(double snr, ns3::WifiMode txMode) [member function]
cls.add_method('GotAck',
'void',
[param('double', 'snr'), param('ns3::WifiMode', 'txMode')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::GotBlockAck(ns3::CtrlBAckResponseHeader const * blockAck, ns3::Mac48Address recipient) [member function]
cls.add_method('GotBlockAck',
'void',
[param('ns3::CtrlBAckResponseHeader const *', 'blockAck'), param('ns3::Mac48Address', 'recipient')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::MissedBlockAck() [member function]
cls.add_method('MissedBlockAck',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::GotAddBaResponse(ns3::MgtAddBaResponseHeader const * respHdr, ns3::Mac48Address recipient) [member function]
cls.add_method('GotAddBaResponse',
'void',
[param('ns3::MgtAddBaResponseHeader const *', 'respHdr'), param('ns3::Mac48Address', 'recipient')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::GotDelBaFrame(ns3::MgtDelBaHeader const * delBaHdr, ns3::Mac48Address recipient) [member function]
cls.add_method('GotDelBaFrame',
'void',
[param('ns3::MgtDelBaHeader const *', 'delBaHdr'), param('ns3::Mac48Address', 'recipient')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::MissedAck() [member function]
cls.add_method('MissedAck',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::StartNext() [member function]
cls.add_method('StartNext',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::RestartAccessIfNeeded() [member function]
cls.add_method('RestartAccessIfNeeded',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::StartAccessIfNeeded() [member function]
cls.add_method('StartAccessIfNeeded',
'void',
[])
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::NeedRts() [member function]
cls.add_method('NeedRts',
'bool',
[])
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::NeedRtsRetransmission() [member function]
cls.add_method('NeedRtsRetransmission',
'bool',
[])
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::NeedDataRetransmission() [member function]
cls.add_method('NeedDataRetransmission',
'bool',
[])
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::NeedFragmentation() const [member function]
cls.add_method('NeedFragmentation',
'bool',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetNextFragmentSize() [member function]
cls.add_method('GetNextFragmentSize',
'uint32_t',
[])
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetFragmentSize() [member function]
cls.add_method('GetFragmentSize',
'uint32_t',
[])
## edca-txop-n.h (module 'wifi'): uint32_t ns3::EdcaTxopN::GetFragmentOffset() [member function]
cls.add_method('GetFragmentOffset',
'uint32_t',
[])
## edca-txop-n.h (module 'wifi'): bool ns3::EdcaTxopN::IsLastFragment() const [member function]
cls.add_method('IsLastFragment',
'bool',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::NextFragment() [member function]
cls.add_method('NextFragment',
'void',
[])
## edca-txop-n.h (module 'wifi'): ns3::Ptr<ns3::Packet> ns3::EdcaTxopN::GetFragmentPacket(ns3::WifiMacHeader * hdr) [member function]
cls.add_method('GetFragmentPacket',
'ns3::Ptr< ns3::Packet >',
[param('ns3::WifiMacHeader *', 'hdr')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetAccessCategory(ns3::AcIndex ac) [member function]
cls.add_method('SetAccessCategory',
'void',
[param('ns3::AcIndex', 'ac')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::Queue(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('Queue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const &', 'hdr')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetMsduAggregator(ns3::Ptr<ns3::MsduAggregator> aggr) [member function]
cls.add_method('SetMsduAggregator',
'void',
[param('ns3::Ptr< ns3::MsduAggregator >', 'aggr')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::PushFront(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('PushFront',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const &', 'hdr')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::CompleteConfig() [member function]
cls.add_method('CompleteConfig',
'void',
[])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetBlockAckThreshold(uint8_t threshold) [member function]
cls.add_method('SetBlockAckThreshold',
'void',
[param('uint8_t', 'threshold')])
## edca-txop-n.h (module 'wifi'): uint8_t ns3::EdcaTxopN::GetBlockAckThreshold() const [member function]
cls.add_method('GetBlockAckThreshold',
'uint8_t',
[],
is_const=True)
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SetBlockAckInactivityTimeout(uint16_t timeout) [member function]
cls.add_method('SetBlockAckInactivityTimeout',
'void',
[param('uint16_t', 'timeout')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::SendDelbaFrame(ns3::Mac48Address addr, uint8_t tid, bool byOriginator) [member function]
cls.add_method('SendDelbaFrame',
'void',
[param('ns3::Mac48Address', 'addr'), param('uint8_t', 'tid'), param('bool', 'byOriginator')])
## edca-txop-n.h (module 'wifi'): void ns3::EdcaTxopN::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3EnumChecker_methods(root_module, cls):
## enum.h (module 'core'): ns3::EnumChecker::EnumChecker(ns3::EnumChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EnumChecker const &', 'arg0')])
## enum.h (module 'core'): ns3::EnumChecker::EnumChecker() [constructor]
cls.add_constructor([])
## enum.h (module 'core'): void ns3::EnumChecker::Add(int v, std::string name) [member function]
cls.add_method('Add',
'void',
[param('int', 'v'), param('std::string', 'name')])
## enum.h (module 'core'): void ns3::EnumChecker::AddDefault(int v, std::string name) [member function]
cls.add_method('AddDefault',
'void',
[param('int', 'v'), param('std::string', 'name')])
## enum.h (module 'core'): bool ns3::EnumChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
## enum.h (module 'core'): bool ns3::EnumChecker::Copy(ns3::AttributeValue const & src, ns3::AttributeValue & dst) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'src'), param('ns3::AttributeValue &', 'dst')],
is_const=True, is_virtual=True)
## enum.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EnumChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): std::string ns3::EnumChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): std::string ns3::EnumChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): bool ns3::EnumChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_const=True, is_virtual=True)
return
def register_Ns3EnumValue_methods(root_module, cls):
## enum.h (module 'core'): ns3::EnumValue::EnumValue(ns3::EnumValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EnumValue const &', 'arg0')])
## enum.h (module 'core'): ns3::EnumValue::EnumValue() [constructor]
cls.add_constructor([])
## enum.h (module 'core'): ns3::EnumValue::EnumValue(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## enum.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EnumValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## enum.h (module 'core'): bool ns3::EnumValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## enum.h (module 'core'): int ns3::EnumValue::Get() const [member function]
cls.add_method('Get',
'int',
[],
is_const=True)
## enum.h (module 'core'): std::string ns3::EnumValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## enum.h (module 'core'): void ns3::EnumValue::Set(int v) [member function]
cls.add_method('Set',
'void',
[param('int', 'v')])
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3ExtendedSupportedRatesIE_methods(root_module, cls):
## supported-rates.h (module 'wifi'): ns3::ExtendedSupportedRatesIE::ExtendedSupportedRatesIE(ns3::ExtendedSupportedRatesIE const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ExtendedSupportedRatesIE const &', 'arg0')])
## supported-rates.h (module 'wifi'): ns3::ExtendedSupportedRatesIE::ExtendedSupportedRatesIE() [constructor]
cls.add_constructor([])
## supported-rates.h (module 'wifi'): ns3::ExtendedSupportedRatesIE::ExtendedSupportedRatesIE(ns3::SupportedRates * rates) [constructor]
cls.add_constructor([param('ns3::SupportedRates *', 'rates')])
## supported-rates.h (module 'wifi'): uint8_t ns3::ExtendedSupportedRatesIE::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_virtual=True)
## supported-rates.h (module 'wifi'): ns3::WifiInformationElementId ns3::ExtendedSupportedRatesIE::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## supported-rates.h (module 'wifi'): uint8_t ns3::ExtendedSupportedRatesIE::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## supported-rates.h (module 'wifi'): uint16_t ns3::ExtendedSupportedRatesIE::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint16_t',
[],
is_const=True)
## supported-rates.h (module 'wifi'): ns3::Buffer::Iterator ns3::ExtendedSupportedRatesIE::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True)
## supported-rates.h (module 'wifi'): void ns3::ExtendedSupportedRatesIE::SerializeInformationField(ns3::Buffer::Iterator start) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
return
def register_Ns3IntegerValue_methods(root_module, cls):
## integer.h (module 'core'): ns3::IntegerValue::IntegerValue() [constructor]
cls.add_constructor([])
## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(ns3::IntegerValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntegerValue const &', 'arg0')])
## integer.h (module 'core'): ns3::IntegerValue::IntegerValue(int64_t const & value) [constructor]
cls.add_constructor([param('int64_t const &', 'value')])
## integer.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::IntegerValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## integer.h (module 'core'): bool ns3::IntegerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## integer.h (module 'core'): int64_t ns3::IntegerValue::Get() const [member function]
cls.add_method('Get',
'int64_t',
[],
is_const=True)
## integer.h (module 'core'): std::string ns3::IntegerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## integer.h (module 'core'): void ns3::IntegerValue::Set(int64_t const & value) [member function]
cls.add_method('Set',
'void',
[param('int64_t const &', 'value')])
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3MeshInformationElementVector_methods(root_module, cls):
## mesh-information-element-vector.h (module 'mesh'): ns3::MeshInformationElementVector::MeshInformationElementVector() [constructor]
cls.add_constructor([])
## mesh-information-element-vector.h (module 'mesh'): ns3::MeshInformationElementVector::MeshInformationElementVector(ns3::MeshInformationElementVector const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MeshInformationElementVector const &', 'arg0')])
## mesh-information-element-vector.h (module 'mesh'): uint32_t ns3::MeshInformationElementVector::DeserializeSingleIe(ns3::Buffer::Iterator start) [member function]
cls.add_method('DeserializeSingleIe',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
return
def register_Ns3MeshL2RoutingProtocol_methods(root_module, cls):
## mesh-l2-routing-protocol.h (module 'mesh'): ns3::MeshL2RoutingProtocol::MeshL2RoutingProtocol() [constructor]
cls.add_constructor([])
## mesh-l2-routing-protocol.h (module 'mesh'): ns3::MeshL2RoutingProtocol::MeshL2RoutingProtocol(ns3::MeshL2RoutingProtocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MeshL2RoutingProtocol const &', 'arg0')])
## mesh-l2-routing-protocol.h (module 'mesh'): ns3::Ptr<ns3::MeshPointDevice> ns3::MeshL2RoutingProtocol::GetMeshPoint() const [member function]
cls.add_method('GetMeshPoint',
'ns3::Ptr< ns3::MeshPointDevice >',
[],
is_const=True)
## mesh-l2-routing-protocol.h (module 'mesh'): static ns3::TypeId ns3::MeshL2RoutingProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mesh-l2-routing-protocol.h (module 'mesh'): bool ns3::MeshL2RoutingProtocol::RemoveRoutingStuff(uint32_t fromIface, ns3::Mac48Address const source, ns3::Mac48Address const destination, ns3::Ptr<ns3::Packet> packet, uint16_t & protocolType) [member function]
cls.add_method('RemoveRoutingStuff',
'bool',
[param('uint32_t', 'fromIface'), param('ns3::Mac48Address const', 'source'), param('ns3::Mac48Address const', 'destination'), param('ns3::Ptr< ns3::Packet >', 'packet'), param('uint16_t &', 'protocolType')],
is_pure_virtual=True, is_virtual=True)
## mesh-l2-routing-protocol.h (module 'mesh'): bool ns3::MeshL2RoutingProtocol::RequestRoute(uint32_t sourceIface, ns3::Mac48Address const source, ns3::Mac48Address const destination, ns3::Ptr<ns3::Packet const> packet, uint16_t protocolType, ns3::Callback<void, bool, ns3::Ptr<ns3::Packet>, ns3::Mac48Address, ns3::Mac48Address, unsigned short, unsigned int, ns3::empty, ns3::empty, ns3::empty> routeReply) [member function]
cls.add_method('RequestRoute',
'bool',
[param('uint32_t', 'sourceIface'), param('ns3::Mac48Address const', 'source'), param('ns3::Mac48Address const', 'destination'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocolType'), param('ns3::Callback< void, bool, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, unsigned short, unsigned int, ns3::empty, ns3::empty, ns3::empty >', 'routeReply')],
is_pure_virtual=True, is_virtual=True)
## mesh-l2-routing-protocol.h (module 'mesh'): void ns3::MeshL2RoutingProtocol::SetMeshPoint(ns3::Ptr<ns3::MeshPointDevice> mp) [member function]
cls.add_method('SetMeshPoint',
'void',
[param('ns3::Ptr< ns3::MeshPointDevice >', 'mp')])
return
def register_Ns3MeshStack_methods(root_module, cls):
## mesh-stack-installer.h (module 'mesh'): ns3::MeshStack::MeshStack() [constructor]
cls.add_constructor([])
## mesh-stack-installer.h (module 'mesh'): ns3::MeshStack::MeshStack(ns3::MeshStack const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MeshStack const &', 'arg0')])
## mesh-stack-installer.h (module 'mesh'): bool ns3::MeshStack::InstallStack(ns3::Ptr<ns3::MeshPointDevice> mp) [member function]
cls.add_method('InstallStack',
'bool',
[param('ns3::Ptr< ns3::MeshPointDevice >', 'mp')],
is_pure_virtual=True, is_virtual=True)
## mesh-stack-installer.h (module 'mesh'): void ns3::MeshStack::Report(ns3::Ptr<ns3::MeshPointDevice> const mp, std::ostream & arg1) [member function]
cls.add_method('Report',
'void',
[param('ns3::Ptr< ns3::MeshPointDevice > const', 'mp'), param('std::ostream &', 'arg1')],
is_pure_virtual=True, is_virtual=True)
## mesh-stack-installer.h (module 'mesh'): void ns3::MeshStack::ResetStats(ns3::Ptr<ns3::MeshPointDevice> const mp) [member function]
cls.add_method('ResetStats',
'void',
[param('ns3::Ptr< ns3::MeshPointDevice > const', 'mp')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3MeshWifiInterfaceMacPlugin_methods(root_module, cls):
## mesh-wifi-interface-mac-plugin.h (module 'mesh'): ns3::MeshWifiInterfaceMacPlugin::MeshWifiInterfaceMacPlugin() [constructor]
cls.add_constructor([])
## mesh-wifi-interface-mac-plugin.h (module 'mesh'): ns3::MeshWifiInterfaceMacPlugin::MeshWifiInterfaceMacPlugin(ns3::MeshWifiInterfaceMacPlugin const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MeshWifiInterfaceMacPlugin const &', 'arg0')])
## mesh-wifi-interface-mac-plugin.h (module 'mesh'): bool ns3::MeshWifiInterfaceMacPlugin::Receive(ns3::Ptr<ns3::Packet> packet, ns3::WifiMacHeader const & header) [member function]
cls.add_method('Receive',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::WifiMacHeader const &', 'header')],
is_pure_virtual=True, is_virtual=True)
## mesh-wifi-interface-mac-plugin.h (module 'mesh'): void ns3::MeshWifiInterfaceMacPlugin::SetParent(ns3::Ptr<ns3::MeshWifiInterfaceMac> parent) [member function]
cls.add_method('SetParent',
'void',
[param('ns3::Ptr< ns3::MeshWifiInterfaceMac >', 'parent')],
is_pure_virtual=True, is_virtual=True)
## mesh-wifi-interface-mac-plugin.h (module 'mesh'): void ns3::MeshWifiInterfaceMacPlugin::UpdateBeacon(ns3::MeshWifiBeacon & beacon) const [member function]
cls.add_method('UpdateBeacon',
'void',
[param('ns3::MeshWifiBeacon &', 'beacon')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## mesh-wifi-interface-mac-plugin.h (module 'mesh'): bool ns3::MeshWifiInterfaceMacPlugin::UpdateOutcomingFrame(ns3::Ptr<ns3::Packet> packet, ns3::WifiMacHeader & header, ns3::Mac48Address from, ns3::Mac48Address to) [member function]
cls.add_method('UpdateOutcomingFrame',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::WifiMacHeader &', 'header'), param('ns3::Mac48Address', 'from'), param('ns3::Mac48Address', 'to')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3MgtBeaconHeader_methods(root_module, cls):
## mgt-headers.h (module 'wifi'): ns3::MgtBeaconHeader::MgtBeaconHeader() [constructor]
cls.add_constructor([])
## mgt-headers.h (module 'wifi'): ns3::MgtBeaconHeader::MgtBeaconHeader(ns3::MgtBeaconHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MgtBeaconHeader const &', 'arg0')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3OutputStreamWrapper_methods(root_module, cls):
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(ns3::OutputStreamWrapper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OutputStreamWrapper const &', 'arg0')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::string filename, std::_Ios_Openmode filemode) [constructor]
cls.add_constructor([param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::ostream * os) [constructor]
cls.add_constructor([param('std::ostream *', 'os')])
## output-stream-wrapper.h (module 'network'): std::ostream * ns3::OutputStreamWrapper::GetStream() [member function]
cls.add_method('GetStream',
'std::ostream *',
[])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint8_t const * ns3::Packet::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
deprecated=True, is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> arg0) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'arg0')])
return
def register_Ns3RegularWifiMac_methods(root_module, cls):
## regular-wifi-mac.h (module 'wifi'): static ns3::TypeId ns3::RegularWifiMac::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## regular-wifi-mac.h (module 'wifi'): ns3::RegularWifiMac::RegularWifiMac() [constructor]
cls.add_constructor([])
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetSlot(ns3::Time slotTime) [member function]
cls.add_method('SetSlot',
'void',
[param('ns3::Time', 'slotTime')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetSifs(ns3::Time sifs) [member function]
cls.add_method('SetSifs',
'void',
[param('ns3::Time', 'sifs')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetEifsNoDifs(ns3::Time eifsNoDifs) [member function]
cls.add_method('SetEifsNoDifs',
'void',
[param('ns3::Time', 'eifsNoDifs')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetPifs(ns3::Time pifs) [member function]
cls.add_method('SetPifs',
'void',
[param('ns3::Time', 'pifs')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetCtsTimeout(ns3::Time ctsTimeout) [member function]
cls.add_method('SetCtsTimeout',
'void',
[param('ns3::Time', 'ctsTimeout')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetAckTimeout(ns3::Time ackTimeout) [member function]
cls.add_method('SetAckTimeout',
'void',
[param('ns3::Time', 'ackTimeout')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetPifs() const [member function]
cls.add_method('GetPifs',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetSifs() const [member function]
cls.add_method('GetSifs',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetSlot() const [member function]
cls.add_method('GetSlot',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetEifsNoDifs() const [member function]
cls.add_method('GetEifsNoDifs',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetCtsTimeout() const [member function]
cls.add_method('GetCtsTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetAckTimeout() const [member function]
cls.add_method('GetAckTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Mac48Address ns3::RegularWifiMac::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Mac48Address',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Ssid ns3::RegularWifiMac::GetSsid() const [member function]
cls.add_method('GetSsid',
'ns3::Ssid',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetAddress(ns3::Mac48Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Mac48Address', 'address')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetSsid(ns3::Ssid ssid) [member function]
cls.add_method('SetSsid',
'void',
[param('ns3::Ssid', 'ssid')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetBssid(ns3::Mac48Address bssid) [member function]
cls.add_method('SetBssid',
'void',
[param('ns3::Mac48Address', 'bssid')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Mac48Address ns3::RegularWifiMac::GetBssid() const [member function]
cls.add_method('GetBssid',
'ns3::Mac48Address',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetPromisc() [member function]
cls.add_method('SetPromisc',
'void',
[],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to, ns3::Mac48Address from) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to'), param('ns3::Mac48Address', 'from')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): bool ns3::RegularWifiMac::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to')],
is_pure_virtual=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetWifiPhy(ns3::Ptr<ns3::WifiPhy> phy) [member function]
cls.add_method('SetWifiPhy',
'void',
[param('ns3::Ptr< ns3::WifiPhy >', 'phy')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Ptr<ns3::WifiPhy> ns3::RegularWifiMac::GetWifiPhy() const [member function]
cls.add_method('GetWifiPhy',
'ns3::Ptr< ns3::WifiPhy >',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetWifiRemoteStationManager(ns3::Ptr<ns3::WifiRemoteStationManager> stationManager) [member function]
cls.add_method('SetWifiRemoteStationManager',
'void',
[param('ns3::Ptr< ns3::WifiRemoteStationManager >', 'stationManager')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Ptr<ns3::WifiRemoteStationManager> ns3::RegularWifiMac::GetWifiRemoteStationManager() const [member function]
cls.add_method('GetWifiRemoteStationManager',
'ns3::Ptr< ns3::WifiRemoteStationManager >',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetForwardUpCallback(ns3::Callback<void, ns3::Ptr<ns3::Packet>, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> upCallback) [member function]
cls.add_method('SetForwardUpCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'upCallback')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetLinkUpCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> linkUp) [member function]
cls.add_method('SetLinkUpCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'linkUp')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetLinkDownCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> linkDown) [member function]
cls.add_method('SetLinkDownCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'linkDown')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetBasicBlockAckTimeout(ns3::Time blockAckTimeout) [member function]
cls.add_method('SetBasicBlockAckTimeout',
'void',
[param('ns3::Time', 'blockAckTimeout')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetBasicBlockAckTimeout() const [member function]
cls.add_method('GetBasicBlockAckTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetCompressedBlockAckTimeout(ns3::Time blockAckTimeout) [member function]
cls.add_method('SetCompressedBlockAckTimeout',
'void',
[param('ns3::Time', 'blockAckTimeout')],
is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): ns3::Time ns3::RegularWifiMac::GetCompressedBlockAckTimeout() const [member function]
cls.add_method('GetCompressedBlockAckTimeout',
'ns3::Time',
[],
is_const=True, is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::FinishConfigureStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('FinishConfigureStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetTypeOfStation(ns3::TypeOfStation type) [member function]
cls.add_method('SetTypeOfStation',
'void',
[param('ns3::TypeOfStation', 'type')],
visibility='protected')
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::Receive(ns3::Ptr<ns3::Packet> packet, ns3::WifiMacHeader const * hdr) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::WifiMacHeader const *', 'hdr')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::TxOk(ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('TxOk',
'void',
[param('ns3::WifiMacHeader const &', 'hdr')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::TxFailed(ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('TxFailed',
'void',
[param('ns3::WifiMacHeader const &', 'hdr')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::ForwardUp(ns3::Ptr<ns3::Packet> packet, ns3::Mac48Address from, ns3::Mac48Address to) [member function]
cls.add_method('ForwardUp',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Mac48Address', 'from'), param('ns3::Mac48Address', 'to')],
visibility='protected')
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::DeaggregateAmsduAndForward(ns3::Ptr<ns3::Packet> aggregatedPacket, ns3::WifiMacHeader const * hdr) [member function]
cls.add_method('DeaggregateAmsduAndForward',
'void',
[param('ns3::Ptr< ns3::Packet >', 'aggregatedPacket'), param('ns3::WifiMacHeader const *', 'hdr')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SendAddBaResponse(ns3::MgtAddBaRequestHeader const * reqHdr, ns3::Mac48Address originator) [member function]
cls.add_method('SendAddBaResponse',
'void',
[param('ns3::MgtAddBaRequestHeader const *', 'reqHdr'), param('ns3::Mac48Address', 'originator')],
visibility='protected', is_virtual=True)
## regular-wifi-mac.h (module 'wifi'): void ns3::RegularWifiMac::SetQosSupported(bool enable) [member function]
cls.add_method('SetQosSupported',
'void',
[param('bool', 'enable')],
visibility='protected')
## regular-wifi-mac.h (module 'wifi'): bool ns3::RegularWifiMac::GetQosSupported() const [member function]
cls.add_method('GetQosSupported',
'bool',
[],
is_const=True, visibility='protected')
return
def register_Ns3Ssid_methods(root_module, cls):
cls.add_output_stream_operator()
## ssid.h (module 'wifi'): ns3::Ssid::Ssid(ns3::Ssid const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ssid const &', 'arg0')])
## ssid.h (module 'wifi'): ns3::Ssid::Ssid() [constructor]
cls.add_constructor([])
## ssid.h (module 'wifi'): ns3::Ssid::Ssid(std::string s) [constructor]
cls.add_constructor([param('std::string', 's')])
## ssid.h (module 'wifi'): ns3::Ssid::Ssid(char const * ssid, uint8_t length) [constructor]
cls.add_constructor([param('char const *', 'ssid'), param('uint8_t', 'length')])
## ssid.h (module 'wifi'): uint8_t ns3::Ssid::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_virtual=True)
## ssid.h (module 'wifi'): ns3::WifiInformationElementId ns3::Ssid::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ssid.h (module 'wifi'): uint8_t ns3::Ssid::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ssid.h (module 'wifi'): bool ns3::Ssid::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ssid.h (module 'wifi'): bool ns3::Ssid::IsEqual(ns3::Ssid const & o) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ssid const &', 'o')],
is_const=True)
## ssid.h (module 'wifi'): char * ns3::Ssid::PeekString() const [member function]
cls.add_method('PeekString',
'char *',
[],
is_const=True)
## ssid.h (module 'wifi'): void ns3::Ssid::SerializeInformationField(ns3::Buffer::Iterator start) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
return
def register_Ns3SsidChecker_methods(root_module, cls):
## ssid.h (module 'wifi'): ns3::SsidChecker::SsidChecker() [constructor]
cls.add_constructor([])
## ssid.h (module 'wifi'): ns3::SsidChecker::SsidChecker(ns3::SsidChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SsidChecker const &', 'arg0')])
return
def register_Ns3SsidValue_methods(root_module, cls):
## ssid.h (module 'wifi'): ns3::SsidValue::SsidValue() [constructor]
cls.add_constructor([])
## ssid.h (module 'wifi'): ns3::SsidValue::SsidValue(ns3::SsidValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SsidValue const &', 'arg0')])
## ssid.h (module 'wifi'): ns3::SsidValue::SsidValue(ns3::Ssid const & value) [constructor]
cls.add_constructor([param('ns3::Ssid const &', 'value')])
## ssid.h (module 'wifi'): ns3::Ptr<ns3::AttributeValue> ns3::SsidValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ssid.h (module 'wifi'): bool ns3::SsidValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ssid.h (module 'wifi'): ns3::Ssid ns3::SsidValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ssid',
[],
is_const=True)
## ssid.h (module 'wifi'): std::string ns3::SsidValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ssid.h (module 'wifi'): void ns3::SsidValue::Set(ns3::Ssid const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ssid const &', 'value')])
return
def register_Ns3SupportedRates_methods(root_module, cls):
cls.add_output_stream_operator()
## supported-rates.h (module 'wifi'): ns3::SupportedRates::SupportedRates(ns3::SupportedRates const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SupportedRates const &', 'arg0')])
## supported-rates.h (module 'wifi'): ns3::SupportedRates::SupportedRates() [constructor]
cls.add_constructor([])
## supported-rates.h (module 'wifi'): void ns3::SupportedRates::AddSupportedRate(uint32_t bs) [member function]
cls.add_method('AddSupportedRate',
'void',
[param('uint32_t', 'bs')])
## supported-rates.h (module 'wifi'): uint8_t ns3::SupportedRates::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_virtual=True)
## supported-rates.h (module 'wifi'): ns3::WifiInformationElementId ns3::SupportedRates::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## supported-rates.h (module 'wifi'): uint8_t ns3::SupportedRates::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## supported-rates.h (module 'wifi'): uint8_t ns3::SupportedRates::GetNRates() const [member function]
cls.add_method('GetNRates',
'uint8_t',
[],
is_const=True)
## supported-rates.h (module 'wifi'): uint32_t ns3::SupportedRates::GetRate(uint8_t i) const [member function]
cls.add_method('GetRate',
'uint32_t',
[param('uint8_t', 'i')],
is_const=True)
## supported-rates.h (module 'wifi'): bool ns3::SupportedRates::IsBasicRate(uint32_t bs) const [member function]
cls.add_method('IsBasicRate',
'bool',
[param('uint32_t', 'bs')],
is_const=True)
## supported-rates.h (module 'wifi'): bool ns3::SupportedRates::IsSupportedRate(uint32_t bs) const [member function]
cls.add_method('IsSupportedRate',
'bool',
[param('uint32_t', 'bs')],
is_const=True)
## supported-rates.h (module 'wifi'): void ns3::SupportedRates::SerializeInformationField(ns3::Buffer::Iterator start) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## supported-rates.h (module 'wifi'): void ns3::SupportedRates::SetBasicRate(uint32_t bs) [member function]
cls.add_method('SetBasicRate',
'void',
[param('uint32_t', 'bs')])
## supported-rates.h (module 'wifi'): ns3::SupportedRates::extended [variable]
cls.add_instance_attribute('extended', 'ns3::ExtendedSupportedRatesIE', is_const=False)
return
def register_Ns3TimeChecker_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeChecker::TimeChecker(ns3::TimeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeChecker const &', 'arg0')])
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3UintegerValue_methods(root_module, cls):
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue() [constructor]
cls.add_constructor([])
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(ns3::UintegerValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::UintegerValue const &', 'arg0')])
## uinteger.h (module 'core'): ns3::UintegerValue::UintegerValue(uint64_t const & value) [constructor]
cls.add_constructor([param('uint64_t const &', 'value')])
## uinteger.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::UintegerValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## uinteger.h (module 'core'): bool ns3::UintegerValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## uinteger.h (module 'core'): uint64_t ns3::UintegerValue::Get() const [member function]
cls.add_method('Get',
'uint64_t',
[],
is_const=True)
## uinteger.h (module 'core'): std::string ns3::UintegerValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## uinteger.h (module 'core'): void ns3::UintegerValue::Set(uint64_t const & value) [member function]
cls.add_method('Set',
'void',
[param('uint64_t const &', 'value')])
return
def register_Ns3WifiModeChecker_methods(root_module, cls):
## wifi-mode.h (module 'wifi'): ns3::WifiModeChecker::WifiModeChecker() [constructor]
cls.add_constructor([])
## wifi-mode.h (module 'wifi'): ns3::WifiModeChecker::WifiModeChecker(ns3::WifiModeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiModeChecker const &', 'arg0')])
return
def register_Ns3WifiModeValue_methods(root_module, cls):
## wifi-mode.h (module 'wifi'): ns3::WifiModeValue::WifiModeValue() [constructor]
cls.add_constructor([])
## wifi-mode.h (module 'wifi'): ns3::WifiModeValue::WifiModeValue(ns3::WifiModeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::WifiModeValue const &', 'arg0')])
## wifi-mode.h (module 'wifi'): ns3::WifiModeValue::WifiModeValue(ns3::WifiMode const & value) [constructor]
cls.add_constructor([param('ns3::WifiMode const &', 'value')])
## wifi-mode.h (module 'wifi'): ns3::Ptr<ns3::AttributeValue> ns3::WifiModeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## wifi-mode.h (module 'wifi'): bool ns3::WifiModeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## wifi-mode.h (module 'wifi'): ns3::WifiMode ns3::WifiModeValue::Get() const [member function]
cls.add_method('Get',
'ns3::WifiMode',
[],
is_const=True)
## wifi-mode.h (module 'wifi'): std::string ns3::WifiModeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## wifi-mode.h (module 'wifi'): void ns3::WifiModeValue::Set(ns3::WifiMode const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::WifiMode const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3BridgeChannel_methods(root_module, cls):
## bridge-channel.h (module 'bridge'): static ns3::TypeId ns3::BridgeChannel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## bridge-channel.h (module 'bridge'): ns3::BridgeChannel::BridgeChannel() [constructor]
cls.add_constructor([])
## bridge-channel.h (module 'bridge'): void ns3::BridgeChannel::AddChannel(ns3::Ptr<ns3::Channel> bridgedChannel) [member function]
cls.add_method('AddChannel',
'void',
[param('ns3::Ptr< ns3::Channel >', 'bridgedChannel')])
## bridge-channel.h (module 'bridge'): uint32_t ns3::BridgeChannel::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True, is_virtual=True)
## bridge-channel.h (module 'bridge'): ns3::Ptr<ns3::NetDevice> ns3::BridgeChannel::GetDevice(uint32_t i) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3DcaTxop_methods(root_module, cls):
## dca-txop.h (module 'wifi'): static ns3::TypeId ns3::DcaTxop::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dca-txop.h (module 'wifi'): ns3::DcaTxop::DcaTxop() [constructor]
cls.add_constructor([])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetLow(ns3::Ptr<ns3::MacLow> low) [member function]
cls.add_method('SetLow',
'void',
[param('ns3::Ptr< ns3::MacLow >', 'low')])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetManager(ns3::DcfManager * manager) [member function]
cls.add_method('SetManager',
'void',
[param('ns3::DcfManager *', 'manager')])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetWifiRemoteStationManager(ns3::Ptr<ns3::WifiRemoteStationManager> remoteManager) [member function]
cls.add_method('SetWifiRemoteStationManager',
'void',
[param('ns3::Ptr< ns3::WifiRemoteStationManager >', 'remoteManager')])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetTxOkCallback(ns3::Callback<void, ns3::WifiMacHeader const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetTxOkCallback',
'void',
[param('ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetTxFailedCallback(ns3::Callback<void, ns3::WifiMacHeader const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('SetTxFailedCallback',
'void',
[param('ns3::Callback< void, ns3::WifiMacHeader const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')])
## dca-txop.h (module 'wifi'): ns3::Ptr<ns3::WifiMacQueue> ns3::DcaTxop::GetQueue() const [member function]
cls.add_method('GetQueue',
'ns3::Ptr< ns3::WifiMacQueue >',
[],
is_const=True)
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetMinCw(uint32_t minCw) [member function]
cls.add_method('SetMinCw',
'void',
[param('uint32_t', 'minCw')],
is_virtual=True)
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetMaxCw(uint32_t maxCw) [member function]
cls.add_method('SetMaxCw',
'void',
[param('uint32_t', 'maxCw')],
is_virtual=True)
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::SetAifsn(uint32_t aifsn) [member function]
cls.add_method('SetAifsn',
'void',
[param('uint32_t', 'aifsn')],
is_virtual=True)
## dca-txop.h (module 'wifi'): uint32_t ns3::DcaTxop::GetMinCw() const [member function]
cls.add_method('GetMinCw',
'uint32_t',
[],
is_const=True, is_virtual=True)
## dca-txop.h (module 'wifi'): uint32_t ns3::DcaTxop::GetMaxCw() const [member function]
cls.add_method('GetMaxCw',
'uint32_t',
[],
is_const=True, is_virtual=True)
## dca-txop.h (module 'wifi'): uint32_t ns3::DcaTxop::GetAifsn() const [member function]
cls.add_method('GetAifsn',
'uint32_t',
[],
is_const=True, is_virtual=True)
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::Queue(ns3::Ptr<ns3::Packet const> packet, ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('Queue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::WifiMacHeader const &', 'hdr')])
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='private', is_virtual=True)
## dca-txop.h (module 'wifi'): void ns3::DcaTxop::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3Dot11sStack_methods(root_module, cls):
## dot11s-installer.h (module 'mesh'): ns3::Dot11sStack::Dot11sStack(ns3::Dot11sStack const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Dot11sStack const &', 'arg0')])
## dot11s-installer.h (module 'mesh'): ns3::Dot11sStack::Dot11sStack() [constructor]
cls.add_constructor([])
## dot11s-installer.h (module 'mesh'): void ns3::Dot11sStack::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## dot11s-installer.h (module 'mesh'): static ns3::TypeId ns3::Dot11sStack::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dot11s-installer.h (module 'mesh'): bool ns3::Dot11sStack::InstallStack(ns3::Ptr<ns3::MeshPointDevice> mp) [member function]
cls.add_method('InstallStack',
'bool',
[param('ns3::Ptr< ns3::MeshPointDevice >', 'mp')],
is_virtual=True)
## dot11s-installer.h (module 'mesh'): void ns3::Dot11sStack::Report(ns3::Ptr<ns3::MeshPointDevice> const mp, std::ostream & arg1) [member function]
cls.add_method('Report',
'void',
[param('ns3::Ptr< ns3::MeshPointDevice > const', 'mp'), param('std::ostream &', 'arg1')],
is_virtual=True)
## dot11s-installer.h (module 'mesh'): void ns3::Dot11sStack::ResetStats(ns3::Ptr<ns3::MeshPointDevice> const mp) [member function]
cls.add_method('ResetStats',
'void',
[param('ns3::Ptr< ns3::MeshPointDevice > const', 'mp')],
is_virtual=True)
return
def register_Ns3FlameStack_methods(root_module, cls):
## flame-installer.h (module 'mesh'): ns3::FlameStack::FlameStack(ns3::FlameStack const & arg0) [copy constructor]
cls.add_constructor([param('ns3::FlameStack const &', 'arg0')])
## flame-installer.h (module 'mesh'): ns3::FlameStack::FlameStack() [constructor]
cls.add_constructor([])
## flame-installer.h (module 'mesh'): void ns3::FlameStack::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## flame-installer.h (module 'mesh'): static ns3::TypeId ns3::FlameStack::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flame-installer.h (module 'mesh'): bool ns3::FlameStack::InstallStack(ns3::Ptr<ns3::MeshPointDevice> mp) [member function]
cls.add_method('InstallStack',
'bool',
[param('ns3::Ptr< ns3::MeshPointDevice >', 'mp')],
is_virtual=True)
## flame-installer.h (module 'mesh'): void ns3::FlameStack::Report(ns3::Ptr<ns3::MeshPointDevice> const mp, std::ostream & arg1) [member function]
cls.add_method('Report',
'void',
[param('ns3::Ptr< ns3::MeshPointDevice > const', 'mp'), param('std::ostream &', 'arg1')],
is_virtual=True)
## flame-installer.h (module 'mesh'): void ns3::FlameStack::ResetStats(ns3::Ptr<ns3::MeshPointDevice> const mp) [member function]
cls.add_method('ResetStats',
'void',
[param('ns3::Ptr< ns3::MeshPointDevice > const', 'mp')],
is_virtual=True)
return
def register_Ns3MeshPointDevice_methods(root_module, cls):
## mesh-point-device.h (module 'mesh'): ns3::MeshPointDevice::MeshPointDevice(ns3::MeshPointDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::MeshPointDevice const &', 'arg0')])
## mesh-point-device.h (module 'mesh'): ns3::MeshPointDevice::MeshPointDevice() [constructor]
cls.add_constructor([])
## mesh-point-device.h (module 'mesh'): void ns3::MeshPointDevice::AddInterface(ns3::Ptr<ns3::NetDevice> port) [member function]
cls.add_method('AddInterface',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'port')])
## mesh-point-device.h (module 'mesh'): void ns3::MeshPointDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_virtual=True)
## mesh-point-device.h (module 'mesh'): void ns3::MeshPointDevice::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## mesh-point-device.h (module 'mesh'): ns3::Address ns3::MeshPointDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): ns3::Address ns3::MeshPointDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): ns3::Ptr<ns3::Channel> ns3::MeshPointDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): uint32_t ns3::MeshPointDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): ns3::Ptr<ns3::NetDevice> ns3::MeshPointDevice::GetInterface(uint32_t id) const [member function]
cls.add_method('GetInterface',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'id')],
is_const=True)
## mesh-point-device.h (module 'mesh'): std::vector<ns3::Ptr<ns3::NetDevice>, std::allocator<ns3::Ptr<ns3::NetDevice> > > ns3::MeshPointDevice::GetInterfaces() const [member function]
cls.add_method('GetInterfaces',
'std::vector< ns3::Ptr< ns3::NetDevice > >',
[],
is_const=True)
## mesh-point-device.h (module 'mesh'): uint16_t ns3::MeshPointDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): ns3::Address ns3::MeshPointDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): ns3::Address ns3::MeshPointDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): uint32_t ns3::MeshPointDevice::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_const=True)
## mesh-point-device.h (module 'mesh'): ns3::Ptr<ns3::Node> ns3::MeshPointDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): ns3::Ptr<ns3::MeshL2RoutingProtocol> ns3::MeshPointDevice::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::MeshL2RoutingProtocol >',
[],
is_const=True)
## mesh-point-device.h (module 'mesh'): static ns3::TypeId ns3::MeshPointDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mesh-point-device.h (module 'mesh'): bool ns3::MeshPointDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): bool ns3::MeshPointDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): bool ns3::MeshPointDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): bool ns3::MeshPointDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): bool ns3::MeshPointDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): bool ns3::MeshPointDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_const=True, is_virtual=True)
## mesh-point-device.h (module 'mesh'): void ns3::MeshPointDevice::Report(std::ostream & os) const [member function]
cls.add_method('Report',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## mesh-point-device.h (module 'mesh'): void ns3::MeshPointDevice::ResetStats() [member function]
cls.add_method('ResetStats',
'void',
[])
## mesh-point-device.h (module 'mesh'): bool ns3::MeshPointDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## mesh-point-device.h (module 'mesh'): bool ns3::MeshPointDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## mesh-point-device.h (module 'mesh'): void ns3::MeshPointDevice::SetAddress(ns3::Address a) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'a')],
is_virtual=True)
## mesh-point-device.h (module 'mesh'): void ns3::MeshPointDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_virtual=True)
## mesh-point-device.h (module 'mesh'): bool ns3::MeshPointDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_virtual=True)
## mesh-point-device.h (module 'mesh'): void ns3::MeshPointDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_virtual=True)
## mesh-point-device.h (module 'mesh'): void ns3::MeshPointDevice::SetPromiscReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## mesh-point-device.h (module 'mesh'): void ns3::MeshPointDevice::SetReceiveCallback(ns3::Callback<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## mesh-point-device.h (module 'mesh'): void ns3::MeshPointDevice::SetRoutingProtocol(ns3::Ptr<ns3::MeshL2RoutingProtocol> protocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::MeshL2RoutingProtocol >', 'protocol')])
## mesh-point-device.h (module 'mesh'): bool ns3::MeshPointDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
return
def register_Ns3MeshWifiInterfaceMac_methods(root_module, cls):
## mesh-wifi-interface-mac.h (module 'mesh'): ns3::MeshWifiInterfaceMac::MeshWifiInterfaceMac() [constructor]
cls.add_constructor([])
## mesh-wifi-interface-mac.h (module 'mesh'): bool ns3::MeshWifiInterfaceMac::CheckSupportedRates(ns3::SupportedRates rates) const [member function]
cls.add_method('CheckSupportedRates',
'bool',
[param('ns3::SupportedRates', 'rates')],
is_const=True)
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to, ns3::Mac48Address from) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to'), param('ns3::Mac48Address', 'from')],
is_virtual=True)
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::Enqueue(ns3::Ptr<ns3::Packet const> packet, ns3::Mac48Address to) [member function]
cls.add_method('Enqueue',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet'), param('ns3::Mac48Address', 'to')],
is_virtual=True)
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::FinishConfigureStandard(ns3::WifiPhyStandard standard) [member function]
cls.add_method('FinishConfigureStandard',
'void',
[param('ns3::WifiPhyStandard', 'standard')],
is_virtual=True)
## mesh-wifi-interface-mac.h (module 'mesh'): ns3::Time ns3::MeshWifiInterfaceMac::GetBeaconInterval() const [member function]
cls.add_method('GetBeaconInterval',
'ns3::Time',
[],
is_const=True)
## mesh-wifi-interface-mac.h (module 'mesh'): uint16_t ns3::MeshWifiInterfaceMac::GetFrequencyChannel() const [member function]
cls.add_method('GetFrequencyChannel',
'uint16_t',
[],
is_const=True)
## mesh-wifi-interface-mac.h (module 'mesh'): uint32_t ns3::MeshWifiInterfaceMac::GetLinkMetric(ns3::Mac48Address peerAddress) [member function]
cls.add_method('GetLinkMetric',
'uint32_t',
[param('ns3::Mac48Address', 'peerAddress')])
## mesh-wifi-interface-mac.h (module 'mesh'): ns3::Mac48Address ns3::MeshWifiInterfaceMac::GetMeshPointAddress() const [member function]
cls.add_method('GetMeshPointAddress',
'ns3::Mac48Address',
[],
is_const=True)
## mesh-wifi-interface-mac.h (module 'mesh'): ns3::WifiPhyStandard ns3::MeshWifiInterfaceMac::GetPhyStandard() const [member function]
cls.add_method('GetPhyStandard',
'ns3::WifiPhyStandard',
[],
is_const=True)
## mesh-wifi-interface-mac.h (module 'mesh'): ns3::SupportedRates ns3::MeshWifiInterfaceMac::GetSupportedRates() const [member function]
cls.add_method('GetSupportedRates',
'ns3::SupportedRates',
[],
is_const=True)
## mesh-wifi-interface-mac.h (module 'mesh'): ns3::Time ns3::MeshWifiInterfaceMac::GetTbtt() const [member function]
cls.add_method('GetTbtt',
'ns3::Time',
[],
is_const=True)
## mesh-wifi-interface-mac.h (module 'mesh'): static ns3::TypeId ns3::MeshWifiInterfaceMac::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::InstallPlugin(ns3::Ptr<ns3::MeshWifiInterfaceMacPlugin> plugin) [member function]
cls.add_method('InstallPlugin',
'void',
[param('ns3::Ptr< ns3::MeshWifiInterfaceMacPlugin >', 'plugin')])
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::Report(std::ostream & arg0) const [member function]
cls.add_method('Report',
'void',
[param('std::ostream &', 'arg0')],
is_const=True)
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::ResetStats() [member function]
cls.add_method('ResetStats',
'void',
[])
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::SendManagementFrame(ns3::Ptr<ns3::Packet> frame, ns3::WifiMacHeader const & hdr) [member function]
cls.add_method('SendManagementFrame',
'void',
[param('ns3::Ptr< ns3::Packet >', 'frame'), param('ns3::WifiMacHeader const &', 'hdr')])
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::SetBeaconGeneration(bool enable) [member function]
cls.add_method('SetBeaconGeneration',
'void',
[param('bool', 'enable')])
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::SetBeaconInterval(ns3::Time interval) [member function]
cls.add_method('SetBeaconInterval',
'void',
[param('ns3::Time', 'interval')])
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::SetLinkMetricCallback(ns3::Callback<unsigned int, ns3::Mac48Address, ns3::Ptr<ns3::MeshWifiInterfaceMac>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetLinkMetricCallback',
'void',
[param('ns3::Callback< unsigned int, ns3::Mac48Address, ns3::Ptr< ns3::MeshWifiInterfaceMac >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')])
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::SetLinkUpCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> linkUp) [member function]
cls.add_method('SetLinkUpCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'linkUp')],
is_virtual=True)
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::SetMeshPointAddress(ns3::Mac48Address arg0) [member function]
cls.add_method('SetMeshPointAddress',
'void',
[param('ns3::Mac48Address', 'arg0')])
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::SetRandomStartDelay(ns3::Time interval) [member function]
cls.add_method('SetRandomStartDelay',
'void',
[param('ns3::Time', 'interval')])
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::ShiftTbtt(ns3::Time shift) [member function]
cls.add_method('ShiftTbtt',
'void',
[param('ns3::Time', 'shift')])
## mesh-wifi-interface-mac.h (module 'mesh'): bool ns3::MeshWifiInterfaceMac::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::SwitchFrequencyChannel(uint16_t new_id) [member function]
cls.add_method('SwitchFrequencyChannel',
'void',
[param('uint16_t', 'new_id')])
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
## mesh-wifi-interface-mac.h (module 'mesh'): void ns3::MeshWifiInterfaceMac::Receive(ns3::Ptr<ns3::Packet> packet, ns3::WifiMacHeader const * hdr) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::WifiMacHeader const *', 'hdr')],
visibility='private', is_virtual=True)
return
def register_Ns3Dot11sDestinationAddressUnit_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## ie-dot11s-preq.h (module 'mesh'): ns3::dot11s::DestinationAddressUnit::DestinationAddressUnit(ns3::dot11s::DestinationAddressUnit const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::DestinationAddressUnit const &', 'arg0')])
## ie-dot11s-preq.h (module 'mesh'): ns3::dot11s::DestinationAddressUnit::DestinationAddressUnit() [constructor]
cls.add_constructor([])
## ie-dot11s-preq.h (module 'mesh'): uint32_t ns3::dot11s::DestinationAddressUnit::GetDestSeqNumber() const [member function]
cls.add_method('GetDestSeqNumber',
'uint32_t',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): ns3::Mac48Address ns3::dot11s::DestinationAddressUnit::GetDestinationAddress() const [member function]
cls.add_method('GetDestinationAddress',
'ns3::Mac48Address',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): bool ns3::dot11s::DestinationAddressUnit::IsDo() [member function]
cls.add_method('IsDo',
'bool',
[])
## ie-dot11s-preq.h (module 'mesh'): bool ns3::dot11s::DestinationAddressUnit::IsRf() [member function]
cls.add_method('IsRf',
'bool',
[])
## ie-dot11s-preq.h (module 'mesh'): bool ns3::dot11s::DestinationAddressUnit::IsUsn() [member function]
cls.add_method('IsUsn',
'bool',
[])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::DestinationAddressUnit::SetDestSeqNumber(uint32_t dest_seq_number) [member function]
cls.add_method('SetDestSeqNumber',
'void',
[param('uint32_t', 'dest_seq_number')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::DestinationAddressUnit::SetDestinationAddress(ns3::Mac48Address dest_address) [member function]
cls.add_method('SetDestinationAddress',
'void',
[param('ns3::Mac48Address', 'dest_address')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::DestinationAddressUnit::SetFlags(bool doFlag, bool rfFlag, bool usnFlag) [member function]
cls.add_method('SetFlags',
'void',
[param('bool', 'doFlag'), param('bool', 'rfFlag'), param('bool', 'usnFlag')])
return
def register_Ns3Dot11sDot11sMeshCapability_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::Dot11sMeshCapability::Dot11sMeshCapability(ns3::dot11s::Dot11sMeshCapability const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::Dot11sMeshCapability const &', 'arg0')])
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::Dot11sMeshCapability::Dot11sMeshCapability() [constructor]
cls.add_constructor([])
## ie-dot11s-configuration.h (module 'mesh'): ns3::Buffer::Iterator ns3::dot11s::Dot11sMeshCapability::Deserialize(ns3::Buffer::Iterator i) [member function]
cls.add_method('Deserialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'i')])
## ie-dot11s-configuration.h (module 'mesh'): uint8_t ns3::dot11s::Dot11sMeshCapability::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint8_t',
[],
is_const=True)
## ie-dot11s-configuration.h (module 'mesh'): uint16_t ns3::dot11s::Dot11sMeshCapability::GetUint16() const [member function]
cls.add_method('GetUint16',
'uint16_t',
[],
is_const=True)
## ie-dot11s-configuration.h (module 'mesh'): bool ns3::dot11s::Dot11sMeshCapability::Is(uint16_t cap, uint8_t n) const [member function]
cls.add_method('Is',
'bool',
[param('uint16_t', 'cap'), param('uint8_t', 'n')],
is_const=True)
## ie-dot11s-configuration.h (module 'mesh'): ns3::Buffer::Iterator ns3::dot11s::Dot11sMeshCapability::Serialize(ns3::Buffer::Iterator i) const [member function]
cls.add_method('Serialize',
'ns3::Buffer::Iterator',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True)
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::Dot11sMeshCapability::MCCAEnabled [variable]
cls.add_instance_attribute('MCCAEnabled', 'bool', is_const=False)
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::Dot11sMeshCapability::MCCASupported [variable]
cls.add_instance_attribute('MCCASupported', 'bool', is_const=False)
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::Dot11sMeshCapability::TBTTAdjustment [variable]
cls.add_instance_attribute('TBTTAdjustment', 'bool', is_const=False)
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::Dot11sMeshCapability::acceptPeerLinks [variable]
cls.add_instance_attribute('acceptPeerLinks', 'bool', is_const=False)
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::Dot11sMeshCapability::beaconTimingReport [variable]
cls.add_instance_attribute('beaconTimingReport', 'bool', is_const=False)
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::Dot11sMeshCapability::forwarding [variable]
cls.add_instance_attribute('forwarding', 'bool', is_const=False)
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::Dot11sMeshCapability::powerSaveLevel [variable]
cls.add_instance_attribute('powerSaveLevel', 'bool', is_const=False)
return
def register_Ns3Dot11sHwmpProtocol_methods(root_module, cls):
## hwmp-protocol.h (module 'mesh'): ns3::dot11s::HwmpProtocol::HwmpProtocol() [constructor]
cls.add_constructor([])
## hwmp-protocol.h (module 'mesh'): void ns3::dot11s::HwmpProtocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## hwmp-protocol.h (module 'mesh'): static ns3::TypeId ns3::dot11s::HwmpProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## hwmp-protocol.h (module 'mesh'): bool ns3::dot11s::HwmpProtocol::Install(ns3::Ptr<ns3::MeshPointDevice> arg0) [member function]
cls.add_method('Install',
'bool',
[param('ns3::Ptr< ns3::MeshPointDevice >', 'arg0')])
## hwmp-protocol.h (module 'mesh'): void ns3::dot11s::HwmpProtocol::PeerLinkStatus(ns3::Mac48Address meshPontAddress, ns3::Mac48Address peerAddress, uint32_t interface, bool status) [member function]
cls.add_method('PeerLinkStatus',
'void',
[param('ns3::Mac48Address', 'meshPontAddress'), param('ns3::Mac48Address', 'peerAddress'), param('uint32_t', 'interface'), param('bool', 'status')])
## hwmp-protocol.h (module 'mesh'): bool ns3::dot11s::HwmpProtocol::RemoveRoutingStuff(uint32_t fromIface, ns3::Mac48Address const source, ns3::Mac48Address const destination, ns3::Ptr<ns3::Packet> packet, uint16_t & protocolType) [member function]
cls.add_method('RemoveRoutingStuff',
'bool',
[param('uint32_t', 'fromIface'), param('ns3::Mac48Address const', 'source'), param('ns3::Mac48Address const', 'destination'), param('ns3::Ptr< ns3::Packet >', 'packet'), param('uint16_t &', 'protocolType')],
is_virtual=True)
## hwmp-protocol.h (module 'mesh'): void ns3::dot11s::HwmpProtocol::Report(std::ostream & arg0) const [member function]
cls.add_method('Report',
'void',
[param('std::ostream &', 'arg0')],
is_const=True)
## hwmp-protocol.h (module 'mesh'): bool ns3::dot11s::HwmpProtocol::RequestRoute(uint32_t sourceIface, ns3::Mac48Address const source, ns3::Mac48Address const destination, ns3::Ptr<ns3::Packet const> packet, uint16_t protocolType, ns3::Callback<void, bool, ns3::Ptr<ns3::Packet>, ns3::Mac48Address, ns3::Mac48Address, unsigned short, unsigned int, ns3::empty, ns3::empty, ns3::empty> routeReply) [member function]
cls.add_method('RequestRoute',
'bool',
[param('uint32_t', 'sourceIface'), param('ns3::Mac48Address const', 'source'), param('ns3::Mac48Address const', 'destination'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocolType'), param('ns3::Callback< void, bool, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, unsigned short, unsigned int, ns3::empty, ns3::empty, ns3::empty >', 'routeReply')],
is_virtual=True)
## hwmp-protocol.h (module 'mesh'): void ns3::dot11s::HwmpProtocol::ResetStats() [member function]
cls.add_method('ResetStats',
'void',
[])
## hwmp-protocol.h (module 'mesh'): void ns3::dot11s::HwmpProtocol::SetNeighboursCallback(ns3::Callback<std::vector<ns3::Mac48Address, std::allocator<ns3::Mac48Address> >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetNeighboursCallback',
'void',
[param('ns3::Callback< std::vector< ns3::Mac48Address >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')])
## hwmp-protocol.h (module 'mesh'): void ns3::dot11s::HwmpProtocol::SetRoot() [member function]
cls.add_method('SetRoot',
'void',
[])
## hwmp-protocol.h (module 'mesh'): void ns3::dot11s::HwmpProtocol::UnsetRoot() [member function]
cls.add_method('UnsetRoot',
'void',
[])
return
def register_Ns3Dot11sHwmpProtocolFailedDestination_methods(root_module, cls):
## hwmp-protocol.h (module 'mesh'): ns3::dot11s::HwmpProtocol::FailedDestination::FailedDestination() [constructor]
cls.add_constructor([])
## hwmp-protocol.h (module 'mesh'): ns3::dot11s::HwmpProtocol::FailedDestination::FailedDestination(ns3::dot11s::HwmpProtocol::FailedDestination const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::HwmpProtocol::FailedDestination const &', 'arg0')])
## hwmp-protocol.h (module 'mesh'): ns3::dot11s::HwmpProtocol::FailedDestination::destination [variable]
cls.add_instance_attribute('destination', 'ns3::Mac48Address', is_const=False)
## hwmp-protocol.h (module 'mesh'): ns3::dot11s::HwmpProtocol::FailedDestination::seqnum [variable]
cls.add_instance_attribute('seqnum', 'uint32_t', is_const=False)
return
def register_Ns3Dot11sHwmpRtable_methods(root_module, cls):
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::HwmpRtable(ns3::dot11s::HwmpRtable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::HwmpRtable const &', 'arg0')])
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::HwmpRtable() [constructor]
cls.add_constructor([])
## hwmp-rtable.h (module 'mesh'): void ns3::dot11s::HwmpRtable::AddPrecursor(ns3::Mac48Address destination, uint32_t precursorInterface, ns3::Mac48Address precursorAddress, ns3::Time lifetime) [member function]
cls.add_method('AddPrecursor',
'void',
[param('ns3::Mac48Address', 'destination'), param('uint32_t', 'precursorInterface'), param('ns3::Mac48Address', 'precursorAddress'), param('ns3::Time', 'lifetime')])
## hwmp-rtable.h (module 'mesh'): void ns3::dot11s::HwmpRtable::AddProactivePath(uint32_t metric, ns3::Mac48Address root, ns3::Mac48Address retransmitter, uint32_t interface, ns3::Time lifetime, uint32_t seqnum) [member function]
cls.add_method('AddProactivePath',
'void',
[param('uint32_t', 'metric'), param('ns3::Mac48Address', 'root'), param('ns3::Mac48Address', 'retransmitter'), param('uint32_t', 'interface'), param('ns3::Time', 'lifetime'), param('uint32_t', 'seqnum')])
## hwmp-rtable.h (module 'mesh'): void ns3::dot11s::HwmpRtable::AddReactivePath(ns3::Mac48Address destination, ns3::Mac48Address retransmitter, uint32_t interface, uint32_t metric, ns3::Time lifetime, uint32_t seqnum) [member function]
cls.add_method('AddReactivePath',
'void',
[param('ns3::Mac48Address', 'destination'), param('ns3::Mac48Address', 'retransmitter'), param('uint32_t', 'interface'), param('uint32_t', 'metric'), param('ns3::Time', 'lifetime'), param('uint32_t', 'seqnum')])
## hwmp-rtable.h (module 'mesh'): void ns3::dot11s::HwmpRtable::DeleteProactivePath() [member function]
cls.add_method('DeleteProactivePath',
'void',
[])
## hwmp-rtable.h (module 'mesh'): void ns3::dot11s::HwmpRtable::DeleteProactivePath(ns3::Mac48Address root) [member function]
cls.add_method('DeleteProactivePath',
'void',
[param('ns3::Mac48Address', 'root')])
## hwmp-rtable.h (module 'mesh'): void ns3::dot11s::HwmpRtable::DeleteReactivePath(ns3::Mac48Address destination) [member function]
cls.add_method('DeleteReactivePath',
'void',
[param('ns3::Mac48Address', 'destination')])
## hwmp-rtable.h (module 'mesh'): void ns3::dot11s::HwmpRtable::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## hwmp-rtable.h (module 'mesh'): std::vector<std::pair<unsigned int, ns3::Mac48Address>, std::allocator<std::pair<unsigned int, ns3::Mac48Address> > > ns3::dot11s::HwmpRtable::GetPrecursors(ns3::Mac48Address destination) [member function]
cls.add_method('GetPrecursors',
'std::vector< std::pair< unsigned int, ns3::Mac48Address > >',
[param('ns3::Mac48Address', 'destination')])
## hwmp-rtable.h (module 'mesh'): static ns3::TypeId ns3::dot11s::HwmpRtable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## hwmp-rtable.h (module 'mesh'): std::vector<ns3::dot11s::HwmpProtocol::FailedDestination, std::allocator<ns3::dot11s::HwmpProtocol::FailedDestination> > ns3::dot11s::HwmpRtable::GetUnreachableDestinations(ns3::Mac48Address peerAddress) [member function]
cls.add_method('GetUnreachableDestinations',
'std::vector< ns3::dot11s::HwmpProtocol::FailedDestination >',
[param('ns3::Mac48Address', 'peerAddress')])
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult ns3::dot11s::HwmpRtable::LookupProactive() [member function]
cls.add_method('LookupProactive',
'ns3::dot11s::HwmpRtable::LookupResult',
[])
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult ns3::dot11s::HwmpRtable::LookupProactiveExpired() [member function]
cls.add_method('LookupProactiveExpired',
'ns3::dot11s::HwmpRtable::LookupResult',
[])
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult ns3::dot11s::HwmpRtable::LookupReactive(ns3::Mac48Address destination) [member function]
cls.add_method('LookupReactive',
'ns3::dot11s::HwmpRtable::LookupResult',
[param('ns3::Mac48Address', 'destination')])
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult ns3::dot11s::HwmpRtable::LookupReactiveExpired(ns3::Mac48Address destination) [member function]
cls.add_method('LookupReactiveExpired',
'ns3::dot11s::HwmpRtable::LookupResult',
[param('ns3::Mac48Address', 'destination')])
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::INTERFACE_ANY [variable]
cls.add_static_attribute('INTERFACE_ANY', 'uint32_t const', is_const=True)
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::MAX_METRIC [variable]
cls.add_static_attribute('MAX_METRIC', 'uint32_t const', is_const=True)
return
def register_Ns3Dot11sHwmpRtableLookupResult_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult::LookupResult(ns3::dot11s::HwmpRtable::LookupResult const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::HwmpRtable::LookupResult const &', 'arg0')])
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult::LookupResult(ns3::Mac48Address r=ns3::Mac48Address::GetBroadcast(), uint32_t i=ns3::dot11s::HwmpRtable::INTERFACE_ANY, uint32_t m=ns3::dot11s::HwmpRtable::MAX_METRIC, uint32_t s=0, ns3::Time l=ns3::Seconds( )) [constructor]
cls.add_constructor([param('ns3::Mac48Address', 'r', default_value='ns3::Mac48Address::GetBroadcast()'), param('uint32_t', 'i', default_value='ns3::dot11s::HwmpRtable::INTERFACE_ANY'), param('uint32_t', 'm', default_value='ns3::dot11s::HwmpRtable::MAX_METRIC'), param('uint32_t', 's', default_value='0'), param('ns3::Time', 'l', default_value='ns3::Seconds(0)')])
## hwmp-rtable.h (module 'mesh'): bool ns3::dot11s::HwmpRtable::LookupResult::IsValid() const [member function]
cls.add_method('IsValid',
'bool',
[],
is_const=True)
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult::ifIndex [variable]
cls.add_instance_attribute('ifIndex', 'uint32_t', is_const=False)
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult::lifetime [variable]
cls.add_instance_attribute('lifetime', 'ns3::Time', is_const=False)
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult::metric [variable]
cls.add_instance_attribute('metric', 'uint32_t', is_const=False)
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult::retransmitter [variable]
cls.add_instance_attribute('retransmitter', 'ns3::Mac48Address', is_const=False)
## hwmp-rtable.h (module 'mesh'): ns3::dot11s::HwmpRtable::LookupResult::seqnum [variable]
cls.add_instance_attribute('seqnum', 'uint32_t', is_const=False)
return
def register_Ns3Dot11sIeBeaconTiming_methods(root_module, cls):
cls.add_output_stream_operator()
## ie-dot11s-beacon-timing.h (module 'mesh'): ns3::dot11s::IeBeaconTiming::IeBeaconTiming(ns3::dot11s::IeBeaconTiming const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IeBeaconTiming const &', 'arg0')])
## ie-dot11s-beacon-timing.h (module 'mesh'): ns3::dot11s::IeBeaconTiming::IeBeaconTiming() [constructor]
cls.add_constructor([])
## ie-dot11s-beacon-timing.h (module 'mesh'): void ns3::dot11s::IeBeaconTiming::AddNeighboursTimingElementUnit(uint16_t aid, ns3::Time last_beacon, ns3::Time beacon_interval) [member function]
cls.add_method('AddNeighboursTimingElementUnit',
'void',
[param('uint16_t', 'aid'), param('ns3::Time', 'last_beacon'), param('ns3::Time', 'beacon_interval')])
## ie-dot11s-beacon-timing.h (module 'mesh'): void ns3::dot11s::IeBeaconTiming::ClearTimingElement() [member function]
cls.add_method('ClearTimingElement',
'void',
[])
## ie-dot11s-beacon-timing.h (module 'mesh'): void ns3::dot11s::IeBeaconTiming::DelNeighboursTimingElementUnit(uint16_t aid, ns3::Time last_beacon, ns3::Time beacon_interval) [member function]
cls.add_method('DelNeighboursTimingElementUnit',
'void',
[param('uint16_t', 'aid'), param('ns3::Time', 'last_beacon'), param('ns3::Time', 'beacon_interval')])
## ie-dot11s-beacon-timing.h (module 'mesh'): uint8_t ns3::dot11s::IeBeaconTiming::DeserializeInformationField(ns3::Buffer::Iterator i, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'i'), param('uint8_t', 'length')],
is_virtual=True)
## ie-dot11s-beacon-timing.h (module 'mesh'): ns3::WifiInformationElementId ns3::dot11s::IeBeaconTiming::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ie-dot11s-beacon-timing.h (module 'mesh'): uint8_t ns3::dot11s::IeBeaconTiming::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ie-dot11s-beacon-timing.h (module 'mesh'): std::vector<ns3::Ptr<ns3::dot11s::IeBeaconTimingUnit>, std::allocator<ns3::Ptr<ns3::dot11s::IeBeaconTimingUnit> > > ns3::dot11s::IeBeaconTiming::GetNeighboursTimingElementsList() [member function]
cls.add_method('GetNeighboursTimingElementsList',
'std::vector< ns3::Ptr< ns3::dot11s::IeBeaconTimingUnit > >',
[])
## ie-dot11s-beacon-timing.h (module 'mesh'): void ns3::dot11s::IeBeaconTiming::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ie-dot11s-beacon-timing.h (module 'mesh'): void ns3::dot11s::IeBeaconTiming::SerializeInformationField(ns3::Buffer::Iterator i) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3Dot11sIeBeaconTimingUnit_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## ie-dot11s-beacon-timing.h (module 'mesh'): ns3::dot11s::IeBeaconTimingUnit::IeBeaconTimingUnit(ns3::dot11s::IeBeaconTimingUnit const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IeBeaconTimingUnit const &', 'arg0')])
## ie-dot11s-beacon-timing.h (module 'mesh'): ns3::dot11s::IeBeaconTimingUnit::IeBeaconTimingUnit() [constructor]
cls.add_constructor([])
## ie-dot11s-beacon-timing.h (module 'mesh'): uint8_t ns3::dot11s::IeBeaconTimingUnit::GetAid() const [member function]
cls.add_method('GetAid',
'uint8_t',
[],
is_const=True)
## ie-dot11s-beacon-timing.h (module 'mesh'): uint16_t ns3::dot11s::IeBeaconTimingUnit::GetBeaconInterval() const [member function]
cls.add_method('GetBeaconInterval',
'uint16_t',
[],
is_const=True)
## ie-dot11s-beacon-timing.h (module 'mesh'): uint16_t ns3::dot11s::IeBeaconTimingUnit::GetLastBeacon() const [member function]
cls.add_method('GetLastBeacon',
'uint16_t',
[],
is_const=True)
## ie-dot11s-beacon-timing.h (module 'mesh'): void ns3::dot11s::IeBeaconTimingUnit::SetAid(uint8_t aid) [member function]
cls.add_method('SetAid',
'void',
[param('uint8_t', 'aid')])
## ie-dot11s-beacon-timing.h (module 'mesh'): void ns3::dot11s::IeBeaconTimingUnit::SetBeaconInterval(uint16_t beaconInterval) [member function]
cls.add_method('SetBeaconInterval',
'void',
[param('uint16_t', 'beaconInterval')])
## ie-dot11s-beacon-timing.h (module 'mesh'): void ns3::dot11s::IeBeaconTimingUnit::SetLastBeacon(uint16_t lastBeacon) [member function]
cls.add_method('SetLastBeacon',
'void',
[param('uint16_t', 'lastBeacon')])
return
def register_Ns3Dot11sIeConfiguration_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::IeConfiguration::IeConfiguration(ns3::dot11s::IeConfiguration const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IeConfiguration const &', 'arg0')])
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::IeConfiguration::IeConfiguration() [constructor]
cls.add_constructor([])
## ie-dot11s-configuration.h (module 'mesh'): uint8_t ns3::dot11s::IeConfiguration::DeserializeInformationField(ns3::Buffer::Iterator i, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'i'), param('uint8_t', 'length')],
is_virtual=True)
## ie-dot11s-configuration.h (module 'mesh'): ns3::WifiInformationElementId ns3::dot11s::IeConfiguration::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ie-dot11s-configuration.h (module 'mesh'): uint8_t ns3::dot11s::IeConfiguration::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ie-dot11s-configuration.h (module 'mesh'): uint8_t ns3::dot11s::IeConfiguration::GetNeighborCount() [member function]
cls.add_method('GetNeighborCount',
'uint8_t',
[])
## ie-dot11s-configuration.h (module 'mesh'): bool ns3::dot11s::IeConfiguration::IsAirtime() [member function]
cls.add_method('IsAirtime',
'bool',
[])
## ie-dot11s-configuration.h (module 'mesh'): bool ns3::dot11s::IeConfiguration::IsHWMP() [member function]
cls.add_method('IsHWMP',
'bool',
[])
## ie-dot11s-configuration.h (module 'mesh'): ns3::dot11s::Dot11sMeshCapability const & ns3::dot11s::IeConfiguration::MeshCapability() [member function]
cls.add_method('MeshCapability',
'ns3::dot11s::Dot11sMeshCapability const &',
[])
## ie-dot11s-configuration.h (module 'mesh'): void ns3::dot11s::IeConfiguration::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ie-dot11s-configuration.h (module 'mesh'): void ns3::dot11s::IeConfiguration::SerializeInformationField(ns3::Buffer::Iterator i) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True, is_virtual=True)
## ie-dot11s-configuration.h (module 'mesh'): void ns3::dot11s::IeConfiguration::SetMetric(ns3::dot11s::dot11sPathSelectionMetric metricId) [member function]
cls.add_method('SetMetric',
'void',
[param('ns3::dot11s::dot11sPathSelectionMetric', 'metricId')])
## ie-dot11s-configuration.h (module 'mesh'): void ns3::dot11s::IeConfiguration::SetNeighborCount(uint8_t neighbors) [member function]
cls.add_method('SetNeighborCount',
'void',
[param('uint8_t', 'neighbors')])
## ie-dot11s-configuration.h (module 'mesh'): void ns3::dot11s::IeConfiguration::SetRouting(ns3::dot11s::dot11sPathSelectionProtocol routingId) [member function]
cls.add_method('SetRouting',
'void',
[param('ns3::dot11s::dot11sPathSelectionProtocol', 'routingId')])
return
def register_Ns3Dot11sIeLinkMetricReport_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ie-dot11s-metric-report.h (module 'mesh'): ns3::dot11s::IeLinkMetricReport::IeLinkMetricReport(ns3::dot11s::IeLinkMetricReport const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IeLinkMetricReport const &', 'arg0')])
## ie-dot11s-metric-report.h (module 'mesh'): ns3::dot11s::IeLinkMetricReport::IeLinkMetricReport() [constructor]
cls.add_constructor([])
## ie-dot11s-metric-report.h (module 'mesh'): ns3::dot11s::IeLinkMetricReport::IeLinkMetricReport(uint32_t metric) [constructor]
cls.add_constructor([param('uint32_t', 'metric')])
## ie-dot11s-metric-report.h (module 'mesh'): uint8_t ns3::dot11s::IeLinkMetricReport::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_virtual=True)
## ie-dot11s-metric-report.h (module 'mesh'): ns3::WifiInformationElementId ns3::dot11s::IeLinkMetricReport::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ie-dot11s-metric-report.h (module 'mesh'): uint8_t ns3::dot11s::IeLinkMetricReport::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ie-dot11s-metric-report.h (module 'mesh'): uint32_t ns3::dot11s::IeLinkMetricReport::GetMetric() [member function]
cls.add_method('GetMetric',
'uint32_t',
[])
## ie-dot11s-metric-report.h (module 'mesh'): void ns3::dot11s::IeLinkMetricReport::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ie-dot11s-metric-report.h (module 'mesh'): void ns3::dot11s::IeLinkMetricReport::SerializeInformationField(ns3::Buffer::Iterator i) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True, is_virtual=True)
## ie-dot11s-metric-report.h (module 'mesh'): void ns3::dot11s::IeLinkMetricReport::SetMetric(uint32_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'metric')])
return
def register_Ns3Dot11sIeMeshId_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshId::IeMeshId(ns3::dot11s::IeMeshId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IeMeshId const &', 'arg0')])
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshId::IeMeshId() [constructor]
cls.add_constructor([])
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshId::IeMeshId(std::string s) [constructor]
cls.add_constructor([param('std::string', 's')])
## ie-dot11s-id.h (module 'mesh'): uint8_t ns3::dot11s::IeMeshId::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_virtual=True)
## ie-dot11s-id.h (module 'mesh'): ns3::WifiInformationElementId ns3::dot11s::IeMeshId::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ie-dot11s-id.h (module 'mesh'): uint8_t ns3::dot11s::IeMeshId::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ie-dot11s-id.h (module 'mesh'): bool ns3::dot11s::IeMeshId::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ie-dot11s-id.h (module 'mesh'): bool ns3::dot11s::IeMeshId::IsEqual(ns3::dot11s::IeMeshId const & o) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::dot11s::IeMeshId const &', 'o')],
is_const=True)
## ie-dot11s-id.h (module 'mesh'): char * ns3::dot11s::IeMeshId::PeekString() const [member function]
cls.add_method('PeekString',
'char *',
[],
is_const=True)
## ie-dot11s-id.h (module 'mesh'): void ns3::dot11s::IeMeshId::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ie-dot11s-id.h (module 'mesh'): void ns3::dot11s::IeMeshId::SerializeInformationField(ns3::Buffer::Iterator i) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3Dot11sIeMeshIdChecker_methods(root_module, cls):
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshIdChecker::IeMeshIdChecker() [constructor]
cls.add_constructor([])
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshIdChecker::IeMeshIdChecker(ns3::dot11s::IeMeshIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IeMeshIdChecker const &', 'arg0')])
return
def register_Ns3Dot11sIeMeshIdValue_methods(root_module, cls):
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshIdValue::IeMeshIdValue() [constructor]
cls.add_constructor([])
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshIdValue::IeMeshIdValue(ns3::dot11s::IeMeshIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IeMeshIdValue const &', 'arg0')])
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshIdValue::IeMeshIdValue(ns3::dot11s::IeMeshId const & value) [constructor]
cls.add_constructor([param('ns3::dot11s::IeMeshId const &', 'value')])
## ie-dot11s-id.h (module 'mesh'): ns3::Ptr<ns3::AttributeValue> ns3::dot11s::IeMeshIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ie-dot11s-id.h (module 'mesh'): bool ns3::dot11s::IeMeshIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ie-dot11s-id.h (module 'mesh'): ns3::dot11s::IeMeshId ns3::dot11s::IeMeshIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::dot11s::IeMeshId',
[],
is_const=True)
## ie-dot11s-id.h (module 'mesh'): std::string ns3::dot11s::IeMeshIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ie-dot11s-id.h (module 'mesh'): void ns3::dot11s::IeMeshIdValue::Set(ns3::dot11s::IeMeshId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::dot11s::IeMeshId const &', 'value')])
return
def register_Ns3Dot11sIePeerManagement_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ie-dot11s-peer-management.h (module 'mesh'): ns3::dot11s::IePeerManagement::IePeerManagement(ns3::dot11s::IePeerManagement const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IePeerManagement const &', 'arg0')])
## ie-dot11s-peer-management.h (module 'mesh'): ns3::dot11s::IePeerManagement::IePeerManagement() [constructor]
cls.add_constructor([])
## ie-dot11s-peer-management.h (module 'mesh'): uint8_t ns3::dot11s::IePeerManagement::DeserializeInformationField(ns3::Buffer::Iterator i, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'i'), param('uint8_t', 'length')],
is_virtual=True)
## ie-dot11s-peer-management.h (module 'mesh'): ns3::WifiInformationElementId ns3::dot11s::IePeerManagement::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ie-dot11s-peer-management.h (module 'mesh'): uint8_t ns3::dot11s::IePeerManagement::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ie-dot11s-peer-management.h (module 'mesh'): uint16_t ns3::dot11s::IePeerManagement::GetLocalLinkId() const [member function]
cls.add_method('GetLocalLinkId',
'uint16_t',
[],
is_const=True)
## ie-dot11s-peer-management.h (module 'mesh'): uint16_t ns3::dot11s::IePeerManagement::GetPeerLinkId() const [member function]
cls.add_method('GetPeerLinkId',
'uint16_t',
[],
is_const=True)
## ie-dot11s-peer-management.h (module 'mesh'): ns3::dot11s::PmpReasonCode ns3::dot11s::IePeerManagement::GetReasonCode() const [member function]
cls.add_method('GetReasonCode',
'ns3::dot11s::PmpReasonCode',
[],
is_const=True)
## ie-dot11s-peer-management.h (module 'mesh'): uint8_t ns3::dot11s::IePeerManagement::GetSubtype() const [member function]
cls.add_method('GetSubtype',
'uint8_t',
[],
is_const=True)
## ie-dot11s-peer-management.h (module 'mesh'): void ns3::dot11s::IePeerManagement::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ie-dot11s-peer-management.h (module 'mesh'): void ns3::dot11s::IePeerManagement::SerializeInformationField(ns3::Buffer::Iterator i) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True, is_virtual=True)
## ie-dot11s-peer-management.h (module 'mesh'): void ns3::dot11s::IePeerManagement::SetPeerClose(uint16_t localLinkID, uint16_t peerLinkId, ns3::dot11s::PmpReasonCode reasonCode) [member function]
cls.add_method('SetPeerClose',
'void',
[param('uint16_t', 'localLinkID'), param('uint16_t', 'peerLinkId'), param('ns3::dot11s::PmpReasonCode', 'reasonCode')])
## ie-dot11s-peer-management.h (module 'mesh'): void ns3::dot11s::IePeerManagement::SetPeerConfirm(uint16_t localLinkID, uint16_t peerLinkId) [member function]
cls.add_method('SetPeerConfirm',
'void',
[param('uint16_t', 'localLinkID'), param('uint16_t', 'peerLinkId')])
## ie-dot11s-peer-management.h (module 'mesh'): void ns3::dot11s::IePeerManagement::SetPeerOpen(uint16_t localLinkId) [member function]
cls.add_method('SetPeerOpen',
'void',
[param('uint16_t', 'localLinkId')])
## ie-dot11s-peer-management.h (module 'mesh'): bool ns3::dot11s::IePeerManagement::SubtypeIsClose() const [member function]
cls.add_method('SubtypeIsClose',
'bool',
[],
is_const=True)
## ie-dot11s-peer-management.h (module 'mesh'): bool ns3::dot11s::IePeerManagement::SubtypeIsConfirm() const [member function]
cls.add_method('SubtypeIsConfirm',
'bool',
[],
is_const=True)
## ie-dot11s-peer-management.h (module 'mesh'): bool ns3::dot11s::IePeerManagement::SubtypeIsOpen() const [member function]
cls.add_method('SubtypeIsOpen',
'bool',
[],
is_const=True)
return
def register_Ns3Dot11sIePeeringProtocol_methods(root_module, cls):
## ie-dot11s-peering-protocol.h (module 'mesh'): ns3::dot11s::IePeeringProtocol::IePeeringProtocol(ns3::dot11s::IePeeringProtocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IePeeringProtocol const &', 'arg0')])
## ie-dot11s-peering-protocol.h (module 'mesh'): ns3::dot11s::IePeeringProtocol::IePeeringProtocol() [constructor]
cls.add_constructor([])
## ie-dot11s-peering-protocol.h (module 'mesh'): uint8_t ns3::dot11s::IePeeringProtocol::DeserializeInformationField(ns3::Buffer::Iterator i, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'i'), param('uint8_t', 'length')],
is_virtual=True)
## ie-dot11s-peering-protocol.h (module 'mesh'): ns3::WifiInformationElementId ns3::dot11s::IePeeringProtocol::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ie-dot11s-peering-protocol.h (module 'mesh'): uint8_t ns3::dot11s::IePeeringProtocol::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ie-dot11s-peering-protocol.h (module 'mesh'): void ns3::dot11s::IePeeringProtocol::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ie-dot11s-peering-protocol.h (module 'mesh'): void ns3::dot11s::IePeeringProtocol::SerializeInformationField(ns3::Buffer::Iterator i) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3Dot11sIePerr_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ie-dot11s-perr.h (module 'mesh'): ns3::dot11s::IePerr::IePerr(ns3::dot11s::IePerr const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IePerr const &', 'arg0')])
## ie-dot11s-perr.h (module 'mesh'): ns3::dot11s::IePerr::IePerr() [constructor]
cls.add_constructor([])
## ie-dot11s-perr.h (module 'mesh'): void ns3::dot11s::IePerr::AddAddressUnit(ns3::dot11s::HwmpProtocol::FailedDestination unit) [member function]
cls.add_method('AddAddressUnit',
'void',
[param('ns3::dot11s::HwmpProtocol::FailedDestination', 'unit')])
## ie-dot11s-perr.h (module 'mesh'): void ns3::dot11s::IePerr::DeleteAddressUnit(ns3::Mac48Address address) [member function]
cls.add_method('DeleteAddressUnit',
'void',
[param('ns3::Mac48Address', 'address')])
## ie-dot11s-perr.h (module 'mesh'): uint8_t ns3::dot11s::IePerr::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_virtual=True)
## ie-dot11s-perr.h (module 'mesh'): ns3::WifiInformationElementId ns3::dot11s::IePerr::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ie-dot11s-perr.h (module 'mesh'): std::vector<ns3::dot11s::HwmpProtocol::FailedDestination, std::allocator<ns3::dot11s::HwmpProtocol::FailedDestination> > ns3::dot11s::IePerr::GetAddressUnitVector() const [member function]
cls.add_method('GetAddressUnitVector',
'std::vector< ns3::dot11s::HwmpProtocol::FailedDestination >',
[],
is_const=True)
## ie-dot11s-perr.h (module 'mesh'): uint8_t ns3::dot11s::IePerr::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ie-dot11s-perr.h (module 'mesh'): uint8_t ns3::dot11s::IePerr::GetNumOfDest() const [member function]
cls.add_method('GetNumOfDest',
'uint8_t',
[],
is_const=True)
## ie-dot11s-perr.h (module 'mesh'): bool ns3::dot11s::IePerr::IsFull() const [member function]
cls.add_method('IsFull',
'bool',
[],
is_const=True)
## ie-dot11s-perr.h (module 'mesh'): void ns3::dot11s::IePerr::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ie-dot11s-perr.h (module 'mesh'): void ns3::dot11s::IePerr::ResetPerr() [member function]
cls.add_method('ResetPerr',
'void',
[])
## ie-dot11s-perr.h (module 'mesh'): void ns3::dot11s::IePerr::SerializeInformationField(ns3::Buffer::Iterator i) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3Dot11sIePrep_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ie-dot11s-prep.h (module 'mesh'): ns3::dot11s::IePrep::IePrep(ns3::dot11s::IePrep const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IePrep const &', 'arg0')])
## ie-dot11s-prep.h (module 'mesh'): ns3::dot11s::IePrep::IePrep() [constructor]
cls.add_constructor([])
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::DecrementTtl() [member function]
cls.add_method('DecrementTtl',
'void',
[])
## ie-dot11s-prep.h (module 'mesh'): uint8_t ns3::dot11s::IePrep::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_virtual=True)
## ie-dot11s-prep.h (module 'mesh'): ns3::WifiInformationElementId ns3::dot11s::IePrep::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ie-dot11s-prep.h (module 'mesh'): ns3::Mac48Address ns3::dot11s::IePrep::GetDestinationAddress() const [member function]
cls.add_method('GetDestinationAddress',
'ns3::Mac48Address',
[],
is_const=True)
## ie-dot11s-prep.h (module 'mesh'): uint32_t ns3::dot11s::IePrep::GetDestinationSeqNumber() const [member function]
cls.add_method('GetDestinationSeqNumber',
'uint32_t',
[],
is_const=True)
## ie-dot11s-prep.h (module 'mesh'): uint8_t ns3::dot11s::IePrep::GetFlags() const [member function]
cls.add_method('GetFlags',
'uint8_t',
[],
is_const=True)
## ie-dot11s-prep.h (module 'mesh'): uint8_t ns3::dot11s::IePrep::GetHopcount() const [member function]
cls.add_method('GetHopcount',
'uint8_t',
[],
is_const=True)
## ie-dot11s-prep.h (module 'mesh'): uint8_t ns3::dot11s::IePrep::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ie-dot11s-prep.h (module 'mesh'): uint32_t ns3::dot11s::IePrep::GetLifetime() const [member function]
cls.add_method('GetLifetime',
'uint32_t',
[],
is_const=True)
## ie-dot11s-prep.h (module 'mesh'): uint32_t ns3::dot11s::IePrep::GetMetric() const [member function]
cls.add_method('GetMetric',
'uint32_t',
[],
is_const=True)
## ie-dot11s-prep.h (module 'mesh'): ns3::Mac48Address ns3::dot11s::IePrep::GetOriginatorAddress() const [member function]
cls.add_method('GetOriginatorAddress',
'ns3::Mac48Address',
[],
is_const=True)
## ie-dot11s-prep.h (module 'mesh'): uint32_t ns3::dot11s::IePrep::GetOriginatorSeqNumber() const [member function]
cls.add_method('GetOriginatorSeqNumber',
'uint32_t',
[],
is_const=True)
## ie-dot11s-prep.h (module 'mesh'): uint32_t ns3::dot11s::IePrep::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint32_t',
[],
is_const=True)
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::IncrementMetric(uint32_t metric) [member function]
cls.add_method('IncrementMetric',
'void',
[param('uint32_t', 'metric')])
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::SerializeInformationField(ns3::Buffer::Iterator i) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True, is_virtual=True)
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::SetDestinationAddress(ns3::Mac48Address dest_address) [member function]
cls.add_method('SetDestinationAddress',
'void',
[param('ns3::Mac48Address', 'dest_address')])
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::SetDestinationSeqNumber(uint32_t dest_seq_number) [member function]
cls.add_method('SetDestinationSeqNumber',
'void',
[param('uint32_t', 'dest_seq_number')])
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::SetFlags(uint8_t flags) [member function]
cls.add_method('SetFlags',
'void',
[param('uint8_t', 'flags')])
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::SetHopcount(uint8_t hopcount) [member function]
cls.add_method('SetHopcount',
'void',
[param('uint8_t', 'hopcount')])
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::SetLifetime(uint32_t lifetime) [member function]
cls.add_method('SetLifetime',
'void',
[param('uint32_t', 'lifetime')])
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::SetMetric(uint32_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'metric')])
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::SetOriginatorAddress(ns3::Mac48Address originator_address) [member function]
cls.add_method('SetOriginatorAddress',
'void',
[param('ns3::Mac48Address', 'originator_address')])
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::SetOriginatorSeqNumber(uint32_t originator_seq_number) [member function]
cls.add_method('SetOriginatorSeqNumber',
'void',
[param('uint32_t', 'originator_seq_number')])
## ie-dot11s-prep.h (module 'mesh'): void ns3::dot11s::IePrep::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3Dot11sIePreq_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ie-dot11s-preq.h (module 'mesh'): ns3::dot11s::IePreq::IePreq(ns3::dot11s::IePreq const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IePreq const &', 'arg0')])
## ie-dot11s-preq.h (module 'mesh'): ns3::dot11s::IePreq::IePreq() [constructor]
cls.add_constructor([])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::AddDestinationAddressElement(bool doFlag, bool rfFlag, ns3::Mac48Address dest_address, uint32_t dest_seq_number) [member function]
cls.add_method('AddDestinationAddressElement',
'void',
[param('bool', 'doFlag'), param('bool', 'rfFlag'), param('ns3::Mac48Address', 'dest_address'), param('uint32_t', 'dest_seq_number')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::ClearDestinationAddressElements() [member function]
cls.add_method('ClearDestinationAddressElements',
'void',
[])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::DecrementTtl() [member function]
cls.add_method('DecrementTtl',
'void',
[])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::DelDestinationAddressElement(ns3::Mac48Address dest_address) [member function]
cls.add_method('DelDestinationAddressElement',
'void',
[param('ns3::Mac48Address', 'dest_address')])
## ie-dot11s-preq.h (module 'mesh'): uint8_t ns3::dot11s::IePreq::DeserializeInformationField(ns3::Buffer::Iterator i, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'i'), param('uint8_t', 'length')],
is_virtual=True)
## ie-dot11s-preq.h (module 'mesh'): ns3::WifiInformationElementId ns3::dot11s::IePreq::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ie-dot11s-preq.h (module 'mesh'): uint8_t ns3::dot11s::IePreq::GetDestCount() const [member function]
cls.add_method('GetDestCount',
'uint8_t',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): std::vector<ns3::Ptr<ns3::dot11s::DestinationAddressUnit>, std::allocator<ns3::Ptr<ns3::dot11s::DestinationAddressUnit> > > ns3::dot11s::IePreq::GetDestinationList() [member function]
cls.add_method('GetDestinationList',
'std::vector< ns3::Ptr< ns3::dot11s::DestinationAddressUnit > >',
[])
## ie-dot11s-preq.h (module 'mesh'): uint8_t ns3::dot11s::IePreq::GetHopCount() const [member function]
cls.add_method('GetHopCount',
'uint8_t',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): uint8_t ns3::dot11s::IePreq::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ie-dot11s-preq.h (module 'mesh'): uint32_t ns3::dot11s::IePreq::GetLifetime() const [member function]
cls.add_method('GetLifetime',
'uint32_t',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): uint32_t ns3::dot11s::IePreq::GetMetric() const [member function]
cls.add_method('GetMetric',
'uint32_t',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): ns3::Mac48Address ns3::dot11s::IePreq::GetOriginatorAddress() const [member function]
cls.add_method('GetOriginatorAddress',
'ns3::Mac48Address',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): uint32_t ns3::dot11s::IePreq::GetOriginatorSeqNumber() const [member function]
cls.add_method('GetOriginatorSeqNumber',
'uint32_t',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): uint32_t ns3::dot11s::IePreq::GetPreqID() const [member function]
cls.add_method('GetPreqID',
'uint32_t',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): uint8_t ns3::dot11s::IePreq::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::IncrementMetric(uint32_t metric) [member function]
cls.add_method('IncrementMetric',
'void',
[param('uint32_t', 'metric')])
## ie-dot11s-preq.h (module 'mesh'): bool ns3::dot11s::IePreq::IsFull() const [member function]
cls.add_method('IsFull',
'bool',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): bool ns3::dot11s::IePreq::IsNeedNotPrep() const [member function]
cls.add_method('IsNeedNotPrep',
'bool',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): bool ns3::dot11s::IePreq::IsUnicastPreq() const [member function]
cls.add_method('IsUnicastPreq',
'bool',
[],
is_const=True)
## ie-dot11s-preq.h (module 'mesh'): bool ns3::dot11s::IePreq::MayAddAddress(ns3::Mac48Address originator) [member function]
cls.add_method('MayAddAddress',
'bool',
[param('ns3::Mac48Address', 'originator')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::SerializeInformationField(ns3::Buffer::Iterator i) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True, is_virtual=True)
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::SetDestCount(uint8_t dest_count) [member function]
cls.add_method('SetDestCount',
'void',
[param('uint8_t', 'dest_count')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::SetHopcount(uint8_t hopcount) [member function]
cls.add_method('SetHopcount',
'void',
[param('uint8_t', 'hopcount')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::SetLifetime(uint32_t lifetime) [member function]
cls.add_method('SetLifetime',
'void',
[param('uint32_t', 'lifetime')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::SetMetric(uint32_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'metric')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::SetNeedNotPrep() [member function]
cls.add_method('SetNeedNotPrep',
'void',
[])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::SetOriginatorAddress(ns3::Mac48Address originator_address) [member function]
cls.add_method('SetOriginatorAddress',
'void',
[param('ns3::Mac48Address', 'originator_address')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::SetOriginatorSeqNumber(uint32_t originator_seq_number) [member function]
cls.add_method('SetOriginatorSeqNumber',
'void',
[param('uint32_t', 'originator_seq_number')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::SetPreqID(uint32_t id) [member function]
cls.add_method('SetPreqID',
'void',
[param('uint32_t', 'id')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::SetTTL(uint8_t ttl) [member function]
cls.add_method('SetTTL',
'void',
[param('uint8_t', 'ttl')])
## ie-dot11s-preq.h (module 'mesh'): void ns3::dot11s::IePreq::SetUnicastPreq() [member function]
cls.add_method('SetUnicastPreq',
'void',
[])
return
def register_Ns3Dot11sIeRann_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ie-dot11s-rann.h (module 'mesh'): ns3::dot11s::IeRann::IeRann(ns3::dot11s::IeRann const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::IeRann const &', 'arg0')])
## ie-dot11s-rann.h (module 'mesh'): ns3::dot11s::IeRann::IeRann() [constructor]
cls.add_constructor([])
## ie-dot11s-rann.h (module 'mesh'): void ns3::dot11s::IeRann::DecrementTtl() [member function]
cls.add_method('DecrementTtl',
'void',
[])
## ie-dot11s-rann.h (module 'mesh'): uint8_t ns3::dot11s::IeRann::DeserializeInformationField(ns3::Buffer::Iterator start, uint8_t length) [member function]
cls.add_method('DeserializeInformationField',
'uint8_t',
[param('ns3::Buffer::Iterator', 'start'), param('uint8_t', 'length')],
is_virtual=True)
## ie-dot11s-rann.h (module 'mesh'): ns3::WifiInformationElementId ns3::dot11s::IeRann::ElementId() const [member function]
cls.add_method('ElementId',
'ns3::WifiInformationElementId',
[],
is_const=True, is_virtual=True)
## ie-dot11s-rann.h (module 'mesh'): uint32_t ns3::dot11s::IeRann::GetDestSeqNumber() [member function]
cls.add_method('GetDestSeqNumber',
'uint32_t',
[])
## ie-dot11s-rann.h (module 'mesh'): uint8_t ns3::dot11s::IeRann::GetFlags() [member function]
cls.add_method('GetFlags',
'uint8_t',
[])
## ie-dot11s-rann.h (module 'mesh'): uint8_t ns3::dot11s::IeRann::GetHopcount() [member function]
cls.add_method('GetHopcount',
'uint8_t',
[])
## ie-dot11s-rann.h (module 'mesh'): uint8_t ns3::dot11s::IeRann::GetInformationFieldSize() const [member function]
cls.add_method('GetInformationFieldSize',
'uint8_t',
[],
is_const=True, is_virtual=True)
## ie-dot11s-rann.h (module 'mesh'): uint32_t ns3::dot11s::IeRann::GetMetric() [member function]
cls.add_method('GetMetric',
'uint32_t',
[])
## ie-dot11s-rann.h (module 'mesh'): ns3::Mac48Address ns3::dot11s::IeRann::GetOriginatorAddress() [member function]
cls.add_method('GetOriginatorAddress',
'ns3::Mac48Address',
[])
## ie-dot11s-rann.h (module 'mesh'): uint8_t ns3::dot11s::IeRann::GetTtl() [member function]
cls.add_method('GetTtl',
'uint8_t',
[])
## ie-dot11s-rann.h (module 'mesh'): void ns3::dot11s::IeRann::IncrementMetric(uint32_t metric) [member function]
cls.add_method('IncrementMetric',
'void',
[param('uint32_t', 'metric')])
## ie-dot11s-rann.h (module 'mesh'): void ns3::dot11s::IeRann::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ie-dot11s-rann.h (module 'mesh'): void ns3::dot11s::IeRann::SerializeInformationField(ns3::Buffer::Iterator i) const [member function]
cls.add_method('SerializeInformationField',
'void',
[param('ns3::Buffer::Iterator', 'i')],
is_const=True, is_virtual=True)
## ie-dot11s-rann.h (module 'mesh'): void ns3::dot11s::IeRann::SetDestSeqNumber(uint32_t dest_seq_number) [member function]
cls.add_method('SetDestSeqNumber',
'void',
[param('uint32_t', 'dest_seq_number')])
## ie-dot11s-rann.h (module 'mesh'): void ns3::dot11s::IeRann::SetFlags(uint8_t flags) [member function]
cls.add_method('SetFlags',
'void',
[param('uint8_t', 'flags')])
## ie-dot11s-rann.h (module 'mesh'): void ns3::dot11s::IeRann::SetHopcount(uint8_t hopcount) [member function]
cls.add_method('SetHopcount',
'void',
[param('uint8_t', 'hopcount')])
## ie-dot11s-rann.h (module 'mesh'): void ns3::dot11s::IeRann::SetMetric(uint32_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'metric')])
## ie-dot11s-rann.h (module 'mesh'): void ns3::dot11s::IeRann::SetOriginatorAddress(ns3::Mac48Address originator_address) [member function]
cls.add_method('SetOriginatorAddress',
'void',
[param('ns3::Mac48Address', 'originator_address')])
## ie-dot11s-rann.h (module 'mesh'): void ns3::dot11s::IeRann::SetTTL(uint8_t ttl) [member function]
cls.add_method('SetTTL',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3Dot11sMeshHeader_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## dot11s-mac-header.h (module 'mesh'): ns3::dot11s::MeshHeader::MeshHeader(ns3::dot11s::MeshHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::MeshHeader const &', 'arg0')])
## dot11s-mac-header.h (module 'mesh'): ns3::dot11s::MeshHeader::MeshHeader() [constructor]
cls.add_constructor([])
## dot11s-mac-header.h (module 'mesh'): uint32_t ns3::dot11s::MeshHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## dot11s-mac-header.h (module 'mesh'): ns3::Mac48Address ns3::dot11s::MeshHeader::GetAddr4() const [member function]
cls.add_method('GetAddr4',
'ns3::Mac48Address',
[],
is_const=True)
## dot11s-mac-header.h (module 'mesh'): ns3::Mac48Address ns3::dot11s::MeshHeader::GetAddr5() const [member function]
cls.add_method('GetAddr5',
'ns3::Mac48Address',
[],
is_const=True)
## dot11s-mac-header.h (module 'mesh'): ns3::Mac48Address ns3::dot11s::MeshHeader::GetAddr6() const [member function]
cls.add_method('GetAddr6',
'ns3::Mac48Address',
[],
is_const=True)
## dot11s-mac-header.h (module 'mesh'): uint8_t ns3::dot11s::MeshHeader::GetAddressExt() const [member function]
cls.add_method('GetAddressExt',
'uint8_t',
[],
is_const=True)
## dot11s-mac-header.h (module 'mesh'): ns3::TypeId ns3::dot11s::MeshHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## dot11s-mac-header.h (module 'mesh'): uint32_t ns3::dot11s::MeshHeader::GetMeshSeqno() const [member function]
cls.add_method('GetMeshSeqno',
'uint32_t',
[],
is_const=True)
## dot11s-mac-header.h (module 'mesh'): uint8_t ns3::dot11s::MeshHeader::GetMeshTtl() const [member function]
cls.add_method('GetMeshTtl',
'uint8_t',
[],
is_const=True)
## dot11s-mac-header.h (module 'mesh'): uint32_t ns3::dot11s::MeshHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## dot11s-mac-header.h (module 'mesh'): static ns3::TypeId ns3::dot11s::MeshHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dot11s-mac-header.h (module 'mesh'): void ns3::dot11s::MeshHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## dot11s-mac-header.h (module 'mesh'): void ns3::dot11s::MeshHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## dot11s-mac-header.h (module 'mesh'): void ns3::dot11s::MeshHeader::SetAddr4(ns3::Mac48Address address) [member function]
cls.add_method('SetAddr4',
'void',
[param('ns3::Mac48Address', 'address')])
## dot11s-mac-header.h (module 'mesh'): void ns3::dot11s::MeshHeader::SetAddr5(ns3::Mac48Address address) [member function]
cls.add_method('SetAddr5',
'void',
[param('ns3::Mac48Address', 'address')])
## dot11s-mac-header.h (module 'mesh'): void ns3::dot11s::MeshHeader::SetAddr6(ns3::Mac48Address address) [member function]
cls.add_method('SetAddr6',
'void',
[param('ns3::Mac48Address', 'address')])
## dot11s-mac-header.h (module 'mesh'): void ns3::dot11s::MeshHeader::SetAddressExt(uint8_t num_of_addresses) [member function]
cls.add_method('SetAddressExt',
'void',
[param('uint8_t', 'num_of_addresses')])
## dot11s-mac-header.h (module 'mesh'): void ns3::dot11s::MeshHeader::SetMeshSeqno(uint32_t seqno) [member function]
cls.add_method('SetMeshSeqno',
'void',
[param('uint32_t', 'seqno')])
## dot11s-mac-header.h (module 'mesh'): void ns3::dot11s::MeshHeader::SetMeshTtl(uint8_t TTL) [member function]
cls.add_method('SetMeshTtl',
'void',
[param('uint8_t', 'TTL')])
return
def register_Ns3Dot11sPeerLink_methods(root_module, cls):
## peer-link.h (module 'mesh'): static ns3::TypeId ns3::dot11s::PeerLink::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## peer-link.h (module 'mesh'): ns3::dot11s::PeerLink::PeerLink() [constructor]
cls.add_constructor([])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::SetBeaconInformation(ns3::Time lastBeacon, ns3::Time BeaconInterval) [member function]
cls.add_method('SetBeaconInformation',
'void',
[param('ns3::Time', 'lastBeacon'), param('ns3::Time', 'BeaconInterval')])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::SetLinkStatusCallback(ns3::Callback<void,unsigned int,ns3::Mac48Address,bool,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetLinkStatusCallback',
'void',
[param('ns3::Callback< void, unsigned int, ns3::Mac48Address, bool, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::SetPeerAddress(ns3::Mac48Address macaddr) [member function]
cls.add_method('SetPeerAddress',
'void',
[param('ns3::Mac48Address', 'macaddr')])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::SetPeerMeshPointAddress(ns3::Mac48Address macaddr) [member function]
cls.add_method('SetPeerMeshPointAddress',
'void',
[param('ns3::Mac48Address', 'macaddr')])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::SetInterface(uint32_t interface) [member function]
cls.add_method('SetInterface',
'void',
[param('uint32_t', 'interface')])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::SetLocalLinkId(uint16_t id) [member function]
cls.add_method('SetLocalLinkId',
'void',
[param('uint16_t', 'id')])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::SetLocalAid(uint16_t aid) [member function]
cls.add_method('SetLocalAid',
'void',
[param('uint16_t', 'aid')])
## peer-link.h (module 'mesh'): uint16_t ns3::dot11s::PeerLink::GetPeerAid() const [member function]
cls.add_method('GetPeerAid',
'uint16_t',
[],
is_const=True)
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::SetBeaconTimingElement(ns3::dot11s::IeBeaconTiming beaconTiming) [member function]
cls.add_method('SetBeaconTimingElement',
'void',
[param('ns3::dot11s::IeBeaconTiming', 'beaconTiming')])
## peer-link.h (module 'mesh'): ns3::Mac48Address ns3::dot11s::PeerLink::GetPeerAddress() const [member function]
cls.add_method('GetPeerAddress',
'ns3::Mac48Address',
[],
is_const=True)
## peer-link.h (module 'mesh'): uint16_t ns3::dot11s::PeerLink::GetLocalAid() const [member function]
cls.add_method('GetLocalAid',
'uint16_t',
[],
is_const=True)
## peer-link.h (module 'mesh'): ns3::Time ns3::dot11s::PeerLink::GetLastBeacon() const [member function]
cls.add_method('GetLastBeacon',
'ns3::Time',
[],
is_const=True)
## peer-link.h (module 'mesh'): ns3::Time ns3::dot11s::PeerLink::GetBeaconInterval() const [member function]
cls.add_method('GetBeaconInterval',
'ns3::Time',
[],
is_const=True)
## peer-link.h (module 'mesh'): ns3::dot11s::IeBeaconTiming ns3::dot11s::PeerLink::GetBeaconTimingElement() const [member function]
cls.add_method('GetBeaconTimingElement',
'ns3::dot11s::IeBeaconTiming',
[],
is_const=True)
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::MLMECancelPeerLink(ns3::dot11s::PmpReasonCode reason) [member function]
cls.add_method('MLMECancelPeerLink',
'void',
[param('ns3::dot11s::PmpReasonCode', 'reason')])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::MLMEActivePeerLinkOpen() [member function]
cls.add_method('MLMEActivePeerLinkOpen',
'void',
[])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::MLMEPeeringRequestReject() [member function]
cls.add_method('MLMEPeeringRequestReject',
'void',
[])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::MLMESetSignalStatusCallback(ns3::Callback<void, unsigned int, ns3::Mac48Address, ns3::Mac48Address, ns3::dot11s::PeerLink::PeerState, ns3::dot11s::PeerLink::PeerState, ns3::empty, ns3::empty, ns3::empty, ns3::empty> arg0) [member function]
cls.add_method('MLMESetSignalStatusCallback',
'void',
[param('ns3::Callback< void, unsigned int, ns3::Mac48Address, ns3::Mac48Address, ns3::dot11s::PeerLink::PeerState, ns3::dot11s::PeerLink::PeerState, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'arg0')])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::TransmissionSuccess() [member function]
cls.add_method('TransmissionSuccess',
'void',
[])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::TransmissionFailure() [member function]
cls.add_method('TransmissionFailure',
'void',
[])
## peer-link.h (module 'mesh'): void ns3::dot11s::PeerLink::Report(std::ostream & os) const [member function]
cls.add_method('Report',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Dot11sPeerLinkFrameStart_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PeerLinkFrameStart() [constructor]
cls.add_constructor([])
## peer-link-frame.h (module 'mesh'): uint32_t ns3::dot11s::PeerLinkFrameStart::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields ns3::dot11s::PeerLinkFrameStart::GetFields() const [member function]
cls.add_method('GetFields',
'ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields',
[],
is_const=True)
## peer-link-frame.h (module 'mesh'): ns3::TypeId ns3::dot11s::PeerLinkFrameStart::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## peer-link-frame.h (module 'mesh'): uint32_t ns3::dot11s::PeerLinkFrameStart::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## peer-link-frame.h (module 'mesh'): static ns3::TypeId ns3::dot11s::PeerLinkFrameStart::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## peer-link-frame.h (module 'mesh'): void ns3::dot11s::PeerLinkFrameStart::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## peer-link-frame.h (module 'mesh'): void ns3::dot11s::PeerLinkFrameStart::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## peer-link-frame.h (module 'mesh'): void ns3::dot11s::PeerLinkFrameStart::SetPlinkFrameStart(ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields arg0) [member function]
cls.add_method('SetPlinkFrameStart',
'void',
[param('ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields', 'arg0')])
## peer-link-frame.h (module 'mesh'): void ns3::dot11s::PeerLinkFrameStart::SetPlinkFrameSubtype(uint8_t subtype) [member function]
cls.add_method('SetPlinkFrameSubtype',
'void',
[param('uint8_t', 'subtype')])
return
def register_Ns3Dot11sPeerLinkFrameStartPlinkFrameStartFields_methods(root_module, cls):
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields::PlinkFrameStartFields() [constructor]
cls.add_constructor([])
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields::PlinkFrameStartFields(ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields const &', 'arg0')])
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields::aid [variable]
cls.add_instance_attribute('aid', 'uint16_t', is_const=False)
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields::capability [variable]
cls.add_instance_attribute('capability', 'uint16_t', is_const=False)
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields::config [variable]
cls.add_instance_attribute('config', 'ns3::dot11s::IeConfiguration', is_const=False)
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields::meshId [variable]
cls.add_instance_attribute('meshId', 'ns3::dot11s::IeMeshId', is_const=False)
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields::protocol [variable]
cls.add_instance_attribute('protocol', 'ns3::dot11s::IePeeringProtocol', is_const=False)
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields::rates [variable]
cls.add_instance_attribute('rates', 'ns3::SupportedRates', is_const=False)
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields::reasonCode [variable]
cls.add_instance_attribute('reasonCode', 'uint16_t', is_const=False)
## peer-link-frame.h (module 'mesh'): ns3::dot11s::PeerLinkFrameStart::PlinkFrameStartFields::subtype [variable]
cls.add_instance_attribute('subtype', 'uint8_t', is_const=False)
return
def register_Ns3Dot11sPeerManagementProtocol_methods(root_module, cls):
## peer-management-protocol.h (module 'mesh'): ns3::dot11s::PeerManagementProtocol::PeerManagementProtocol() [constructor]
cls.add_constructor([])
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::ConfigurationMismatch(uint32_t interface, ns3::Mac48Address peerAddress) [member function]
cls.add_method('ConfigurationMismatch',
'void',
[param('uint32_t', 'interface'), param('ns3::Mac48Address', 'peerAddress')])
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## peer-management-protocol.h (module 'mesh'): ns3::Ptr<ns3::dot11s::PeerLink> ns3::dot11s::PeerManagementProtocol::FindPeerLink(uint32_t interface, ns3::Mac48Address peerAddress) [member function]
cls.add_method('FindPeerLink',
'ns3::Ptr< ns3::dot11s::PeerLink >',
[param('uint32_t', 'interface'), param('ns3::Mac48Address', 'peerAddress')])
## peer-management-protocol.h (module 'mesh'): ns3::Mac48Address ns3::dot11s::PeerManagementProtocol::GetAddress() [member function]
cls.add_method('GetAddress',
'ns3::Mac48Address',
[])
## peer-management-protocol.h (module 'mesh'): bool ns3::dot11s::PeerManagementProtocol::GetBeaconCollisionAvoidance() const [member function]
cls.add_method('GetBeaconCollisionAvoidance',
'bool',
[],
is_const=True)
## peer-management-protocol.h (module 'mesh'): ns3::Ptr<ns3::dot11s::IeBeaconTiming> ns3::dot11s::PeerManagementProtocol::GetBeaconTimingElement(uint32_t interface) [member function]
cls.add_method('GetBeaconTimingElement',
'ns3::Ptr< ns3::dot11s::IeBeaconTiming >',
[param('uint32_t', 'interface')])
## peer-management-protocol.h (module 'mesh'): ns3::Ptr<ns3::dot11s::IeMeshId> ns3::dot11s::PeerManagementProtocol::GetMeshId() const [member function]
cls.add_method('GetMeshId',
'ns3::Ptr< ns3::dot11s::IeMeshId >',
[],
is_const=True)
## peer-management-protocol.h (module 'mesh'): uint8_t ns3::dot11s::PeerManagementProtocol::GetNumberOfLinks() [member function]
cls.add_method('GetNumberOfLinks',
'uint8_t',
[])
## peer-management-protocol.h (module 'mesh'): std::vector<ns3::Ptr<ns3::dot11s::PeerLink>,std::allocator<ns3::Ptr<ns3::dot11s::PeerLink> > > ns3::dot11s::PeerManagementProtocol::GetPeerLinks() const [member function]
cls.add_method('GetPeerLinks',
'std::vector< ns3::Ptr< ns3::dot11s::PeerLink > >',
[],
is_const=True)
## peer-management-protocol.h (module 'mesh'): std::vector<ns3::Mac48Address,std::allocator<ns3::Mac48Address> > ns3::dot11s::PeerManagementProtocol::GetPeers(uint32_t interface) const [member function]
cls.add_method('GetPeers',
'std::vector< ns3::Mac48Address >',
[param('uint32_t', 'interface')],
is_const=True)
## peer-management-protocol.h (module 'mesh'): static ns3::TypeId ns3::dot11s::PeerManagementProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## peer-management-protocol.h (module 'mesh'): bool ns3::dot11s::PeerManagementProtocol::Install(ns3::Ptr<ns3::MeshPointDevice> arg0) [member function]
cls.add_method('Install',
'bool',
[param('ns3::Ptr< ns3::MeshPointDevice >', 'arg0')])
## peer-management-protocol.h (module 'mesh'): bool ns3::dot11s::PeerManagementProtocol::IsActiveLink(uint32_t interface, ns3::Mac48Address peerAddress) [member function]
cls.add_method('IsActiveLink',
'bool',
[param('uint32_t', 'interface'), param('ns3::Mac48Address', 'peerAddress')])
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::NotifyBeaconSent(uint32_t interface, ns3::Time beaconInterval) [member function]
cls.add_method('NotifyBeaconSent',
'void',
[param('uint32_t', 'interface'), param('ns3::Time', 'beaconInterval')])
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::ReceiveBeacon(uint32_t interface, ns3::Mac48Address peerAddress, ns3::Time beaconInterval, ns3::Ptr<ns3::dot11s::IeBeaconTiming> beaconTiming) [member function]
cls.add_method('ReceiveBeacon',
'void',
[param('uint32_t', 'interface'), param('ns3::Mac48Address', 'peerAddress'), param('ns3::Time', 'beaconInterval'), param('ns3::Ptr< ns3::dot11s::IeBeaconTiming >', 'beaconTiming')])
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::ReceivePeerLinkFrame(uint32_t interface, ns3::Mac48Address peerAddress, ns3::Mac48Address peerMeshPointAddress, uint16_t aid, ns3::dot11s::IePeerManagement peerManagementElement, ns3::dot11s::IeConfiguration meshConfig) [member function]
cls.add_method('ReceivePeerLinkFrame',
'void',
[param('uint32_t', 'interface'), param('ns3::Mac48Address', 'peerAddress'), param('ns3::Mac48Address', 'peerMeshPointAddress'), param('uint16_t', 'aid'), param('ns3::dot11s::IePeerManagement', 'peerManagementElement'), param('ns3::dot11s::IeConfiguration', 'meshConfig')])
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::Report(std::ostream & arg0) const [member function]
cls.add_method('Report',
'void',
[param('std::ostream &', 'arg0')],
is_const=True)
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::ResetStats() [member function]
cls.add_method('ResetStats',
'void',
[])
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::SetBeaconCollisionAvoidance(bool enable) [member function]
cls.add_method('SetBeaconCollisionAvoidance',
'void',
[param('bool', 'enable')])
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::SetMeshId(std::string s) [member function]
cls.add_method('SetMeshId',
'void',
[param('std::string', 's')])
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::SetPeerLinkStatusCallback(ns3::Callback<void, ns3::Mac48Address, ns3::Mac48Address, unsigned int, bool, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> cb) [member function]
cls.add_method('SetPeerLinkStatusCallback',
'void',
[param('ns3::Callback< void, ns3::Mac48Address, ns3::Mac48Address, unsigned int, bool, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')])
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::TransmissionFailure(uint32_t interface, ns3::Mac48Address const peerAddress) [member function]
cls.add_method('TransmissionFailure',
'void',
[param('uint32_t', 'interface'), param('ns3::Mac48Address const', 'peerAddress')])
## peer-management-protocol.h (module 'mesh'): void ns3::dot11s::PeerManagementProtocol::TransmissionSuccess(uint32_t interface, ns3::Mac48Address const peerAddress) [member function]
cls.add_method('TransmissionSuccess',
'void',
[param('uint32_t', 'interface'), param('ns3::Mac48Address const', 'peerAddress')])
return
def register_Ns3FlameFlameHeader_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## flame-header.h (module 'mesh'): ns3::flame::FlameHeader::FlameHeader(ns3::flame::FlameHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::flame::FlameHeader const &', 'arg0')])
## flame-header.h (module 'mesh'): ns3::flame::FlameHeader::FlameHeader() [constructor]
cls.add_constructor([])
## flame-header.h (module 'mesh'): void ns3::flame::FlameHeader::AddCost(uint8_t cost) [member function]
cls.add_method('AddCost',
'void',
[param('uint8_t', 'cost')])
## flame-header.h (module 'mesh'): uint32_t ns3::flame::FlameHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## flame-header.h (module 'mesh'): uint8_t ns3::flame::FlameHeader::GetCost() const [member function]
cls.add_method('GetCost',
'uint8_t',
[],
is_const=True)
## flame-header.h (module 'mesh'): ns3::TypeId ns3::flame::FlameHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## flame-header.h (module 'mesh'): ns3::Mac48Address ns3::flame::FlameHeader::GetOrigDst() const [member function]
cls.add_method('GetOrigDst',
'ns3::Mac48Address',
[],
is_const=True)
## flame-header.h (module 'mesh'): ns3::Mac48Address ns3::flame::FlameHeader::GetOrigSrc() const [member function]
cls.add_method('GetOrigSrc',
'ns3::Mac48Address',
[],
is_const=True)
## flame-header.h (module 'mesh'): uint16_t ns3::flame::FlameHeader::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint16_t',
[],
is_const=True)
## flame-header.h (module 'mesh'): uint16_t ns3::flame::FlameHeader::GetSeqno() const [member function]
cls.add_method('GetSeqno',
'uint16_t',
[],
is_const=True)
## flame-header.h (module 'mesh'): uint32_t ns3::flame::FlameHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## flame-header.h (module 'mesh'): static ns3::TypeId ns3::flame::FlameHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flame-header.h (module 'mesh'): void ns3::flame::FlameHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## flame-header.h (module 'mesh'): void ns3::flame::FlameHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## flame-header.h (module 'mesh'): void ns3::flame::FlameHeader::SetOrigDst(ns3::Mac48Address dst) [member function]
cls.add_method('SetOrigDst',
'void',
[param('ns3::Mac48Address', 'dst')])
## flame-header.h (module 'mesh'): void ns3::flame::FlameHeader::SetOrigSrc(ns3::Mac48Address OrigSrc) [member function]
cls.add_method('SetOrigSrc',
'void',
[param('ns3::Mac48Address', 'OrigSrc')])
## flame-header.h (module 'mesh'): void ns3::flame::FlameHeader::SetProtocol(uint16_t protocol) [member function]
cls.add_method('SetProtocol',
'void',
[param('uint16_t', 'protocol')])
## flame-header.h (module 'mesh'): void ns3::flame::FlameHeader::SetSeqno(uint16_t seqno) [member function]
cls.add_method('SetSeqno',
'void',
[param('uint16_t', 'seqno')])
return
def register_Ns3FlameFlameProtocol_methods(root_module, cls):
## flame-protocol.h (module 'mesh'): ns3::flame::FlameProtocol::FlameProtocol() [constructor]
cls.add_constructor([])
## flame-protocol.h (module 'mesh'): void ns3::flame::FlameProtocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## flame-protocol.h (module 'mesh'): ns3::Mac48Address ns3::flame::FlameProtocol::GetAddress() [member function]
cls.add_method('GetAddress',
'ns3::Mac48Address',
[])
## flame-protocol.h (module 'mesh'): static ns3::TypeId ns3::flame::FlameProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flame-protocol.h (module 'mesh'): bool ns3::flame::FlameProtocol::Install(ns3::Ptr<ns3::MeshPointDevice> arg0) [member function]
cls.add_method('Install',
'bool',
[param('ns3::Ptr< ns3::MeshPointDevice >', 'arg0')])
## flame-protocol.h (module 'mesh'): bool ns3::flame::FlameProtocol::RemoveRoutingStuff(uint32_t fromIface, ns3::Mac48Address const source, ns3::Mac48Address const destination, ns3::Ptr<ns3::Packet> packet, uint16_t & protocolType) [member function]
cls.add_method('RemoveRoutingStuff',
'bool',
[param('uint32_t', 'fromIface'), param('ns3::Mac48Address const', 'source'), param('ns3::Mac48Address const', 'destination'), param('ns3::Ptr< ns3::Packet >', 'packet'), param('uint16_t &', 'protocolType')],
is_virtual=True)
## flame-protocol.h (module 'mesh'): void ns3::flame::FlameProtocol::Report(std::ostream & arg0) const [member function]
cls.add_method('Report',
'void',
[param('std::ostream &', 'arg0')],
is_const=True)
## flame-protocol.h (module 'mesh'): bool ns3::flame::FlameProtocol::RequestRoute(uint32_t sourceIface, ns3::Mac48Address const source, ns3::Mac48Address const destination, ns3::Ptr<ns3::Packet const> packet, uint16_t protocolType, ns3::Callback<void, bool, ns3::Ptr<ns3::Packet>, ns3::Mac48Address, ns3::Mac48Address, unsigned short, unsigned int, ns3::empty, ns3::empty, ns3::empty> routeReply) [member function]
cls.add_method('RequestRoute',
'bool',
[param('uint32_t', 'sourceIface'), param('ns3::Mac48Address const', 'source'), param('ns3::Mac48Address const', 'destination'), param('ns3::Ptr< ns3::Packet const >', 'packet'), param('uint16_t', 'protocolType'), param('ns3::Callback< void, bool, ns3::Ptr< ns3::Packet >, ns3::Mac48Address, ns3::Mac48Address, unsigned short, unsigned int, ns3::empty, ns3::empty, ns3::empty >', 'routeReply')],
is_virtual=True)
## flame-protocol.h (module 'mesh'): void ns3::flame::FlameProtocol::ResetStats() [member function]
cls.add_method('ResetStats',
'void',
[])
return
def register_Ns3FlameFlameProtocolMac_methods(root_module, cls):
## flame-protocol-mac.h (module 'mesh'): ns3::flame::FlameProtocolMac::FlameProtocolMac(ns3::flame::FlameProtocolMac const & arg0) [copy constructor]
cls.add_constructor([param('ns3::flame::FlameProtocolMac const &', 'arg0')])
## flame-protocol-mac.h (module 'mesh'): ns3::flame::FlameProtocolMac::FlameProtocolMac(uint32_t arg0, ns3::Ptr<ns3::flame::FlameProtocol> arg1) [constructor]
cls.add_constructor([param('uint32_t', 'arg0'), param('ns3::Ptr< ns3::flame::FlameProtocol >', 'arg1')])
## flame-protocol-mac.h (module 'mesh'): uint16_t ns3::flame::FlameProtocolMac::GetChannelId() const [member function]
cls.add_method('GetChannelId',
'uint16_t',
[],
is_const=True)
## flame-protocol-mac.h (module 'mesh'): bool ns3::flame::FlameProtocolMac::Receive(ns3::Ptr<ns3::Packet> packet, ns3::WifiMacHeader const & header) [member function]
cls.add_method('Receive',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::WifiMacHeader const &', 'header')],
is_virtual=True)
## flame-protocol-mac.h (module 'mesh'): void ns3::flame::FlameProtocolMac::Report(std::ostream & arg0) const [member function]
cls.add_method('Report',
'void',
[param('std::ostream &', 'arg0')],
is_const=True)
## flame-protocol-mac.h (module 'mesh'): void ns3::flame::FlameProtocolMac::ResetStats() [member function]
cls.add_method('ResetStats',
'void',
[])
## flame-protocol-mac.h (module 'mesh'): void ns3::flame::FlameProtocolMac::SetParent(ns3::Ptr<ns3::MeshWifiInterfaceMac> parent) [member function]
cls.add_method('SetParent',
'void',
[param('ns3::Ptr< ns3::MeshWifiInterfaceMac >', 'parent')],
is_virtual=True)
## flame-protocol-mac.h (module 'mesh'): void ns3::flame::FlameProtocolMac::UpdateBeacon(ns3::MeshWifiBeacon & beacon) const [member function]
cls.add_method('UpdateBeacon',
'void',
[param('ns3::MeshWifiBeacon &', 'beacon')],
is_const=True, is_virtual=True)
## flame-protocol-mac.h (module 'mesh'): bool ns3::flame::FlameProtocolMac::UpdateOutcomingFrame(ns3::Ptr<ns3::Packet> packet, ns3::WifiMacHeader & header, ns3::Mac48Address from, ns3::Mac48Address to) [member function]
cls.add_method('UpdateOutcomingFrame',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::WifiMacHeader &', 'header'), param('ns3::Mac48Address', 'from'), param('ns3::Mac48Address', 'to')],
is_virtual=True)
return
def register_Ns3FlameFlameRtable_methods(root_module, cls):
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable::FlameRtable() [constructor]
cls.add_constructor([])
## flame-rtable.h (module 'mesh'): void ns3::flame::FlameRtable::AddPath(ns3::Mac48Address const destination, ns3::Mac48Address const retransmitter, uint32_t const interface, uint8_t const cost, uint16_t const seqnum) [member function]
cls.add_method('AddPath',
'void',
[param('ns3::Mac48Address const', 'destination'), param('ns3::Mac48Address const', 'retransmitter'), param('uint32_t const', 'interface'), param('uint8_t const', 'cost'), param('uint16_t const', 'seqnum')])
## flame-rtable.h (module 'mesh'): void ns3::flame::FlameRtable::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## flame-rtable.h (module 'mesh'): static ns3::TypeId ns3::flame::FlameRtable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable::LookupResult ns3::flame::FlameRtable::Lookup(ns3::Mac48Address destination) [member function]
cls.add_method('Lookup',
'ns3::flame::FlameRtable::LookupResult',
[param('ns3::Mac48Address', 'destination')])
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable::INTERFACE_ANY [variable]
cls.add_static_attribute('INTERFACE_ANY', 'uint32_t const', is_const=True)
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable::MAX_COST [variable]
cls.add_static_attribute('MAX_COST', 'uint32_t const', is_const=True)
return
def register_Ns3FlameFlameRtableLookupResult_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable::LookupResult::LookupResult(ns3::flame::FlameRtable::LookupResult const & arg0) [copy constructor]
cls.add_constructor([param('ns3::flame::FlameRtable::LookupResult const &', 'arg0')])
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable::LookupResult::LookupResult(ns3::Mac48Address r=ns3::Mac48Address::GetBroadcast(), uint32_t i=ns3::flame::FlameRtable::INTERFACE_ANY, uint8_t c=ns3::flame::FlameRtable::MAX_COST, uint16_t s=0) [constructor]
cls.add_constructor([param('ns3::Mac48Address', 'r', default_value='ns3::Mac48Address::GetBroadcast()'), param('uint32_t', 'i', default_value='ns3::flame::FlameRtable::INTERFACE_ANY'), param('uint8_t', 'c', default_value='ns3::flame::FlameRtable::MAX_COST'), param('uint16_t', 's', default_value='0')])
## flame-rtable.h (module 'mesh'): bool ns3::flame::FlameRtable::LookupResult::IsValid() const [member function]
cls.add_method('IsValid',
'bool',
[],
is_const=True)
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable::LookupResult::cost [variable]
cls.add_instance_attribute('cost', 'uint8_t', is_const=False)
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable::LookupResult::ifIndex [variable]
cls.add_instance_attribute('ifIndex', 'uint32_t', is_const=False)
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable::LookupResult::retransmitter [variable]
cls.add_instance_attribute('retransmitter', 'ns3::Mac48Address', is_const=False)
## flame-rtable.h (module 'mesh'): ns3::flame::FlameRtable::LookupResult::seqnum [variable]
cls.add_instance_attribute('seqnum', 'uint16_t', is_const=False)
return
def register_Ns3FlameFlameTag_methods(root_module, cls):
## flame-protocol.h (module 'mesh'): ns3::flame::FlameTag::FlameTag(ns3::flame::FlameTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::flame::FlameTag const &', 'arg0')])
## flame-protocol.h (module 'mesh'): ns3::flame::FlameTag::FlameTag(ns3::Mac48Address a=ns3::Mac48Address()) [constructor]
cls.add_constructor([param('ns3::Mac48Address', 'a', default_value='ns3::Mac48Address()')])
## flame-protocol.h (module 'mesh'): void ns3::flame::FlameTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## flame-protocol.h (module 'mesh'): ns3::TypeId ns3::flame::FlameTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## flame-protocol.h (module 'mesh'): uint32_t ns3::flame::FlameTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## flame-protocol.h (module 'mesh'): static ns3::TypeId ns3::flame::FlameTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## flame-protocol.h (module 'mesh'): void ns3::flame::FlameTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## flame-protocol.h (module 'mesh'): void ns3::flame::FlameTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## flame-protocol.h (module 'mesh'): ns3::flame::FlameTag::receiver [variable]
cls.add_instance_attribute('receiver', 'ns3::Mac48Address', is_const=False)
## flame-protocol.h (module 'mesh'): ns3::flame::FlameTag::transmitter [variable]
cls.add_instance_attribute('transmitter', 'ns3::Mac48Address', is_const=False)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_dot11s(module.get_submodule('dot11s'), root_module)
register_functions_ns3_flame(module.get_submodule('flame'), root_module)
register_functions_ns3_internal(module.get_submodule('internal'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_dot11s(module, root_module):
## ie-dot11s-id.h (module 'mesh'): extern ns3::Ptr<ns3::AttributeChecker const> ns3::dot11s::MakeIeMeshIdChecker() [free function]
module.add_function('MakeIeMeshIdChecker',
'ns3::Ptr< ns3::AttributeChecker const >',
[])
return
def register_functions_ns3_flame(module, root_module):
return
def register_functions_ns3_internal(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
gpl-2.0
|
AlanZatarain/django-photologue
|
photologue/management/commands/plcache.py
|
25
|
1406
|
from django.core.management.base import BaseCommand, CommandError
from optparse import make_option
from photologue.models import PhotoSize, ImageModel
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--reset', '-r', action='store_true', dest='reset', help='Reset photo cache before generating'),
)
help = ('Manages Photologue cache file for the given sizes.')
args = '[sizes]'
requires_model_validation = True
can_import_settings = True
def handle(self, *args, **options):
return create_cache(args, options)
def create_cache(sizes, options):
"""
Creates the cache for the given files
"""
reset = options.get('reset', None)
size_list = [size.strip(' ,') for size in sizes]
if len(size_list) < 1:
sizes = PhotoSize.objects.filter(pre_cache=True)
else:
sizes = PhotoSize.objects.filter(name__in=size_list)
if not len(sizes):
raise CommandError('No photo sizes were found.')
print 'Caching photos, this may take a while...'
for cls in ImageModel.__subclasses__():
for photosize in sizes:
print 'Cacheing %s size images' % photosize.name
for obj in cls.objects.all():
if reset:
obj.remove_size(photosize)
obj.create_size(photosize)
|
bsd-3-clause
|
SergeyKubrak/django-rosetta
|
rosetta/polib.py
|
1
|
59163
|
# -* coding: utf-8 -*-
#
# License: MIT (see LICENSE file provided)
# vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4:
"""
**polib** allows you to manipulate, create, modify gettext files (pot, po and
mo files). You can load existing files, iterate through it's entries, add,
modify entries, comments or metadata, etc. or create new po files from scratch.
**polib** provides a simple and pythonic API via the :func:`~polib.pofile` and
:func:`~polib.mofile` convenience functions.
"""
__author__ = 'David Jean Louis <izimobil@gmail.com>'
__version__ = '1.0.3'
__all__ = ['pofile', 'POFile', 'POEntry', 'mofile', 'MOFile', 'MOEntry',
'default_encoding', 'escape', 'unescape', 'detect_encoding', ]
import array
import codecs
import os
import re
import struct
import sys
import textwrap
# the default encoding to use when encoding cannot be detected
default_encoding = 'utf-8'
# python 2/3 compatibility helpers {{{
if sys.version_info[:2] < (3, 0):
PY3 = False
text_type = unicode
def b(s):
return s
def u(s):
return unicode(s, "unicode_escape")
else:
PY3 = True
text_type = str
def b(s):
return s.encode("latin-1")
def u(s):
return s
# }}}
# _pofile_or_mofile {{{
def _pofile_or_mofile(f, type, **kwargs):
"""
Internal function used by :func:`polib.pofile` and :func:`polib.mofile` to
honor the DRY concept.
"""
# get the file encoding
enc = kwargs.get('encoding')
if enc is None:
enc = detect_encoding(f, type == 'mofile')
# parse the file
kls = type == 'pofile' and _POFileParser or _MOFileParser
parser = kls(
f,
encoding=enc,
check_for_duplicates=kwargs.get('check_for_duplicates', False),
klass=kwargs.get('klass')
)
instance = parser.parse()
instance.wrapwidth = kwargs.get('wrapwidth', 78)
return instance
# }}}
# function pofile() {{{
def pofile(pofile, **kwargs):
"""
Convenience function that parses the po or pot file ``pofile`` and returns
a :class:`~polib.POFile` instance.
Arguments:
``pofile``
string, full or relative path to the po/pot file or its content (data).
``wrapwidth``
integer, the wrap width, only useful when the ``-w`` option was passed
to xgettext (optional, default: ``78``).
``encoding``
string, the encoding to use (e.g. "utf-8") (default: ``None``, the
encoding will be auto-detected).
``check_for_duplicates``
whether to check for duplicate entries when adding entries to the
file (optional, default: ``False``).
``klass``
class which is used to instantiate the return value (optional,
default: ``None``, the return value with be a :class:`~polib.POFile`
instance).
"""
return _pofile_or_mofile(pofile, 'pofile', **kwargs)
# }}}
# function mofile() {{{
def mofile(mofile, **kwargs):
"""
Convenience function that parses the mo file ``mofile`` and returns a
:class:`~polib.MOFile` instance.
Arguments:
``mofile``
string, full or relative path to the mo file or its content (data).
``wrapwidth``
integer, the wrap width, only useful when the ``-w`` option was passed
to xgettext to generate the po file that was used to format the mo file
(optional, default: ``78``).
``encoding``
string, the encoding to use (e.g. "utf-8") (default: ``None``, the
encoding will be auto-detected).
``check_for_duplicates``
whether to check for duplicate entries when adding entries to the
file (optional, default: ``False``).
``klass``
class which is used to instantiate the return value (optional,
default: ``None``, the return value with be a :class:`~polib.POFile`
instance).
"""
return _pofile_or_mofile(mofile, 'mofile', **kwargs)
# }}}
# function detect_encoding() {{{
def detect_encoding(file, binary_mode=False):
"""
Try to detect the encoding used by the ``file``. The ``file`` argument can
be a PO or MO file path or a string containing the contents of the file.
If the encoding cannot be detected, the function will return the value of
``default_encoding``.
Arguments:
``file``
string, full or relative path to the po/mo file or its content.
``binary_mode``
boolean, set this to True if ``file`` is a mo file.
"""
PATTERN = r'"?Content-Type:.+? charset=([\w_\-:\.]+)'
rxt = re.compile(u(PATTERN))
rxb = re.compile(b(PATTERN))
def charset_exists(charset):
"""Check whether ``charset`` is valid or not."""
try:
codecs.lookup(charset)
except LookupError:
return False
return True
try:
is_file = os.path.exists(file)
except (ValueError, UnicodeEncodeError):
is_file = False
if not is_file:
match = rxt.search(file)
if match:
enc = match.group(1).strip()
if charset_exists(enc):
return enc
else:
# For PY3, always treat as binary
if binary_mode or PY3:
mode = 'rb'
rx = rxb
else:
mode = 'r'
rx = rxt
f = open(file, mode)
for l in f.readlines():
match = rx.search(l)
if match:
f.close()
enc = match.group(1).strip()
if not isinstance(enc, text_type):
enc = enc.decode('utf-8')
if charset_exists(enc):
return enc
f.close()
return default_encoding
# }}}
# function escape() {{{
def escape(st):
"""
Escapes the characters ``\\\\``, ``\\t``, ``\\n``, ``\\r`` and ``"`` in
the given string ``st`` and returns it.
"""
return st.replace('\\', r'\\')\
.replace('\t', r'\t')\
.replace('\r', r'\r')\
.replace('\n', r'\n')\
.replace('\"', r'\"')
# }}}
# function unescape() {{{
def unescape(st):
"""
Unescapes the characters ``\\\\``, ``\\t``, ``\\n``, ``\\r`` and ``"`` in
the given string ``st`` and returns it.
"""
def unescape_repl(m):
m = m.group(1)
if m == 'n':
return '\n'
if m == 't':
return '\t'
if m == 'r':
return '\r'
if m == '\\':
return '\\'
return m # handles escaped double quote
return re.sub(r'\\(\\|n|t|r|")', unescape_repl, st)
# }}}
# class _BaseFile {{{
class _BaseFile(list):
"""
Common base class for the :class:`~polib.POFile` and :class:`~polib.MOFile`
classes. This class should **not** be instanciated directly.
"""
def __init__(self, *args, **kwargs):
"""
Constructor, accepts the following keyword arguments:
``pofile``
string, the path to the po or mo file, or its content as a string.
``wrapwidth``
integer, the wrap width, only useful when the ``-w`` option was
passed to xgettext (optional, default: ``78``).
``encoding``
string, the encoding to use, defaults to ``default_encoding``
global variable (optional).
``check_for_duplicates``
whether to check for duplicate entries when adding entries to the
file, (optional, default: ``False``).
"""
list.__init__(self)
# the opened file handle
pofile = kwargs.get('pofile', None)
if pofile and os.path.exists(pofile):
self.fpath = pofile
else:
self.fpath = kwargs.get('fpath')
# the width at which lines should be wrapped
self.wrapwidth = kwargs.get('wrapwidth', 78)
# the file encoding
self.encoding = kwargs.get('encoding', default_encoding)
# whether to check for duplicate entries or not
self.check_for_duplicates = kwargs.get('check_for_duplicates', False)
# header
self.header = ''
# both po and mo files have metadata
self.metadata = {}
self.metadata_is_fuzzy = 0
def __unicode__(self):
"""
Returns the unicode representation of the file.
"""
ret = []
entries = [self.metadata_as_entry()] + \
[e for e in self if not e.obsolete]
for entry in entries:
ret.append(entry.__unicode__(self.wrapwidth))
for entry in self.obsolete_entries():
ret.append(entry.__unicode__(self.wrapwidth))
ret = u('\n').join(ret)
assert isinstance(ret, text_type)
#if type(ret) != text_type:
# return unicode(ret, self.encoding)
return ret
if PY3:
def __str__(self):
return self.__unicode__()
else:
def __str__(self):
"""
Returns the string representation of the file.
"""
return unicode(self).encode(self.encoding)
def __contains__(self, entry):
"""
Overriden ``list`` method to implement the membership test (in and
not in).
The method considers that an entry is in the file if it finds an entry
that has the same msgid (the test is **case sensitive**) and the same
msgctxt (or none for both entries).
Argument:
``entry``
an instance of :class:`~polib._BaseEntry`.
"""
return self.find(entry.msgid, by='msgid', msgctxt=entry.msgctxt) \
is not None
def __eq__(self, other):
return str(self) == str(other)
def append(self, entry):
"""
Overriden method to check for duplicates entries, if a user tries to
add an entry that is already in the file, the method will raise a
``ValueError`` exception.
Argument:
``entry``
an instance of :class:`~polib._BaseEntry`.
"""
if self.check_for_duplicates and entry in self:
raise ValueError('Entry "%s" already exists' % entry.msgid)
super(_BaseFile, self).append(entry)
def insert(self, index, entry):
"""
Overriden method to check for duplicates entries, if a user tries to
add an entry that is already in the file, the method will raise a
``ValueError`` exception.
Arguments:
``index``
index at which the entry should be inserted.
``entry``
an instance of :class:`~polib._BaseEntry`.
"""
if self.check_for_duplicates and entry in self:
raise ValueError('Entry "%s" already exists' % entry.msgid)
super(_BaseFile, self).insert(index, entry)
def metadata_as_entry(self):
"""
Returns the file metadata as a :class:`~polib.POFile` instance.
"""
e = POEntry(msgid='')
mdata = self.ordered_metadata()
if mdata:
strs = []
for name, value in mdata:
# Strip whitespace off each line in a multi-line entry
strs.append('%s: %s' % (name, value))
e.msgstr = '\n'.join(strs) + '\n'
if self.metadata_is_fuzzy:
e.flags.append('fuzzy')
return e
def save(self, fpath=None, repr_method='__unicode__'):
"""
Saves the po file to ``fpath``.
If it is an existing file and no ``fpath`` is provided, then the
existing file is rewritten with the modified data.
Keyword arguments:
``fpath``
string, full or relative path to the file.
``repr_method``
string, the method to use for output.
"""
if self.fpath is None and fpath is None:
raise IOError('You must provide a file path to save() method')
contents = getattr(self, repr_method)()
if fpath is None:
fpath = self.fpath
if repr_method == 'to_binary':
fhandle = open(fpath, 'wb')
else:
fhandle = codecs.open(fpath, 'w', self.encoding)
if not isinstance(contents, text_type):
contents = contents.decode(self.encoding)
fhandle.write(contents)
fhandle.close()
# set the file path if not set
if self.fpath is None and fpath:
self.fpath = fpath
def find(self, st, by='msgid', include_obsolete_entries=False,
msgctxt=False):
"""
Find the entry which msgid (or property identified by the ``by``
argument) matches the string ``st``.
Keyword arguments:
``st``
string, the string to search for.
``by``
string, the property to use for comparison (default: ``msgid``).
``include_obsolete_entries``
boolean, whether to also search in entries that are obsolete.
``msgctxt``
string, allows to specify a specific message context for the
search.
"""
if include_obsolete_entries:
entries = self[:]
else:
entries = [e for e in self if not e.obsolete]
for e in entries:
if getattr(e, by) == st:
if msgctxt is not False and e.msgctxt != msgctxt:
continue
return e
return None
def ordered_metadata(self):
"""
Convenience method that returns an ordered version of the metadata
dictionary. The return value is list of tuples (metadata name,
metadata_value).
"""
# copy the dict first
metadata = self.metadata.copy()
data_order = [
'Project-Id-Version',
'Report-Msgid-Bugs-To',
'POT-Creation-Date',
'PO-Revision-Date',
'Last-Translator',
'Language-Team',
'MIME-Version',
'Content-Type',
'Content-Transfer-Encoding'
]
ordered_data = []
for data in data_order:
try:
value = metadata.pop(data)
ordered_data.append((data, value))
except KeyError:
pass
# the rest of the metadata will be alphabetically ordered since there
# are no specs for this AFAIK
for data in sorted(metadata.keys()):
value = metadata[data]
ordered_data.append((data, value))
return ordered_data
def to_binary(self):
"""
Return the binary representation of the file.
"""
offsets = []
entries = self.translated_entries()
# the keys are sorted in the .mo file
def cmp(_self, other):
# msgfmt compares entries with msgctxt if it exists
self_msgid = _self.msgctxt and _self.msgctxt or _self.msgid
other_msgid = other.msgctxt and other.msgctxt or other.msgid
if self_msgid > other_msgid:
return 1
elif self_msgid < other_msgid:
return -1
else:
return 0
# add metadata entry
entries.sort(key=lambda o: o.msgctxt or o.msgid)
mentry = self.metadata_as_entry()
#mentry.msgstr = mentry.msgstr.replace('\\n', '').lstrip()
entries = [mentry] + entries
entries_len = len(entries)
ids, strs = b(''), b('')
for e in entries:
# For each string, we need size and file offset. Each string is
# NUL terminated; the NUL does not count into the size.
msgid = b('')
if e.msgctxt:
# Contexts are stored by storing the concatenation of the
# context, a <EOT> byte, and the original string
msgid = self._encode(e.msgctxt + '\4')
if e.msgid_plural:
msgstr = []
for index in sorted(e.msgstr_plural.keys()):
msgstr.append(e.msgstr_plural[index])
msgid += self._encode(e.msgid + '\0' + e.msgid_plural)
msgstr = self._encode('\0'.join(msgstr))
else:
msgid += self._encode(e.msgid)
msgstr = self._encode(e.msgstr)
offsets.append((len(ids), len(msgid), len(strs), len(msgstr)))
ids += msgid + b('\0')
strs += msgstr + b('\0')
# The header is 7 32-bit unsigned integers.
keystart = 7 * 4 + 16 * entries_len
# and the values start after the keys
valuestart = keystart + len(ids)
koffsets = []
voffsets = []
# The string table first has the list of keys, then the list of values.
# Each entry has first the size of the string, then the file offset.
for o1, l1, o2, l2 in offsets:
koffsets += [l1, o1 + keystart]
voffsets += [l2, o2 + valuestart]
offsets = koffsets + voffsets
# check endianness for magic number
if struct.pack('@h', 1) == struct.pack('<h', 1):
magic_number = MOFile.LITTLE_ENDIAN
else:
magic_number = MOFile.BIG_ENDIAN
output = struct.pack(
"Iiiiiii",
# Magic number
magic_number,
# Version
0,
# number of entries
entries_len,
# start of key index
7 * 4,
# start of value index
7 * 4 + entries_len * 8,
# size and offset of hash table, we don't use hash tables
0, keystart
)
if PY3 and sys.version_info.minor > 1: # python 3.2 or superior
output += array.array("i", offsets).tobytes()
else:
output += array.array("i", offsets).tostring()
output += ids
output += strs
return output
def _encode(self, mixed):
"""
Encodes the given ``mixed`` argument with the file encoding if and
only if it's an unicode string and returns the encoded string.
"""
if isinstance(mixed, text_type):
mixed = mixed.encode(self.encoding)
return mixed
# }}}
# class POFile {{{
class POFile(_BaseFile):
"""
Po (or Pot) file reader/writer.
This class inherits the :class:`~polib._BaseFile` class and, by extension,
the python ``list`` type.
"""
def __unicode__(self):
"""
Returns the unicode representation of the po file.
"""
ret, headers = '', self.header.split('\n')
for header in headers:
if header[:1] in [',', ':']:
ret += '#%s\n' % header
else:
ret += '# %s\n' % header
if not isinstance(ret, text_type):
ret = ret.decode(self.encoding)
return ret + _BaseFile.__unicode__(self)
def save_as_mofile(self, fpath):
"""
Saves the binary representation of the file to given ``fpath``.
Keyword argument:
``fpath``
string, full or relative path to the mo file.
"""
_BaseFile.save(self, fpath, 'to_binary')
def percent_translated(self):
"""
Convenience method that returns the percentage of translated
messages.
"""
total = len([e for e in self if not e.obsolete])
if total == 0:
return 100
translated = len(self.translated_entries())
return int((100.00 / float(total)) * translated)
def translated_entries(self):
"""
Convenience method that returns the list of translated entries.
"""
return [e for e in self if e.translated()]
def untranslated_entries(self):
"""
Convenience method that returns the list of untranslated entries.
"""
return [e for e in self if not e.translated() and not e.obsolete
and not 'fuzzy' in e.flags]
def fuzzy_entries(self):
"""
Convenience method that returns the list of fuzzy entries.
"""
return [e for e in self if 'fuzzy' in e.flags]
def obsolete_entries(self):
"""
Convenience method that returns the list of obsolete entries.
"""
return [e for e in self if e.obsolete]
def merge(self, refpot):
"""
Convenience method that merges the current pofile with the pot file
provided. It behaves exactly as the gettext msgmerge utility:
* comments of this file will be preserved, but extracted comments and
occurrences will be discarded;
* any translations or comments in the file will be discarded, however,
dot comments and file positions will be preserved;
* the fuzzy flags are preserved.
Keyword argument:
``refpot``
object POFile, the reference catalog.
"""
# Store entries in dict/set for faster access
self_entries = dict((entry.msgid, entry) for entry in self)
refpot_msgids = set(entry.msgid for entry in refpot)
# Merge entries that are in the refpot
for entry in refpot:
e = self_entries.get(entry.msgid)
if e is None:
e = POEntry()
self.append(e)
e.merge(entry)
# ok, now we must "obsolete" entries that are not in the refpot anymore
for entry in self:
if entry.msgid not in refpot_msgids:
entry.obsolete = True
# }}}
# class MOFile {{{
class MOFile(_BaseFile):
"""
Mo file reader/writer.
This class inherits the :class:`~polib._BaseFile` class and, by
extension, the python ``list`` type.
"""
BIG_ENDIAN = 0xde120495
LITTLE_ENDIAN = 0x950412de
def __init__(self, *args, **kwargs):
"""
Constructor, accepts all keywords arguments accepted by
:class:`~polib._BaseFile` class.
"""
_BaseFile.__init__(self, *args, **kwargs)
self.magic_number = None
self.version = 0
def save_as_pofile(self, fpath):
"""
Saves the mofile as a pofile to ``fpath``.
Keyword argument:
``fpath``
string, full or relative path to the file.
"""
_BaseFile.save(self, fpath)
def save(self, fpath=None):
"""
Saves the mofile to ``fpath``.
Keyword argument:
``fpath``
string, full or relative path to the file.
"""
_BaseFile.save(self, fpath, 'to_binary')
def percent_translated(self):
"""
Convenience method to keep the same interface with POFile instances.
"""
return 100
def translated_entries(self):
"""
Convenience method to keep the same interface with POFile instances.
"""
return self
def untranslated_entries(self):
"""
Convenience method to keep the same interface with POFile instances.
"""
return []
def fuzzy_entries(self):
"""
Convenience method to keep the same interface with POFile instances.
"""
return []
def obsolete_entries(self):
"""
Convenience method to keep the same interface with POFile instances.
"""
return []
# }}}
# class _BaseEntry {{{
class _BaseEntry(object):
"""
Base class for :class:`~polib.POEntry` and :class:`~polib.MOEntry` classes.
This class should **not** be instanciated directly.
"""
def __init__(self, *args, **kwargs):
"""
Constructor, accepts the following keyword arguments:
``msgid``
string, the entry msgid.
``msgstr``
string, the entry msgstr.
``msgid_plural``
string, the entry msgid_plural.
``msgstr_plural``
list, the entry msgstr_plural lines.
``msgctxt``
string, the entry context (msgctxt).
``obsolete``
bool, whether the entry is "obsolete" or not.
``encoding``
string, the encoding to use, defaults to ``default_encoding``
global variable (optional).
"""
self.msgid = kwargs.get('msgid', '')
self.msgstr = kwargs.get('msgstr', '')
self.msgid_plural = kwargs.get('msgid_plural', '')
self.msgstr_plural = kwargs.get('msgstr_plural', {})
self.msgctxt = kwargs.get('msgctxt', None)
self.obsolete = kwargs.get('obsolete', False)
self.encoding = kwargs.get('encoding', default_encoding)
def __unicode__(self, wrapwidth=78):
"""
Returns the unicode representation of the entry.
"""
if self.obsolete:
delflag = '#~ '
else:
delflag = ''
ret = []
# write the msgctxt if any
if self.msgctxt is not None:
ret += self._str_field("msgctxt", delflag, "", self.msgctxt,
wrapwidth)
# write the msgid
ret += self._str_field("msgid", delflag, "", self.msgid, wrapwidth)
# write the msgid_plural if any
if self.msgid_plural:
ret += self._str_field("msgid_plural", delflag, "",
self.msgid_plural, wrapwidth)
if self.msgstr_plural:
# write the msgstr_plural if any
msgstrs = self.msgstr_plural
keys = list(msgstrs)
keys.sort()
for index in keys:
msgstr = msgstrs[index]
plural_index = '[%s]' % index
ret += self._str_field("msgstr", delflag, plural_index, msgstr,
wrapwidth)
else:
# otherwise write the msgstr
ret += self._str_field("msgstr", delflag, "", self.msgstr,
wrapwidth)
ret.append('')
ret = u('\n').join(ret)
return ret
if PY3:
def __str__(self):
return self.__unicode__()
else:
def __str__(self):
"""
Returns the string representation of the entry.
"""
return unicode(self).encode(self.encoding)
def __eq__(self, other):
return str(self) == str(other)
def _str_field(self, fieldname, delflag, plural_index, field,
wrapwidth=78):
lines = field.splitlines(True)
if len(lines) > 1:
lines = [''] + lines # start with initial empty line
else:
escaped_field = escape(field)
specialchars_count = 0
for c in ['\\', '\n', '\r', '\t', '"']:
specialchars_count += field.count(c)
# comparison must take into account fieldname length + one space
# + 2 quotes (eg. msgid "<string>")
flength = len(fieldname) + 3
if plural_index:
flength += len(plural_index)
real_wrapwidth = wrapwidth - flength + specialchars_count
if wrapwidth > 0 and len(field) > real_wrapwidth:
# Wrap the line but take field name into account
lines = [''] + [unescape(item) for item in wrap(
escaped_field,
wrapwidth - 2, # 2 for quotes ""
drop_whitespace=False,
break_long_words=False
)]
else:
lines = [field]
if fieldname.startswith('previous_'):
# quick and dirty trick to get the real field name
fieldname = fieldname[9:]
ret = ['%s%s%s "%s"' % (delflag, fieldname, plural_index,
escape(lines.pop(0)))]
for mstr in lines:
#import pdb; pdb.set_trace()
ret.append('%s"%s"' % (delflag, escape(mstr)))
return ret
# }}}
# class POEntry {{{
class POEntry(_BaseEntry):
"""
Represents a po file entry.
"""
def __init__(self, *args, **kwargs):
"""
Constructor, accepts the following keyword arguments:
``comment``
string, the entry comment.
``tcomment``
string, the entry translator comment.
``occurrences``
list, the entry occurrences.
``flags``
list, the entry flags.
``previous_msgctxt``
string, the entry previous context.
``previous_msgid``
string, the entry previous msgid.
``previous_msgid_plural``
string, the entry previous msgid_plural.
"""
_BaseEntry.__init__(self, *args, **kwargs)
self.comment = kwargs.get('comment', '')
self.tcomment = kwargs.get('tcomment', '')
self.occurrences = kwargs.get('occurrences', [])
self.flags = kwargs.get('flags', [])
self.previous_msgctxt = kwargs.get('previous_msgctxt', None)
self.previous_msgid = kwargs.get('previous_msgid', None)
self.previous_msgid_plural = kwargs.get('previous_msgid_plural', None)
def __unicode__(self, wrapwidth=78):
"""
Returns the unicode representation of the entry.
"""
if self.obsolete:
return _BaseEntry.__unicode__(self, wrapwidth)
ret = []
# comments first, if any (with text wrapping as xgettext does)
comments = [('comment', '#. '), ('tcomment', '# ')]
for c in comments:
val = getattr(self, c[0])
if val:
for comment in val.split('\n'):
if wrapwidth > 0 and len(comment) + len(c[1]) > wrapwidth:
ret += wrap(
comment,
wrapwidth,
initial_indent=c[1],
subsequent_indent=c[1],
break_long_words=False
)
else:
ret.append('%s%s' % (c[1], comment))
# occurrences (with text wrapping as xgettext does)
if self.occurrences:
filelist = []
for fpath, lineno in self.occurrences:
if lineno:
filelist.append('%s:%s' % (fpath, lineno))
else:
filelist.append(fpath)
filestr = ' '.join(filelist)
if wrapwidth > 0 and len(filestr) + 3 > wrapwidth:
# textwrap split words that contain hyphen, this is not
# what we want for filenames, so the dirty hack is to
# temporally replace hyphens with a char that a file cannot
# contain, like "*"
ret += [l.replace('*', '-') for l in wrap(
filestr.replace('-', '*'),
wrapwidth,
initial_indent='#: ',
subsequent_indent='#: ',
break_long_words=False
)]
else:
ret.append('#: ' + filestr)
# flags (TODO: wrapping ?)
if self.flags:
ret.append('#, %s' % ', '.join(self.flags))
# previous context and previous msgid/msgid_plural
fields = ['previous_msgctxt', 'previous_msgid',
'previous_msgid_plural']
for f in fields:
val = getattr(self, f)
if val:
ret += self._str_field(f, "#| ", "", val, wrapwidth)
ret.append(_BaseEntry.__unicode__(self, wrapwidth))
ret = u('\n').join(ret)
assert isinstance(ret, text_type)
#if type(ret) != types.UnicodeType:
# return unicode(ret, self.encoding)
return ret
def __cmp__(self, other):
"""
Called by comparison operations if rich comparison is not defined.
"""
# First: Obsolete test
if self.obsolete != other.obsolete:
if self.obsolete:
return -1
else:
return 1
# Work on a copy to protect original
occ1 = sorted(self.occurrences[:])
occ2 = sorted(other.occurrences[:])
pos = 0
for entry1 in occ1:
try:
entry2 = occ2[pos]
except IndexError:
return 1
pos = pos + 1
if entry1[0] != entry2[0]:
if entry1[0] > entry2[0]:
return 1
else:
return -1
if entry1[1] != entry2[1]:
if entry1[1] > entry2[1]:
return 1
else:
return -1
# Finally: Compare message ID
if self.msgid > other.msgid:
return 1
elif self.msgid < other.msgid:
return -1
return 0
def __gt__(self, other):
return self.__cmp__(other) > 0
def __lt__(self, other):
return self.__cmp__(other) < 0
def __ge__(self, other):
return self.__cmp__(other) >= 0
def __le__(self, other):
return self.__cmp__(other) <= 0
def __eq__(self, other):
return self.__cmp__(other) == 0
def __ne__(self, other):
return self.__cmp__(other) != 0
def __hash__(self):
return hash((self.msgid, self.msgstr))
def translated(self):
"""
Returns ``True`` if the entry has been translated or ``False``
otherwise.
"""
if self.obsolete or 'fuzzy' in self.flags:
return False
if self.msgstr != '':
return True
if self.msgstr_plural:
for pos in self.msgstr_plural:
if self.msgstr_plural[pos] == '':
return False
return True
return False
def merge(self, other):
"""
Merge the current entry with the given pot entry.
"""
self.msgid = other.msgid
self.msgctxt = other.msgctxt
self.occurrences = other.occurrences
self.comment = other.comment
fuzzy = 'fuzzy' in self.flags
self.flags = other.flags[:] # clone flags
if fuzzy:
self.flags.append('fuzzy')
self.msgid_plural = other.msgid_plural
self.obsolete = other.obsolete
self.previous_msgctxt = other.previous_msgctxt
self.previous_msgid = other.previous_msgid
self.previous_msgid_plural = other.previous_msgid_plural
if other.msgstr_plural:
for pos in other.msgstr_plural:
try:
# keep existing translation at pos if any
self.msgstr_plural[pos]
except KeyError:
self.msgstr_plural[pos] = ''
# }}}
# class MOEntry {{{
class MOEntry(_BaseEntry):
"""
Represents a mo file entry.
"""
pass
# }}}
# class _POFileParser {{{
class _POFileParser(object):
"""
A finite state machine to parse efficiently and correctly po
file format.
"""
def __init__(self, pofile, *args, **kwargs):
"""
Constructor.
Keyword arguments:
``pofile``
string, path to the po file or its content
``encoding``
string, the encoding to use, defaults to ``default_encoding``
global variable (optional).
``check_for_duplicates``
whether to check for duplicate entries when adding entries to the
file (optional, default: ``False``).
"""
enc = kwargs.get('encoding', default_encoding)
if os.path.exists(pofile):
try:
self.fhandle = codecs.open(pofile, 'rU', enc)
except LookupError:
enc = default_encoding
self.fhandle = codecs.open(pofile, 'rU', enc)
else:
self.fhandle = pofile.splitlines()
klass = kwargs.get('klass')
if klass is None:
klass = POFile
self.instance = klass(
pofile=pofile,
encoding=enc,
check_for_duplicates=kwargs.get('check_for_duplicates', False)
)
self.transitions = {}
self.current_entry = POEntry()
self.current_state = 'ST'
self.current_token = None
# two memo flags used in handlers
self.msgstr_index = 0
self.entry_obsolete = 0
# Configure the state machine, by adding transitions.
# Signification of symbols:
# * ST: Beginning of the file (start)
# * HE: Header
# * TC: a translation comment
# * GC: a generated comment
# * OC: a file/line occurence
# * FL: a flags line
# * CT: a message context
# * PC: a previous msgctxt
# * PM: a previous msgid
# * PP: a previous msgid_plural
# * MI: a msgid
# * MP: a msgid plural
# * MS: a msgstr
# * MX: a msgstr plural
# * MC: a msgid or msgstr continuation line
all = ['ST', 'HE', 'GC', 'OC', 'FL', 'CT', 'PC', 'PM', 'PP', 'TC',
'MS', 'MP', 'MX', 'MI']
self.add('TC', ['ST', 'HE'], 'HE')
self.add('TC', ['GC', 'OC', 'FL', 'TC', 'PC', 'PM', 'PP', 'MS',
'MP', 'MX', 'MI'], 'TC')
self.add('GC', all, 'GC')
self.add('OC', all, 'OC')
self.add('FL', all, 'FL')
self.add('PC', all, 'PC')
self.add('PM', all, 'PM')
self.add('PP', all, 'PP')
self.add('CT', ['ST', 'HE', 'GC', 'OC', 'FL', 'TC', 'PC', 'PM',
'PP', 'MS', 'MX'], 'CT')
self.add('MI', ['ST', 'HE', 'GC', 'OC', 'FL', 'CT', 'TC', 'PC',
'PM', 'PP', 'MS', 'MX'], 'MI')
self.add('MP', ['TC', 'GC', 'PC', 'PM', 'PP', 'MI'], 'MP')
self.add('MS', ['MI', 'MP', 'TC'], 'MS')
self.add('MX', ['MI', 'MX', 'MP', 'TC'], 'MX')
self.add('MC', ['CT', 'MI', 'MP', 'MS', 'MX', 'PM', 'PP', 'PC'], 'MC')
def parse(self):
"""
Run the state machine, parse the file line by line and call process()
with the current matched symbol.
"""
i = 0
keywords = {
'msgctxt': 'CT',
'msgid': 'MI',
'msgstr': 'MS',
'msgid_plural': 'MP',
}
prev_keywords = {
'msgid_plural': 'PP',
'msgid': 'PM',
'msgctxt': 'PC',
}
for line in self.fhandle:
i += 1
line = line.strip()
if line == '':
continue
tokens = line.split(None, 2)
nb_tokens = len(tokens)
if tokens[0] == '#~|':
continue
if tokens[0] == '#~' and nb_tokens > 1:
line = line[3:].strip()
tokens = tokens[1:]
nb_tokens -= 1
self.entry_obsolete = 1
else:
self.entry_obsolete = 0
# Take care of keywords like
# msgid, msgid_plural, msgctxt & msgstr.
if tokens[0] in keywords and nb_tokens > 1:
line = line[len(tokens[0]):].lstrip()
if re.search(r'([^\\]|^)"', line[1:-1]):
raise IOError('Syntax error in po file %s (line %s): '
'unescaped double quote found' %
(self.instance.fpath, i))
self.current_token = line
self.process(keywords[tokens[0]], i)
continue
self.current_token = line
if tokens[0] == '#:':
if nb_tokens <= 1:
continue
# we are on a occurrences line
self.process('OC', i)
elif line[:1] == '"':
# we are on a continuation line
if re.search(r'([^\\]|^)"', line[1:-1]):
raise IOError('Syntax error in po file %s (line %s): '
'unescaped double quote found' %
(self.instance.fpath, i))
self.process('MC', i)
elif line[:7] == 'msgstr[':
# we are on a msgstr plural
self.process('MX', i)
elif tokens[0] == '#,':
if nb_tokens <= 1:
continue
# we are on a flags line
self.process('FL', i)
elif tokens[0] == '#' or tokens[0].startswith('##'):
if line == '#':
line += ' '
# we are on a translator comment line
self.process('TC', i)
elif tokens[0] == '#.':
if nb_tokens <= 1:
continue
# we are on a generated comment line
self.process('GC', i)
elif tokens[0] == '#|':
if nb_tokens <= 1:
raise IOError('Syntax error in po file %s (line %s)' %
(self.instance.fpath, i))
# Remove the marker and any whitespace right after that.
line = line[2:].lstrip()
self.current_token = line
if tokens[1].startswith('"'):
# Continuation of previous metadata.
self.process('MC', i)
continue
if nb_tokens == 2:
# Invalid continuation line.
raise IOError('Syntax error in po file %s (line %s): '
'invalid continuation line' %
(self.instance.fpath, i))
# we are on a "previous translation" comment line,
if tokens[1] not in prev_keywords:
# Unknown keyword in previous translation comment.
raise IOError('Syntax error in po file %s (line %s): '
'unknown keyword %s' %
(self.instance.fpath, i, tokens[1]))
# Remove the keyword and any whitespace
# between it and the starting quote.
line = line[len(tokens[1]):].lstrip()
self.current_token = line
self.process(prev_keywords[tokens[1]], i)
else:
raise IOError('Syntax error in po file %s (line %s)' %
(self.instance.fpath, i))
if self.current_entry:
# since entries are added when another entry is found, we must add
# the last entry here (only if there are lines)
self.instance.append(self.current_entry)
# before returning the instance, check if there's metadata and if
# so extract it in a dict
metadataentry = self.instance.find('')
if metadataentry: # metadata found
# remove the entry
self.instance.remove(metadataentry)
self.instance.metadata_is_fuzzy = metadataentry.flags
key = None
for msg in metadataentry.msgstr.splitlines():
try:
key, val = msg.split(':', 1)
self.instance.metadata[key] = val.strip()
except (ValueError, KeyError):
if key is not None:
self.instance.metadata[key] += '\n' + msg.strip()
# close opened file
if not isinstance(self.fhandle, list): # must be file
self.fhandle.close()
return self.instance
def add(self, symbol, states, next_state):
"""
Add a transition to the state machine.
Keywords arguments:
``symbol``
string, the matched token (two chars symbol).
``states``
list, a list of states (two chars symbols).
``next_state``
the next state the fsm will have after the action.
"""
for state in states:
action = getattr(self, 'handle_%s' % next_state.lower())
self.transitions[(symbol, state)] = (action, next_state)
def process(self, symbol, linenum):
"""
Process the transition corresponding to the current state and the
symbol provided.
Keywords arguments:
``symbol``
string, the matched token (two chars symbol).
``linenum``
integer, the current line number of the parsed file.
"""
try:
(action, state) = self.transitions[(symbol, self.current_state)]
if action():
self.current_state = state
except Exception:
raise IOError('Syntax error in po file (line %s)' % linenum)
# state handlers
def handle_he(self):
"""Handle a header comment."""
if self.instance.header != '':
self.instance.header += '\n'
self.instance.header += self.current_token[2:]
return 1
def handle_tc(self):
"""Handle a translator comment."""
if self.current_state in ['MC', 'MS', 'MX']:
self.instance.append(self.current_entry)
self.current_entry = POEntry()
if self.current_entry.tcomment != '':
self.current_entry.tcomment += '\n'
tcomment = self.current_token.lstrip('#')
if tcomment.startswith(' '):
tcomment = tcomment[1:]
self.current_entry.tcomment += tcomment
return True
def handle_gc(self):
"""Handle a generated comment."""
if self.current_state in ['MC', 'MS', 'MX']:
self.instance.append(self.current_entry)
self.current_entry = POEntry()
if self.current_entry.comment != '':
self.current_entry.comment += '\n'
self.current_entry.comment += self.current_token[3:]
return True
def handle_oc(self):
"""Handle a file:num occurence."""
if self.current_state in ['MC', 'MS', 'MX']:
self.instance.append(self.current_entry)
self.current_entry = POEntry()
occurrences = self.current_token[3:].split()
for occurrence in occurrences:
if occurrence != '':
try:
fil, line = occurrence.split(':')
if not line.isdigit():
fil = fil + line
line = ''
self.current_entry.occurrences.append((fil, line))
except (ValueError, AttributeError):
self.current_entry.occurrences.append((occurrence, ''))
return True
def handle_fl(self):
"""Handle a flags line."""
if self.current_state in ['MC', 'MS', 'MX']:
self.instance.append(self.current_entry)
self.current_entry = POEntry()
self.current_entry.flags += self.current_token[3:].split(', ')
return True
def handle_pp(self):
"""Handle a previous msgid_plural line."""
if self.current_state in ['MC', 'MS', 'MX']:
self.instance.append(self.current_entry)
self.current_entry = POEntry()
self.current_entry.previous_msgid_plural = \
unescape(self.current_token[1:-1])
return True
def handle_pm(self):
"""Handle a previous msgid line."""
if self.current_state in ['MC', 'MS', 'MX']:
self.instance.append(self.current_entry)
self.current_entry = POEntry()
self.current_entry.previous_msgid = \
unescape(self.current_token[1:-1])
return True
def handle_pc(self):
"""Handle a previous msgctxt line."""
if self.current_state in ['MC', 'MS', 'MX']:
self.instance.append(self.current_entry)
self.current_entry = POEntry()
self.current_entry.previous_msgctxt = \
unescape(self.current_token[1:-1])
return True
def handle_ct(self):
"""Handle a msgctxt."""
if self.current_state in ['MC', 'MS', 'MX']:
self.instance.append(self.current_entry)
self.current_entry = POEntry()
self.current_entry.msgctxt = unescape(self.current_token[1:-1])
return True
def handle_mi(self):
"""Handle a msgid."""
if self.current_state in ['MC', 'MS', 'MX']:
self.instance.append(self.current_entry)
self.current_entry = POEntry()
self.current_entry.obsolete = self.entry_obsolete
self.current_entry.msgid = unescape(self.current_token[1:-1])
return True
def handle_mp(self):
"""Handle a msgid plural."""
self.current_entry.msgid_plural = unescape(self.current_token[1:-1])
return True
def handle_ms(self):
"""Handle a msgstr."""
self.current_entry.msgstr = unescape(self.current_token[1:-1])
return True
def handle_mx(self):
"""Handle a msgstr plural."""
index, value = self.current_token[7], self.current_token[11:-1]
self.current_entry.msgstr_plural[index] = unescape(value)
self.msgstr_index = index
return True
def handle_mc(self):
"""Handle a msgid or msgstr continuation line."""
token = unescape(self.current_token[1:-1])
if self.current_state == 'CT':
self.current_entry.msgctxt += token
elif self.current_state == 'MI':
self.current_entry.msgid += token
elif self.current_state == 'MP':
self.current_entry.msgid_plural += token
elif self.current_state == 'MS':
self.current_entry.msgstr += token
elif self.current_state == 'MX':
self.current_entry.msgstr_plural[self.msgstr_index] += token
elif self.current_state == 'PP':
token = token[3:]
self.current_entry.previous_msgid_plural += token
elif self.current_state == 'PM':
token = token[3:]
self.current_entry.previous_msgid += token
elif self.current_state == 'PC':
token = token[3:]
self.current_entry.previous_msgctxt += token
# don't change the current state
return False
# }}}
# class _MOFileParser {{{
class _MOFileParser(object):
"""
A class to parse binary mo files.
"""
def __init__(self, mofile, *args, **kwargs):
"""
Constructor.
Keyword arguments:
``mofile``
string, path to the mo file or its content
``encoding``
string, the encoding to use, defaults to ``default_encoding``
global variable (optional).
``check_for_duplicates``
whether to check for duplicate entries when adding entries to the
file (optional, default: ``False``).
"""
self.fhandle = open(mofile, 'rb')
klass = kwargs.get('klass')
if klass is None:
klass = MOFile
self.instance = klass(
fpath=mofile,
encoding=kwargs.get('encoding', default_encoding),
check_for_duplicates=kwargs.get('check_for_duplicates', False)
)
def parse(self):
"""
Build the instance with the file handle provided in the
constructor.
"""
# parse magic number
magic_number = self._readbinary('<I', 4)
if magic_number == MOFile.LITTLE_ENDIAN:
ii = '<II'
elif magic_number == MOFile.BIG_ENDIAN:
ii = '>II'
else:
raise IOError('Invalid mo file, magic number is incorrect !')
self.instance.magic_number = magic_number
# parse the version number and the number of strings
self.instance.version, numofstrings = self._readbinary(ii, 8)
# original strings and translation strings hash table offset
msgids_hash_offset, msgstrs_hash_offset = self._readbinary(ii, 8)
# move to msgid hash table and read length and offset of msgids
self.fhandle.seek(msgids_hash_offset)
msgids_index = []
for i in range(numofstrings):
msgids_index.append(self._readbinary(ii, 8))
# move to msgstr hash table and read length and offset of msgstrs
self.fhandle.seek(msgstrs_hash_offset)
msgstrs_index = []
for i in range(numofstrings):
msgstrs_index.append(self._readbinary(ii, 8))
# build entries
encoding = self.instance.encoding
for i in range(numofstrings):
self.fhandle.seek(msgids_index[i][1])
msgid = self.fhandle.read(msgids_index[i][0])
self.fhandle.seek(msgstrs_index[i][1])
msgstr = self.fhandle.read(msgstrs_index[i][0])
if i == 0: # metadata
raw_metadata, metadata = msgstr.split(b('\n')), {}
for line in raw_metadata:
tokens = line.split(b(':'), 1)
if tokens[0] != b(''):
try:
k = tokens[0].decode(encoding)
v = tokens[1].decode(encoding)
metadata[k] = v.strip()
except IndexError:
metadata[k] = u('')
self.instance.metadata = metadata
continue
# test if we have a plural entry
msgid_tokens = msgid.split(b('\0'))
if len(msgid_tokens) > 1:
entry = self._build_entry(
msgid=msgid_tokens[0],
msgid_plural=msgid_tokens[1],
msgstr_plural=dict((k, v) for k, v in
enumerate(msgstr.split(b('\0'))))
)
else:
entry = self._build_entry(msgid=msgid, msgstr=msgstr)
self.instance.append(entry)
# close opened file
self.fhandle.close()
return self.instance
def _build_entry(self, msgid, msgstr=None, msgid_plural=None,
msgstr_plural=None):
msgctxt_msgid = msgid.split(b('\x04'))
encoding = self.instance.encoding
if len(msgctxt_msgid) > 1:
kwargs = {
'msgctxt': msgctxt_msgid[0].decode(encoding),
'msgid': msgctxt_msgid[1].decode(encoding),
}
else:
kwargs = {'msgid': msgid.decode(encoding)}
if msgstr:
kwargs['msgstr'] = msgstr.decode(encoding)
if msgid_plural:
kwargs['msgid_plural'] = msgid_plural.decode(encoding)
if msgstr_plural:
for k in msgstr_plural:
msgstr_plural[k] = msgstr_plural[k].decode(encoding)
kwargs['msgstr_plural'] = msgstr_plural
return MOEntry(**kwargs)
def _readbinary(self, fmt, numbytes):
"""
Private method that unpack n bytes of data using format <fmt>.
It returns a tuple or a mixed value if the tuple length is 1.
"""
bytes = self.fhandle.read(numbytes)
tup = struct.unpack(fmt, bytes)
if len(tup) == 1:
return tup[0]
return tup
# }}}
# class TextWrapper {{{
class TextWrapper(textwrap.TextWrapper):
"""
Subclass of textwrap.TextWrapper that backport the
drop_whitespace option.
"""
def __init__(self, *args, **kwargs):
drop_whitespace = kwargs.pop('drop_whitespace', True)
textwrap.TextWrapper.__init__(self, *args, **kwargs)
self.drop_whitespace = drop_whitespace
def _wrap_chunks(self, chunks):
"""_wrap_chunks(chunks : [string]) -> [string]
Wrap a sequence of text chunks and return a list of lines of
length 'self.width' or less. (If 'break_long_words' is false,
some lines may be longer than this.) Chunks correspond roughly
to words and the whitespace between them: each chunk is
indivisible (modulo 'break_long_words'), but a line break can
come between any two chunks. Chunks should not have internal
whitespace; ie. a chunk is either all whitespace or a "word".
Whitespace chunks will be removed from the beginning and end of
lines, but apart from that whitespace is preserved.
"""
lines = []
if self.width <= 0:
raise ValueError("invalid width %r (must be > 0)" % self.width)
# Arrange in reverse order so items can be efficiently popped
# from a stack of chucks.
chunks.reverse()
while chunks:
# Start the list of chunks that will make up the current line.
# cur_len is just the length of all the chunks in cur_line.
cur_line = []
cur_len = 0
# Figure out which static string will prefix this line.
if lines:
indent = self.subsequent_indent
else:
indent = self.initial_indent
# Maximum width for this line.
width = self.width - len(indent)
# First chunk on line is whitespace -- drop it, unless this
# is the very beginning of the text (ie. no lines started yet).
if self.drop_whitespace and chunks[-1].strip() == '' and lines:
del chunks[-1]
while chunks:
l = len(chunks[-1])
# Can at least squeeze this chunk onto the current line.
if cur_len + l <= width:
cur_line.append(chunks.pop())
cur_len += l
# Nope, this line is full.
else:
break
# The current line is full, and the next chunk is too big to
# fit on *any* line (not just this one).
if chunks and len(chunks[-1]) > width:
self._handle_long_word(chunks, cur_line, cur_len, width)
# If the last chunk on this line is all whitespace, drop it.
if self.drop_whitespace and cur_line and not cur_line[-1].strip():
del cur_line[-1]
# Convert current line back to a string and store it in list
# of all lines (return value).
if cur_line:
lines.append(indent + ''.join(cur_line))
return lines
# }}}
# function wrap() {{{
def wrap(text, width=70, **kwargs):
"""
Wrap a single paragraph of text, returning a list of wrapped lines.
"""
if sys.version_info < (2, 6):
return TextWrapper(width=width, **kwargs).wrap(text)
return textwrap.wrap(text, width=width, **kwargs)
# }}}
|
mit
|
grace-/opencv-3.0.0-cvpr
|
opencv/platforms/android/service/test_native.py
|
5
|
1233
|
#!/usr/bin/python
import os
import sys
DEVICE_NAME = ""
DEVICE_STR = ""
DEVICE_ARCH = "armeabi"
LOCAL_LOG_PATH = os.path.join(os.getcwd(), "logs")
DEVICE_LOG_PATH = "/sdcard/OpenCVEngineLogs"
DEVICE_BIN_PATH = "/data/data/EngineTest"
def RunTestApp(AppName):
TestLog = os.path.join(DEVICE_LOG_PATH, AppName + "_" + DEVICE_ARCH + ".xml")
os.system("adb %s shell \"LD_LIBRARY_PATH=%s:$LD_LIBRARY_PATH;%s --gtest_output=\"xml:%s\"\"" % (DEVICE_STR, DEVICE_BIN_PATH, os.path.join(DEVICE_BIN_PATH, AppName), TestLog))
os.system("adb %s pull \"%s\" \"%s\"" % (DEVICE_STR, TestLog, LOCAL_LOG_PATH))
if (__name__ == "__main__"):
if (3 == len(sys.argv)):
DEVICE_ARCH = sys.argv[1]
DEVICE_NAME = sys.argv[2]
if (DEVICE_NAME != ""):
DEVICE_STR = "-s \"" + DEVICE_NAME + "\""
if (not os.path.exists(LOCAL_LOG_PATH)):
os.makedirs(LOCAL_LOG_PATH)
print("Waiting for device \"%s\" with arch \"%s\" ..." % (DEVICE_NAME, DEVICE_ARCH))
os.system("adb %s wait-for-device" % DEVICE_STR)
os.system("adb %s shell rm -r \"%s\"" % (DEVICE_STR, DEVICE_LOG_PATH))
os.system("adb %s shell mkdir -p \"%s\"" % (DEVICE_STR, DEVICE_LOG_PATH))
RunTestApp("OpenCVEngineTestApp")
|
bsd-3-clause
|
hernandito/SickRage
|
lib/unidecode/x074.py
|
252
|
4696
|
data = (
'Han ', # 0x00
'Xuan ', # 0x01
'Yan ', # 0x02
'Qiu ', # 0x03
'Quan ', # 0x04
'Lang ', # 0x05
'Li ', # 0x06
'Xiu ', # 0x07
'Fu ', # 0x08
'Liu ', # 0x09
'Ye ', # 0x0a
'Xi ', # 0x0b
'Ling ', # 0x0c
'Li ', # 0x0d
'Jin ', # 0x0e
'Lian ', # 0x0f
'Suo ', # 0x10
'Chiisai ', # 0x11
'[?] ', # 0x12
'Wan ', # 0x13
'Dian ', # 0x14
'Pin ', # 0x15
'Zhan ', # 0x16
'Cui ', # 0x17
'Min ', # 0x18
'Yu ', # 0x19
'Ju ', # 0x1a
'Chen ', # 0x1b
'Lai ', # 0x1c
'Wen ', # 0x1d
'Sheng ', # 0x1e
'Wei ', # 0x1f
'Dian ', # 0x20
'Chu ', # 0x21
'Zhuo ', # 0x22
'Pei ', # 0x23
'Cheng ', # 0x24
'Hu ', # 0x25
'Qi ', # 0x26
'E ', # 0x27
'Kun ', # 0x28
'Chang ', # 0x29
'Qi ', # 0x2a
'Beng ', # 0x2b
'Wan ', # 0x2c
'Lu ', # 0x2d
'Cong ', # 0x2e
'Guan ', # 0x2f
'Yan ', # 0x30
'Diao ', # 0x31
'Bei ', # 0x32
'Lin ', # 0x33
'Qin ', # 0x34
'Pi ', # 0x35
'Pa ', # 0x36
'Que ', # 0x37
'Zhuo ', # 0x38
'Qin ', # 0x39
'Fa ', # 0x3a
'[?] ', # 0x3b
'Qiong ', # 0x3c
'Du ', # 0x3d
'Jie ', # 0x3e
'Hun ', # 0x3f
'Yu ', # 0x40
'Mao ', # 0x41
'Mei ', # 0x42
'Chun ', # 0x43
'Xuan ', # 0x44
'Ti ', # 0x45
'Xing ', # 0x46
'Dai ', # 0x47
'Rou ', # 0x48
'Min ', # 0x49
'Zhen ', # 0x4a
'Wei ', # 0x4b
'Ruan ', # 0x4c
'Huan ', # 0x4d
'Jie ', # 0x4e
'Chuan ', # 0x4f
'Jian ', # 0x50
'Zhuan ', # 0x51
'Yang ', # 0x52
'Lian ', # 0x53
'Quan ', # 0x54
'Xia ', # 0x55
'Duan ', # 0x56
'Yuan ', # 0x57
'Ye ', # 0x58
'Nao ', # 0x59
'Hu ', # 0x5a
'Ying ', # 0x5b
'Yu ', # 0x5c
'Huang ', # 0x5d
'Rui ', # 0x5e
'Se ', # 0x5f
'Liu ', # 0x60
'Shi ', # 0x61
'Rong ', # 0x62
'Suo ', # 0x63
'Yao ', # 0x64
'Wen ', # 0x65
'Wu ', # 0x66
'Jin ', # 0x67
'Jin ', # 0x68
'Ying ', # 0x69
'Ma ', # 0x6a
'Tao ', # 0x6b
'Liu ', # 0x6c
'Tang ', # 0x6d
'Li ', # 0x6e
'Lang ', # 0x6f
'Gui ', # 0x70
'Zhen ', # 0x71
'Qiang ', # 0x72
'Cuo ', # 0x73
'Jue ', # 0x74
'Zhao ', # 0x75
'Yao ', # 0x76
'Ai ', # 0x77
'Bin ', # 0x78
'Tu ', # 0x79
'Chang ', # 0x7a
'Kun ', # 0x7b
'Zhuan ', # 0x7c
'Cong ', # 0x7d
'Jin ', # 0x7e
'Yi ', # 0x7f
'Cui ', # 0x80
'Cong ', # 0x81
'Qi ', # 0x82
'Li ', # 0x83
'Ying ', # 0x84
'Suo ', # 0x85
'Qiu ', # 0x86
'Xuan ', # 0x87
'Ao ', # 0x88
'Lian ', # 0x89
'Man ', # 0x8a
'Zhang ', # 0x8b
'Yin ', # 0x8c
'[?] ', # 0x8d
'Ying ', # 0x8e
'Zhi ', # 0x8f
'Lu ', # 0x90
'Wu ', # 0x91
'Deng ', # 0x92
'Xiou ', # 0x93
'Zeng ', # 0x94
'Xun ', # 0x95
'Qu ', # 0x96
'Dang ', # 0x97
'Lin ', # 0x98
'Liao ', # 0x99
'Qiong ', # 0x9a
'Su ', # 0x9b
'Huang ', # 0x9c
'Gui ', # 0x9d
'Pu ', # 0x9e
'Jing ', # 0x9f
'Fan ', # 0xa0
'Jin ', # 0xa1
'Liu ', # 0xa2
'Ji ', # 0xa3
'[?] ', # 0xa4
'Jing ', # 0xa5
'Ai ', # 0xa6
'Bi ', # 0xa7
'Can ', # 0xa8
'Qu ', # 0xa9
'Zao ', # 0xaa
'Dang ', # 0xab
'Jiao ', # 0xac
'Gun ', # 0xad
'Tan ', # 0xae
'Hui ', # 0xaf
'Huan ', # 0xb0
'Se ', # 0xb1
'Sui ', # 0xb2
'Tian ', # 0xb3
'[?] ', # 0xb4
'Yu ', # 0xb5
'Jin ', # 0xb6
'Lu ', # 0xb7
'Bin ', # 0xb8
'Shou ', # 0xb9
'Wen ', # 0xba
'Zui ', # 0xbb
'Lan ', # 0xbc
'Xi ', # 0xbd
'Ji ', # 0xbe
'Xuan ', # 0xbf
'Ruan ', # 0xc0
'Huo ', # 0xc1
'Gai ', # 0xc2
'Lei ', # 0xc3
'Du ', # 0xc4
'Li ', # 0xc5
'Zhi ', # 0xc6
'Rou ', # 0xc7
'Li ', # 0xc8
'Zan ', # 0xc9
'Qiong ', # 0xca
'Zhe ', # 0xcb
'Gui ', # 0xcc
'Sui ', # 0xcd
'La ', # 0xce
'Long ', # 0xcf
'Lu ', # 0xd0
'Li ', # 0xd1
'Zan ', # 0xd2
'Lan ', # 0xd3
'Ying ', # 0xd4
'Mi ', # 0xd5
'Xiang ', # 0xd6
'Xi ', # 0xd7
'Guan ', # 0xd8
'Dao ', # 0xd9
'Zan ', # 0xda
'Huan ', # 0xdb
'Gua ', # 0xdc
'Bo ', # 0xdd
'Die ', # 0xde
'Bao ', # 0xdf
'Hu ', # 0xe0
'Zhi ', # 0xe1
'Piao ', # 0xe2
'Ban ', # 0xe3
'Rang ', # 0xe4
'Li ', # 0xe5
'Wa ', # 0xe6
'Dekaguramu ', # 0xe7
'Jiang ', # 0xe8
'Qian ', # 0xe9
'Fan ', # 0xea
'Pen ', # 0xeb
'Fang ', # 0xec
'Dan ', # 0xed
'Weng ', # 0xee
'Ou ', # 0xef
'Deshiguramu ', # 0xf0
'Miriguramu ', # 0xf1
'Thon ', # 0xf2
'Hu ', # 0xf3
'Ling ', # 0xf4
'Yi ', # 0xf5
'Ping ', # 0xf6
'Ci ', # 0xf7
'Hekutogura ', # 0xf8
'Juan ', # 0xf9
'Chang ', # 0xfa
'Chi ', # 0xfb
'Sarake ', # 0xfc
'Dang ', # 0xfd
'Meng ', # 0xfe
'Pou ', # 0xff
)
|
gpl-3.0
|
ROM-Jeremy/android_kernel_lge_msm8226
|
tools/perf/scripts/python/futex-contention.py
|
11261
|
1486
|
# futex contention
# (c) 2010, Arnaldo Carvalho de Melo <acme@redhat.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Translation of:
#
# http://sourceware.org/systemtap/wiki/WSFutexContention
#
# to perf python scripting.
#
# Measures futex contention
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + '/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Util import *
process_names = {}
thread_thislock = {}
thread_blocktime = {}
lock_waits = {} # long-lived stats on (tid,lock) blockage elapsed time
process_names = {} # long-lived pid-to-execname mapping
def syscalls__sys_enter_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, uaddr, op, val, utime, uaddr2, val3):
cmd = op & FUTEX_CMD_MASK
if cmd != FUTEX_WAIT:
return # we don't care about originators of WAKE events
process_names[tid] = comm
thread_thislock[tid] = uaddr
thread_blocktime[tid] = nsecs(s, ns)
def syscalls__sys_exit_futex(event, ctxt, cpu, s, ns, tid, comm,
nr, ret):
if thread_blocktime.has_key(tid):
elapsed = nsecs(s, ns) - thread_blocktime[tid]
add_stats(lock_waits, (tid, thread_thislock[tid]), elapsed)
del thread_blocktime[tid]
del thread_thislock[tid]
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
for (tid, lock) in lock_waits:
min, max, avg, count = lock_waits[tid, lock]
print "%s[%d] lock %x contended %d times, %d avg ns" % \
(process_names[tid], tid, lock, count, avg)
|
gpl-2.0
|
joshblum/django-with-audit
|
tests/regressiontests/utils/termcolors.py
|
138
|
7367
|
from django.utils import unittest
from django.utils.termcolors import parse_color_setting, PALETTES, DEFAULT_PALETTE, LIGHT_PALETTE, DARK_PALETTE, NOCOLOR_PALETTE
class TermColorTests(unittest.TestCase):
def test_empty_string(self):
self.assertEqual(parse_color_setting(''), PALETTES[DEFAULT_PALETTE])
def test_simple_palette(self):
self.assertEqual(parse_color_setting('light'), PALETTES[LIGHT_PALETTE])
self.assertEqual(parse_color_setting('dark'), PALETTES[DARK_PALETTE])
self.assertEqual(parse_color_setting('nocolor'), None)
def test_fg(self):
self.assertEqual(parse_color_setting('error=green'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green'}))
def test_fg_bg(self):
self.assertEqual(parse_color_setting('error=green/blue'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'bg':'blue'}))
def test_fg_opts(self):
self.assertEqual(parse_color_setting('error=green,blink'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'opts': ('blink',)}))
self.assertEqual(parse_color_setting('error=green,bold,blink'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'opts': ('blink','bold')}))
def test_fg_bg_opts(self):
self.assertEqual(parse_color_setting('error=green/blue,blink'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'bg':'blue', 'opts': ('blink',)}))
self.assertEqual(parse_color_setting('error=green/blue,bold,blink'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'bg':'blue', 'opts': ('blink','bold')}))
def test_override_palette(self):
self.assertEqual(parse_color_setting('light;error=green'),
dict(PALETTES[LIGHT_PALETTE],
ERROR={'fg':'green'}))
def test_override_nocolor(self):
self.assertEqual(parse_color_setting('nocolor;error=green'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg': 'green'}))
def test_reverse_override(self):
self.assertEqual(parse_color_setting('error=green;light'), PALETTES[LIGHT_PALETTE])
def test_multiple_roles(self):
self.assertEqual(parse_color_setting('error=green;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green'},
SQL_FIELD={'fg':'blue'}))
def test_override_with_multiple_roles(self):
self.assertEqual(parse_color_setting('light;error=green;sql_field=blue'),
dict(PALETTES[LIGHT_PALETTE],
ERROR={'fg':'green'},
SQL_FIELD={'fg':'blue'}))
def test_empty_definition(self):
self.assertEqual(parse_color_setting(';'), None)
self.assertEqual(parse_color_setting('light;'), PALETTES[LIGHT_PALETTE])
self.assertEqual(parse_color_setting(';;;'), None)
def test_empty_options(self):
self.assertEqual(parse_color_setting('error=green,'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green'}))
self.assertEqual(parse_color_setting('error=green,,,'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green'}))
self.assertEqual(parse_color_setting('error=green,,blink,,'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'opts': ('blink',)}))
def test_bad_palette(self):
self.assertEqual(parse_color_setting('unknown'), None)
def test_bad_role(self):
self.assertEqual(parse_color_setting('unknown='), None)
self.assertEqual(parse_color_setting('unknown=green'), None)
self.assertEqual(parse_color_setting('unknown=green;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE],
SQL_FIELD={'fg':'blue'}))
def test_bad_color(self):
self.assertEqual(parse_color_setting('error='), None)
self.assertEqual(parse_color_setting('error=;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE],
SQL_FIELD={'fg':'blue'}))
self.assertEqual(parse_color_setting('error=unknown'), None)
self.assertEqual(parse_color_setting('error=unknown;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE],
SQL_FIELD={'fg':'blue'}))
self.assertEqual(parse_color_setting('error=green/unknown'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green'}))
self.assertEqual(parse_color_setting('error=green/blue/something'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'bg': 'blue'}))
self.assertEqual(parse_color_setting('error=green/blue/something,blink'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'bg': 'blue', 'opts': ('blink',)}))
def test_bad_option(self):
self.assertEqual(parse_color_setting('error=green,unknown'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green'}))
self.assertEqual(parse_color_setting('error=green,unknown,blink'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'opts': ('blink',)}))
def test_role_case(self):
self.assertEqual(parse_color_setting('ERROR=green'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green'}))
self.assertEqual(parse_color_setting('eRrOr=green'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green'}))
def test_color_case(self):
self.assertEqual(parse_color_setting('error=GREEN'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green'}))
self.assertEqual(parse_color_setting('error=GREEN/BLUE'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'bg':'blue'}))
self.assertEqual(parse_color_setting('error=gReEn'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green'}))
self.assertEqual(parse_color_setting('error=gReEn/bLuE'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'bg':'blue'}))
def test_opts_case(self):
self.assertEqual(parse_color_setting('error=green,BLINK'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'opts': ('blink',)}))
self.assertEqual(parse_color_setting('error=green,bLiNk'),
dict(PALETTES[NOCOLOR_PALETTE],
ERROR={'fg':'green', 'opts': ('blink',)}))
|
bsd-3-clause
|
Edraak/edraak-platform
|
lms/djangoapps/courseware/tests/test_middleware.py
|
19
|
1491
|
"""
Tests for courseware middleware
"""
from django.http import Http404
from django.test.client import RequestFactory
from nose.plugins.attrib import attr
from lms.djangoapps.courseware.exceptions import Redirect
from lms.djangoapps.courseware.middleware import RedirectMiddleware
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
@attr(shard=1)
class CoursewareMiddlewareTestCase(SharedModuleStoreTestCase):
"""Tests that courseware middleware is correctly redirected"""
@classmethod
def setUpClass(cls):
super(CoursewareMiddlewareTestCase, cls).setUpClass()
cls.course = CourseFactory.create()
def test_process_404(self):
"""A 404 should not trigger anything"""
request = RequestFactory().get("dummy_url")
response = RedirectMiddleware().process_exception(
request, Http404()
)
self.assertIsNone(response)
def test_redirect_exceptions(self):
"""
Unit tests for handling of Redirect exceptions.
"""
request = RequestFactory().get("dummy_url")
test_url = '/test_url'
exception = Redirect(test_url)
response = RedirectMiddleware().process_exception(
request, exception
)
self.assertEqual(response.status_code, 302)
target_url = response._headers['location'][1]
self.assertTrue(target_url.endswith(test_url))
|
agpl-3.0
|
mosbasik/buzhug
|
javasrc/lib/Jython/Lib/distutils/dir_util.py
|
7
|
8084
|
"""distutils.dir_util
Utility functions for manipulating directories and directory trees."""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import os, sys
from types import *
from distutils.errors import DistutilsFileError, DistutilsInternalError
from distutils import log
# cache for by mkpath() -- in addition to cheapening redundant calls,
# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
_path_created = {}
# I don't use os.makedirs because a) it's new to Python 1.5.2, and
# b) it blows up if the directory already exists (I want to silently
# succeed in that case).
def mkpath (name, mode=0777, verbose=0, dry_run=0):
"""Create a directory and any missing ancestor directories. If the
directory already exists (or if 'name' is the empty string, which
means the current directory, which of course exists), then do
nothing. Raise DistutilsFileError if unable to create some
directory along the way (eg. some sub-path exists, but is a file
rather than a directory). If 'verbose' is true, print a one-line
summary of each mkdir to stdout. Return the list of directories
actually created."""
global _path_created
# Detect a common bug -- name is None
if not isinstance(name, StringTypes):
raise DistutilsInternalError, \
"mkpath: 'name' must be a string (got %r)" % (name,)
# XXX what's the better way to handle verbosity? print as we create
# each directory in the path (the current behaviour), or only announce
# the creation of the whole path? (quite easy to do the latter since
# we're not using a recursive algorithm)
name = os.path.normpath(name)
created_dirs = []
if os.path.isdir(name) or name == '':
return created_dirs
if _path_created.get(os.path.abspath(name)):
return created_dirs
(head, tail) = os.path.split(name)
tails = [tail] # stack of lone dirs to create
while head and tail and not os.path.isdir(head):
#print "splitting '%s': " % head,
(head, tail) = os.path.split(head)
#print "to ('%s','%s')" % (head, tail)
tails.insert(0, tail) # push next higher dir onto stack
#print "stack of tails:", tails
# now 'head' contains the deepest directory that already exists
# (that is, the child of 'head' in 'name' is the highest directory
# that does *not* exist)
for d in tails:
#print "head = %s, d = %s: " % (head, d),
head = os.path.join(head, d)
abs_head = os.path.abspath(head)
if _path_created.get(abs_head):
continue
log.info("creating %s", head)
if not dry_run:
try:
os.mkdir(head)
created_dirs.append(head)
except OSError, exc:
raise DistutilsFileError, \
"could not create '%s': %s" % (head, exc[-1])
_path_created[abs_head] = 1
return created_dirs
# mkpath ()
def create_tree (base_dir, files, mode=0777, verbose=0, dry_run=0):
"""Create all the empty directories under 'base_dir' needed to
put 'files' there. 'base_dir' is just the a name of a directory
which doesn't necessarily exist yet; 'files' is a list of filenames
to be interpreted relative to 'base_dir'. 'base_dir' + the
directory portion of every file in 'files' will be created if it
doesn't already exist. 'mode', 'verbose' and 'dry_run' flags are as
for 'mkpath()'."""
# First get the list of directories to create
need_dir = {}
for file in files:
need_dir[os.path.join(base_dir, os.path.dirname(file))] = 1
need_dirs = need_dir.keys()
need_dirs.sort()
# Now create them
for dir in need_dirs:
mkpath(dir, mode, dry_run=dry_run)
# create_tree ()
def copy_tree (src, dst,
preserve_mode=1,
preserve_times=1,
preserve_symlinks=0,
update=0,
verbose=0,
dry_run=0):
"""Copy an entire directory tree 'src' to a new location 'dst'. Both
'src' and 'dst' must be directory names. If 'src' is not a
directory, raise DistutilsFileError. If 'dst' does not exist, it is
created with 'mkpath()'. The end result of the copy is that every
file in 'src' is copied to 'dst', and directories under 'src' are
recursively copied to 'dst'. Return the list of files that were
copied or might have been copied, using their output name. The
return value is unaffected by 'update' or 'dry_run': it is simply
the list of all files under 'src', with the names changed to be
under 'dst'.
'preserve_mode' and 'preserve_times' are the same as for
'copy_file'; note that they only apply to regular files, not to
directories. If 'preserve_symlinks' is true, symlinks will be
copied as symlinks (on platforms that support them!); otherwise
(the default), the destination of the symlink will be copied.
'update' and 'verbose' are the same as for 'copy_file'."""
from distutils.file_util import copy_file
if not dry_run and not os.path.isdir(src):
raise DistutilsFileError, \
"cannot copy tree '%s': not a directory" % src
try:
names = os.listdir(src)
except os.error, (errno, errstr):
if dry_run:
names = []
else:
raise DistutilsFileError, \
"error listing files in '%s': %s" % (src, errstr)
if not dry_run:
mkpath(dst)
outputs = []
for n in names:
src_name = os.path.join(src, n)
dst_name = os.path.join(dst, n)
if preserve_symlinks and os.path.islink(src_name):
link_dest = os.readlink(src_name)
log.info("linking %s -> %s", dst_name, link_dest)
if not dry_run:
os.symlink(link_dest, dst_name)
outputs.append(dst_name)
elif os.path.isdir(src_name):
outputs.extend(
copy_tree(src_name, dst_name, preserve_mode,
preserve_times, preserve_symlinks, update,
dry_run=dry_run))
else:
copy_file(src_name, dst_name, preserve_mode,
preserve_times, update, dry_run=dry_run)
outputs.append(dst_name)
return outputs
# copy_tree ()
# Helper for remove_tree()
def _build_cmdtuple(path, cmdtuples):
for f in os.listdir(path):
real_f = os.path.join(path,f)
if os.path.isdir(real_f) and not os.path.islink(real_f):
_build_cmdtuple(real_f, cmdtuples)
else:
cmdtuples.append((os.remove, real_f))
cmdtuples.append((os.rmdir, path))
def remove_tree (directory, verbose=0, dry_run=0):
"""Recursively remove an entire directory tree. Any errors are ignored
(apart from being reported to stdout if 'verbose' is true).
"""
from distutils.util import grok_environment_error
global _path_created
log.info("removing '%s' (and everything under it)", directory)
if dry_run:
return
cmdtuples = []
_build_cmdtuple(directory, cmdtuples)
for cmd in cmdtuples:
try:
apply(cmd[0], (cmd[1],))
# remove dir from cache if it's already there
abspath = os.path.abspath(cmd[1])
if _path_created.has_key(abspath):
del _path_created[abspath]
except (IOError, OSError), exc:
log.warn(grok_environment_error(
exc, "error removing %s: " % directory))
def ensure_relative (path):
"""Take the full path 'path', and make it a relative path so
it can be the second argument to os.path.join().
"""
drive, path = os.path.splitdrive(path)
if sys.platform == 'mac':
return os.sep + path
else:
if path[0:1] == os.sep:
path = drive + path[1:]
return path
|
bsd-3-clause
|
modulexcite/catapult
|
dashboard/dashboard/edit_config_handler.py
|
5
|
8814
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A common base class for pages that are used to edit configs."""
from google.appengine.api import taskqueue
from dashboard import list_tests
from dashboard import request_handler
from dashboard import utils
from dashboard import xsrf
# Max number of entities to put in one request to /put_entities_task.
_MAX_TESTS_TO_PUT_AT_ONCE = 25
# The queue to use to re-put tests. Should be present in queue.yaml.
_TASK_QUEUE_NAME = 'edit-sheriffs-queue'
class EditConfigHandler(request_handler.RequestHandler):
"""Base class for handlers that are used to add or edit entities.
Specifically, this is a common base class for EditSheriffsHandler
and EditAnomalyConfigsHandler. Both of these kinds of entities
represent a configuration that can apply to a set of tests, where
the set of tests is specified with a list of test path patterns.
"""
# The webapp2 docs say that custom __init__ methods should call initialize()
# at the beginning of the method (rather than calling super __init__). See:
# https://webapp-improved.appspot.com/guide/handlers.html#overriding-init
# pylint: disable=super-init-not-called
def __init__(self, request, response, model_class):
"""Constructs a handler object for editing entities of the given class.
Args:
request: Request object (implicitly passed in by webapp2).
response: Response object (implicitly passed in by webapp2).
model_class: A subclass of ndb.Model.
"""
self.initialize(request, response)
self._model_class = model_class
@xsrf.TokenRequired
def post(self):
"""Updates the user-selected anomaly threshold configuration.
Request parameters:
add-edit: Either 'add' if adding a new config, or 'edit'.
add-name: A new anomaly config name, if adding one.
edit-name: An existing anomaly config name, if editing one.
patterns: Newline-separated list of test path patterns to monitor.
Depending on the specific sub-class, this will also take other
parameters for specific properties of the entity being edited.
"""
try:
edit_type = self.request.get('add-edit')
if edit_type == 'add':
self._AddEntity()
elif edit_type == 'edit':
self._EditEntity()
else:
raise request_handler.InvalidInputError('Invalid value for add-edit.')
except request_handler.InvalidInputError as error:
message = str(error) + ' Model class: ' + self._model_class.__name__
self.RenderHtml('result.html', {'errors': [message]})
def _AddEntity(self):
"""Adds adds a new entity according to the request parameters."""
name = self.request.get('add-name')
if not name:
raise request_handler.InvalidInputError('No name given when adding new ')
if self._model_class.get_by_id(name):
raise request_handler.InvalidInputError(
'Entity "%s" already exists, cannot add.' % name)
entity = self._model_class(id=name)
self._UpdateAndReportResults(entity)
def _EditEntity(self):
"""Edits an existing entity according to the request parameters."""
name = self.request.get('edit-name')
if not name:
raise request_handler.InvalidInputError('No name given.')
entity = self._model_class.get_by_id(name)
if not entity:
raise request_handler.InvalidInputError(
'Entity "%s" does not exist, cannot edit.' % name)
self._UpdateAndReportResults(entity)
def _UpdateAndReportResults(self, entity):
"""Updates the entity and reports the results of this updating."""
new_patterns = _SplitPatternLines(self.request.get('patterns'))
added_test_paths, removed_test_paths = _ChangeTestPatterns(
entity.patterns, new_patterns)
entity.patterns = new_patterns
self._UpdateFromRequestParameters(entity)
entity.put()
self._RenderResults(entity, added_test_paths, removed_test_paths)
def _UpdateFromRequestParameters(self, entity):
"""Updates the given entity based on query parameters.
This method does not need to put() the entity.
Args:
entity: The entity to update.
"""
raise NotImplementedError()
def _RenderResults(self, entity, added_test_paths, removed_test_paths):
"""Outputs results using the results.html template.
Args:
entity: The entity that was edited.
added_test_paths: New tests that this config now applies to.
removed_test_paths: Tests that this config no longer applies to.
"""
def ResultEntry(name, value):
"""Returns an entry in the results lists to embed on result.html."""
return {'name': name, 'value': value, 'class': 'results-pre'}
self.RenderHtml('result.html', {
'headline': ('Added or updated %s "%s".' %
(self._model_class.__name__, entity.key.string_id())),
'results': [
ResultEntry('Entity', str(entity)),
ResultEntry('Added tests', '\n'.join(added_test_paths)),
ResultEntry('Removed tests', '\n'.join(removed_test_paths)),
]
})
def _SplitPatternLines(patterns_string):
"""Splits up the given newline-separated patterns and validates them."""
test_path_patterns = sorted(p for p in patterns_string.splitlines() if p)
_ValidatePatterns(test_path_patterns)
return test_path_patterns
def _ValidatePatterns(test_path_patterns):
"""Raises an exception if any test path patterns are invalid."""
for pattern in test_path_patterns:
if not _IsValidTestPathPattern(pattern):
raise request_handler.InvalidInputError(
'Invalid test path pattern: "%s"' % pattern)
def _IsValidTestPathPattern(test_path_pattern):
"""Checks whether the given test path pattern string is OK."""
if '[' in test_path_pattern or ']' in test_path_pattern:
return False
# Valid test paths will have a Master, bot, and test suite, and will
# generally have a chart name and trace name after that.
return len(test_path_pattern.split('/')) >= 3
def _ChangeTestPatterns(old_patterns, new_patterns):
"""Updates tests that are different between old_patterns and new_patterns.
The two arguments both represent sets of test paths (i.e. sets of data
series). Any tests that are different between these two sets need to be
updated.
Some properties of Test entities are updated when they are put in the
|_pre_put_hook| method of Test, so any Test entity that might need to
be updated should be re-put.
Args:
old_patterns: An iterable of test path pattern strings.
new_patterns: Another iterable of test path pattern strings.
Returns:
A pair (added_test_paths, removed_test_paths), which are, respectively,
the test paths that are in the new set but not the old, and those that
are in the old set but not the new.
"""
added_patterns, removed_patterns = _ComputeDeltas(old_patterns, new_patterns)
added_test_paths = _AllTestPathsMatchingPatterns(added_patterns)
removed_test_paths = _AllTestPathsMatchingPatterns(removed_patterns)
_AddTestsToPutToTaskQueue(added_test_paths + removed_test_paths)
return _RemoveOverlapping(added_test_paths, removed_test_paths)
def _ComputeDeltas(old_items, new_items):
"""Finds the added and removed items in a new set compared to an old one.
Args:
old_items: A collection of existing items. Could be a list or set.
new_items: Another collection of items.
Returns:
A pair of sets (added, removed).
"""
old, new = set(old_items), set(new_items)
return new - old, old - new
def _RemoveOverlapping(added_items, removed_items):
"""Returns two sets of items with the common items removed."""
added, removed = set(added_items), set(removed_items)
return added - removed, removed - added
def _AllTestPathsMatchingPatterns(patterns_list):
"""Returns a list of all test paths matching the given list of patterns."""
test_paths = set()
for pattern in patterns_list:
test_paths |= set(list_tests.GetTestsMatchingPattern(pattern))
return sorted(test_paths)
def _AddTestsToPutToTaskQueue(test_paths):
"""Adds tests that we want to re-put in the datastore to a queue.
We need to re-put the tests so that Test._pre_put_hook is run, so that
the sheriff or alert threshold config of the Test is updated.
Args:
test_paths: List of test paths of tests to be re-put.
"""
for start_index in range(0, len(test_paths), _MAX_TESTS_TO_PUT_AT_ONCE):
group = test_paths[start_index:start_index + _MAX_TESTS_TO_PUT_AT_ONCE]
urlsafe_keys = [utils.TestKey(t).urlsafe() for t in group]
taskqueue.add(
url='/put_entities_task',
params={'keys': ','.join(urlsafe_keys)},
queue_name=_TASK_QUEUE_NAME)
|
bsd-3-clause
|
DerTeufel/dualboot_recovery
|
tools/ota/convert-to-bmp.py
|
32
|
2607
|
#!/usr/bin/python2.4
"""A simple script to convert asset images to BMP files, that supports
RGBA image."""
import struct
import Image
import sys
infile = sys.argv[1]
outfile = sys.argv[2]
if not outfile.endswith(".bmp"):
print >> sys.stderr, "Warning: I'm expecting to write BMP files."
im = Image.open(infile)
if im.mode == 'RGB':
im.save(outfile)
elif im.mode == 'RGBA':
# Python Imaging Library doesn't write RGBA BMP files, so we roll
# our own.
BMP_HEADER_FMT = ("<" # little-endian
"H" # signature
"L" # file size
"HH" # reserved (set to 0)
"L" # offset to start of bitmap data)
)
BITMAPINFO_HEADER_FMT= ("<" # little-endian
"L" # size of this struct
"L" # width
"L" # height
"H" # planes (set to 1)
"H" # bit count
"L" # compression (set to 0 for minui)
"L" # size of image data (0 if uncompressed)
"L" # x pixels per meter (1)
"L" # y pixels per meter (1)
"L" # colors used (0)
"L" # important colors (0)
)
fileheadersize = struct.calcsize(BMP_HEADER_FMT)
infoheadersize = struct.calcsize(BITMAPINFO_HEADER_FMT)
header = struct.pack(BMP_HEADER_FMT,
0x4d42, # "BM" in little-endian
(fileheadersize + infoheadersize +
im.size[0] * im.size[1] * 4),
0, 0,
fileheadersize + infoheadersize)
info = struct.pack(BITMAPINFO_HEADER_FMT,
infoheadersize,
im.size[0],
im.size[1],
1,
32,
0,
0,
1,
1,
0,
0)
f = open(outfile, "wb")
f.write(header)
f.write(info)
data = im.tostring()
for j in range(im.size[1]-1, -1, -1): # rows bottom-to-top
for i in range(j*im.size[0]*4, (j+1)*im.size[0]*4, 4):
f.write(data[i+2]) # B
f.write(data[i+1]) # G
f.write(data[i+0]) # R
f.write(data[i+3]) # A
f.close()
else:
print >> sys.stderr, "Don't know how to handle image mode '%s'." % (im.mode,)
|
bsd-2-clause
|
jagguli/intellij-community
|
python/lib/Lib/rfc822.py
|
89
|
33167
|
"""RFC 2822 message manipulation.
Note: This is only a very rough sketch of a full RFC-822 parser; in particular
the tokenizing of addresses does not adhere to all the quoting rules.
Note: RFC 2822 is a long awaited update to RFC 822. This module should
conform to RFC 2822, and is thus mis-named (it's not worth renaming it). Some
effort at RFC 2822 updates have been made, but a thorough audit has not been
performed. Consider any RFC 2822 non-conformance to be a bug.
RFC 2822: http://www.faqs.org/rfcs/rfc2822.html
RFC 822 : http://www.faqs.org/rfcs/rfc822.html (obsolete)
Directions for use:
To create a Message object: first open a file, e.g.:
fp = open(file, 'r')
You can use any other legal way of getting an open file object, e.g. use
sys.stdin or call os.popen(). Then pass the open file object to the Message()
constructor:
m = Message(fp)
This class can work with any input object that supports a readline method. If
the input object has seek and tell capability, the rewindbody method will
work; also illegal lines will be pushed back onto the input stream. If the
input object lacks seek but has an `unread' method that can push back a line
of input, Message will use that to push back illegal lines. Thus this class
can be used to parse messages coming from a buffered stream.
The optional `seekable' argument is provided as a workaround for certain stdio
libraries in which tell() discards buffered data before discovering that the
lseek() system call doesn't work. For maximum portability, you should set the
seekable argument to zero to prevent that initial \code{tell} when passing in
an unseekable object such as a a file object created from a socket object. If
it is 1 on entry -- which it is by default -- the tell() method of the open
file object is called once; if this raises an exception, seekable is reset to
0. For other nonzero values of seekable, this test is not made.
To get the text of a particular header there are several methods:
str = m.getheader(name)
str = m.getrawheader(name)
where name is the name of the header, e.g. 'Subject'. The difference is that
getheader() strips the leading and trailing whitespace, while getrawheader()
doesn't. Both functions retain embedded whitespace (including newlines)
exactly as they are specified in the header, and leave the case of the text
unchanged.
For addresses and address lists there are functions
realname, mailaddress = m.getaddr(name)
list = m.getaddrlist(name)
where the latter returns a list of (realname, mailaddr) tuples.
There is also a method
time = m.getdate(name)
which parses a Date-like field and returns a time-compatible tuple,
i.e. a tuple such as returned by time.localtime() or accepted by
time.mktime().
See the class definition for lower level access methods.
There are also some utility functions here.
"""
# Cleanup and extensions by Eric S. Raymond <esr@thyrsus.com>
import time
__all__ = ["Message","AddressList","parsedate","parsedate_tz","mktime_tz"]
_blanklines = ('\r\n', '\n') # Optimization for islast()
class Message:
"""Represents a single RFC 2822-compliant message."""
def __init__(self, fp, seekable = 1):
"""Initialize the class instance and read the headers."""
if seekable == 1:
# Exercise tell() to make sure it works
# (and then assume seek() works, too)
try:
fp.tell()
except (AttributeError, IOError):
seekable = 0
self.fp = fp
self.seekable = seekable
self.startofheaders = None
self.startofbody = None
#
if self.seekable:
try:
self.startofheaders = self.fp.tell()
except IOError:
self.seekable = 0
#
self.readheaders()
#
if self.seekable:
try:
self.startofbody = self.fp.tell()
except IOError:
self.seekable = 0
def rewindbody(self):
"""Rewind the file to the start of the body (if seekable)."""
if not self.seekable:
raise IOError, "unseekable file"
self.fp.seek(self.startofbody)
def readheaders(self):
"""Read header lines.
Read header lines up to the entirely blank line that terminates them.
The (normally blank) line that ends the headers is skipped, but not
included in the returned list. If a non-header line ends the headers,
(which is an error), an attempt is made to backspace over it; it is
never included in the returned list.
The variable self.status is set to the empty string if all went well,
otherwise it is an error message. The variable self.headers is a
completely uninterpreted list of lines contained in the header (so
printing them will reproduce the header exactly as it appears in the
file).
"""
self.dict = {}
self.unixfrom = ''
self.headers = lst = []
self.status = ''
headerseen = ""
firstline = 1
startofline = unread = tell = None
if hasattr(self.fp, 'unread'):
unread = self.fp.unread
elif self.seekable:
tell = self.fp.tell
while 1:
if tell:
try:
startofline = tell()
except IOError:
startofline = tell = None
self.seekable = 0
line = self.fp.readline()
if not line:
self.status = 'EOF in headers'
break
# Skip unix From name time lines
if firstline and line.startswith('From '):
self.unixfrom = self.unixfrom + line
continue
firstline = 0
if headerseen and line[0] in ' \t':
# It's a continuation line.
lst.append(line)
x = (self.dict[headerseen] + "\n " + line.strip())
self.dict[headerseen] = x.strip()
continue
elif self.iscomment(line):
# It's a comment. Ignore it.
continue
elif self.islast(line):
# Note! No pushback here! The delimiter line gets eaten.
break
headerseen = self.isheader(line)
if headerseen:
# It's a legal header line, save it.
lst.append(line)
self.dict[headerseen] = line[len(headerseen)+1:].strip()
continue
else:
# It's not a header line; throw it back and stop here.
if not self.dict:
self.status = 'No headers'
else:
self.status = 'Non-header line where header expected'
# Try to undo the read.
if unread:
unread(line)
elif tell:
self.fp.seek(startofline)
else:
self.status = self.status + '; bad seek'
break
def isheader(self, line):
"""Determine whether a given line is a legal header.
This method should return the header name, suitably canonicalized.
You may override this method in order to use Message parsing on tagged
data in RFC 2822-like formats with special header formats.
"""
i = line.find(':')
if i > 0:
return line[:i].lower()
return None
def islast(self, line):
"""Determine whether a line is a legal end of RFC 2822 headers.
You may override this method if your application wants to bend the
rules, e.g. to strip trailing whitespace, or to recognize MH template
separators ('--------'). For convenience (e.g. for code reading from
sockets) a line consisting of \r\n also matches.
"""
return line in _blanklines
def iscomment(self, line):
"""Determine whether a line should be skipped entirely.
You may override this method in order to use Message parsing on tagged
data in RFC 2822-like formats that support embedded comments or
free-text data.
"""
return False
def getallmatchingheaders(self, name):
"""Find all header lines matching a given header name.
Look through the list of headers and find all lines matching a given
header name (and their continuation lines). A list of the lines is
returned, without interpretation. If the header does not occur, an
empty list is returned. If the header occurs multiple times, all
occurrences are returned. Case is not important in the header name.
"""
name = name.lower() + ':'
n = len(name)
lst = []
hit = 0
for line in self.headers:
if line[:n].lower() == name:
hit = 1
elif not line[:1].isspace():
hit = 0
if hit:
lst.append(line)
return lst
def getfirstmatchingheader(self, name):
"""Get the first header line matching name.
This is similar to getallmatchingheaders, but it returns only the
first matching header (and its continuation lines).
"""
name = name.lower() + ':'
n = len(name)
lst = []
hit = 0
for line in self.headers:
if hit:
if not line[:1].isspace():
break
elif line[:n].lower() == name:
hit = 1
if hit:
lst.append(line)
return lst
def getrawheader(self, name):
"""A higher-level interface to getfirstmatchingheader().
Return a string containing the literal text of the header but with the
keyword stripped. All leading, trailing and embedded whitespace is
kept in the string, however. Return None if the header does not
occur.
"""
lst = self.getfirstmatchingheader(name)
if not lst:
return None
lst[0] = lst[0][len(name) + 1:]
return ''.join(lst)
def getheader(self, name, default=None):
"""Get the header value for a name.
This is the normal interface: it returns a stripped version of the
header value for a given header name, or None if it doesn't exist.
This uses the dictionary version which finds the *last* such header.
"""
return self.dict.get(name.lower(), default)
get = getheader
def getheaders(self, name):
"""Get all values for a header.
This returns a list of values for headers given more than once; each
value in the result list is stripped in the same way as the result of
getheader(). If the header is not given, return an empty list.
"""
result = []
current = ''
have_header = 0
for s in self.getallmatchingheaders(name):
if s[0].isspace():
if current:
current = "%s\n %s" % (current, s.strip())
else:
current = s.strip()
else:
if have_header:
result.append(current)
current = s[s.find(":") + 1:].strip()
have_header = 1
if have_header:
result.append(current)
return result
def getaddr(self, name):
"""Get a single address from a header, as a tuple.
An example return value:
('Guido van Rossum', 'guido@cwi.nl')
"""
# New, by Ben Escoto
alist = self.getaddrlist(name)
if alist:
return alist[0]
else:
return (None, None)
def getaddrlist(self, name):
"""Get a list of addresses from a header.
Retrieves a list of addresses from a header, where each address is a
tuple as returned by getaddr(). Scans all named headers, so it works
properly with multiple To: or Cc: headers for example.
"""
raw = []
for h in self.getallmatchingheaders(name):
if h[0] in ' \t':
raw.append(h)
else:
if raw:
raw.append(', ')
i = h.find(':')
if i > 0:
addr = h[i+1:]
raw.append(addr)
alladdrs = ''.join(raw)
a = AddressList(alladdrs)
return a.addresslist
def getdate(self, name):
"""Retrieve a date field from a header.
Retrieves a date field from the named header, returning a tuple
compatible with time.mktime().
"""
try:
data = self[name]
except KeyError:
return None
return parsedate(data)
def getdate_tz(self, name):
"""Retrieve a date field from a header as a 10-tuple.
The first 9 elements make up a tuple compatible with time.mktime(),
and the 10th is the offset of the poster's time zone from GMT/UTC.
"""
try:
data = self[name]
except KeyError:
return None
return parsedate_tz(data)
# Access as a dictionary (only finds *last* header of each type):
def __len__(self):
"""Get the number of headers in a message."""
return len(self.dict)
def __getitem__(self, name):
"""Get a specific header, as from a dictionary."""
return self.dict[name.lower()]
def __setitem__(self, name, value):
"""Set the value of a header.
Note: This is not a perfect inversion of __getitem__, because any
changed headers get stuck at the end of the raw-headers list rather
than where the altered header was.
"""
del self[name] # Won't fail if it doesn't exist
self.dict[name.lower()] = value
text = name + ": " + value
for line in text.split("\n"):
self.headers.append(line + "\n")
def __delitem__(self, name):
"""Delete all occurrences of a specific header, if it is present."""
name = name.lower()
if not name in self.dict:
return
del self.dict[name]
name = name + ':'
n = len(name)
lst = []
hit = 0
for i in range(len(self.headers)):
line = self.headers[i]
if line[:n].lower() == name:
hit = 1
elif not line[:1].isspace():
hit = 0
if hit:
lst.append(i)
for i in reversed(lst):
del self.headers[i]
def setdefault(self, name, default=""):
lowername = name.lower()
if lowername in self.dict:
return self.dict[lowername]
else:
text = name + ": " + default
for line in text.split("\n"):
self.headers.append(line + "\n")
self.dict[lowername] = default
return default
def has_key(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __contains__(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __iter__(self):
return iter(self.dict)
def keys(self):
"""Get all of a message's header field names."""
return self.dict.keys()
def values(self):
"""Get all of a message's header field values."""
return self.dict.values()
def items(self):
"""Get all of a message's headers.
Returns a list of name, value tuples.
"""
return self.dict.items()
def __str__(self):
return ''.join(self.headers)
# Utility functions
# -----------------
# XXX Should fix unquote() and quote() to be really conformant.
# XXX The inverses of the parse functions may also be useful.
def unquote(s):
"""Remove quotes from a string."""
if len(s) > 1:
if s.startswith('"') and s.endswith('"'):
return s[1:-1].replace('\\\\', '\\').replace('\\"', '"')
if s.startswith('<') and s.endswith('>'):
return s[1:-1]
return s
def quote(s):
"""Add quotes around a string."""
return s.replace('\\', '\\\\').replace('"', '\\"')
def parseaddr(address):
"""Parse an address into a (realname, mailaddr) tuple."""
a = AddressList(address)
lst = a.addresslist
if not lst:
return (None, None)
return lst[0]
class AddrlistClass:
"""Address parser class by Ben Escoto.
To understand what this class does, it helps to have a copy of
RFC 2822 in front of you.
http://www.faqs.org/rfcs/rfc2822.html
Note: this class interface is deprecated and may be removed in the future.
Use rfc822.AddressList instead.
"""
def __init__(self, field):
"""Initialize a new instance.
`field' is an unparsed address header field, containing one or more
addresses.
"""
self.specials = '()<>@,:;.\"[]'
self.pos = 0
self.LWS = ' \t'
self.CR = '\r\n'
self.atomends = self.specials + self.LWS + self.CR
# Note that RFC 2822 now specifies `.' as obs-phrase, meaning that it
# is obsolete syntax. RFC 2822 requires that we recognize obsolete
# syntax, so allow dots in phrases.
self.phraseends = self.atomends.replace('.', '')
self.field = field
self.commentlist = []
def gotonext(self):
"""Parse up to the start of the next address."""
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS + '\n\r':
self.pos = self.pos + 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
else: break
def getaddrlist(self):
"""Parse all addresses.
Returns a list containing all of the addresses.
"""
result = []
ad = self.getaddress()
while ad:
result += ad
ad = self.getaddress()
return result
def getaddress(self):
"""Parse the next address."""
self.commentlist = []
self.gotonext()
oldpos = self.pos
oldcl = self.commentlist
plist = self.getphraselist()
self.gotonext()
returnlist = []
if self.pos >= len(self.field):
# Bad email address technically, no domain.
if plist:
returnlist = [(' '.join(self.commentlist), plist[0])]
elif self.field[self.pos] in '.@':
# email address is just an addrspec
# this isn't very efficient since we start over
self.pos = oldpos
self.commentlist = oldcl
addrspec = self.getaddrspec()
returnlist = [(' '.join(self.commentlist), addrspec)]
elif self.field[self.pos] == ':':
# address is a group
returnlist = []
fieldlen = len(self.field)
self.pos += 1
while self.pos < len(self.field):
self.gotonext()
if self.pos < fieldlen and self.field[self.pos] == ';':
self.pos += 1
break
returnlist = returnlist + self.getaddress()
elif self.field[self.pos] == '<':
# Address is a phrase then a route addr
routeaddr = self.getrouteaddr()
if self.commentlist:
returnlist = [(' '.join(plist) + ' (' + \
' '.join(self.commentlist) + ')', routeaddr)]
else: returnlist = [(' '.join(plist), routeaddr)]
else:
if plist:
returnlist = [(' '.join(self.commentlist), plist[0])]
elif self.field[self.pos] in self.specials:
self.pos += 1
self.gotonext()
if self.pos < len(self.field) and self.field[self.pos] == ',':
self.pos += 1
return returnlist
def getrouteaddr(self):
"""Parse a route address (Return-path value).
This method just skips all the route stuff and returns the addrspec.
"""
if self.field[self.pos] != '<':
return
expectroute = 0
self.pos += 1
self.gotonext()
adlist = ""
while self.pos < len(self.field):
if expectroute:
self.getdomain()
expectroute = 0
elif self.field[self.pos] == '>':
self.pos += 1
break
elif self.field[self.pos] == '@':
self.pos += 1
expectroute = 1
elif self.field[self.pos] == ':':
self.pos += 1
else:
adlist = self.getaddrspec()
self.pos += 1
break
self.gotonext()
return adlist
def getaddrspec(self):
"""Parse an RFC 2822 addr-spec."""
aslist = []
self.gotonext()
while self.pos < len(self.field):
if self.field[self.pos] == '.':
aslist.append('.')
self.pos += 1
elif self.field[self.pos] == '"':
aslist.append('"%s"' % self.getquote())
elif self.field[self.pos] in self.atomends:
break
else: aslist.append(self.getatom())
self.gotonext()
if self.pos >= len(self.field) or self.field[self.pos] != '@':
return ''.join(aslist)
aslist.append('@')
self.pos += 1
self.gotonext()
return ''.join(aslist) + self.getdomain()
def getdomain(self):
"""Get the complete domain name from an address."""
sdlist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS:
self.pos += 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] == '[':
sdlist.append(self.getdomainliteral())
elif self.field[self.pos] == '.':
self.pos += 1
sdlist.append('.')
elif self.field[self.pos] in self.atomends:
break
else: sdlist.append(self.getatom())
return ''.join(sdlist)
def getdelimited(self, beginchar, endchars, allowcomments = 1):
"""Parse a header fragment delimited by special characters.
`beginchar' is the start character for the fragment. If self is not
looking at an instance of `beginchar' then getdelimited returns the
empty string.
`endchars' is a sequence of allowable end-delimiting characters.
Parsing stops when one of these is encountered.
If `allowcomments' is non-zero, embedded RFC 2822 comments are allowed
within the parsed fragment.
"""
if self.field[self.pos] != beginchar:
return ''
slist = ['']
quote = 0
self.pos += 1
while self.pos < len(self.field):
if quote == 1:
slist.append(self.field[self.pos])
quote = 0
elif self.field[self.pos] in endchars:
self.pos += 1
break
elif allowcomments and self.field[self.pos] == '(':
slist.append(self.getcomment())
continue # have already advanced pos from getcomment
elif self.field[self.pos] == '\\':
quote = 1
else:
slist.append(self.field[self.pos])
self.pos += 1
return ''.join(slist)
def getquote(self):
"""Get a quote-delimited fragment from self's field."""
return self.getdelimited('"', '"\r', 0)
def getcomment(self):
"""Get a parenthesis-delimited fragment from self's field."""
return self.getdelimited('(', ')\r', 1)
def getdomainliteral(self):
"""Parse an RFC 2822 domain-literal."""
return '[%s]' % self.getdelimited('[', ']\r', 0)
def getatom(self, atomends=None):
"""Parse an RFC 2822 atom.
Optional atomends specifies a different set of end token delimiters
(the default is to use self.atomends). This is used e.g. in
getphraselist() since phrase endings must not include the `.' (which
is legal in phrases)."""
atomlist = ['']
if atomends is None:
atomends = self.atomends
while self.pos < len(self.field):
if self.field[self.pos] in atomends:
break
else: atomlist.append(self.field[self.pos])
self.pos += 1
return ''.join(atomlist)
def getphraselist(self):
"""Parse a sequence of RFC 2822 phrases.
A phrase is a sequence of words, which are in turn either RFC 2822
atoms or quoted-strings. Phrases are canonicalized by squeezing all
runs of continuous whitespace into one space.
"""
plist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS:
self.pos += 1
elif self.field[self.pos] == '"':
plist.append(self.getquote())
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] in self.phraseends:
break
else:
plist.append(self.getatom(self.phraseends))
return plist
class AddressList(AddrlistClass):
"""An AddressList encapsulates a list of parsed RFC 2822 addresses."""
def __init__(self, field):
AddrlistClass.__init__(self, field)
if field:
self.addresslist = self.getaddrlist()
else:
self.addresslist = []
def __len__(self):
return len(self.addresslist)
def __str__(self):
return ", ".join(map(dump_address_pair, self.addresslist))
def __add__(self, other):
# Set union
newaddr = AddressList(None)
newaddr.addresslist = self.addresslist[:]
for x in other.addresslist:
if not x in self.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __iadd__(self, other):
# Set union, in-place
for x in other.addresslist:
if not x in self.addresslist:
self.addresslist.append(x)
return self
def __sub__(self, other):
# Set difference
newaddr = AddressList(None)
for x in self.addresslist:
if not x in other.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __isub__(self, other):
# Set difference, in-place
for x in other.addresslist:
if x in self.addresslist:
self.addresslist.remove(x)
return self
def __getitem__(self, index):
# Make indexing, slices, and 'in' work
return self.addresslist[index]
def dump_address_pair(pair):
"""Dump a (name, address) pair in a canonicalized form."""
if pair[0]:
return '"' + pair[0] + '" <' + pair[1] + '>'
else:
return pair[1]
# Parse a date field
_monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul',
'aug', 'sep', 'oct', 'nov', 'dec',
'january', 'february', 'march', 'april', 'may', 'june', 'july',
'august', 'september', 'october', 'november', 'december']
_daynames = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
# The timezone table does not include the military time zones defined
# in RFC822, other than Z. According to RFC1123, the description in
# RFC822 gets the signs wrong, so we can't rely on any such time
# zones. RFC1123 recommends that numeric timezone indicators be used
# instead of timezone names.
_timezones = {'UT':0, 'UTC':0, 'GMT':0, 'Z':0,
'AST': -400, 'ADT': -300, # Atlantic (used in Canada)
'EST': -500, 'EDT': -400, # Eastern
'CST': -600, 'CDT': -500, # Central
'MST': -700, 'MDT': -600, # Mountain
'PST': -800, 'PDT': -700 # Pacific
}
def parsedate_tz(data):
"""Convert a date string to a time tuple.
Accounts for military timezones.
"""
if not data:
return None
data = data.split()
if data[0][-1] in (',', '.') or data[0].lower() in _daynames:
# There's a dayname here. Skip it
del data[0]
else:
# no space after the "weekday,"?
i = data[0].rfind(',')
if i >= 0:
data[0] = data[0][i+1:]
if len(data) == 3: # RFC 850 date, deprecated
stuff = data[0].split('-')
if len(stuff) == 3:
data = stuff + data[1:]
if len(data) == 4:
s = data[3]
i = s.find('+')
if i > 0:
data[3:] = [s[:i], s[i+1:]]
else:
data.append('') # Dummy tz
if len(data) < 5:
return None
data = data[:5]
[dd, mm, yy, tm, tz] = data
mm = mm.lower()
if not mm in _monthnames:
dd, mm = mm, dd.lower()
if not mm in _monthnames:
return None
mm = _monthnames.index(mm)+1
if mm > 12: mm = mm - 12
if dd[-1] == ',':
dd = dd[:-1]
i = yy.find(':')
if i > 0:
yy, tm = tm, yy
if yy[-1] == ',':
yy = yy[:-1]
if not yy[0].isdigit():
yy, tz = tz, yy
if tm[-1] == ',':
tm = tm[:-1]
tm = tm.split(':')
if len(tm) == 2:
[thh, tmm] = tm
tss = '0'
elif len(tm) == 3:
[thh, tmm, tss] = tm
else:
return None
try:
yy = int(yy)
dd = int(dd)
thh = int(thh)
tmm = int(tmm)
tss = int(tss)
except ValueError:
return None
tzoffset = None
tz = tz.upper()
if tz in _timezones:
tzoffset = _timezones[tz]
else:
try:
tzoffset = int(tz)
except ValueError:
pass
# Convert a timezone offset into seconds ; -0500 -> -18000
if tzoffset:
if tzoffset < 0:
tzsign = -1
tzoffset = -tzoffset
else:
tzsign = 1
tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60)
return (yy, mm, dd, thh, tmm, tss, 0, 1, 0, tzoffset)
def parsedate(data):
"""Convert a time string to a time tuple."""
t = parsedate_tz(data)
if t is None:
return t
return t[:9]
def mktime_tz(data):
"""Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp."""
if data[9] is None:
# No zone info, so localtime is better assumption than GMT
return time.mktime(data[:8] + (-1,))
else:
t = time.mktime(data[:8] + (0,))
return t - data[9] - time.timezone
def formatdate(timeval=None):
"""Returns time format preferred for Internet standards.
Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123
According to RFC 1123, day and month names must always be in
English. If not for that, this code could use strftime(). It
can't because strftime() honors the locale and could generated
non-English names.
"""
if timeval is None:
timeval = time.time()
timeval = time.gmtime(timeval)
return "%s, %02d %s %04d %02d:%02d:%02d GMT" % (
("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")[timeval[6]],
timeval[2],
("Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")[timeval[1]-1],
timeval[0], timeval[3], timeval[4], timeval[5])
# When used as script, run a small test program.
# The first command line argument must be a filename containing one
# message in RFC-822 format.
if __name__ == '__main__':
import sys, os
file = os.path.join(os.environ['HOME'], 'Mail/inbox/1')
if sys.argv[1:]: file = sys.argv[1]
f = open(file, 'r')
m = Message(f)
print 'From:', m.getaddr('from')
print 'To:', m.getaddrlist('to')
print 'Subject:', m.getheader('subject')
print 'Date:', m.getheader('date')
date = m.getdate_tz('date')
tz = date[-1]
date = time.localtime(mktime_tz(date))
if date:
print 'ParsedDate:', time.asctime(date),
hhmmss = tz
hhmm, ss = divmod(hhmmss, 60)
hh, mm = divmod(hhmm, 60)
print "%+03d%02d" % (hh, mm),
if ss: print ".%02d" % ss,
print
else:
print 'ParsedDate:', None
m.rewindbody()
n = 0
while f.readline():
n += 1
print 'Lines:', n
print '-'*70
print 'len =', len(m)
if 'Date' in m: print 'Date =', m['Date']
if 'X-Nonsense' in m: pass
print 'keys =', m.keys()
print 'values =', m.values()
print 'items =', m.items()
|
apache-2.0
|
chirilo/ana-ka-todo-app
|
node_modules/laravel-elixir/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/input.py
|
292
|
114315
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from compiler.ast import Const
from compiler.ast import Dict
from compiler.ast import Discard
from compiler.ast import List
from compiler.ast import Module
from compiler.ast import Node
from compiler.ast import Stmt
import compiler
import gyp.common
import gyp.simple_copy
import multiprocessing
import optparse
import os.path
import re
import shlex
import signal
import subprocess
import sys
import threading
import time
import traceback
from gyp.common import GypError
from gyp.common import OrderedSet
# A list of types that are treated as linkable.
linkable_types = ['executable', 'shared_library', 'loadable_module']
# A list of sections that contain links to other targets.
dependency_sections = ['dependencies', 'export_dependent_settings']
# base_path_sections is a list of sections defined by GYP that contain
# pathnames. The generators can provide more keys, the two lists are merged
# into path_sections, but you should call IsPathSection instead of using either
# list directly.
base_path_sections = [
'destination',
'files',
'include_dirs',
'inputs',
'libraries',
'outputs',
'sources',
]
path_sections = set()
# These per-process dictionaries are used to cache build file data when loading
# in parallel mode.
per_process_data = {}
per_process_aux_data = {}
def IsPathSection(section):
# If section ends in one of the '=+?!' characters, it's applied to a section
# without the trailing characters. '/' is notably absent from this list,
# because there's no way for a regular expression to be treated as a path.
while section[-1:] in '=+?!':
section = section[:-1]
if section in path_sections:
return True
# Sections mathing the regexp '_(dir|file|path)s?$' are also
# considered PathSections. Using manual string matching since that
# is much faster than the regexp and this can be called hundreds of
# thousands of times so micro performance matters.
if "_" in section:
tail = section[-6:]
if tail[-1] == 's':
tail = tail[:-1]
if tail[-5:] in ('_file', '_path'):
return True
return tail[-4:] == '_dir'
return False
# base_non_configuration_keys is a list of key names that belong in the target
# itself and should not be propagated into its configurations. It is merged
# with a list that can come from the generator to
# create non_configuration_keys.
base_non_configuration_keys = [
# Sections that must exist inside targets and not configurations.
'actions',
'configurations',
'copies',
'default_configuration',
'dependencies',
'dependencies_original',
'libraries',
'postbuilds',
'product_dir',
'product_extension',
'product_name',
'product_prefix',
'rules',
'run_as',
'sources',
'standalone_static_library',
'suppress_wildcard',
'target_name',
'toolset',
'toolsets',
'type',
# Sections that can be found inside targets or configurations, but that
# should not be propagated from targets into their configurations.
'variables',
]
non_configuration_keys = []
# Keys that do not belong inside a configuration dictionary.
invalid_configuration_keys = [
'actions',
'all_dependent_settings',
'configurations',
'dependencies',
'direct_dependent_settings',
'libraries',
'link_settings',
'sources',
'standalone_static_library',
'target_name',
'type',
]
# Controls whether or not the generator supports multiple toolsets.
multiple_toolsets = False
# Paths for converting filelist paths to output paths: {
# toplevel,
# qualified_output_dir,
# }
generator_filelist_paths = None
def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
"""Return a list of all build files included into build_file_path.
The returned list will contain build_file_path as well as all other files
that it included, either directly or indirectly. Note that the list may
contain files that were included into a conditional section that evaluated
to false and was not merged into build_file_path's dict.
aux_data is a dict containing a key for each build file or included build
file. Those keys provide access to dicts whose "included" keys contain
lists of all other files included by the build file.
included should be left at its default None value by external callers. It
is used for recursion.
The returned list will not contain any duplicate entries. Each build file
in the list will be relative to the current directory.
"""
if included == None:
included = []
if build_file_path in included:
return included
included.append(build_file_path)
for included_build_file in aux_data[build_file_path].get('included', []):
GetIncludedBuildFiles(included_build_file, aux_data, included)
return included
def CheckedEval(file_contents):
"""Return the eval of a gyp file.
The gyp file is restricted to dictionaries and lists only, and
repeated keys are not allowed.
Note that this is slower than eval() is.
"""
ast = compiler.parse(file_contents)
assert isinstance(ast, Module)
c1 = ast.getChildren()
assert c1[0] is None
assert isinstance(c1[1], Stmt)
c2 = c1[1].getChildren()
assert isinstance(c2[0], Discard)
c3 = c2[0].getChildren()
assert len(c3) == 1
return CheckNode(c3[0], [])
def CheckNode(node, keypath):
if isinstance(node, Dict):
c = node.getChildren()
dict = {}
for n in range(0, len(c), 2):
assert isinstance(c[n], Const)
key = c[n].getChildren()[0]
if key in dict:
raise GypError("Key '" + key + "' repeated at level " +
repr(len(keypath) + 1) + " with key path '" +
'.'.join(keypath) + "'")
kp = list(keypath) # Make a copy of the list for descending this node.
kp.append(key)
dict[key] = CheckNode(c[n + 1], kp)
return dict
elif isinstance(node, List):
c = node.getChildren()
children = []
for index, child in enumerate(c):
kp = list(keypath) # Copy list.
kp.append(repr(index))
children.append(CheckNode(child, kp))
return children
elif isinstance(node, Const):
return node.getChildren()[0]
else:
raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
"': " + repr(node))
def LoadOneBuildFile(build_file_path, data, aux_data, includes,
is_target, check):
if build_file_path in data:
return data[build_file_path]
if os.path.exists(build_file_path):
build_file_contents = open(build_file_path).read()
else:
raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
build_file_data = None
try:
if check:
build_file_data = CheckedEval(build_file_contents)
else:
build_file_data = eval(build_file_contents, {'__builtins__': None},
None)
except SyntaxError, e:
e.filename = build_file_path
raise
except Exception, e:
gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
raise
if type(build_file_data) is not dict:
raise GypError("%s does not evaluate to a dictionary." % build_file_path)
data[build_file_path] = build_file_data
aux_data[build_file_path] = {}
# Scan for includes and merge them in.
if ('skip_includes' not in build_file_data or
not build_file_data['skip_includes']):
try:
if is_target:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
aux_data, includes, check)
else:
LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
aux_data, None, check)
except Exception, e:
gyp.common.ExceptionAppend(e,
'while reading includes of ' + build_file_path)
raise
return build_file_data
def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
includes, check):
includes_list = []
if includes != None:
includes_list.extend(includes)
if 'includes' in subdict:
for include in subdict['includes']:
# "include" is specified relative to subdict_path, so compute the real
# path to include by appending the provided "include" to the directory
# in which subdict_path resides.
relative_include = \
os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
includes_list.append(relative_include)
# Unhook the includes list, it's no longer needed.
del subdict['includes']
# Merge in the included files.
for include in includes_list:
if not 'included' in aux_data[subdict_path]:
aux_data[subdict_path]['included'] = []
aux_data[subdict_path]['included'].append(include)
gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
MergeDicts(subdict,
LoadOneBuildFile(include, data, aux_data, None, False, check),
subdict_path, include)
# Recurse into subdictionaries.
for k, v in subdict.iteritems():
if type(v) is dict:
LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
None, check)
elif type(v) is list:
LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
check)
# This recurses into lists so that it can look for dicts.
def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
for item in sublist:
if type(item) is dict:
LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
None, check)
elif type(item) is list:
LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
# Processes toolsets in all the targets. This recurses into condition entries
# since they can contain toolsets as well.
def ProcessToolsetsInDict(data):
if 'targets' in data:
target_list = data['targets']
new_target_list = []
for target in target_list:
# If this target already has an explicit 'toolset', and no 'toolsets'
# list, don't modify it further.
if 'toolset' in target and 'toolsets' not in target:
new_target_list.append(target)
continue
if multiple_toolsets:
toolsets = target.get('toolsets', ['target'])
else:
toolsets = ['target']
# Make sure this 'toolsets' definition is only processed once.
if 'toolsets' in target:
del target['toolsets']
if len(toolsets) > 0:
# Optimization: only do copies if more than one toolset is specified.
for build in toolsets[1:]:
new_target = gyp.simple_copy.deepcopy(target)
new_target['toolset'] = build
new_target_list.append(new_target)
target['toolset'] = toolsets[0]
new_target_list.append(target)
data['targets'] = new_target_list
if 'conditions' in data:
for condition in data['conditions']:
if type(condition) is list:
for condition_dict in condition[1:]:
if type(condition_dict) is dict:
ProcessToolsetsInDict(condition_dict)
# TODO(mark): I don't love this name. It just means that it's going to load
# a build file that contains targets and is expected to provide a targets dict
# that contains the targets...
def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
depth, check, load_dependencies):
# If depth is set, predefine the DEPTH variable to be a relative path from
# this build file's directory to the directory identified by depth.
if depth:
# TODO(dglazkov) The backslash/forward-slash replacement at the end is a
# temporary measure. This should really be addressed by keeping all paths
# in POSIX until actual project generation.
d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
if d == '':
variables['DEPTH'] = '.'
else:
variables['DEPTH'] = d.replace('\\', '/')
# The 'target_build_files' key is only set when loading target build files in
# the non-parallel code path, where LoadTargetBuildFile is called
# recursively. In the parallel code path, we don't need to check whether the
# |build_file_path| has already been loaded, because the 'scheduled' set in
# ParallelState guarantees that we never load the same |build_file_path|
# twice.
if 'target_build_files' in data:
if build_file_path in data['target_build_files']:
# Already loaded.
return False
data['target_build_files'].add(build_file_path)
gyp.DebugOutput(gyp.DEBUG_INCLUDES,
"Loading Target Build File '%s'", build_file_path)
build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
includes, True, check)
# Store DEPTH for later use in generators.
build_file_data['_DEPTH'] = depth
# Set up the included_files key indicating which .gyp files contributed to
# this target dict.
if 'included_files' in build_file_data:
raise GypError(build_file_path + ' must not contain included_files key')
included = GetIncludedBuildFiles(build_file_path, aux_data)
build_file_data['included_files'] = []
for included_file in included:
# included_file is relative to the current directory, but it needs to
# be made relative to build_file_path's directory.
included_relative = \
gyp.common.RelativePath(included_file,
os.path.dirname(build_file_path))
build_file_data['included_files'].append(included_relative)
# Do a first round of toolsets expansion so that conditions can be defined
# per toolset.
ProcessToolsetsInDict(build_file_data)
# Apply "pre"/"early" variable expansions and condition evaluations.
ProcessVariablesAndConditionsInDict(
build_file_data, PHASE_EARLY, variables, build_file_path)
# Since some toolsets might have been defined conditionally, perform
# a second round of toolsets expansion now.
ProcessToolsetsInDict(build_file_data)
# Look at each project's target_defaults dict, and merge settings into
# targets.
if 'target_defaults' in build_file_data:
if 'targets' not in build_file_data:
raise GypError("Unable to find targets in build file %s" %
build_file_path)
index = 0
while index < len(build_file_data['targets']):
# This procedure needs to give the impression that target_defaults is
# used as defaults, and the individual targets inherit from that.
# The individual targets need to be merged into the defaults. Make
# a deep copy of the defaults for each target, merge the target dict
# as found in the input file into that copy, and then hook up the
# copy with the target-specific data merged into it as the replacement
# target dict.
old_target_dict = build_file_data['targets'][index]
new_target_dict = gyp.simple_copy.deepcopy(
build_file_data['target_defaults'])
MergeDicts(new_target_dict, old_target_dict,
build_file_path, build_file_path)
build_file_data['targets'][index] = new_target_dict
index += 1
# No longer needed.
del build_file_data['target_defaults']
# Look for dependencies. This means that dependency resolution occurs
# after "pre" conditionals and variable expansion, but before "post" -
# in other words, you can't put a "dependencies" section inside a "post"
# conditional within a target.
dependencies = []
if 'targets' in build_file_data:
for target_dict in build_file_data['targets']:
if 'dependencies' not in target_dict:
continue
for dependency in target_dict['dependencies']:
dependencies.append(
gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
if load_dependencies:
for dependency in dependencies:
try:
LoadTargetBuildFile(dependency, data, aux_data, variables,
includes, depth, check, load_dependencies)
except Exception, e:
gyp.common.ExceptionAppend(
e, 'while loading dependencies of %s' % build_file_path)
raise
else:
return (build_file_path, dependencies)
def CallLoadTargetBuildFile(global_flags,
build_file_path, variables,
includes, depth, check,
generator_input_info):
"""Wrapper around LoadTargetBuildFile for parallel processing.
This wrapper is used when LoadTargetBuildFile is executed in
a worker process.
"""
try:
signal.signal(signal.SIGINT, signal.SIG_IGN)
# Apply globals so that the worker process behaves the same.
for key, value in global_flags.iteritems():
globals()[key] = value
SetGeneratorGlobals(generator_input_info)
result = LoadTargetBuildFile(build_file_path, per_process_data,
per_process_aux_data, variables,
includes, depth, check, False)
if not result:
return result
(build_file_path, dependencies) = result
# We can safely pop the build_file_data from per_process_data because it
# will never be referenced by this process again, so we don't need to keep
# it in the cache.
build_file_data = per_process_data.pop(build_file_path)
# This gets serialized and sent back to the main process via a pipe.
# It's handled in LoadTargetBuildFileCallback.
return (build_file_path,
build_file_data,
dependencies)
except GypError, e:
sys.stderr.write("gyp: %s\n" % e)
return None
except Exception, e:
print >>sys.stderr, 'Exception:', e
print >>sys.stderr, traceback.format_exc()
return None
class ParallelProcessingError(Exception):
pass
class ParallelState(object):
"""Class to keep track of state when processing input files in parallel.
If build files are loaded in parallel, use this to keep track of
state during farming out and processing parallel jobs. It's stored
in a global so that the callback function can have access to it.
"""
def __init__(self):
# The multiprocessing pool.
self.pool = None
# The condition variable used to protect this object and notify
# the main loop when there might be more data to process.
self.condition = None
# The "data" dict that was passed to LoadTargetBuildFileParallel
self.data = None
# The number of parallel calls outstanding; decremented when a response
# was received.
self.pending = 0
# The set of all build files that have been scheduled, so we don't
# schedule the same one twice.
self.scheduled = set()
# A list of dependency build file paths that haven't been scheduled yet.
self.dependencies = []
# Flag to indicate if there was an error in a child process.
self.error = False
def LoadTargetBuildFileCallback(self, result):
"""Handle the results of running LoadTargetBuildFile in another process.
"""
self.condition.acquire()
if not result:
self.error = True
self.condition.notify()
self.condition.release()
return
(build_file_path0, build_file_data0, dependencies0) = result
self.data[build_file_path0] = build_file_data0
self.data['target_build_files'].add(build_file_path0)
for new_dependency in dependencies0:
if new_dependency not in self.scheduled:
self.scheduled.add(new_dependency)
self.dependencies.append(new_dependency)
self.pending -= 1
self.condition.notify()
self.condition.release()
def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
check, generator_input_info):
parallel_state = ParallelState()
parallel_state.condition = threading.Condition()
# Make copies of the build_files argument that we can modify while working.
parallel_state.dependencies = list(build_files)
parallel_state.scheduled = set(build_files)
parallel_state.pending = 0
parallel_state.data = data
try:
parallel_state.condition.acquire()
while parallel_state.dependencies or parallel_state.pending:
if parallel_state.error:
break
if not parallel_state.dependencies:
parallel_state.condition.wait()
continue
dependency = parallel_state.dependencies.pop()
parallel_state.pending += 1
global_flags = {
'path_sections': globals()['path_sections'],
'non_configuration_keys': globals()['non_configuration_keys'],
'multiple_toolsets': globals()['multiple_toolsets']}
if not parallel_state.pool:
parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
parallel_state.pool.apply_async(
CallLoadTargetBuildFile,
args = (global_flags, dependency,
variables, includes, depth, check, generator_input_info),
callback = parallel_state.LoadTargetBuildFileCallback)
except KeyboardInterrupt, e:
parallel_state.pool.terminate()
raise e
parallel_state.condition.release()
parallel_state.pool.close()
parallel_state.pool.join()
parallel_state.pool = None
if parallel_state.error:
sys.exit(1)
# Look for the bracket that matches the first bracket seen in a
# string, and return the start and end as a tuple. For example, if
# the input is something like "<(foo <(bar)) blah", then it would
# return (1, 13), indicating the entire string except for the leading
# "<" and trailing " blah".
LBRACKETS= set('{[(')
BRACKETS = {'}': '{', ']': '[', ')': '('}
def FindEnclosingBracketGroup(input_str):
stack = []
start = -1
for index, char in enumerate(input_str):
if char in LBRACKETS:
stack.append(char)
if start == -1:
start = index
elif char in BRACKETS:
if not stack:
return (-1, -1)
if stack.pop() != BRACKETS[char]:
return (-1, -1)
if not stack:
return (start, index + 1)
return (-1, -1)
def IsStrCanonicalInt(string):
"""Returns True if |string| is in its canonical integer form.
The canonical form is such that str(int(string)) == string.
"""
if type(string) is str:
# This function is called a lot so for maximum performance, avoid
# involving regexps which would otherwise make the code much
# shorter. Regexps would need twice the time of this function.
if string:
if string == "0":
return True
if string[0] == "-":
string = string[1:]
if not string:
return False
if '1' <= string[0] <= '9':
return string.isdigit()
return False
# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
# In the last case, the inner "<()" is captured in match['content'].
early_variable_re = re.compile(
r'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
r'\((?P<is_array>\s*\[?)'
r'(?P<content>.*?)(\]?)\))')
# This matches the same as early_variable_re, but with '>' instead of '<'.
late_variable_re = re.compile(
r'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
r'\((?P<is_array>\s*\[?)'
r'(?P<content>.*?)(\]?)\))')
# This matches the same as early_variable_re, but with '^' instead of '<'.
latelate_variable_re = re.compile(
r'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
r'\((?P<is_array>\s*\[?)'
r'(?P<content>.*?)(\]?)\))')
# Global cache of results from running commands so they don't have to be run
# more then once.
cached_command_results = {}
def FixupPlatformCommand(cmd):
if sys.platform == 'win32':
if type(cmd) is list:
cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
else:
cmd = re.sub('^cat ', 'type ', cmd)
return cmd
PHASE_EARLY = 0
PHASE_LATE = 1
PHASE_LATELATE = 2
def ExpandVariables(input, phase, variables, build_file):
# Look for the pattern that gets expanded into variables
if phase == PHASE_EARLY:
variable_re = early_variable_re
expansion_symbol = '<'
elif phase == PHASE_LATE:
variable_re = late_variable_re
expansion_symbol = '>'
elif phase == PHASE_LATELATE:
variable_re = latelate_variable_re
expansion_symbol = '^'
else:
assert False
input_str = str(input)
if IsStrCanonicalInt(input_str):
return int(input_str)
# Do a quick scan to determine if an expensive regex search is warranted.
if expansion_symbol not in input_str:
return input_str
# Get the entire list of matches as a list of MatchObject instances.
# (using findall here would return strings instead of MatchObjects).
matches = list(variable_re.finditer(input_str))
if not matches:
return input_str
output = input_str
# Reverse the list of matches so that replacements are done right-to-left.
# That ensures that earlier replacements won't mess up the string in a
# way that causes later calls to find the earlier substituted text instead
# of what's intended for replacement.
matches.reverse()
for match_group in matches:
match = match_group.groupdict()
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
# match['replace'] is the substring to look for, match['type']
# is the character code for the replacement type (< > <! >! <| >| <@
# >@ <!@ >!@), match['is_array'] contains a '[' for command
# arrays, and match['content'] is the name of the variable (< >)
# or command to run (<! >!). match['command_string'] is an optional
# command string. Currently, only 'pymod_do_main' is supported.
# run_command is true if a ! variant is used.
run_command = '!' in match['type']
command_string = match['command_string']
# file_list is true if a | variant is used.
file_list = '|' in match['type']
# Capture these now so we can adjust them later.
replace_start = match_group.start('replace')
replace_end = match_group.end('replace')
# Find the ending paren, and re-evaluate the contained string.
(c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
# Adjust the replacement range to match the entire command
# found by FindEnclosingBracketGroup (since the variable_re
# probably doesn't match the entire command if it contained
# nested variables).
replace_end = replace_start + c_end
# Find the "real" replacement, matching the appropriate closing
# paren, and adjust the replacement start and end.
replacement = input_str[replace_start:replace_end]
# Figure out what the contents of the variable parens are.
contents_start = replace_start + c_start + 1
contents_end = replace_end - 1
contents = input_str[contents_start:contents_end]
# Do filter substitution now for <|().
# Admittedly, this is different than the evaluation order in other
# contexts. However, since filtration has no chance to run on <|(),
# this seems like the only obvious way to give them access to filters.
if file_list:
processed_variables = gyp.simple_copy.deepcopy(variables)
ProcessListFiltersInDict(contents, processed_variables)
# Recurse to expand variables in the contents
contents = ExpandVariables(contents, phase,
processed_variables, build_file)
else:
# Recurse to expand variables in the contents
contents = ExpandVariables(contents, phase, variables, build_file)
# Strip off leading/trailing whitespace so that variable matches are
# simpler below (and because they are rarely needed).
contents = contents.strip()
# expand_to_list is true if an @ variant is used. In that case,
# the expansion should result in a list. Note that the caller
# is to be expecting a list in return, and not all callers do
# because not all are working in list context. Also, for list
# expansions, there can be no other text besides the variable
# expansion in the input string.
expand_to_list = '@' in match['type'] and input_str == replacement
if run_command or file_list:
# Find the build file's directory, so commands can be run or file lists
# generated relative to it.
build_file_dir = os.path.dirname(build_file)
if build_file_dir == '' and not file_list:
# If build_file is just a leaf filename indicating a file in the
# current directory, build_file_dir might be an empty string. Set
# it to None to signal to subprocess.Popen that it should run the
# command in the current directory.
build_file_dir = None
# Support <|(listfile.txt ...) which generates a file
# containing items from a gyp list, generated at gyp time.
# This works around actions/rules which have more inputs than will
# fit on the command line.
if file_list:
if type(contents) is list:
contents_list = contents
else:
contents_list = contents.split(' ')
replacement = contents_list[0]
if os.path.isabs(replacement):
raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
if not generator_filelist_paths:
path = os.path.join(build_file_dir, replacement)
else:
if os.path.isabs(build_file_dir):
toplevel = generator_filelist_paths['toplevel']
rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
else:
rel_build_file_dir = build_file_dir
qualified_out_dir = generator_filelist_paths['qualified_out_dir']
path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
gyp.common.EnsureDirExists(path)
replacement = gyp.common.RelativePath(path, build_file_dir)
f = gyp.common.WriteOnDiff(path)
for i in contents_list[1:]:
f.write('%s\n' % i)
f.close()
elif run_command:
use_shell = True
if match['is_array']:
contents = eval(contents)
use_shell = False
# Check for a cached value to avoid executing commands, or generating
# file lists more than once. The cache key contains the command to be
# run as well as the directory to run it from, to account for commands
# that depend on their current directory.
# TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
# someone could author a set of GYP files where each time the command
# is invoked it produces different output by design. When the need
# arises, the syntax should be extended to support no caching off a
# command's output so it is run every time.
cache_key = (str(contents), build_file_dir)
cached_value = cached_command_results.get(cache_key, None)
if cached_value is None:
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Executing command '%s' in directory '%s'",
contents, build_file_dir)
replacement = ''
if command_string == 'pymod_do_main':
# <!pymod_do_main(modulename param eters) loads |modulename| as a
# python module and then calls that module's DoMain() function,
# passing ["param", "eters"] as a single list argument. For modules
# that don't load quickly, this can be faster than
# <!(python modulename param eters). Do this in |build_file_dir|.
oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir.
if build_file_dir: # build_file_dir may be None (see above).
os.chdir(build_file_dir)
try:
parsed_contents = shlex.split(contents)
try:
py_module = __import__(parsed_contents[0])
except ImportError as e:
raise GypError("Error importing pymod_do_main"
"module (%s): %s" % (parsed_contents[0], e))
replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
finally:
os.chdir(oldwd)
assert replacement != None
elif command_string:
raise GypError("Unknown command string '%s' in '%s'." %
(command_string, contents))
else:
# Fix up command with platform specific workarounds.
contents = FixupPlatformCommand(contents)
p = subprocess.Popen(contents, shell=use_shell,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
stdin=subprocess.PIPE,
cwd=build_file_dir)
p_stdout, p_stderr = p.communicate('')
if p.wait() != 0 or p_stderr:
sys.stderr.write(p_stderr)
# Simulate check_call behavior, since check_call only exists
# in python 2.5 and later.
raise GypError("Call to '%s' returned exit status %d." %
(contents, p.returncode))
replacement = p_stdout.rstrip()
cached_command_results[cache_key] = replacement
else:
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Had cache value for command '%s' in directory '%s'",
contents,build_file_dir)
replacement = cached_value
else:
if not contents in variables:
if contents[-1] in ['!', '/']:
# In order to allow cross-compiles (nacl) to happen more naturally,
# we will allow references to >(sources/) etc. to resolve to
# and empty list if undefined. This allows actions to:
# 'action!': [
# '>@(_sources!)',
# ],
# 'action/': [
# '>@(_sources/)',
# ],
replacement = []
else:
raise GypError('Undefined variable ' + contents +
' in ' + build_file)
else:
replacement = variables[contents]
if type(replacement) is list:
for item in replacement:
if not contents[-1] == '/' and type(item) not in (str, int):
raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' +
'list contains a ' +
item.__class__.__name__)
# Run through the list and handle variable expansions in it. Since
# the list is guaranteed not to contain dicts, this won't do anything
# with conditions sections.
ProcessVariablesAndConditionsInList(replacement, phase, variables,
build_file)
elif type(replacement) not in (str, int):
raise GypError('Variable ' + contents +
' must expand to a string or list of strings; ' +
'found a ' + replacement.__class__.__name__)
if expand_to_list:
# Expanding in list context. It's guaranteed that there's only one
# replacement to do in |input_str| and that it's this replacement. See
# above.
if type(replacement) is list:
# If it's already a list, make a copy.
output = replacement[:]
else:
# Split it the same way sh would split arguments.
output = shlex.split(str(replacement))
else:
# Expanding in string context.
encoded_replacement = ''
if type(replacement) is list:
# When expanding a list into string context, turn the list items
# into a string in a way that will work with a subprocess call.
#
# TODO(mark): This isn't completely correct. This should
# call a generator-provided function that observes the
# proper list-to-argument quoting rules on a specific
# platform instead of just calling the POSIX encoding
# routine.
encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
else:
encoded_replacement = replacement
output = output[:replace_start] + str(encoded_replacement) + \
output[replace_end:]
# Prepare for the next match iteration.
input_str = output
if output == input:
gyp.DebugOutput(gyp.DEBUG_VARIABLES,
"Found only identity matches on %r, avoiding infinite "
"recursion.",
output)
else:
# Look for more matches now that we've replaced some, to deal with
# expanding local variables (variables defined in the same
# variables block as this one).
gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
if type(output) is list:
if output and type(output[0]) is list:
# Leave output alone if it's a list of lists.
# We don't want such lists to be stringified.
pass
else:
new_output = []
for item in output:
new_output.append(
ExpandVariables(item, phase, variables, build_file))
output = new_output
else:
output = ExpandVariables(output, phase, variables, build_file)
# Convert all strings that are canonically-represented integers into integers.
if type(output) is list:
for index in xrange(0, len(output)):
if IsStrCanonicalInt(output[index]):
output[index] = int(output[index])
elif IsStrCanonicalInt(output):
output = int(output)
return output
# The same condition is often evaluated over and over again so it
# makes sense to cache as much as possible between evaluations.
cached_conditions_asts = {}
def EvalCondition(condition, conditions_key, phase, variables, build_file):
"""Returns the dict that should be used or None if the result was
that nothing should be used."""
if type(condition) is not list:
raise GypError(conditions_key + ' must be a list')
if len(condition) < 2:
# It's possible that condition[0] won't work in which case this
# attempt will raise its own IndexError. That's probably fine.
raise GypError(conditions_key + ' ' + condition[0] +
' must be at least length 2, not ' + str(len(condition)))
i = 0
result = None
while i < len(condition):
cond_expr = condition[i]
true_dict = condition[i + 1]
if type(true_dict) is not dict:
raise GypError('{} {} must be followed by a dictionary, not {}'.format(
conditions_key, cond_expr, type(true_dict)))
if len(condition) > i + 2 and type(condition[i + 2]) is dict:
false_dict = condition[i + 2]
i = i + 3
if i != len(condition):
raise GypError('{} {} has {} unexpected trailing items'.format(
conditions_key, cond_expr, len(condition) - i))
else:
false_dict = None
i = i + 2
if result == None:
result = EvalSingleCondition(
cond_expr, true_dict, false_dict, phase, variables, build_file)
return result
def EvalSingleCondition(
cond_expr, true_dict, false_dict, phase, variables, build_file):
"""Returns true_dict if cond_expr evaluates to true, and false_dict
otherwise."""
# Do expansions on the condition itself. Since the conditon can naturally
# contain variable references without needing to resort to GYP expansion
# syntax, this is of dubious value for variables, but someone might want to
# use a command expansion directly inside a condition.
cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
build_file)
if type(cond_expr_expanded) not in (str, int):
raise ValueError(
'Variable expansion in this context permits str and int ' + \
'only, found ' + cond_expr_expanded.__class__.__name__)
try:
if cond_expr_expanded in cached_conditions_asts:
ast_code = cached_conditions_asts[cond_expr_expanded]
else:
ast_code = compile(cond_expr_expanded, '<string>', 'eval')
cached_conditions_asts[cond_expr_expanded] = ast_code
if eval(ast_code, {'__builtins__': None}, variables):
return true_dict
return false_dict
except SyntaxError, e:
syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
'at character %d.' %
(str(e.args[0]), e.text, build_file, e.offset),
e.filename, e.lineno, e.offset, e.text)
raise syntax_error
except NameError, e:
gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
(cond_expr_expanded, build_file))
raise GypError(e)
def ProcessConditionsInDict(the_dict, phase, variables, build_file):
# Process a 'conditions' or 'target_conditions' section in the_dict,
# depending on phase.
# early -> conditions
# late -> target_conditions
# latelate -> no conditions
#
# Each item in a conditions list consists of cond_expr, a string expression
# evaluated as the condition, and true_dict, a dict that will be merged into
# the_dict if cond_expr evaluates to true. Optionally, a third item,
# false_dict, may be present. false_dict is merged into the_dict if
# cond_expr evaluates to false.
#
# Any dict merged into the_dict will be recursively processed for nested
# conditionals and other expansions, also according to phase, immediately
# prior to being merged.
if phase == PHASE_EARLY:
conditions_key = 'conditions'
elif phase == PHASE_LATE:
conditions_key = 'target_conditions'
elif phase == PHASE_LATELATE:
return
else:
assert False
if not conditions_key in the_dict:
return
conditions_list = the_dict[conditions_key]
# Unhook the conditions list, it's no longer needed.
del the_dict[conditions_key]
for condition in conditions_list:
merge_dict = EvalCondition(condition, conditions_key, phase, variables,
build_file)
if merge_dict != None:
# Expand variables and nested conditinals in the merge_dict before
# merging it.
ProcessVariablesAndConditionsInDict(merge_dict, phase,
variables, build_file)
MergeDicts(the_dict, merge_dict, build_file, build_file)
def LoadAutomaticVariablesFromDict(variables, the_dict):
# Any keys with plain string values in the_dict become automatic variables.
# The variable name is the key name with a "_" character prepended.
for key, value in the_dict.iteritems():
if type(value) in (str, int, list):
variables['_' + key] = value
def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
# Any keys in the_dict's "variables" dict, if it has one, becomes a
# variable. The variable name is the key name in the "variables" dict.
# Variables that end with the % character are set only if they are unset in
# the variables dict. the_dict_key is the name of the key that accesses
# the_dict in the_dict's parent dict. If the_dict's parent is not a dict
# (it could be a list or it could be parentless because it is a root dict),
# the_dict_key will be None.
for key, value in the_dict.get('variables', {}).iteritems():
if type(value) not in (str, int, list):
continue
if key.endswith('%'):
variable_name = key[:-1]
if variable_name in variables:
# If the variable is already set, don't set it.
continue
if the_dict_key is 'variables' and variable_name in the_dict:
# If the variable is set without a % in the_dict, and the_dict is a
# variables dict (making |variables| a varaibles sub-dict of a
# variables dict), use the_dict's definition.
value = the_dict[variable_name]
else:
variable_name = key
variables[variable_name] = value
def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
build_file, the_dict_key=None):
"""Handle all variable and command expansion and conditional evaluation.
This function is the public entry point for all variable expansions and
conditional evaluations. The variables_in dictionary will not be modified
by this function.
"""
# Make a copy of the variables_in dict that can be modified during the
# loading of automatics and the loading of the variables dict.
variables = variables_in.copy()
LoadAutomaticVariablesFromDict(variables, the_dict)
if 'variables' in the_dict:
# Make sure all the local variables are added to the variables
# list before we process them so that you can reference one
# variable from another. They will be fully expanded by recursion
# in ExpandVariables.
for key, value in the_dict['variables'].iteritems():
variables[key] = value
# Handle the associated variables dict first, so that any variable
# references within can be resolved prior to using them as variables.
# Pass a copy of the variables dict to avoid having it be tainted.
# Otherwise, it would have extra automatics added for everything that
# should just be an ordinary variable in this scope.
ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
variables, build_file, 'variables')
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
for key, value in the_dict.iteritems():
# Skip "variables", which was already processed if present.
if key != 'variables' and type(value) is str:
expanded = ExpandVariables(value, phase, variables, build_file)
if type(expanded) not in (str, int):
raise ValueError(
'Variable expansion in this context permits str and int ' + \
'only, found ' + expanded.__class__.__name__ + ' for ' + key)
the_dict[key] = expanded
# Variable expansion may have resulted in changes to automatics. Reload.
# TODO(mark): Optimization: only reload if no changes were made.
variables = variables_in.copy()
LoadAutomaticVariablesFromDict(variables, the_dict)
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
# Process conditions in this dict. This is done after variable expansion
# so that conditions may take advantage of expanded variables. For example,
# if the_dict contains:
# {'type': '<(library_type)',
# 'conditions': [['_type=="static_library"', { ... }]]},
# _type, as used in the condition, will only be set to the value of
# library_type if variable expansion is performed before condition
# processing. However, condition processing should occur prior to recursion
# so that variables (both automatic and "variables" dict type) may be
# adjusted by conditions sections, merged into the_dict, and have the
# intended impact on contained dicts.
#
# This arrangement means that a "conditions" section containing a "variables"
# section will only have those variables effective in subdicts, not in
# the_dict. The workaround is to put a "conditions" section within a
# "variables" section. For example:
# {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
# 'defines': ['<(define)'],
# 'my_subdict': {'defines': ['<(define)']}},
# will not result in "IS_MAC" being appended to the "defines" list in the
# current scope but would result in it being appended to the "defines" list
# within "my_subdict". By comparison:
# {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
# 'defines': ['<(define)'],
# 'my_subdict': {'defines': ['<(define)']}},
# will append "IS_MAC" to both "defines" lists.
# Evaluate conditions sections, allowing variable expansions within them
# as well as nested conditionals. This will process a 'conditions' or
# 'target_conditions' section, perform appropriate merging and recursive
# conditional and variable processing, and then remove the conditions section
# from the_dict if it is present.
ProcessConditionsInDict(the_dict, phase, variables, build_file)
# Conditional processing may have resulted in changes to automatics or the
# variables dict. Reload.
variables = variables_in.copy()
LoadAutomaticVariablesFromDict(variables, the_dict)
LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
# Recurse into child dicts, or process child lists which may result in
# further recursion into descendant dicts.
for key, value in the_dict.iteritems():
# Skip "variables" and string values, which were already processed if
# present.
if key == 'variables' or type(value) is str:
continue
if type(value) is dict:
# Pass a copy of the variables dict so that subdicts can't influence
# parents.
ProcessVariablesAndConditionsInDict(value, phase, variables,
build_file, key)
elif type(value) is list:
# The list itself can't influence the variables dict, and
# ProcessVariablesAndConditionsInList will make copies of the variables
# dict if it needs to pass it to something that can influence it. No
# copy is necessary here.
ProcessVariablesAndConditionsInList(value, phase, variables,
build_file)
elif type(value) is not int:
raise TypeError('Unknown type ' + value.__class__.__name__ + \
' for ' + key)
def ProcessVariablesAndConditionsInList(the_list, phase, variables,
build_file):
# Iterate using an index so that new values can be assigned into the_list.
index = 0
while index < len(the_list):
item = the_list[index]
if type(item) is dict:
# Make a copy of the variables dict so that it won't influence anything
# outside of its own scope.
ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
elif type(item) is list:
ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
elif type(item) is str:
expanded = ExpandVariables(item, phase, variables, build_file)
if type(expanded) in (str, int):
the_list[index] = expanded
elif type(expanded) is list:
the_list[index:index+1] = expanded
index += len(expanded)
# index now identifies the next item to examine. Continue right now
# without falling into the index increment below.
continue
else:
raise ValueError(
'Variable expansion in this context permits strings and ' + \
'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
index)
elif type(item) is not int:
raise TypeError('Unknown type ' + item.__class__.__name__ + \
' at index ' + index)
index = index + 1
def BuildTargetsDict(data):
"""Builds a dict mapping fully-qualified target names to their target dicts.
|data| is a dict mapping loaded build files by pathname relative to the
current directory. Values in |data| are build file contents. For each
|data| value with a "targets" key, the value of the "targets" key is taken
as a list containing target dicts. Each target's fully-qualified name is
constructed from the pathname of the build file (|data| key) and its
"target_name" property. These fully-qualified names are used as the keys
in the returned dict. These keys provide access to the target dicts,
the dicts in the "targets" lists.
"""
targets = {}
for build_file in data['target_build_files']:
for target in data[build_file].get('targets', []):
target_name = gyp.common.QualifiedTarget(build_file,
target['target_name'],
target['toolset'])
if target_name in targets:
raise GypError('Duplicate target definitions for ' + target_name)
targets[target_name] = target
return targets
def QualifyDependencies(targets):
"""Make dependency links fully-qualified relative to the current directory.
|targets| is a dict mapping fully-qualified target names to their target
dicts. For each target in this dict, keys known to contain dependency
links are examined, and any dependencies referenced will be rewritten
so that they are fully-qualified and relative to the current directory.
All rewritten dependencies are suitable for use as keys to |targets| or a
similar dict.
"""
all_dependency_sections = [dep + op
for dep in dependency_sections
for op in ('', '!', '/')]
for target, target_dict in targets.iteritems():
target_build_file = gyp.common.BuildFile(target)
toolset = target_dict['toolset']
for dependency_key in all_dependency_sections:
dependencies = target_dict.get(dependency_key, [])
for index in xrange(0, len(dependencies)):
dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
target_build_file, dependencies[index], toolset)
if not multiple_toolsets:
# Ignore toolset specification in the dependency if it is specified.
dep_toolset = toolset
dependency = gyp.common.QualifiedTarget(dep_file,
dep_target,
dep_toolset)
dependencies[index] = dependency
# Make sure anything appearing in a list other than "dependencies" also
# appears in the "dependencies" list.
if dependency_key != 'dependencies' and \
dependency not in target_dict['dependencies']:
raise GypError('Found ' + dependency + ' in ' + dependency_key +
' of ' + target + ', but not in dependencies')
def ExpandWildcardDependencies(targets, data):
"""Expands dependencies specified as build_file:*.
For each target in |targets|, examines sections containing links to other
targets. If any such section contains a link of the form build_file:*, it
is taken as a wildcard link, and is expanded to list each target in
build_file. The |data| dict provides access to build file dicts.
Any target that does not wish to be included by wildcard can provide an
optional "suppress_wildcard" key in its target dict. When present and
true, a wildcard dependency link will not include such targets.
All dependency names, including the keys to |targets| and the values in each
dependency list, must be qualified when this function is called.
"""
for target, target_dict in targets.iteritems():
toolset = target_dict['toolset']
target_build_file = gyp.common.BuildFile(target)
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
# Loop this way instead of "for dependency in" or "for index in xrange"
# because the dependencies list will be modified within the loop body.
index = 0
while index < len(dependencies):
(dependency_build_file, dependency_target, dependency_toolset) = \
gyp.common.ParseQualifiedTarget(dependencies[index])
if dependency_target != '*' and dependency_toolset != '*':
# Not a wildcard. Keep it moving.
index = index + 1
continue
if dependency_build_file == target_build_file:
# It's an error for a target to depend on all other targets in
# the same file, because a target cannot depend on itself.
raise GypError('Found wildcard in ' + dependency_key + ' of ' +
target + ' referring to same build file')
# Take the wildcard out and adjust the index so that the next
# dependency in the list will be processed the next time through the
# loop.
del dependencies[index]
index = index - 1
# Loop through the targets in the other build file, adding them to
# this target's list of dependencies in place of the removed
# wildcard.
dependency_target_dicts = data[dependency_build_file]['targets']
for dependency_target_dict in dependency_target_dicts:
if int(dependency_target_dict.get('suppress_wildcard', False)):
continue
dependency_target_name = dependency_target_dict['target_name']
if (dependency_target != '*' and
dependency_target != dependency_target_name):
continue
dependency_target_toolset = dependency_target_dict['toolset']
if (dependency_toolset != '*' and
dependency_toolset != dependency_target_toolset):
continue
dependency = gyp.common.QualifiedTarget(dependency_build_file,
dependency_target_name,
dependency_target_toolset)
index = index + 1
dependencies.insert(index, dependency)
index = index + 1
def Unify(l):
"""Removes duplicate elements from l, keeping the first element."""
seen = {}
return [seen.setdefault(e, e) for e in l if e not in seen]
def RemoveDuplicateDependencies(targets):
"""Makes sure every dependency appears only once in all targets's dependency
lists."""
for target_name, target_dict in targets.iteritems():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
target_dict[dependency_key] = Unify(dependencies)
def Filter(l, item):
"""Removes item from l."""
res = {}
return [res.setdefault(e, e) for e in l if e != item]
def RemoveSelfDependencies(targets):
"""Remove self dependencies from targets that have the prune_self_dependency
variable set."""
for target_name, target_dict in targets.iteritems():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
for t in dependencies:
if t == target_name:
if targets[t].get('variables', {}).get('prune_self_dependency', 0):
target_dict[dependency_key] = Filter(dependencies, target_name)
def RemoveLinkDependenciesFromNoneTargets(targets):
"""Remove dependencies having the 'link_dependency' attribute from the 'none'
targets."""
for target_name, target_dict in targets.iteritems():
for dependency_key in dependency_sections:
dependencies = target_dict.get(dependency_key, [])
if dependencies:
for t in dependencies:
if target_dict.get('type', None) == 'none':
if targets[t].get('variables', {}).get('link_dependency', 0):
target_dict[dependency_key] = \
Filter(target_dict[dependency_key], t)
class DependencyGraphNode(object):
"""
Attributes:
ref: A reference to an object that this DependencyGraphNode represents.
dependencies: List of DependencyGraphNodes on which this one depends.
dependents: List of DependencyGraphNodes that depend on this one.
"""
class CircularException(GypError):
pass
def __init__(self, ref):
self.ref = ref
self.dependencies = []
self.dependents = []
def __repr__(self):
return '<DependencyGraphNode: %r>' % self.ref
def FlattenToList(self):
# flat_list is the sorted list of dependencies - actually, the list items
# are the "ref" attributes of DependencyGraphNodes. Every target will
# appear in flat_list after all of its dependencies, and before all of its
# dependents.
flat_list = OrderedSet()
# in_degree_zeros is the list of DependencyGraphNodes that have no
# dependencies not in flat_list. Initially, it is a copy of the children
# of this node, because when the graph was built, nodes with no
# dependencies were made implicit dependents of the root node.
in_degree_zeros = set(self.dependents[:])
while in_degree_zeros:
# Nodes in in_degree_zeros have no dependencies not in flat_list, so they
# can be appended to flat_list. Take these nodes out of in_degree_zeros
# as work progresses, so that the next node to process from the list can
# always be accessed at a consistent position.
node = in_degree_zeros.pop()
flat_list.add(node.ref)
# Look at dependents of the node just added to flat_list. Some of them
# may now belong in in_degree_zeros.
for node_dependent in node.dependents:
is_in_degree_zero = True
# TODO: We want to check through the
# node_dependent.dependencies list but if it's long and we
# always start at the beginning, then we get O(n^2) behaviour.
for node_dependent_dependency in node_dependent.dependencies:
if not node_dependent_dependency.ref in flat_list:
# The dependent one or more dependencies not in flat_list. There
# will be more chances to add it to flat_list when examining
# it again as a dependent of those other dependencies, provided
# that there are no cycles.
is_in_degree_zero = False
break
if is_in_degree_zero:
# All of the dependent's dependencies are already in flat_list. Add
# it to in_degree_zeros where it will be processed in a future
# iteration of the outer loop.
in_degree_zeros.add(node_dependent)
return list(flat_list)
def FindCycles(self):
"""
Returns a list of cycles in the graph, where each cycle is its own list.
"""
results = []
visited = set()
def Visit(node, path):
for child in node.dependents:
if child in path:
results.append([child] + path[:path.index(child) + 1])
elif not child in visited:
visited.add(child)
Visit(child, [child] + path)
visited.add(self)
Visit(self, [self])
return results
def DirectDependencies(self, dependencies=None):
"""Returns a list of just direct dependencies."""
if dependencies == None:
dependencies = []
for dependency in self.dependencies:
# Check for None, corresponding to the root node.
if dependency.ref != None and dependency.ref not in dependencies:
dependencies.append(dependency.ref)
return dependencies
def _AddImportedDependencies(self, targets, dependencies=None):
"""Given a list of direct dependencies, adds indirect dependencies that
other dependencies have declared to export their settings.
This method does not operate on self. Rather, it operates on the list
of dependencies in the |dependencies| argument. For each dependency in
that list, if any declares that it exports the settings of one of its
own dependencies, those dependencies whose settings are "passed through"
are added to the list. As new items are added to the list, they too will
be processed, so it is possible to import settings through multiple levels
of dependencies.
This method is not terribly useful on its own, it depends on being
"primed" with a list of direct dependencies such as one provided by
DirectDependencies. DirectAndImportedDependencies is intended to be the
public entry point.
"""
if dependencies == None:
dependencies = []
index = 0
while index < len(dependencies):
dependency = dependencies[index]
dependency_dict = targets[dependency]
# Add any dependencies whose settings should be imported to the list
# if not already present. Newly-added items will be checked for
# their own imports when the list iteration reaches them.
# Rather than simply appending new items, insert them after the
# dependency that exported them. This is done to more closely match
# the depth-first method used by DeepDependencies.
add_index = 1
for imported_dependency in \
dependency_dict.get('export_dependent_settings', []):
if imported_dependency not in dependencies:
dependencies.insert(index + add_index, imported_dependency)
add_index = add_index + 1
index = index + 1
return dependencies
def DirectAndImportedDependencies(self, targets, dependencies=None):
"""Returns a list of a target's direct dependencies and all indirect
dependencies that a dependency has advertised settings should be exported
through the dependency for.
"""
dependencies = self.DirectDependencies(dependencies)
return self._AddImportedDependencies(targets, dependencies)
def DeepDependencies(self, dependencies=None):
"""Returns an OrderedSet of all of a target's dependencies, recursively."""
if dependencies is None:
# Using a list to get ordered output and a set to do fast "is it
# already added" checks.
dependencies = OrderedSet()
for dependency in self.dependencies:
# Check for None, corresponding to the root node.
if dependency.ref is None:
continue
if dependency.ref not in dependencies:
dependencies.add(dependency.ref)
dependency.DeepDependencies(dependencies)
return dependencies
def _LinkDependenciesInternal(self, targets, include_shared_libraries,
dependencies=None, initial=True):
"""Returns an OrderedSet of dependency targets that are linked
into this target.
This function has a split personality, depending on the setting of
|initial|. Outside callers should always leave |initial| at its default
setting.
When adding a target to the list of dependencies, this function will
recurse into itself with |initial| set to False, to collect dependencies
that are linked into the linkable target for which the list is being built.
If |include_shared_libraries| is False, the resulting dependencies will not
include shared_library targets that are linked into this target.
"""
if dependencies is None:
# Using a list to get ordered output and a set to do fast "is it
# already added" checks.
dependencies = OrderedSet()
# Check for None, corresponding to the root node.
if self.ref is None:
return dependencies
# It's kind of sucky that |targets| has to be passed into this function,
# but that's presently the easiest way to access the target dicts so that
# this function can find target types.
if 'target_name' not in targets[self.ref]:
raise GypError("Missing 'target_name' field in target.")
if 'type' not in targets[self.ref]:
raise GypError("Missing 'type' field in target %s" %
targets[self.ref]['target_name'])
target_type = targets[self.ref]['type']
is_linkable = target_type in linkable_types
if initial and not is_linkable:
# If this is the first target being examined and it's not linkable,
# return an empty list of link dependencies, because the link
# dependencies are intended to apply to the target itself (initial is
# True) and this target won't be linked.
return dependencies
# Don't traverse 'none' targets if explicitly excluded.
if (target_type == 'none' and
not targets[self.ref].get('dependencies_traverse', True)):
dependencies.add(self.ref)
return dependencies
# Executables and loadable modules are already fully and finally linked.
# Nothing else can be a link dependency of them, there can only be
# dependencies in the sense that a dependent target might run an
# executable or load the loadable_module.
if not initial and target_type in ('executable', 'loadable_module'):
return dependencies
# Shared libraries are already fully linked. They should only be included
# in |dependencies| when adjusting static library dependencies (in order to
# link against the shared_library's import lib), but should not be included
# in |dependencies| when propagating link_settings.
# The |include_shared_libraries| flag controls which of these two cases we
# are handling.
if (not initial and target_type == 'shared_library' and
not include_shared_libraries):
return dependencies
# The target is linkable, add it to the list of link dependencies.
if self.ref not in dependencies:
dependencies.add(self.ref)
if initial or not is_linkable:
# If this is a subsequent target and it's linkable, don't look any
# further for linkable dependencies, as they'll already be linked into
# this target linkable. Always look at dependencies of the initial
# target, and always look at dependencies of non-linkables.
for dependency in self.dependencies:
dependency._LinkDependenciesInternal(targets,
include_shared_libraries,
dependencies, False)
return dependencies
def DependenciesForLinkSettings(self, targets):
"""
Returns a list of dependency targets whose link_settings should be merged
into this target.
"""
# TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
# link_settings are propagated. So for now, we will allow it, unless the
# 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
# False. Once chrome is fixed, we can remove this flag.
include_shared_libraries = \
targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
return self._LinkDependenciesInternal(targets, include_shared_libraries)
def DependenciesToLinkAgainst(self, targets):
"""
Returns a list of dependency targets that are linked into this target.
"""
return self._LinkDependenciesInternal(targets, True)
def BuildDependencyList(targets):
# Create a DependencyGraphNode for each target. Put it into a dict for easy
# access.
dependency_nodes = {}
for target, spec in targets.iteritems():
if target not in dependency_nodes:
dependency_nodes[target] = DependencyGraphNode(target)
# Set up the dependency links. Targets that have no dependencies are treated
# as dependent on root_node.
root_node = DependencyGraphNode(None)
for target, spec in targets.iteritems():
target_node = dependency_nodes[target]
target_build_file = gyp.common.BuildFile(target)
dependencies = spec.get('dependencies')
if not dependencies:
target_node.dependencies = [root_node]
root_node.dependents.append(target_node)
else:
for dependency in dependencies:
dependency_node = dependency_nodes.get(dependency)
if not dependency_node:
raise GypError("Dependency '%s' not found while "
"trying to load target %s" % (dependency, target))
target_node.dependencies.append(dependency_node)
dependency_node.dependents.append(target_node)
flat_list = root_node.FlattenToList()
# If there's anything left unvisited, there must be a circular dependency
# (cycle).
if len(flat_list) != len(targets):
if not root_node.dependents:
# If all targets have dependencies, add the first target as a dependent
# of root_node so that the cycle can be discovered from root_node.
target = targets.keys()[0]
target_node = dependency_nodes[target]
target_node.dependencies.append(root_node)
root_node.dependents.append(target_node)
cycles = []
for cycle in root_node.FindCycles():
paths = [node.ref for node in cycle]
cycles.append('Cycle: %s' % ' -> '.join(paths))
raise DependencyGraphNode.CircularException(
'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
return [dependency_nodes, flat_list]
def VerifyNoGYPFileCircularDependencies(targets):
# Create a DependencyGraphNode for each gyp file containing a target. Put
# it into a dict for easy access.
dependency_nodes = {}
for target in targets.iterkeys():
build_file = gyp.common.BuildFile(target)
if not build_file in dependency_nodes:
dependency_nodes[build_file] = DependencyGraphNode(build_file)
# Set up the dependency links.
for target, spec in targets.iteritems():
build_file = gyp.common.BuildFile(target)
build_file_node = dependency_nodes[build_file]
target_dependencies = spec.get('dependencies', [])
for dependency in target_dependencies:
try:
dependency_build_file = gyp.common.BuildFile(dependency)
except GypError, e:
gyp.common.ExceptionAppend(
e, 'while computing dependencies of .gyp file %s' % build_file)
raise
if dependency_build_file == build_file:
# A .gyp file is allowed to refer back to itself.
continue
dependency_node = dependency_nodes.get(dependency_build_file)
if not dependency_node:
raise GypError("Dependancy '%s' not found" % dependency_build_file)
if dependency_node not in build_file_node.dependencies:
build_file_node.dependencies.append(dependency_node)
dependency_node.dependents.append(build_file_node)
# Files that have no dependencies are treated as dependent on root_node.
root_node = DependencyGraphNode(None)
for build_file_node in dependency_nodes.itervalues():
if len(build_file_node.dependencies) == 0:
build_file_node.dependencies.append(root_node)
root_node.dependents.append(build_file_node)
flat_list = root_node.FlattenToList()
# If there's anything left unvisited, there must be a circular dependency
# (cycle).
if len(flat_list) != len(dependency_nodes):
if not root_node.dependents:
# If all files have dependencies, add the first file as a dependent
# of root_node so that the cycle can be discovered from root_node.
file_node = dependency_nodes.values()[0]
file_node.dependencies.append(root_node)
root_node.dependents.append(file_node)
cycles = []
for cycle in root_node.FindCycles():
paths = [node.ref for node in cycle]
cycles.append('Cycle: %s' % ' -> '.join(paths))
raise DependencyGraphNode.CircularException(
'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
def DoDependentSettings(key, flat_list, targets, dependency_nodes):
# key should be one of all_dependent_settings, direct_dependent_settings,
# or link_settings.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
if key == 'all_dependent_settings':
dependencies = dependency_nodes[target].DeepDependencies()
elif key == 'direct_dependent_settings':
dependencies = \
dependency_nodes[target].DirectAndImportedDependencies(targets)
elif key == 'link_settings':
dependencies = \
dependency_nodes[target].DependenciesForLinkSettings(targets)
else:
raise GypError("DoDependentSettings doesn't know how to determine "
'dependencies for ' + key)
for dependency in dependencies:
dependency_dict = targets[dependency]
if not key in dependency_dict:
continue
dependency_build_file = gyp.common.BuildFile(dependency)
MergeDicts(target_dict, dependency_dict[key],
build_file, dependency_build_file)
def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
sort_dependencies):
# Recompute target "dependencies" properties. For each static library
# target, remove "dependencies" entries referring to other static libraries,
# unless the dependency has the "hard_dependency" attribute set. For each
# linkable target, add a "dependencies" entry referring to all of the
# target's computed list of link dependencies (including static libraries
# if no such entry is already present.
for target in flat_list:
target_dict = targets[target]
target_type = target_dict['type']
if target_type == 'static_library':
if not 'dependencies' in target_dict:
continue
target_dict['dependencies_original'] = target_dict.get(
'dependencies', [])[:]
# A static library should not depend on another static library unless
# the dependency relationship is "hard," which should only be done when
# a dependent relies on some side effect other than just the build
# product, like a rule or action output. Further, if a target has a
# non-hard dependency, but that dependency exports a hard dependency,
# the non-hard dependency can safely be removed, but the exported hard
# dependency must be added to the target to keep the same dependency
# ordering.
dependencies = \
dependency_nodes[target].DirectAndImportedDependencies(targets)
index = 0
while index < len(dependencies):
dependency = dependencies[index]
dependency_dict = targets[dependency]
# Remove every non-hard static library dependency and remove every
# non-static library dependency that isn't a direct dependency.
if (dependency_dict['type'] == 'static_library' and \
not dependency_dict.get('hard_dependency', False)) or \
(dependency_dict['type'] != 'static_library' and \
not dependency in target_dict['dependencies']):
# Take the dependency out of the list, and don't increment index
# because the next dependency to analyze will shift into the index
# formerly occupied by the one being removed.
del dependencies[index]
else:
index = index + 1
# Update the dependencies. If the dependencies list is empty, it's not
# needed, so unhook it.
if len(dependencies) > 0:
target_dict['dependencies'] = dependencies
else:
del target_dict['dependencies']
elif target_type in linkable_types:
# Get a list of dependency targets that should be linked into this
# target. Add them to the dependencies list if they're not already
# present.
link_dependencies = \
dependency_nodes[target].DependenciesToLinkAgainst(targets)
for dependency in link_dependencies:
if dependency == target:
continue
if not 'dependencies' in target_dict:
target_dict['dependencies'] = []
if not dependency in target_dict['dependencies']:
target_dict['dependencies'].append(dependency)
# Sort the dependencies list in the order from dependents to dependencies.
# e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
# Note: flat_list is already sorted in the order from dependencies to
# dependents.
if sort_dependencies and 'dependencies' in target_dict:
target_dict['dependencies'] = [dep for dep in reversed(flat_list)
if dep in target_dict['dependencies']]
# Initialize this here to speed up MakePathRelative.
exception_re = re.compile(r'''["']?[-/$<>^]''')
def MakePathRelative(to_file, fro_file, item):
# If item is a relative path, it's relative to the build file dict that it's
# coming from. Fix it up to make it relative to the build file dict that
# it's going into.
# Exception: any |item| that begins with these special characters is
# returned without modification.
# / Used when a path is already absolute (shortcut optimization;
# such paths would be returned as absolute anyway)
# $ Used for build environment variables
# - Used for some build environment flags (such as -lapr-1 in a
# "libraries" section)
# < Used for our own variable and command expansions (see ExpandVariables)
# > Used for our own variable and command expansions (see ExpandVariables)
# ^ Used for our own variable and command expansions (see ExpandVariables)
#
# "/' Used when a value is quoted. If these are present, then we
# check the second character instead.
#
if to_file == fro_file or exception_re.match(item):
return item
else:
# TODO(dglazkov) The backslash/forward-slash replacement at the end is a
# temporary measure. This should really be addressed by keeping all paths
# in POSIX until actual project generation.
ret = os.path.normpath(os.path.join(
gyp.common.RelativePath(os.path.dirname(fro_file),
os.path.dirname(to_file)),
item)).replace('\\', '/')
if item[-1] == '/':
ret += '/'
return ret
def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
# Python documentation recommends objects which do not support hash
# set this value to None. Python library objects follow this rule.
is_hashable = lambda val: val.__hash__
# If x is hashable, returns whether x is in s. Else returns whether x is in l.
def is_in_set_or_list(x, s, l):
if is_hashable(x):
return x in s
return x in l
prepend_index = 0
# Make membership testing of hashables in |to| (in particular, strings)
# faster.
hashable_to_set = set(x for x in to if is_hashable(x))
for item in fro:
singleton = False
if type(item) in (str, int):
# The cheap and easy case.
if is_paths:
to_item = MakePathRelative(to_file, fro_file, item)
else:
to_item = item
if not (type(item) is str and item.startswith('-')):
# Any string that doesn't begin with a "-" is a singleton - it can
# only appear once in a list, to be enforced by the list merge append
# or prepend.
singleton = True
elif type(item) is dict:
# Make a copy of the dictionary, continuing to look for paths to fix.
# The other intelligent aspects of merge processing won't apply because
# item is being merged into an empty dict.
to_item = {}
MergeDicts(to_item, item, to_file, fro_file)
elif type(item) is list:
# Recurse, making a copy of the list. If the list contains any
# descendant dicts, path fixing will occur. Note that here, custom
# values for is_paths and append are dropped; those are only to be
# applied to |to| and |fro|, not sublists of |fro|. append shouldn't
# matter anyway because the new |to_item| list is empty.
to_item = []
MergeLists(to_item, item, to_file, fro_file)
else:
raise TypeError(
'Attempt to merge list item of unsupported type ' + \
item.__class__.__name__)
if append:
# If appending a singleton that's already in the list, don't append.
# This ensures that the earliest occurrence of the item will stay put.
if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
to.append(to_item)
if is_hashable(to_item):
hashable_to_set.add(to_item)
else:
# If prepending a singleton that's already in the list, remove the
# existing instance and proceed with the prepend. This ensures that the
# item appears at the earliest possible position in the list.
while singleton and to_item in to:
to.remove(to_item)
# Don't just insert everything at index 0. That would prepend the new
# items to the list in reverse order, which would be an unwelcome
# surprise.
to.insert(prepend_index, to_item)
if is_hashable(to_item):
hashable_to_set.add(to_item)
prepend_index = prepend_index + 1
def MergeDicts(to, fro, to_file, fro_file):
# I wanted to name the parameter "from" but it's a Python keyword...
for k, v in fro.iteritems():
# It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
# copy semantics. Something else may want to merge from the |fro| dict
# later, and having the same dict ref pointed to twice in the tree isn't
# what anyone wants considering that the dicts may subsequently be
# modified.
if k in to:
bad_merge = False
if type(v) in (str, int):
if type(to[k]) not in (str, int):
bad_merge = True
elif type(v) is not type(to[k]):
bad_merge = True
if bad_merge:
raise TypeError(
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
' into incompatible type ' + to[k].__class__.__name__ + \
' for key ' + k)
if type(v) in (str, int):
# Overwrite the existing value, if any. Cheap and easy.
is_path = IsPathSection(k)
if is_path:
to[k] = MakePathRelative(to_file, fro_file, v)
else:
to[k] = v
elif type(v) is dict:
# Recurse, guaranteeing copies will be made of objects that require it.
if not k in to:
to[k] = {}
MergeDicts(to[k], v, to_file, fro_file)
elif type(v) is list:
# Lists in dicts can be merged with different policies, depending on
# how the key in the "from" dict (k, the from-key) is written.
#
# If the from-key has ...the to-list will have this action
# this character appended:... applied when receiving the from-list:
# = replace
# + prepend
# ? set, only if to-list does not yet exist
# (none) append
#
# This logic is list-specific, but since it relies on the associated
# dict key, it's checked in this dict-oriented function.
ext = k[-1]
append = True
if ext == '=':
list_base = k[:-1]
lists_incompatible = [list_base, list_base + '?']
to[list_base] = []
elif ext == '+':
list_base = k[:-1]
lists_incompatible = [list_base + '=', list_base + '?']
append = False
elif ext == '?':
list_base = k[:-1]
lists_incompatible = [list_base, list_base + '=', list_base + '+']
else:
list_base = k
lists_incompatible = [list_base + '=', list_base + '?']
# Some combinations of merge policies appearing together are meaningless.
# It's stupid to replace and append simultaneously, for example. Append
# and prepend are the only policies that can coexist.
for list_incompatible in lists_incompatible:
if list_incompatible in fro:
raise GypError('Incompatible list policies ' + k + ' and ' +
list_incompatible)
if list_base in to:
if ext == '?':
# If the key ends in "?", the list will only be merged if it doesn't
# already exist.
continue
elif type(to[list_base]) is not list:
# This may not have been checked above if merging in a list with an
# extension character.
raise TypeError(
'Attempt to merge dict value of type ' + v.__class__.__name__ + \
' into incompatible type ' + to[list_base].__class__.__name__ + \
' for key ' + list_base + '(' + k + ')')
else:
to[list_base] = []
# Call MergeLists, which will make copies of objects that require it.
# MergeLists can recurse back into MergeDicts, although this will be
# to make copies of dicts (with paths fixed), there will be no
# subsequent dict "merging" once entering a list because lists are
# always replaced, appended to, or prepended to.
is_paths = IsPathSection(list_base)
MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
else:
raise TypeError(
'Attempt to merge dict value of unsupported type ' + \
v.__class__.__name__ + ' for key ' + k)
def MergeConfigWithInheritance(new_configuration_dict, build_file,
target_dict, configuration, visited):
# Skip if previously visted.
if configuration in visited:
return
# Look at this configuration.
configuration_dict = target_dict['configurations'][configuration]
# Merge in parents.
for parent in configuration_dict.get('inherit_from', []):
MergeConfigWithInheritance(new_configuration_dict, build_file,
target_dict, parent, visited + [configuration])
# Merge it into the new config.
MergeDicts(new_configuration_dict, configuration_dict,
build_file, build_file)
# Drop abstract.
if 'abstract' in new_configuration_dict:
del new_configuration_dict['abstract']
def SetUpConfigurations(target, target_dict):
# key_suffixes is a list of key suffixes that might appear on key names.
# These suffixes are handled in conditional evaluations (for =, +, and ?)
# and rules/exclude processing (for ! and /). Keys with these suffixes
# should be treated the same as keys without.
key_suffixes = ['=', '+', '?', '!', '/']
build_file = gyp.common.BuildFile(target)
# Provide a single configuration by default if none exists.
# TODO(mark): Signal an error if default_configurations exists but
# configurations does not.
if not 'configurations' in target_dict:
target_dict['configurations'] = {'Default': {}}
if not 'default_configuration' in target_dict:
concrete = [i for (i, config) in target_dict['configurations'].iteritems()
if not config.get('abstract')]
target_dict['default_configuration'] = sorted(concrete)[0]
merged_configurations = {}
configs = target_dict['configurations']
for (configuration, old_configuration_dict) in configs.iteritems():
# Skip abstract configurations (saves work only).
if old_configuration_dict.get('abstract'):
continue
# Configurations inherit (most) settings from the enclosing target scope.
# Get the inheritance relationship right by making a copy of the target
# dict.
new_configuration_dict = {}
for (key, target_val) in target_dict.iteritems():
key_ext = key[-1:]
if key_ext in key_suffixes:
key_base = key[:-1]
else:
key_base = key
if not key_base in non_configuration_keys:
new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
# Merge in configuration (with all its parents first).
MergeConfigWithInheritance(new_configuration_dict, build_file,
target_dict, configuration, [])
merged_configurations[configuration] = new_configuration_dict
# Put the new configurations back into the target dict as a configuration.
for configuration in merged_configurations.keys():
target_dict['configurations'][configuration] = (
merged_configurations[configuration])
# Now drop all the abstract ones.
for configuration in target_dict['configurations'].keys():
old_configuration_dict = target_dict['configurations'][configuration]
if old_configuration_dict.get('abstract'):
del target_dict['configurations'][configuration]
# Now that all of the target's configurations have been built, go through
# the target dict's keys and remove everything that's been moved into a
# "configurations" section.
delete_keys = []
for key in target_dict:
key_ext = key[-1:]
if key_ext in key_suffixes:
key_base = key[:-1]
else:
key_base = key
if not key_base in non_configuration_keys:
delete_keys.append(key)
for key in delete_keys:
del target_dict[key]
# Check the configurations to see if they contain invalid keys.
for configuration in target_dict['configurations'].keys():
configuration_dict = target_dict['configurations'][configuration]
for key in configuration_dict.keys():
if key in invalid_configuration_keys:
raise GypError('%s not allowed in the %s configuration, found in '
'target %s' % (key, configuration, target))
def ProcessListFiltersInDict(name, the_dict):
"""Process regular expression and exclusion-based filters on lists.
An exclusion list is in a dict key named with a trailing "!", like
"sources!". Every item in such a list is removed from the associated
main list, which in this example, would be "sources". Removed items are
placed into a "sources_excluded" list in the dict.
Regular expression (regex) filters are contained in dict keys named with a
trailing "/", such as "sources/" to operate on the "sources" list. Regex
filters in a dict take the form:
'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
['include', '_mac\\.cc$'] ],
The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
_win.cc. The second filter then includes all files ending in _mac.cc that
are now or were once in the "sources" list. Items matching an "exclude"
filter are subject to the same processing as would occur if they were listed
by name in an exclusion list (ending in "!"). Items matching an "include"
filter are brought back into the main list if previously excluded by an
exclusion list or exclusion regex filter. Subsequent matching "exclude"
patterns can still cause items to be excluded after matching an "include".
"""
# Look through the dictionary for any lists whose keys end in "!" or "/".
# These are lists that will be treated as exclude lists and regular
# expression-based exclude/include lists. Collect the lists that are
# needed first, looking for the lists that they operate on, and assemble
# then into |lists|. This is done in a separate loop up front, because
# the _included and _excluded keys need to be added to the_dict, and that
# can't be done while iterating through it.
lists = []
del_lists = []
for key, value in the_dict.iteritems():
operation = key[-1]
if operation != '!' and operation != '/':
continue
if type(value) is not list:
raise ValueError(name + ' key ' + key + ' must be list, not ' + \
value.__class__.__name__)
list_key = key[:-1]
if list_key not in the_dict:
# This happens when there's a list like "sources!" but no corresponding
# "sources" list. Since there's nothing for it to operate on, queue up
# the "sources!" list for deletion now.
del_lists.append(key)
continue
if type(the_dict[list_key]) is not list:
value = the_dict[list_key]
raise ValueError(name + ' key ' + list_key + \
' must be list, not ' + \
value.__class__.__name__ + ' when applying ' + \
{'!': 'exclusion', '/': 'regex'}[operation])
if not list_key in lists:
lists.append(list_key)
# Delete the lists that are known to be unneeded at this point.
for del_list in del_lists:
del the_dict[del_list]
for list_key in lists:
the_list = the_dict[list_key]
# Initialize the list_actions list, which is parallel to the_list. Each
# item in list_actions identifies whether the corresponding item in
# the_list should be excluded, unconditionally preserved (included), or
# whether no exclusion or inclusion has been applied. Items for which
# no exclusion or inclusion has been applied (yet) have value -1, items
# excluded have value 0, and items included have value 1. Includes and
# excludes override previous actions. All items in list_actions are
# initialized to -1 because no excludes or includes have been processed
# yet.
list_actions = list((-1,) * len(the_list))
exclude_key = list_key + '!'
if exclude_key in the_dict:
for exclude_item in the_dict[exclude_key]:
for index in xrange(0, len(the_list)):
if exclude_item == the_list[index]:
# This item matches the exclude_item, so set its action to 0
# (exclude).
list_actions[index] = 0
# The "whatever!" list is no longer needed, dump it.
del the_dict[exclude_key]
regex_key = list_key + '/'
if regex_key in the_dict:
for regex_item in the_dict[regex_key]:
[action, pattern] = regex_item
pattern_re = re.compile(pattern)
if action == 'exclude':
# This item matches an exclude regex, so set its value to 0 (exclude).
action_value = 0
elif action == 'include':
# This item matches an include regex, so set its value to 1 (include).
action_value = 1
else:
# This is an action that doesn't make any sense.
raise ValueError('Unrecognized action ' + action + ' in ' + name + \
' key ' + regex_key)
for index in xrange(0, len(the_list)):
list_item = the_list[index]
if list_actions[index] == action_value:
# Even if the regex matches, nothing will change so continue (regex
# searches are expensive).
continue
if pattern_re.search(list_item):
# Regular expression match.
list_actions[index] = action_value
# The "whatever/" list is no longer needed, dump it.
del the_dict[regex_key]
# Add excluded items to the excluded list.
#
# Note that exclude_key ("sources!") is different from excluded_key
# ("sources_excluded"). The exclude_key list is input and it was already
# processed and deleted; the excluded_key list is output and it's about
# to be created.
excluded_key = list_key + '_excluded'
if excluded_key in the_dict:
raise GypError(name + ' key ' + excluded_key +
' must not be present prior '
' to applying exclusion/regex filters for ' + list_key)
excluded_list = []
# Go backwards through the list_actions list so that as items are deleted,
# the indices of items that haven't been seen yet don't shift. That means
# that things need to be prepended to excluded_list to maintain them in the
# same order that they existed in the_list.
for index in xrange(len(list_actions) - 1, -1, -1):
if list_actions[index] == 0:
# Dump anything with action 0 (exclude). Keep anything with action 1
# (include) or -1 (no include or exclude seen for the item).
excluded_list.insert(0, the_list[index])
del the_list[index]
# If anything was excluded, put the excluded list into the_dict at
# excluded_key.
if len(excluded_list) > 0:
the_dict[excluded_key] = excluded_list
# Now recurse into subdicts and lists that may contain dicts.
for key, value in the_dict.iteritems():
if type(value) is dict:
ProcessListFiltersInDict(key, value)
elif type(value) is list:
ProcessListFiltersInList(key, value)
def ProcessListFiltersInList(name, the_list):
for item in the_list:
if type(item) is dict:
ProcessListFiltersInDict(name, item)
elif type(item) is list:
ProcessListFiltersInList(name, item)
def ValidateTargetType(target, target_dict):
"""Ensures the 'type' field on the target is one of the known types.
Arguments:
target: string, name of target.
target_dict: dict, target spec.
Raises an exception on error.
"""
VALID_TARGET_TYPES = ('executable', 'loadable_module',
'static_library', 'shared_library',
'none')
target_type = target_dict.get('type', None)
if target_type not in VALID_TARGET_TYPES:
raise GypError("Target %s has an invalid target type '%s'. "
"Must be one of %s." %
(target, target_type, '/'.join(VALID_TARGET_TYPES)))
if (target_dict.get('standalone_static_library', 0) and
not target_type == 'static_library'):
raise GypError('Target %s has type %s but standalone_static_library flag is'
' only valid for static_library type.' % (target,
target_type))
def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
"""Ensures that the rules sections in target_dict are valid and consistent,
and determines which sources they apply to.
Arguments:
target: string, name of target.
target_dict: dict, target spec containing "rules" and "sources" lists.
extra_sources_for_rules: a list of keys to scan for rule matches in
addition to 'sources'.
"""
# Dicts to map between values found in rules' 'rule_name' and 'extension'
# keys and the rule dicts themselves.
rule_names = {}
rule_extensions = {}
rules = target_dict.get('rules', [])
for rule in rules:
# Make sure that there's no conflict among rule names and extensions.
rule_name = rule['rule_name']
if rule_name in rule_names:
raise GypError('rule %s exists in duplicate, target %s' %
(rule_name, target))
rule_names[rule_name] = rule
rule_extension = rule['extension']
if rule_extension.startswith('.'):
rule_extension = rule_extension[1:]
if rule_extension in rule_extensions:
raise GypError(('extension %s associated with multiple rules, ' +
'target %s rules %s and %s') %
(rule_extension, target,
rule_extensions[rule_extension]['rule_name'],
rule_name))
rule_extensions[rule_extension] = rule
# Make sure rule_sources isn't already there. It's going to be
# created below if needed.
if 'rule_sources' in rule:
raise GypError(
'rule_sources must not exist in input, target %s rule %s' %
(target, rule_name))
rule_sources = []
source_keys = ['sources']
source_keys.extend(extra_sources_for_rules)
for source_key in source_keys:
for source in target_dict.get(source_key, []):
(source_root, source_extension) = os.path.splitext(source)
if source_extension.startswith('.'):
source_extension = source_extension[1:]
if source_extension == rule_extension:
rule_sources.append(source)
if len(rule_sources) > 0:
rule['rule_sources'] = rule_sources
def ValidateRunAsInTarget(target, target_dict, build_file):
target_name = target_dict.get('target_name')
run_as = target_dict.get('run_as')
if not run_as:
return
if type(run_as) is not dict:
raise GypError("The 'run_as' in target %s from file %s should be a "
"dictionary." %
(target_name, build_file))
action = run_as.get('action')
if not action:
raise GypError("The 'run_as' in target %s from file %s must have an "
"'action' section." %
(target_name, build_file))
if type(action) is not list:
raise GypError("The 'action' for 'run_as' in target %s from file %s "
"must be a list." %
(target_name, build_file))
working_directory = run_as.get('working_directory')
if working_directory and type(working_directory) is not str:
raise GypError("The 'working_directory' for 'run_as' in target %s "
"in file %s should be a string." %
(target_name, build_file))
environment = run_as.get('environment')
if environment and type(environment) is not dict:
raise GypError("The 'environment' for 'run_as' in target %s "
"in file %s should be a dictionary." %
(target_name, build_file))
def ValidateActionsInTarget(target, target_dict, build_file):
'''Validates the inputs to the actions in a target.'''
target_name = target_dict.get('target_name')
actions = target_dict.get('actions', [])
for action in actions:
action_name = action.get('action_name')
if not action_name:
raise GypError("Anonymous action in target %s. "
"An action must have an 'action_name' field." %
target_name)
inputs = action.get('inputs', None)
if inputs is None:
raise GypError('Action in target %s has no inputs.' % target_name)
action_command = action.get('action')
if action_command and not action_command[0]:
raise GypError("Empty action as command in target %s." % target_name)
def TurnIntIntoStrInDict(the_dict):
"""Given dict the_dict, recursively converts all integers into strings.
"""
# Use items instead of iteritems because there's no need to try to look at
# reinserted keys and their associated values.
for k, v in the_dict.items():
if type(v) is int:
v = str(v)
the_dict[k] = v
elif type(v) is dict:
TurnIntIntoStrInDict(v)
elif type(v) is list:
TurnIntIntoStrInList(v)
if type(k) is int:
del the_dict[k]
the_dict[str(k)] = v
def TurnIntIntoStrInList(the_list):
"""Given list the_list, recursively converts all integers into strings.
"""
for index in xrange(0, len(the_list)):
item = the_list[index]
if type(item) is int:
the_list[index] = str(item)
elif type(item) is dict:
TurnIntIntoStrInDict(item)
elif type(item) is list:
TurnIntIntoStrInList(item)
def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
data):
"""Return only the targets that are deep dependencies of |root_targets|."""
qualified_root_targets = []
for target in root_targets:
target = target.strip()
qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
if not qualified_targets:
raise GypError("Could not find target %s" % target)
qualified_root_targets.extend(qualified_targets)
wanted_targets = {}
for target in qualified_root_targets:
wanted_targets[target] = targets[target]
for dependency in dependency_nodes[target].DeepDependencies():
wanted_targets[dependency] = targets[dependency]
wanted_flat_list = [t for t in flat_list if t in wanted_targets]
# Prune unwanted targets from each build_file's data dict.
for build_file in data['target_build_files']:
if not 'targets' in data[build_file]:
continue
new_targets = []
for target in data[build_file]['targets']:
qualified_name = gyp.common.QualifiedTarget(build_file,
target['target_name'],
target['toolset'])
if qualified_name in wanted_targets:
new_targets.append(target)
data[build_file]['targets'] = new_targets
return wanted_targets, wanted_flat_list
def VerifyNoCollidingTargets(targets):
"""Verify that no two targets in the same directory share the same name.
Arguments:
targets: A list of targets in the form 'path/to/file.gyp:target_name'.
"""
# Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
used = {}
for target in targets:
# Separate out 'path/to/file.gyp, 'target_name' from
# 'path/to/file.gyp:target_name'.
path, name = target.rsplit(':', 1)
# Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
subdir, gyp = os.path.split(path)
# Use '.' for the current directory '', so that the error messages make
# more sense.
if not subdir:
subdir = '.'
# Prepare a key like 'path/to:target_name'.
key = subdir + ':' + name
if key in used:
# Complain if this target is already used.
raise GypError('Duplicate target name "%s" in directory "%s" used both '
'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
used[key] = gyp
def SetGeneratorGlobals(generator_input_info):
# Set up path_sections and non_configuration_keys with the default data plus
# the generator-specific data.
global path_sections
path_sections = set(base_path_sections)
path_sections.update(generator_input_info['path_sections'])
global non_configuration_keys
non_configuration_keys = base_non_configuration_keys[:]
non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
global multiple_toolsets
multiple_toolsets = generator_input_info[
'generator_supports_multiple_toolsets']
global generator_filelist_paths
generator_filelist_paths = generator_input_info['generator_filelist_paths']
def Load(build_files, variables, includes, depth, generator_input_info, check,
circular_check, parallel, root_targets):
SetGeneratorGlobals(generator_input_info)
# A generator can have other lists (in addition to sources) be processed
# for rules.
extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
# Load build files. This loads every target-containing build file into
# the |data| dictionary such that the keys to |data| are build file names,
# and the values are the entire build file contents after "early" or "pre"
# processing has been done and includes have been resolved.
# NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
# well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
# track of the keys corresponding to "target" files.
data = {'target_build_files': set()}
# Normalize paths everywhere. This is important because paths will be
# used as keys to the data dict and for references between input files.
build_files = set(map(os.path.normpath, build_files))
if parallel:
LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
check, generator_input_info)
else:
aux_data = {}
for build_file in build_files:
try:
LoadTargetBuildFile(build_file, data, aux_data,
variables, includes, depth, check, True)
except Exception, e:
gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
raise
# Build a dict to access each target's subdict by qualified name.
targets = BuildTargetsDict(data)
# Fully qualify all dependency links.
QualifyDependencies(targets)
# Remove self-dependencies from targets that have 'prune_self_dependencies'
# set to 1.
RemoveSelfDependencies(targets)
# Expand dependencies specified as build_file:*.
ExpandWildcardDependencies(targets, data)
# Remove all dependencies marked as 'link_dependency' from the targets of
# type 'none'.
RemoveLinkDependenciesFromNoneTargets(targets)
# Apply exclude (!) and regex (/) list filters only for dependency_sections.
for target_name, target_dict in targets.iteritems():
tmp_dict = {}
for key_base in dependency_sections:
for op in ('', '!', '/'):
key = key_base + op
if key in target_dict:
tmp_dict[key] = target_dict[key]
del target_dict[key]
ProcessListFiltersInDict(target_name, tmp_dict)
# Write the results back to |target_dict|.
for key in tmp_dict:
target_dict[key] = tmp_dict[key]
# Make sure every dependency appears at most once.
RemoveDuplicateDependencies(targets)
if circular_check:
# Make sure that any targets in a.gyp don't contain dependencies in other
# .gyp files that further depend on a.gyp.
VerifyNoGYPFileCircularDependencies(targets)
[dependency_nodes, flat_list] = BuildDependencyList(targets)
if root_targets:
# Remove, from |targets| and |flat_list|, the targets that are not deep
# dependencies of the targets specified in |root_targets|.
targets, flat_list = PruneUnwantedTargets(
targets, flat_list, dependency_nodes, root_targets, data)
# Check that no two targets in the same directory have the same name.
VerifyNoCollidingTargets(flat_list)
# Handle dependent settings of various types.
for settings_type in ['all_dependent_settings',
'direct_dependent_settings',
'link_settings']:
DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
# Take out the dependent settings now that they've been published to all
# of the targets that require them.
for target in flat_list:
if settings_type in targets[target]:
del targets[target][settings_type]
# Make sure static libraries don't declare dependencies on other static
# libraries, but that linkables depend on all unlinked static libraries
# that they need so that their link steps will be correct.
gii = generator_input_info
if gii['generator_wants_static_library_dependencies_adjusted']:
AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
gii['generator_wants_sorted_dependencies'])
# Apply "post"/"late"/"target" variable expansions and condition evaluations.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
ProcessVariablesAndConditionsInDict(
target_dict, PHASE_LATE, variables, build_file)
# Move everything that can go into a "configurations" section into one.
for target in flat_list:
target_dict = targets[target]
SetUpConfigurations(target, target_dict)
# Apply exclude (!) and regex (/) list filters.
for target in flat_list:
target_dict = targets[target]
ProcessListFiltersInDict(target, target_dict)
# Apply "latelate" variable expansions and condition evaluations.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
ProcessVariablesAndConditionsInDict(
target_dict, PHASE_LATELATE, variables, build_file)
# Make sure that the rules make sense, and build up rule_sources lists as
# needed. Not all generators will need to use the rule_sources lists, but
# some may, and it seems best to build the list in a common spot.
# Also validate actions and run_as elements in targets.
for target in flat_list:
target_dict = targets[target]
build_file = gyp.common.BuildFile(target)
ValidateTargetType(target, target_dict)
ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
ValidateRunAsInTarget(target, target_dict, build_file)
ValidateActionsInTarget(target, target_dict, build_file)
# Generators might not expect ints. Turn them into strs.
TurnIntIntoStrInDict(data)
# TODO(mark): Return |data| for now because the generator needs a list of
# build files that came in. In the future, maybe it should just accept
# a list, and not the whole data dict.
return [flat_list, targets, data]
|
mit
|
VladimirShe/in100gram
|
in100gram/in100gram/settings.py
|
1
|
2137
|
"""
Django settings for in100gram project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '22q%is3ng$=h^cx8(8%&(9)@7e9yi(m^lk#w5a#j8ym5!-mlua'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'in100gram.urls'
WSGI_APPLICATION = 'in100gram.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'mysql.connector.django',
'NAME': 'in100gram',
'USER': 'in100gram',
'PASSWORD': 'in100gram',
'HOST': '127.0.0.1',
'PORT': '3306',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
|
apache-2.0
|
yujikato/DIRAC
|
src/DIRAC/DataManagementSystem/Client/test/mock_DM.py
|
2
|
1432
|
""" A mock of the DataManager, used for testing purposes
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=protected-access, missing-docstring, invalid-name, line-too-long
from mock import MagicMock
from DIRAC import S_OK
dm_mock = MagicMock()
dm_mock.getReplicas.return_value = S_OK({'Successful': {'/a/lfn/1.txt': {'SE1': '/a/lfn/at/SE1.1.txt',
'SE2': '/a/lfn/at/SE2.1.txt'},
'/a/lfn/2.txt': {'SE1': '/a/lfn/at/SE1.1.txt'}},
'Failed': {}})
dm_mock.getActiveReplicas.return_value = dm_mock.getReplicas.return_value
dm_mock.getReplicasForJobs.return_value = dm_mock.getReplicas.return_value
dm_mock.getCatalogFileMetadata.return_value = {'OK': True, 'Value': {'Successful': {'pippo': 'metadataPippo'},
'Failed': None}}
dm_mock.removeFile.return_value = {'OK': True, 'Value': {'Failed': False}}
dm_mock.putStorageDirectory.return_value = {'OK': True, 'Value': {'Failed': False}}
dm_mock.addCatalogFile.return_value = {'OK': True, 'Value': {'Failed': False}}
dm_mock.putAndRegister.return_value = {'OK': True, 'Value': {'Failed': False}}
dm_mock.getFile.return_value = {'OK': True, 'Value': {'Failed': False}}
|
gpl-3.0
|
gopal1cloud/neutron
|
neutron/tests/unit/ml2/drivers/cisco/apic/test_cisco_apic_client.py
|
8
|
12242
|
# Copyright (c) 2014 Cisco Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Henry Gessau, Cisco Systems
import mock
import requests
import requests.exceptions
from neutron.plugins.ml2.drivers.cisco.apic import apic_client as apic
from neutron.plugins.ml2.drivers.cisco.apic import exceptions as cexc
from neutron.tests import base
from neutron.tests.unit.ml2.drivers.cisco.apic import (
test_cisco_apic_common as mocked)
class TestCiscoApicClient(base.BaseTestCase, mocked.ControllerMixin):
def setUp(self):
super(TestCiscoApicClient, self).setUp()
self.set_up_mocks()
self.apic = apic.RestClient(mocked.APIC_HOST)
self.addCleanup(mock.patch.stopall)
def _mock_authenticate(self, timeout=300):
self.reset_reponses()
self.mock_apic_manager_login_responses(timeout=timeout)
self.apic.login(mocked.APIC_USR, mocked.APIC_PWD)
def test_login_by_instantiation(self):
self.reset_reponses()
self.mock_apic_manager_login_responses()
apic2 = apic.RestClient(mocked.APIC_HOST,
usr=mocked.APIC_USR, pwd=mocked.APIC_PWD)
self.assertIsNotNone(apic2.authentication)
self.assertEqual(apic2.username, mocked.APIC_USR)
def test_client_session_login_ok(self):
self._mock_authenticate()
self.assertEqual(
self.apic.authentication['userName'], mocked.APIC_USR)
self.assertTrue(self.apic.api_base.startswith('http://'))
self.assertEqual(self.apic.username, mocked.APIC_USR)
self.assertIsNotNone(self.apic.authentication)
self.apic = apic.RestClient(mocked.APIC_HOST, mocked.APIC_PORT,
ssl=True)
self.assertTrue(self.apic.api_base.startswith('https://'))
def test_client_session_login_fail(self):
self.mock_error_post_response(requests.codes.unauthorized,
code='599',
text=u'Fake error')
self.assertRaises(cexc.ApicResponseNotOk, self.apic.login,
mocked.APIC_USR, mocked.APIC_PWD)
def test_client_session_login_timeout(self):
self.response['post'].append(requests.exceptions.Timeout)
self.assertRaises(cexc.ApicHostNoResponse, self.apic.login,
mocked.APIC_USR, mocked.APIC_PWD)
def test_client_session_logout_ok(self):
self.mock_response_for_post('aaaLogout')
self.apic.logout()
self.assertIsNone(self.apic.authentication)
# Multiple signouts should not cause an error
self.apic.logout()
self.assertIsNone(self.apic.authentication)
def test_client_session_logout_fail(self):
self._mock_authenticate()
self.mock_error_post_response(requests.codes.timeout,
code='123', text='failed')
self.assertRaises(cexc.ApicResponseNotOk, self.apic.logout)
def test_query_not_logged_in(self):
self.apic.authentication = None
self.assertRaises(cexc.ApicSessionNotLoggedIn,
self.apic.fvTenant.get, mocked.APIC_TENANT)
def test_query_no_response(self):
self._mock_authenticate()
requests.Session.get = mock.Mock(return_value=None)
self.assertRaises(cexc.ApicHostNoResponse,
self.apic.fvTenant.get, mocked.APIC_TENANT)
def test_query_error_response_no_data(self):
self._mock_authenticate()
self.mock_error_get_response(requests.codes.bad) # No error attrs.
self.assertRaises(cexc.ApicResponseNotOk,
self.apic.fvTenant.get, mocked.APIC_TENANT)
def test_generic_get_data(self):
self._mock_authenticate()
self.mock_response_for_get('topSystem', name='ifc1')
top_system = self.apic.get_data('class/topSystem')
self.assertIsNotNone(top_system)
name = top_system[0]['topSystem']['attributes']['name']
self.assertEqual(name, 'ifc1')
def test_session_timeout_refresh_ok(self):
self._mock_authenticate(timeout=-1)
# Client will do refresh before getting tenant
self.mock_response_for_get('aaaLogin', token='ok',
refreshTimeoutSeconds=300)
self.mock_response_for_get('fvTenant', name=mocked.APIC_TENANT)
tenant = self.apic.fvTenant.get(mocked.APIC_TENANT)
self.assertEqual(tenant['name'], mocked.APIC_TENANT)
def test_session_timeout_refresh_no_cookie(self):
self._mock_authenticate(timeout=-1)
# Client will do refresh before getting tenant
self.mock_response_for_get('aaaLogin', notoken='test')
self.assertRaises(cexc.ApicResponseNoCookie,
self.apic.fvTenant.get, mocked.APIC_TENANT)
def test_session_timeout_refresh_error(self):
self._mock_authenticate(timeout=-1)
self.mock_error_get_response(requests.codes.timeout,
code='503', text=u'timed out')
self.assertRaises(cexc.ApicResponseNotOk,
self.apic.fvTenant.get, mocked.APIC_TENANT)
def test_session_timeout_refresh_timeout_error(self):
self._mock_authenticate(timeout=-1)
# Client will try to get refresh, we fake a refresh error.
self.mock_error_get_response(requests.codes.bad_request,
code='403',
text=u'Token was invalid. Expired.')
# Client will then try to re-login.
self.mock_apic_manager_login_responses()
# Finally the client will try to get the tenant.
self.mock_response_for_get('fvTenant', name=mocked.APIC_TENANT)
tenant = self.apic.fvTenant.get(mocked.APIC_TENANT)
self.assertEqual(tenant['name'], mocked.APIC_TENANT)
def test_lookup_mo_bad_token_retry(self):
self._mock_authenticate()
# For the first get request we mock a bad token.
self.mock_error_get_response(requests.codes.bad_request,
code='403',
text=u'Token was invalid. Expired.')
# Client will then try to re-login.
self.mock_apic_manager_login_responses()
# Then the client will retry to get the tenant.
self.mock_response_for_get('fvTenant', name=mocked.APIC_TENANT)
tenant = self.apic.fvTenant.get(mocked.APIC_TENANT)
self.assertEqual(tenant['name'], mocked.APIC_TENANT)
def test_use_unsupported_managed_object(self):
self._mock_authenticate()
# unittest.assertRaises cannot catch exceptions raised in
# __getattr__, so we need to defer the evaluation using lambda.
self.assertRaises(cexc.ApicManagedObjectNotSupported,
lambda: self.apic.nonexistentObject)
def test_lookup_nonexistant_mo(self):
self._mock_authenticate()
self.mock_response_for_get('fvTenant')
self.assertIsNone(self.apic.fvTenant.get(mocked.APIC_TENANT))
def test_lookup_existing_mo(self):
self._mock_authenticate()
self.mock_response_for_get('fvTenant', name='infra')
tenant = self.apic.fvTenant.get('infra')
self.assertEqual(tenant['name'], 'infra')
def test_list_mos_ok(self):
self._mock_authenticate()
self.mock_response_for_get('fvTenant', name='t1')
self.mock_append_to_response('fvTenant', name='t2')
tlist = self.apic.fvTenant.list_all()
self.assertIsNotNone(tlist)
self.assertEqual(len(tlist), 2)
self.assertIn({'name': 't1'}, tlist)
self.assertIn({'name': 't2'}, tlist)
def test_list_mo_names_ok(self):
self._mock_authenticate()
self.mock_response_for_get('fvTenant', name='t1')
self.mock_append_to_response('fvTenant', name='t2')
tnlist = self.apic.fvTenant.list_names()
self.assertIsNotNone(tnlist)
self.assertEqual(len(tnlist), 2)
self.assertIn('t1', tnlist)
self.assertIn('t2', tnlist)
def test_list_mos_split_class_fail(self):
self._mock_authenticate()
self.mock_response_for_get('fvnsEncapBlk', name='Blk1')
encap_blks = self.apic.fvnsEncapBlk__vlan.list_all()
self.assertEqual(len(encap_blks), 1)
def test_delete_mo_ok(self):
self._mock_authenticate()
self.mock_response_for_post('fvTenant')
self.assertTrue(self.apic.fvTenant.delete(mocked.APIC_TENANT))
def test_create_mo_ok(self):
self._mock_authenticate()
self.mock_response_for_post('fvTenant', name=mocked.APIC_TENANT)
self.mock_response_for_get('fvTenant', name=mocked.APIC_TENANT)
self.apic.fvTenant.create(mocked.APIC_TENANT)
tenant = self.apic.fvTenant.get(mocked.APIC_TENANT)
self.assertEqual(tenant['name'], mocked.APIC_TENANT)
def test_create_mo_already_exists(self):
self._mock_authenticate()
self.mock_error_post_response(requests.codes.bad_request,
code='103',
text=u'Fake 103 error')
self.assertRaises(cexc.ApicResponseNotOk,
self.apic.vmmProvP.create, mocked.APIC_VMMP)
def test_create_mo_with_prereq(self):
self._mock_authenticate()
self.mock_response_for_post('fvTenant', name=mocked.APIC_TENANT)
self.mock_response_for_post('fvBD', name=mocked.APIC_NETWORK)
self.mock_response_for_get('fvBD', name=mocked.APIC_NETWORK)
bd_args = mocked.APIC_TENANT, mocked.APIC_NETWORK
self.apic.fvBD.create(*bd_args)
network = self.apic.fvBD.get(*bd_args)
self.assertEqual(network['name'], mocked.APIC_NETWORK)
def test_create_mo_prereq_exists(self):
self._mock_authenticate()
self.mock_response_for_post('vmmDomP', name=mocked.APIC_DOMAIN)
self.mock_response_for_get('vmmDomP', name=mocked.APIC_DOMAIN)
self.apic.vmmDomP.create(mocked.APIC_VMMP, mocked.APIC_DOMAIN)
dom = self.apic.vmmDomP.get(mocked.APIC_VMMP, mocked.APIC_DOMAIN)
self.assertEqual(dom['name'], mocked.APIC_DOMAIN)
def test_create_mo_fails(self):
self._mock_authenticate()
self.mock_response_for_post('fvTenant', name=mocked.APIC_TENANT)
self.mock_error_post_response(requests.codes.bad_request,
code='not103',
text=u'Fake not103 error')
bd_args = mocked.APIC_TENANT, mocked.APIC_NETWORK
self.assertRaises(cexc.ApicResponseNotOk,
self.apic.fvBD.create, *bd_args)
def test_update_mo(self):
self._mock_authenticate()
self.mock_response_for_post('fvTenant', name=mocked.APIC_TENANT)
self.mock_response_for_get('fvTenant', name=mocked.APIC_TENANT,
more='extra')
self.apic.fvTenant.update(mocked.APIC_TENANT, more='extra')
tenant = self.apic.fvTenant.get(mocked.APIC_TENANT)
self.assertEqual(tenant['name'], mocked.APIC_TENANT)
self.assertEqual(tenant['more'], 'extra')
def test_attr_fail_empty_list(self):
self._mock_authenticate()
self.mock_response_for_get('fvTenant') # No attrs for tenant.
self.assertIsNone(self.apic.fvTenant.get(mocked.APIC_TENANT))
def test_attr_fail_other_obj(self):
self._mock_authenticate()
self.mock_response_for_get('other', name=mocked.APIC_TENANT)
self.assertIsNone(self.apic.fvTenant.get(mocked.APIC_TENANT))
|
apache-2.0
|
joaormatos/anaconda
|
Anaconda/pyglet/image/codecs/gdkpixbuf2.py
|
9
|
5863
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
from ctypes import *
from pyglet.gl import *
from pyglet.image import *
from pyglet.image.codecs import *
from pyglet.image.codecs import gif
import pyglet.lib
import pyglet.window
gdk = pyglet.lib.load_library('gdk-x11-2.0')
gdkpixbuf = pyglet.lib.load_library('gdk_pixbuf-2.0')
GdkPixbufLoader = c_void_p
GdkPixbuf = c_void_p
gdkpixbuf.gdk_pixbuf_loader_new.restype = GdkPixbufLoader
gdkpixbuf.gdk_pixbuf_loader_get_pixbuf.restype = GdkPixbuf
gdkpixbuf.gdk_pixbuf_get_pixels.restype = c_void_p
gdkpixbuf.gdk_pixbuf_loader_get_animation.restype = c_void_p
gdkpixbuf.gdk_pixbuf_animation_get_iter.restype = c_void_p
gdkpixbuf.gdk_pixbuf_animation_iter_get_pixbuf.restype = GdkPixbuf
class GTimeVal(Structure):
_fields_ = [
('tv_sec', c_long),
('tv_usec', c_long)
]
class GdkPixbuf2ImageDecoder(ImageDecoder):
def get_file_extensions(self):
return ['.png', '.xpm', '.jpg', '.jpeg', '.tif', '.tiff', '.pnm',
'.ras', '.bmp', '.gif']
def get_animation_file_extensions(self):
return ['.gif', '.ani']
def _load(self, file, filename, load_func):
data = file.read()
loader = gdkpixbuf.gdk_pixbuf_loader_new()
gdkpixbuf.gdk_pixbuf_loader_write(loader, data, len(data), None)
if not gdkpixbuf.gdk_pixbuf_loader_close(loader, None):
raise ImageDecodeException(filename)
result = load_func(loader)
if not result:
raise ImageDecodeException('Unable to load: %s' % filename)
return result
def _pixbuf_to_image(self, pixbuf):
# Get format and dimensions
width = gdkpixbuf.gdk_pixbuf_get_width(pixbuf)
height = gdkpixbuf.gdk_pixbuf_get_height(pixbuf)
channels = gdkpixbuf.gdk_pixbuf_get_n_channels(pixbuf)
rowstride = gdkpixbuf.gdk_pixbuf_get_rowstride(pixbuf)
#has_alpha = gdkpixbuf.gdk_pixbuf_get_has_alpha(pixbuf)
pixels = gdkpixbuf.gdk_pixbuf_get_pixels(pixbuf)
# Copy pixel data.
buffer = (c_ubyte * (rowstride * height))()
memmove(buffer, pixels, rowstride * (height - 1) + width * channels)
# Release pixbuf
gdk.g_object_unref(pixbuf)
# Determine appropriate GL type
if channels == 3:
format = 'RGB'
else:
format = 'RGBA'
return ImageData(width, height, format, buffer, -rowstride)
def decode(self, file, filename):
pixbuf = self._load(file, filename,
gdkpixbuf.gdk_pixbuf_loader_get_pixbuf)
return self._pixbuf_to_image(pixbuf)
def decode_animation(self, file, filename):
# Extract GIF control data. If it's not a GIF, this method will
# raise.
gif_stream = gif.read(file)
delays = [image.delay for image in gif_stream.images]
# Get GDK animation iterator
file.seek(0)
anim = self._load(file, filename,
gdkpixbuf.gdk_pixbuf_loader_get_animation)
time = GTimeVal(0, 0)
iter = gdkpixbuf.gdk_pixbuf_animation_get_iter(anim, byref(time))
frames = []
# Extract each image
for control_delay in delays:
pixbuf = gdkpixbuf.gdk_pixbuf_animation_iter_get_pixbuf(iter)
image = self._pixbuf_to_image(pixbuf)
frames.append(AnimationFrame(image, control_delay))
gdk_delay = gdkpixbuf.gdk_pixbuf_animation_iter_get_delay_time(iter)
gdk_delay *= 1000 # milliseconds to microseconds
# Compare gdk_delay to control_delay for interest only.
#print control_delay, gdk_delay / 1000000.
if gdk_delay == -1:
break
us = time.tv_usec + gdk_delay
time.tv_sec += us // 1000000
time.tv_usec = us % 1000000
gdkpixbuf.gdk_pixbuf_animation_iter_advance(iter, byref(time))
return Animation(frames)
def get_decoders():
return [GdkPixbuf2ImageDecoder()]
def get_encoders():
return []
def init():
gdk.g_type_init()
init()
|
gpl-3.0
|
ytflzq/laravel-learn-demo
|
vendor/mockery/mockery/docs/conf.py
|
468
|
8442
|
# -*- coding: utf-8 -*-
#
# Mockery Docs documentation build configuration file, created by
# sphinx-quickstart on Mon Mar 3 14:04:26 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.todo',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Mockery Docs'
copyright = u'2014, Pádraic Brady, Dave Marshall, Wouter, Graham Campbell'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.9'
# The full version, including alpha/beta/rc tags.
release = '0.9'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MockeryDocsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index2', 'MockeryDocs.tex', u'Mockery Docs Documentation',
u'Pádraic Brady, Dave Marshall, Wouter, Graham Campbell', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index2', 'mockerydocs', u'Mockery Docs Documentation',
[u'Pádraic Brady, Dave Marshall, Wouter, Graham Campbell'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index2', 'MockeryDocs', u'Mockery Docs Documentation',
u'Pádraic Brady, Dave Marshall, Wouter, Graham Campbell', 'MockeryDocs', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
#on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
print sphinx_rtd_theme.get_html_theme_path()
|
mit
|
mtdx/ml-algorithms
|
neural-networks/cnn.py
|
1
|
2621
|
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)
n_classes = 10
batch_size = 128
x = tf.placeholder('float', [None, 784])
y = tf.placeholder('float')
keep_rate = 0.8
keep_prob = tf.placeholder(tf.float32)
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
def maxpool2d(x):
# size of window movement of window
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
def convolutional_neural_network(x):
weights = {'W_conv1': tf.Variable(tf.random_normal([5, 5, 1, 32])),
'W_conv2': tf.Variable(tf.random_normal([5, 5, 32, 64])),
'W_fc': tf.Variable(tf.random_normal([7 * 7 * 64, 1024])),
'out': tf.Variable(tf.random_normal([1024, n_classes]))}
biases = {'b_conv1': tf.Variable(tf.random_normal([32])),
'b_conv2': tf.Variable(tf.random_normal([64])),
'b_fc': tf.Variable(tf.random_normal([1024])),
'out': tf.Variable(tf.random_normal([n_classes]))}
x = tf.reshape(x, shape=[-1, 28, 28, 1])
conv1 = tf.nn.relu(conv2d(x, weights['W_conv1']) + biases['b_conv1'])
conv1 = maxpool2d(conv1)
conv2 = tf.nn.relu(conv2d(conv1, weights['W_conv2']) + biases['b_conv2'])
conv2 = maxpool2d(conv2)
fc = tf.reshape(conv2, [-1, 7 * 7 * 64])
fc = tf.nn.relu(tf.matmul(fc, weights['W_fc']) + biases['b_fc'])
fc = tf.nn.dropout(fc, keep_rate)
output = tf.matmul(fc, weights['out']) + biases['out']
return output
def train_neural_network(x):
prediction = convolutional_neural_network(x)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(prediction, y))
optimizer = tf.train.AdamOptimizer().minimize(cost)
hm_epochs = 10
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
for epoch in range(hm_epochs):
epoch_loss = 0
for _ in range(int(mnist.train.num_examples / batch_size)):
epoch_x, epoch_y = mnist.train.next_batch(batch_size)
_, c = sess.run([optimizer, cost], feed_dict={x: epoch_x, y: epoch_y})
epoch_loss += c
print('Epoch', epoch, 'completed out of', hm_epochs, 'loss:', epoch_loss)
correct = tf.equal(tf.argmax(prediction, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct, 'float'))
print('Accuracy:', accuracy.eval({x: mnist.test.images, y: mnist.test.labels}))
train_neural_network(x)
|
mit
|
petecummings/NewsBlur
|
vendor/tweepy/utils.py
|
32
|
1355
|
# Tweepy
# Copyright 2010 Joshua Roesslein
# See LICENSE for details.
from datetime import datetime
import time
import htmlentitydefs
import re
import locale
from urllib import quote
from email.utils import parsedate
def parse_datetime(string):
return datetime(*(parsedate(string)[:6]))
def parse_html_value(html):
return html[html.find('>')+1:html.rfind('<')]
def parse_a_href(atag):
start = atag.find('"') + 1
end = atag.find('"', start)
return atag[start:end]
def convert_to_utf8_str(arg):
# written by Michael Norton (http://docondev.blogspot.com/)
if isinstance(arg, unicode):
arg = arg.encode('utf-8')
elif not isinstance(arg, str):
arg = str(arg)
return arg
def import_simplejson():
try:
import simplejson as json
except ImportError:
try:
import json # Python 2.6+
except ImportError:
try:
from django.utils import simplejson as json # Google App Engine
except ImportError:
raise ImportError, "Can't load a json library"
return json
def list_to_csv(item_list):
if item_list:
return ','.join([str(i) for i in item_list])
def urlencode_noplus(query):
return '&'.join(['%s=%s' % (quote(str(k), ''), quote(str(v), '')) \
for k, v in query.iteritems()])
|
mit
|
eckardm/archivematica
|
src/MCPClient/lib/clientScripts/createEvent.py
|
2
|
2433
|
#!/usr/bin/env python2
# This file is part of Archivematica.
#
# Copyright 2010-2013 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
# @package Archivematica
# @subpackage archivematicaClientScript
# @author Joseph Perry <joseph@artefactual.com>
from optparse import OptionParser
import uuid
# databaseFunctions requires Django to be set up
import django
django.setup()
# archivematicaCommon
from custom_handlers import get_script_logger
from databaseFunctions import insertIntoEvents
if __name__ == '__main__':
logger = get_script_logger("archivematica.mcp.client.createEvent")
parser = OptionParser()
parser.add_option("-i", "--fileUUID", action="store", dest="fileUUID", default="")
parser.add_option("-t", "--eventType", action="store", dest="eventType", default="")
parser.add_option("-d", "--eventDateTime", action="store", dest="eventDateTime", default="")
parser.add_option("-e", "--eventDetail", action="store", dest="eventDetail", default="")
parser.add_option("-o", "--eventOutcome", action="store", dest="eventOutcome", default="")
parser.add_option("-n", "--eventOutcomeDetailNote", action="store", dest="eventOutcomeDetailNote", default="")
parser.add_option("-u", "--eventIdentifierUUID", action="store", dest="eventIdentifierUUID", default="")
(opts, args) = parser.parse_args()
insertIntoEvents(fileUUID=opts.fileUUID, \
eventIdentifierUUID=str(uuid.uuid4()), \
eventType=opts.eventType, \
eventDateTime=opts.eventDateTime, \
eventDetail=opts.eventDetail, \
eventOutcome=opts.eventOutcome, \
eventOutcomeDetailNote=opts.eventOutcomeDetailNote)
|
agpl-3.0
|
snazy/cassandra-dtest
|
cassandra-thrift/v11/ttypes.py
|
11
|
123631
|
#
# Autogenerated by Thrift Compiler (0.8.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class ConsistencyLevel:
"""
The ConsistencyLevel is an enum that controls both read and write
behavior based on the ReplicationFactor of the keyspace. The
different consistency levels have different meanings, depending on
if you're doing a write or read operation.
If W + R > ReplicationFactor, where W is the number of nodes to
block for on write, and R the number to block for on reads, you
will have strongly consistent behavior; that is, readers will
always see the most recent write. Of these, the most interesting is
to do QUORUM reads and writes, which gives you consistency while
still allowing availability in the face of node failures up to half
of <ReplicationFactor>. Of course if latency is more important than
consistency then you can use lower values for either or both.
Some ConsistencyLevels (ONE, TWO, THREE) refer to a specific number
of replicas rather than a logical concept that adjusts
automatically with the replication factor. Of these, only ONE is
commonly used; TWO and (even more rarely) THREE are only useful
when you care more about guaranteeing a certain level of
durability, than consistency.
Write consistency levels make the following guarantees before reporting success to the client:
ANY Ensure that the write has been written once somewhere, including possibly being hinted in a non-target node.
ONE Ensure that the write has been written to at least 1 node's commit log and memory table
TWO Ensure that the write has been written to at least 2 node's commit log and memory table
THREE Ensure that the write has been written to at least 3 node's commit log and memory table
QUORUM Ensure that the write has been written to <ReplicationFactor> / 2 + 1 nodes
LOCAL_QUORUM Ensure that the write has been written to <ReplicationFactor> / 2 + 1 nodes, within the local datacenter (requires NetworkTopologyStrategy)
EACH_QUORUM Ensure that the write has been written to <ReplicationFactor> / 2 + 1 nodes in each datacenter (requires NetworkTopologyStrategy)
ALL Ensure that the write is written to <code><ReplicationFactor></code> nodes before responding to the client.
Read consistency levels make the following guarantees before returning successful results to the client:
ANY Not supported. You probably want ONE instead.
ONE Returns the record obtained from a single replica.
TWO Returns the record with the most recent timestamp once two replicas have replied.
THREE Returns the record with the most recent timestamp once three replicas have replied.
QUORUM Returns the record with the most recent timestamp once a majority of replicas have replied.
LOCAL_QUORUM Returns the record with the most recent timestamp once a majority of replicas within the local datacenter have replied.
EACH_QUORUM Returns the record with the most recent timestamp once a majority of replicas within each datacenter have replied.
ALL Returns the record with the most recent timestamp once all replicas have replied (implies no replica may be down)..
"""
ONE = 1
QUORUM = 2
LOCAL_QUORUM = 3
EACH_QUORUM = 4
ALL = 5
ANY = 6
TWO = 7
THREE = 8
_VALUES_TO_NAMES = {
1: "ONE",
2: "QUORUM",
3: "LOCAL_QUORUM",
4: "EACH_QUORUM",
5: "ALL",
6: "ANY",
7: "TWO",
8: "THREE",
}
_NAMES_TO_VALUES = {
"ONE": 1,
"QUORUM": 2,
"LOCAL_QUORUM": 3,
"EACH_QUORUM": 4,
"ALL": 5,
"ANY": 6,
"TWO": 7,
"THREE": 8,
}
class IndexOperator:
EQ = 0
GTE = 1
GT = 2
LTE = 3
LT = 4
_VALUES_TO_NAMES = {
0: "EQ",
1: "GTE",
2: "GT",
3: "LTE",
4: "LT",
}
_NAMES_TO_VALUES = {
"EQ": 0,
"GTE": 1,
"GT": 2,
"LTE": 3,
"LT": 4,
}
class IndexType:
KEYS = 0
CUSTOM = 1
_VALUES_TO_NAMES = {
0: "KEYS",
1: "CUSTOM",
}
_NAMES_TO_VALUES = {
"KEYS": 0,
"CUSTOM": 1,
}
class Compression:
"""
CQL query compression
"""
GZIP = 1
NONE = 2
_VALUES_TO_NAMES = {
1: "GZIP",
2: "NONE",
}
_NAMES_TO_VALUES = {
"GZIP": 1,
"NONE": 2,
}
class CqlResultType:
ROWS = 1
VOID = 2
INT = 3
_VALUES_TO_NAMES = {
1: "ROWS",
2: "VOID",
3: "INT",
}
_NAMES_TO_VALUES = {
"ROWS": 1,
"VOID": 2,
"INT": 3,
}
class Column:
"""
Basic unit of data within a ColumnFamily.
@param name, the name by which this column is set and retrieved. Maximum 64KB long.
@param value. The data associated with the name. Maximum 2GB long, but in practice you should limit it to small numbers of MB (since Thrift must read the full value into memory to operate on it).
@param timestamp. The timestamp is used for conflict detection/resolution when two columns with same name need to be compared.
@param ttl. An optional, positive delay (in seconds) after which the column will be automatically deleted.
Attributes:
- name
- value
- timestamp
- ttl
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'value', None, None, ), # 2
(3, TType.I64, 'timestamp', None, None, ), # 3
(4, TType.I32, 'ttl', None, None, ), # 4
)
def __init__(self, name=None, value=None, timestamp=None, ttl=None,):
self.name = name
self.value = value
self.timestamp = timestamp
self.ttl = ttl
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.value = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.timestamp = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.ttl = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Column')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 2)
oprot.writeString(self.value)
oprot.writeFieldEnd()
if self.timestamp is not None:
oprot.writeFieldBegin('timestamp', TType.I64, 3)
oprot.writeI64(self.timestamp)
oprot.writeFieldEnd()
if self.ttl is not None:
oprot.writeFieldBegin('ttl', TType.I32, 4)
oprot.writeI32(self.ttl)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SuperColumn:
"""
A named list of columns.
@param name. see Column.name.
@param columns. A collection of standard Columns. The columns within a super column are defined in an adhoc manner.
Columns within a super column do not have to have matching structures (similarly named child columns).
Attributes:
- name
- columns
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.LIST, 'columns', (TType.STRUCT,(Column, Column.thrift_spec)), None, ), # 2
)
def __init__(self, name=None, columns=None,):
self.name = name
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.columns = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in xrange(_size0):
_elem5 = Column()
_elem5.read(iprot)
self.columns.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SuperColumn')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter6 in self.columns:
iter6.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.columns is None:
raise TProtocol.TProtocolException(message='Required field columns is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CounterColumn:
"""
Attributes:
- name
- value
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.I64, 'value', None, None, ), # 2
)
def __init__(self, name=None, value=None,):
self.name = name
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.value = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CounterColumn')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.I64, 2)
oprot.writeI64(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.value is None:
raise TProtocol.TProtocolException(message='Required field value is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CounterSuperColumn:
"""
Attributes:
- name
- columns
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.LIST, 'columns', (TType.STRUCT,(CounterColumn, CounterColumn.thrift_spec)), None, ), # 2
)
def __init__(self, name=None, columns=None,):
self.name = name
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.columns = []
(_etype10, _size7) = iprot.readListBegin()
for _i11 in xrange(_size7):
_elem12 = CounterColumn()
_elem12.read(iprot)
self.columns.append(_elem12)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CounterSuperColumn')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter13 in self.columns:
iter13.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.columns is None:
raise TProtocol.TProtocolException(message='Required field columns is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ColumnOrSuperColumn:
"""
Methods for fetching rows/records from Cassandra will return either a single instance of ColumnOrSuperColumn or a list
of ColumnOrSuperColumns (get_slice()). If you're looking up a SuperColumn (or list of SuperColumns) then the resulting
instances of ColumnOrSuperColumn will have the requested SuperColumn in the attribute super_column. For queries resulting
in Columns, those values will be in the attribute column. This change was made between 0.3 and 0.4 to standardize on
single query methods that may return either a SuperColumn or Column.
If the query was on a counter column family, you will either get a counter_column (instead of a column) or a
counter_super_column (instead of a super_column)
@param column. The Column returned by get() or get_slice().
@param super_column. The SuperColumn returned by get() or get_slice().
@param counter_column. The Counterolumn returned by get() or get_slice().
@param counter_super_column. The CounterSuperColumn returned by get() or get_slice().
Attributes:
- column
- super_column
- counter_column
- counter_super_column
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'column', (Column, Column.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'super_column', (SuperColumn, SuperColumn.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'counter_column', (CounterColumn, CounterColumn.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'counter_super_column', (CounterSuperColumn, CounterSuperColumn.thrift_spec), None, ), # 4
)
def __init__(self, column=None, super_column=None, counter_column=None, counter_super_column=None,):
self.column = column
self.super_column = super_column
self.counter_column = counter_column
self.counter_super_column = counter_super_column
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.column = Column()
self.column.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.super_column = SuperColumn()
self.super_column.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.counter_column = CounterColumn()
self.counter_column.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.counter_super_column = CounterSuperColumn()
self.counter_super_column.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ColumnOrSuperColumn')
if self.column is not None:
oprot.writeFieldBegin('column', TType.STRUCT, 1)
self.column.write(oprot)
oprot.writeFieldEnd()
if self.super_column is not None:
oprot.writeFieldBegin('super_column', TType.STRUCT, 2)
self.super_column.write(oprot)
oprot.writeFieldEnd()
if self.counter_column is not None:
oprot.writeFieldBegin('counter_column', TType.STRUCT, 3)
self.counter_column.write(oprot)
oprot.writeFieldEnd()
if self.counter_super_column is not None:
oprot.writeFieldBegin('counter_super_column', TType.STRUCT, 4)
self.counter_super_column.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class NotFoundException(TException):
"""
A specific column was requested that does not exist.
"""
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('NotFoundException')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class InvalidRequestException(TException):
"""
Invalid request could mean keyspace or column family does not exist, required parameters are missing, or a parameter is malformed.
why contains an associated error message.
Attributes:
- why
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'why', None, None, ), # 1
)
def __init__(self, why=None,):
self.why = why
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.why = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('InvalidRequestException')
if self.why is not None:
oprot.writeFieldBegin('why', TType.STRING, 1)
oprot.writeString(self.why)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.why is None:
raise TProtocol.TProtocolException(message='Required field why is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class UnavailableException(TException):
"""
Not all the replicas required could be created and/or read.
"""
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('UnavailableException')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TimedOutException(TException):
"""
RPC timeout was exceeded. either a node failed mid-operation, or load was too high, or the requested op was too large.
"""
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TimedOutException')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AuthenticationException(TException):
"""
invalid authentication request (invalid keyspace, user does not exist, or credentials invalid)
Attributes:
- why
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'why', None, None, ), # 1
)
def __init__(self, why=None,):
self.why = why
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.why = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('AuthenticationException')
if self.why is not None:
oprot.writeFieldBegin('why', TType.STRING, 1)
oprot.writeString(self.why)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.why is None:
raise TProtocol.TProtocolException(message='Required field why is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AuthorizationException(TException):
"""
invalid authorization request (user does not have access to keyspace)
Attributes:
- why
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'why', None, None, ), # 1
)
def __init__(self, why=None,):
self.why = why
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.why = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('AuthorizationException')
if self.why is not None:
oprot.writeFieldBegin('why', TType.STRING, 1)
oprot.writeString(self.why)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.why is None:
raise TProtocol.TProtocolException(message='Required field why is unset!')
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SchemaDisagreementException(TException):
"""
schemas are not in agreement across all nodes
"""
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SchemaDisagreementException')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __str__(self):
return repr(self)
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ColumnParent:
"""
ColumnParent is used when selecting groups of columns from the same ColumnFamily. In directory structure terms, imagine
ColumnParent as ColumnPath + '/../'.
See also <a href="cassandra.html#Struct_ColumnPath">ColumnPath</a>
Attributes:
- column_family
- super_column
"""
thrift_spec = (
None, # 0
None, # 1
None, # 2
(3, TType.STRING, 'column_family', None, None, ), # 3
(4, TType.STRING, 'super_column', None, None, ), # 4
)
def __init__(self, column_family=None, super_column=None,):
self.column_family = column_family
self.super_column = super_column
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 3:
if ftype == TType.STRING:
self.column_family = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.super_column = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ColumnParent')
if self.column_family is not None:
oprot.writeFieldBegin('column_family', TType.STRING, 3)
oprot.writeString(self.column_family)
oprot.writeFieldEnd()
if self.super_column is not None:
oprot.writeFieldBegin('super_column', TType.STRING, 4)
oprot.writeString(self.super_column)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.column_family is None:
raise TProtocol.TProtocolException(message='Required field column_family is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ColumnPath:
"""
The ColumnPath is the path to a single column in Cassandra. It might make sense to think of ColumnPath and
ColumnParent in terms of a directory structure.
ColumnPath is used to looking up a single column.
@param column_family. The name of the CF of the column being looked up.
@param super_column. The super column name.
@param column. The column name.
Attributes:
- column_family
- super_column
- column
"""
thrift_spec = (
None, # 0
None, # 1
None, # 2
(3, TType.STRING, 'column_family', None, None, ), # 3
(4, TType.STRING, 'super_column', None, None, ), # 4
(5, TType.STRING, 'column', None, None, ), # 5
)
def __init__(self, column_family=None, super_column=None, column=None,):
self.column_family = column_family
self.super_column = super_column
self.column = column
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 3:
if ftype == TType.STRING:
self.column_family = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.super_column = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.column = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ColumnPath')
if self.column_family is not None:
oprot.writeFieldBegin('column_family', TType.STRING, 3)
oprot.writeString(self.column_family)
oprot.writeFieldEnd()
if self.super_column is not None:
oprot.writeFieldBegin('super_column', TType.STRING, 4)
oprot.writeString(self.super_column)
oprot.writeFieldEnd()
if self.column is not None:
oprot.writeFieldBegin('column', TType.STRING, 5)
oprot.writeString(self.column)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.column_family is None:
raise TProtocol.TProtocolException(message='Required field column_family is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SliceRange:
"""
A slice range is a structure that stores basic range, ordering and limit information for a query that will return
multiple columns. It could be thought of as Cassandra's version of LIMIT and ORDER BY
@param start. The column name to start the slice with. This attribute is not required, though there is no default value,
and can be safely set to '', i.e., an empty byte array, to start with the first column name. Otherwise, it
must a valid value under the rules of the Comparator defined for the given ColumnFamily.
@param finish. The column name to stop the slice at. This attribute is not required, though there is no default value,
and can be safely set to an empty byte array to not stop until 'count' results are seen. Otherwise, it
must also be a valid value to the ColumnFamily Comparator.
@param reversed. Whether the results should be ordered in reversed order. Similar to ORDER BY blah DESC in SQL.
@param count. How many columns to return. Similar to LIMIT in SQL. May be arbitrarily large, but Thrift will
materialize the whole result into memory before returning it to the client, so be aware that you may
be better served by iterating through slices by passing the last value of one call in as the 'start'
of the next instead of increasing 'count' arbitrarily large.
Attributes:
- start
- finish
- reversed
- count
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'start', None, None, ), # 1
(2, TType.STRING, 'finish', None, None, ), # 2
(3, TType.BOOL, 'reversed', None, False, ), # 3
(4, TType.I32, 'count', None, 100, ), # 4
)
def __init__(self, start=None, finish=None, reversed=thrift_spec[3][4], count=thrift_spec[4][4],):
self.start = start
self.finish = finish
self.reversed = reversed
self.count = count
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.start = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.finish = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.reversed = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SliceRange')
if self.start is not None:
oprot.writeFieldBegin('start', TType.STRING, 1)
oprot.writeString(self.start)
oprot.writeFieldEnd()
if self.finish is not None:
oprot.writeFieldBegin('finish', TType.STRING, 2)
oprot.writeString(self.finish)
oprot.writeFieldEnd()
if self.reversed is not None:
oprot.writeFieldBegin('reversed', TType.BOOL, 3)
oprot.writeBool(self.reversed)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 4)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.start is None:
raise TProtocol.TProtocolException(message='Required field start is unset!')
if self.finish is None:
raise TProtocol.TProtocolException(message='Required field finish is unset!')
if self.reversed is None:
raise TProtocol.TProtocolException(message='Required field reversed is unset!')
if self.count is None:
raise TProtocol.TProtocolException(message='Required field count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class SlicePredicate:
"""
A SlicePredicate is similar to a mathematic predicate (see http://en.wikipedia.org/wiki/Predicate_(mathematical_logic)),
which is described as "a property that the elements of a set have in common."
SlicePredicate's in Cassandra are described with either a list of column_names or a SliceRange. If column_names is
specified, slice_range is ignored.
@param column_name. A list of column names to retrieve. This can be used similar to Memcached's "multi-get" feature
to fetch N known column names. For instance, if you know you wish to fetch columns 'Joe', 'Jack',
and 'Jim' you can pass those column names as a list to fetch all three at once.
@param slice_range. A SliceRange describing how to range, order, and/or limit the slice.
Attributes:
- column_names
- slice_range
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'column_names', (TType.STRING,None), None, ), # 1
(2, TType.STRUCT, 'slice_range', (SliceRange, SliceRange.thrift_spec), None, ), # 2
)
def __init__(self, column_names=None, slice_range=None,):
self.column_names = column_names
self.slice_range = slice_range
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.column_names = []
(_etype17, _size14) = iprot.readListBegin()
for _i18 in xrange(_size14):
_elem19 = iprot.readString();
self.column_names.append(_elem19)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.slice_range = SliceRange()
self.slice_range.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('SlicePredicate')
if self.column_names is not None:
oprot.writeFieldBegin('column_names', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.column_names))
for iter20 in self.column_names:
oprot.writeString(iter20)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.slice_range is not None:
oprot.writeFieldBegin('slice_range', TType.STRUCT, 2)
self.slice_range.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class IndexExpression:
"""
Attributes:
- column_name
- op
- value
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'column_name', None, None, ), # 1
(2, TType.I32, 'op', None, None, ), # 2
(3, TType.STRING, 'value', None, None, ), # 3
)
def __init__(self, column_name=None, op=None, value=None,):
self.column_name = column_name
self.op = op
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.column_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.op = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.value = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('IndexExpression')
if self.column_name is not None:
oprot.writeFieldBegin('column_name', TType.STRING, 1)
oprot.writeString(self.column_name)
oprot.writeFieldEnd()
if self.op is not None:
oprot.writeFieldBegin('op', TType.I32, 2)
oprot.writeI32(self.op)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 3)
oprot.writeString(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.column_name is None:
raise TProtocol.TProtocolException(message='Required field column_name is unset!')
if self.op is None:
raise TProtocol.TProtocolException(message='Required field op is unset!')
if self.value is None:
raise TProtocol.TProtocolException(message='Required field value is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class IndexClause:
"""
@Deprecated: use a KeyRange with row_filter in get_range_slices instead
Attributes:
- expressions
- start_key
- count
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'expressions', (TType.STRUCT,(IndexExpression, IndexExpression.thrift_spec)), None, ), # 1
(2, TType.STRING, 'start_key', None, None, ), # 2
(3, TType.I32, 'count', None, 100, ), # 3
)
def __init__(self, expressions=None, start_key=None, count=thrift_spec[3][4],):
self.expressions = expressions
self.start_key = start_key
self.count = count
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.expressions = []
(_etype24, _size21) = iprot.readListBegin()
for _i25 in xrange(_size21):
_elem26 = IndexExpression()
_elem26.read(iprot)
self.expressions.append(_elem26)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.start_key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('IndexClause')
if self.expressions is not None:
oprot.writeFieldBegin('expressions', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.expressions))
for iter27 in self.expressions:
iter27.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.start_key is not None:
oprot.writeFieldBegin('start_key', TType.STRING, 2)
oprot.writeString(self.start_key)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 3)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.expressions is None:
raise TProtocol.TProtocolException(message='Required field expressions is unset!')
if self.start_key is None:
raise TProtocol.TProtocolException(message='Required field start_key is unset!')
if self.count is None:
raise TProtocol.TProtocolException(message='Required field count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KeyRange:
"""
The semantics of start keys and tokens are slightly different.
Keys are start-inclusive; tokens are start-exclusive. Token
ranges may also wrap -- that is, the end token may be less
than the start one. Thus, a range from keyX to keyX is a
one-element range, but a range from tokenY to tokenY is the
full ring.
Attributes:
- start_key
- end_key
- start_token
- end_token
- row_filter
- count
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'start_key', None, None, ), # 1
(2, TType.STRING, 'end_key', None, None, ), # 2
(3, TType.STRING, 'start_token', None, None, ), # 3
(4, TType.STRING, 'end_token', None, None, ), # 4
(5, TType.I32, 'count', None, 100, ), # 5
(6, TType.LIST, 'row_filter', (TType.STRUCT,(IndexExpression, IndexExpression.thrift_spec)), None, ), # 6
)
def __init__(self, start_key=None, end_key=None, start_token=None, end_token=None, row_filter=None, count=thrift_spec[5][4],):
self.start_key = start_key
self.end_key = end_key
self.start_token = start_token
self.end_token = end_token
self.row_filter = row_filter
self.count = count
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.start_key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.end_key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.start_token = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.end_token = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.row_filter = []
(_etype31, _size28) = iprot.readListBegin()
for _i32 in xrange(_size28):
_elem33 = IndexExpression()
_elem33.read(iprot)
self.row_filter.append(_elem33)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('KeyRange')
if self.start_key is not None:
oprot.writeFieldBegin('start_key', TType.STRING, 1)
oprot.writeString(self.start_key)
oprot.writeFieldEnd()
if self.end_key is not None:
oprot.writeFieldBegin('end_key', TType.STRING, 2)
oprot.writeString(self.end_key)
oprot.writeFieldEnd()
if self.start_token is not None:
oprot.writeFieldBegin('start_token', TType.STRING, 3)
oprot.writeString(self.start_token)
oprot.writeFieldEnd()
if self.end_token is not None:
oprot.writeFieldBegin('end_token', TType.STRING, 4)
oprot.writeString(self.end_token)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 5)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
if self.row_filter is not None:
oprot.writeFieldBegin('row_filter', TType.LIST, 6)
oprot.writeListBegin(TType.STRUCT, len(self.row_filter))
for iter34 in self.row_filter:
iter34.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.count is None:
raise TProtocol.TProtocolException(message='Required field count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KeySlice:
"""
A KeySlice is key followed by the data it maps to. A collection of KeySlice is returned by the get_range_slice operation.
@param key. a row key
@param columns. List of data represented by the key. Typically, the list is pared down to only the columns specified by
a SlicePredicate.
Attributes:
- key
- columns
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'key', None, None, ), # 1
(2, TType.LIST, 'columns', (TType.STRUCT,(ColumnOrSuperColumn, ColumnOrSuperColumn.thrift_spec)), None, ), # 2
)
def __init__(self, key=None, columns=None,):
self.key = key
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.columns = []
(_etype38, _size35) = iprot.readListBegin()
for _i39 in xrange(_size35):
_elem40 = ColumnOrSuperColumn()
_elem40.read(iprot)
self.columns.append(_elem40)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('KeySlice')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter41 in self.columns:
iter41.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.key is None:
raise TProtocol.TProtocolException(message='Required field key is unset!')
if self.columns is None:
raise TProtocol.TProtocolException(message='Required field columns is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KeyCount:
"""
Attributes:
- key
- count
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'key', None, None, ), # 1
(2, TType.I32, 'count', None, None, ), # 2
)
def __init__(self, key=None, count=None,):
self.key = key
self.count = count
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('KeyCount')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 2)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.key is None:
raise TProtocol.TProtocolException(message='Required field key is unset!')
if self.count is None:
raise TProtocol.TProtocolException(message='Required field count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Deletion:
"""
Note that the timestamp is only optional in case of counter deletion.
Attributes:
- timestamp
- super_column
- predicate
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'timestamp', None, None, ), # 1
(2, TType.STRING, 'super_column', None, None, ), # 2
(3, TType.STRUCT, 'predicate', (SlicePredicate, SlicePredicate.thrift_spec), None, ), # 3
)
def __init__(self, timestamp=None, super_column=None, predicate=None,):
self.timestamp = timestamp
self.super_column = super_column
self.predicate = predicate
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.timestamp = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.super_column = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.predicate = SlicePredicate()
self.predicate.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Deletion')
if self.timestamp is not None:
oprot.writeFieldBegin('timestamp', TType.I64, 1)
oprot.writeI64(self.timestamp)
oprot.writeFieldEnd()
if self.super_column is not None:
oprot.writeFieldBegin('super_column', TType.STRING, 2)
oprot.writeString(self.super_column)
oprot.writeFieldEnd()
if self.predicate is not None:
oprot.writeFieldBegin('predicate', TType.STRUCT, 3)
self.predicate.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class Mutation:
"""
A Mutation is either an insert (represented by filling column_or_supercolumn) or a deletion (represented by filling the deletion attribute).
@param column_or_supercolumn. An insert to a column or supercolumn (possibly counter column or supercolumn)
@param deletion. A deletion of a column or supercolumn
Attributes:
- column_or_supercolumn
- deletion
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'column_or_supercolumn', (ColumnOrSuperColumn, ColumnOrSuperColumn.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'deletion', (Deletion, Deletion.thrift_spec), None, ), # 2
)
def __init__(self, column_or_supercolumn=None, deletion=None,):
self.column_or_supercolumn = column_or_supercolumn
self.deletion = deletion
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.column_or_supercolumn = ColumnOrSuperColumn()
self.column_or_supercolumn.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.deletion = Deletion()
self.deletion.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('Mutation')
if self.column_or_supercolumn is not None:
oprot.writeFieldBegin('column_or_supercolumn', TType.STRUCT, 1)
self.column_or_supercolumn.write(oprot)
oprot.writeFieldEnd()
if self.deletion is not None:
oprot.writeFieldBegin('deletion', TType.STRUCT, 2)
self.deletion.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class EndpointDetails:
"""
Attributes:
- host
- datacenter
- rack
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'host', None, None, ), # 1
(2, TType.STRING, 'datacenter', None, None, ), # 2
(3, TType.STRING, 'rack', None, None, ), # 3
)
def __init__(self, host=None, datacenter=None, rack=None,):
self.host = host
self.datacenter = datacenter
self.rack = rack
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.host = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.datacenter = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.rack = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('EndpointDetails')
if self.host is not None:
oprot.writeFieldBegin('host', TType.STRING, 1)
oprot.writeString(self.host)
oprot.writeFieldEnd()
if self.datacenter is not None:
oprot.writeFieldBegin('datacenter', TType.STRING, 2)
oprot.writeString(self.datacenter)
oprot.writeFieldEnd()
if self.rack is not None:
oprot.writeFieldBegin('rack', TType.STRING, 3)
oprot.writeString(self.rack)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TokenRange:
"""
A TokenRange describes part of the Cassandra ring, it is a mapping from a range to
endpoints responsible for that range.
@param start_token The first token in the range
@param end_token The last token in the range
@param endpoints The endpoints responsible for the range (listed by their configured listen_address)
@param rpc_endpoints The endpoints responsible for the range (listed by their configured rpc_address)
Attributes:
- start_token
- end_token
- endpoints
- rpc_endpoints
- endpoint_details
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'start_token', None, None, ), # 1
(2, TType.STRING, 'end_token', None, None, ), # 2
(3, TType.LIST, 'endpoints', (TType.STRING,None), None, ), # 3
(4, TType.LIST, 'rpc_endpoints', (TType.STRING,None), None, ), # 4
(5, TType.LIST, 'endpoint_details', (TType.STRUCT,(EndpointDetails, EndpointDetails.thrift_spec)), None, ), # 5
)
def __init__(self, start_token=None, end_token=None, endpoints=None, rpc_endpoints=None, endpoint_details=None,):
self.start_token = start_token
self.end_token = end_token
self.endpoints = endpoints
self.rpc_endpoints = rpc_endpoints
self.endpoint_details = endpoint_details
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.start_token = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.end_token = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.endpoints = []
(_etype45, _size42) = iprot.readListBegin()
for _i46 in xrange(_size42):
_elem47 = iprot.readString();
self.endpoints.append(_elem47)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.rpc_endpoints = []
(_etype51, _size48) = iprot.readListBegin()
for _i52 in xrange(_size48):
_elem53 = iprot.readString();
self.rpc_endpoints.append(_elem53)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.endpoint_details = []
(_etype57, _size54) = iprot.readListBegin()
for _i58 in xrange(_size54):
_elem59 = EndpointDetails()
_elem59.read(iprot)
self.endpoint_details.append(_elem59)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TokenRange')
if self.start_token is not None:
oprot.writeFieldBegin('start_token', TType.STRING, 1)
oprot.writeString(self.start_token)
oprot.writeFieldEnd()
if self.end_token is not None:
oprot.writeFieldBegin('end_token', TType.STRING, 2)
oprot.writeString(self.end_token)
oprot.writeFieldEnd()
if self.endpoints is not None:
oprot.writeFieldBegin('endpoints', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.endpoints))
for iter60 in self.endpoints:
oprot.writeString(iter60)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.rpc_endpoints is not None:
oprot.writeFieldBegin('rpc_endpoints', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.rpc_endpoints))
for iter61 in self.rpc_endpoints:
oprot.writeString(iter61)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.endpoint_details is not None:
oprot.writeFieldBegin('endpoint_details', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.endpoint_details))
for iter62 in self.endpoint_details:
iter62.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.start_token is None:
raise TProtocol.TProtocolException(message='Required field start_token is unset!')
if self.end_token is None:
raise TProtocol.TProtocolException(message='Required field end_token is unset!')
if self.endpoints is None:
raise TProtocol.TProtocolException(message='Required field endpoints is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class AuthenticationRequest:
"""
Authentication requests can contain any data, dependent on the IAuthenticator used
Attributes:
- credentials
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'credentials', (TType.STRING,None,TType.STRING,None), None, ), # 1
)
def __init__(self, credentials=None,):
self.credentials = credentials
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.credentials = {}
(_ktype64, _vtype65, _size63 ) = iprot.readMapBegin()
for _i67 in xrange(_size63):
_key68 = iprot.readString();
_val69 = iprot.readString();
self.credentials[_key68] = _val69
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('AuthenticationRequest')
if self.credentials is not None:
oprot.writeFieldBegin('credentials', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.credentials))
for kiter70,viter71 in self.credentials.items():
oprot.writeString(kiter70)
oprot.writeString(viter71)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.credentials is None:
raise TProtocol.TProtocolException(message='Required field credentials is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ColumnDef:
"""
Attributes:
- name
- validation_class
- index_type
- index_name
- index_options
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'validation_class', None, None, ), # 2
(3, TType.I32, 'index_type', None, None, ), # 3
(4, TType.STRING, 'index_name', None, None, ), # 4
(5, TType.MAP, 'index_options', (TType.STRING,None,TType.STRING,None), None, ), # 5
)
def __init__(self, name=None, validation_class=None, index_type=None, index_name=None, index_options=None,):
self.name = name
self.validation_class = validation_class
self.index_type = index_type
self.index_name = index_name
self.index_options = index_options
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.validation_class = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.index_type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.index_name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.MAP:
self.index_options = {}
(_ktype73, _vtype74, _size72 ) = iprot.readMapBegin()
for _i76 in xrange(_size72):
_key77 = iprot.readString();
_val78 = iprot.readString();
self.index_options[_key77] = _val78
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ColumnDef')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.validation_class is not None:
oprot.writeFieldBegin('validation_class', TType.STRING, 2)
oprot.writeString(self.validation_class)
oprot.writeFieldEnd()
if self.index_type is not None:
oprot.writeFieldBegin('index_type', TType.I32, 3)
oprot.writeI32(self.index_type)
oprot.writeFieldEnd()
if self.index_name is not None:
oprot.writeFieldBegin('index_name', TType.STRING, 4)
oprot.writeString(self.index_name)
oprot.writeFieldEnd()
if self.index_options is not None:
oprot.writeFieldBegin('index_options', TType.MAP, 5)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.index_options))
for kiter79,viter80 in self.index_options.items():
oprot.writeString(kiter79)
oprot.writeString(viter80)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.validation_class is None:
raise TProtocol.TProtocolException(message='Required field validation_class is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CfDef:
"""
Attributes:
- keyspace
- name
- column_type
- comparator_type
- subcomparator_type
- comment
- read_repair_chance
- column_metadata
- gc_grace_seconds
- default_validation_class
- id
- min_compaction_threshold
- max_compaction_threshold
- replicate_on_write
- key_validation_class
- key_alias
- compaction_strategy
- compaction_strategy_options
- compression_options
- bloom_filter_fp_chance
- caching
- column_aliases
- value_alias
- dclocal_read_repair_chance
- row_cache_size: @deprecated
- key_cache_size: @deprecated
- row_cache_save_period_in_seconds: @deprecated
- key_cache_save_period_in_seconds: @deprecated
- memtable_flush_after_mins: @deprecated
- memtable_throughput_in_mb: @deprecated
- memtable_operations_in_millions: @deprecated
- merge_shards_chance: @deprecated
- row_cache_provider: @deprecated
- row_cache_keys_to_save: @deprecated
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'keyspace', None, None, ), # 1
(2, TType.STRING, 'name', None, None, ), # 2
(3, TType.STRING, 'column_type', None, "Standard", ), # 3
None, # 4
(5, TType.STRING, 'comparator_type', None, "BytesType", ), # 5
(6, TType.STRING, 'subcomparator_type', None, None, ), # 6
None, # 7
(8, TType.STRING, 'comment', None, None, ), # 8
(9, TType.DOUBLE, 'row_cache_size', None, None, ), # 9
None, # 10
(11, TType.DOUBLE, 'key_cache_size', None, None, ), # 11
(12, TType.DOUBLE, 'read_repair_chance', None, None, ), # 12
(13, TType.LIST, 'column_metadata', (TType.STRUCT,(ColumnDef, ColumnDef.thrift_spec)), None, ), # 13
(14, TType.I32, 'gc_grace_seconds', None, None, ), # 14
(15, TType.STRING, 'default_validation_class', None, None, ), # 15
(16, TType.I32, 'id', None, None, ), # 16
(17, TType.I32, 'min_compaction_threshold', None, None, ), # 17
(18, TType.I32, 'max_compaction_threshold', None, None, ), # 18
(19, TType.I32, 'row_cache_save_period_in_seconds', None, None, ), # 19
(20, TType.I32, 'key_cache_save_period_in_seconds', None, None, ), # 20
(21, TType.I32, 'memtable_flush_after_mins', None, None, ), # 21
(22, TType.I32, 'memtable_throughput_in_mb', None, None, ), # 22
(23, TType.DOUBLE, 'memtable_operations_in_millions', None, None, ), # 23
(24, TType.BOOL, 'replicate_on_write', None, None, ), # 24
(25, TType.DOUBLE, 'merge_shards_chance', None, None, ), # 25
(26, TType.STRING, 'key_validation_class', None, None, ), # 26
(27, TType.STRING, 'row_cache_provider', None, None, ), # 27
(28, TType.STRING, 'key_alias', None, None, ), # 28
(29, TType.STRING, 'compaction_strategy', None, None, ), # 29
(30, TType.MAP, 'compaction_strategy_options', (TType.STRING,None,TType.STRING,None), None, ), # 30
(31, TType.I32, 'row_cache_keys_to_save', None, None, ), # 31
(32, TType.MAP, 'compression_options', (TType.STRING,None,TType.STRING,None), None, ), # 32
(33, TType.DOUBLE, 'bloom_filter_fp_chance', None, None, ), # 33
(34, TType.STRING, 'caching', None, "keys_only", ), # 34
(35, TType.LIST, 'column_aliases', (TType.STRING,None), None, ), # 35
(36, TType.STRING, 'value_alias', None, None, ), # 36
(37, TType.DOUBLE, 'dclocal_read_repair_chance', None, 0, ), # 37
)
def __init__(self, keyspace=None, name=None, column_type=thrift_spec[3][4], comparator_type=thrift_spec[5][4], subcomparator_type=None, comment=None, read_repair_chance=None, column_metadata=None, gc_grace_seconds=None, default_validation_class=None, id=None, min_compaction_threshold=None, max_compaction_threshold=None, replicate_on_write=None, key_validation_class=None, key_alias=None, compaction_strategy=None, compaction_strategy_options=None, compression_options=None, bloom_filter_fp_chance=None, caching=thrift_spec[34][4], column_aliases=None, value_alias=None, dclocal_read_repair_chance=thrift_spec[37][4], row_cache_size=None, key_cache_size=None, row_cache_save_period_in_seconds=None, key_cache_save_period_in_seconds=None, memtable_flush_after_mins=None, memtable_throughput_in_mb=None, memtable_operations_in_millions=None, merge_shards_chance=None, row_cache_provider=None, row_cache_keys_to_save=None,):
self.keyspace = keyspace
self.name = name
self.column_type = column_type
self.comparator_type = comparator_type
self.subcomparator_type = subcomparator_type
self.comment = comment
self.read_repair_chance = read_repair_chance
self.column_metadata = column_metadata
self.gc_grace_seconds = gc_grace_seconds
self.default_validation_class = default_validation_class
self.id = id
self.min_compaction_threshold = min_compaction_threshold
self.max_compaction_threshold = max_compaction_threshold
self.replicate_on_write = replicate_on_write
self.key_validation_class = key_validation_class
self.key_alias = key_alias
self.compaction_strategy = compaction_strategy
self.compaction_strategy_options = compaction_strategy_options
self.compression_options = compression_options
self.bloom_filter_fp_chance = bloom_filter_fp_chance
self.caching = caching
self.column_aliases = column_aliases
self.value_alias = value_alias
self.dclocal_read_repair_chance = dclocal_read_repair_chance
self.row_cache_size = row_cache_size
self.key_cache_size = key_cache_size
self.row_cache_save_period_in_seconds = row_cache_save_period_in_seconds
self.key_cache_save_period_in_seconds = key_cache_save_period_in_seconds
self.memtable_flush_after_mins = memtable_flush_after_mins
self.memtable_throughput_in_mb = memtable_throughput_in_mb
self.memtable_operations_in_millions = memtable_operations_in_millions
self.merge_shards_chance = merge_shards_chance
self.row_cache_provider = row_cache_provider
self.row_cache_keys_to_save = row_cache_keys_to_save
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.keyspace = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.column_type = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.comparator_type = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRING:
self.subcomparator_type = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 8:
if ftype == TType.STRING:
self.comment = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 12:
if ftype == TType.DOUBLE:
self.read_repair_chance = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 13:
if ftype == TType.LIST:
self.column_metadata = []
(_etype84, _size81) = iprot.readListBegin()
for _i85 in xrange(_size81):
_elem86 = ColumnDef()
_elem86.read(iprot)
self.column_metadata.append(_elem86)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 14:
if ftype == TType.I32:
self.gc_grace_seconds = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 15:
if ftype == TType.STRING:
self.default_validation_class = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 16:
if ftype == TType.I32:
self.id = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 17:
if ftype == TType.I32:
self.min_compaction_threshold = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 18:
if ftype == TType.I32:
self.max_compaction_threshold = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 24:
if ftype == TType.BOOL:
self.replicate_on_write = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 26:
if ftype == TType.STRING:
self.key_validation_class = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 28:
if ftype == TType.STRING:
self.key_alias = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 29:
if ftype == TType.STRING:
self.compaction_strategy = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 30:
if ftype == TType.MAP:
self.compaction_strategy_options = {}
(_ktype88, _vtype89, _size87 ) = iprot.readMapBegin()
for _i91 in xrange(_size87):
_key92 = iprot.readString();
_val93 = iprot.readString();
self.compaction_strategy_options[_key92] = _val93
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 32:
if ftype == TType.MAP:
self.compression_options = {}
(_ktype95, _vtype96, _size94 ) = iprot.readMapBegin()
for _i98 in xrange(_size94):
_key99 = iprot.readString();
_val100 = iprot.readString();
self.compression_options[_key99] = _val100
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 33:
if ftype == TType.DOUBLE:
self.bloom_filter_fp_chance = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 34:
if ftype == TType.STRING:
self.caching = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 35:
if ftype == TType.LIST:
self.column_aliases = []
(_etype104, _size101) = iprot.readListBegin()
for _i105 in xrange(_size101):
_elem106 = iprot.readString();
self.column_aliases.append(_elem106)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 36:
if ftype == TType.STRING:
self.value_alias = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 37:
if ftype == TType.DOUBLE:
self.dclocal_read_repair_chance = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 9:
if ftype == TType.DOUBLE:
self.row_cache_size = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 11:
if ftype == TType.DOUBLE:
self.key_cache_size = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 19:
if ftype == TType.I32:
self.row_cache_save_period_in_seconds = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 20:
if ftype == TType.I32:
self.key_cache_save_period_in_seconds = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 21:
if ftype == TType.I32:
self.memtable_flush_after_mins = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 22:
if ftype == TType.I32:
self.memtable_throughput_in_mb = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 23:
if ftype == TType.DOUBLE:
self.memtable_operations_in_millions = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 25:
if ftype == TType.DOUBLE:
self.merge_shards_chance = iprot.readDouble();
else:
iprot.skip(ftype)
elif fid == 27:
if ftype == TType.STRING:
self.row_cache_provider = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 31:
if ftype == TType.I32:
self.row_cache_keys_to_save = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CfDef')
if self.keyspace is not None:
oprot.writeFieldBegin('keyspace', TType.STRING, 1)
oprot.writeString(self.keyspace)
oprot.writeFieldEnd()
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 2)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.column_type is not None:
oprot.writeFieldBegin('column_type', TType.STRING, 3)
oprot.writeString(self.column_type)
oprot.writeFieldEnd()
if self.comparator_type is not None:
oprot.writeFieldBegin('comparator_type', TType.STRING, 5)
oprot.writeString(self.comparator_type)
oprot.writeFieldEnd()
if self.subcomparator_type is not None:
oprot.writeFieldBegin('subcomparator_type', TType.STRING, 6)
oprot.writeString(self.subcomparator_type)
oprot.writeFieldEnd()
if self.comment is not None:
oprot.writeFieldBegin('comment', TType.STRING, 8)
oprot.writeString(self.comment)
oprot.writeFieldEnd()
if self.row_cache_size is not None:
oprot.writeFieldBegin('row_cache_size', TType.DOUBLE, 9)
oprot.writeDouble(self.row_cache_size)
oprot.writeFieldEnd()
if self.key_cache_size is not None:
oprot.writeFieldBegin('key_cache_size', TType.DOUBLE, 11)
oprot.writeDouble(self.key_cache_size)
oprot.writeFieldEnd()
if self.read_repair_chance is not None:
oprot.writeFieldBegin('read_repair_chance', TType.DOUBLE, 12)
oprot.writeDouble(self.read_repair_chance)
oprot.writeFieldEnd()
if self.column_metadata is not None:
oprot.writeFieldBegin('column_metadata', TType.LIST, 13)
oprot.writeListBegin(TType.STRUCT, len(self.column_metadata))
for iter107 in self.column_metadata:
iter107.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.gc_grace_seconds is not None:
oprot.writeFieldBegin('gc_grace_seconds', TType.I32, 14)
oprot.writeI32(self.gc_grace_seconds)
oprot.writeFieldEnd()
if self.default_validation_class is not None:
oprot.writeFieldBegin('default_validation_class', TType.STRING, 15)
oprot.writeString(self.default_validation_class)
oprot.writeFieldEnd()
if self.id is not None:
oprot.writeFieldBegin('id', TType.I32, 16)
oprot.writeI32(self.id)
oprot.writeFieldEnd()
if self.min_compaction_threshold is not None:
oprot.writeFieldBegin('min_compaction_threshold', TType.I32, 17)
oprot.writeI32(self.min_compaction_threshold)
oprot.writeFieldEnd()
if self.max_compaction_threshold is not None:
oprot.writeFieldBegin('max_compaction_threshold', TType.I32, 18)
oprot.writeI32(self.max_compaction_threshold)
oprot.writeFieldEnd()
if self.row_cache_save_period_in_seconds is not None:
oprot.writeFieldBegin('row_cache_save_period_in_seconds', TType.I32, 19)
oprot.writeI32(self.row_cache_save_period_in_seconds)
oprot.writeFieldEnd()
if self.key_cache_save_period_in_seconds is not None:
oprot.writeFieldBegin('key_cache_save_period_in_seconds', TType.I32, 20)
oprot.writeI32(self.key_cache_save_period_in_seconds)
oprot.writeFieldEnd()
if self.memtable_flush_after_mins is not None:
oprot.writeFieldBegin('memtable_flush_after_mins', TType.I32, 21)
oprot.writeI32(self.memtable_flush_after_mins)
oprot.writeFieldEnd()
if self.memtable_throughput_in_mb is not None:
oprot.writeFieldBegin('memtable_throughput_in_mb', TType.I32, 22)
oprot.writeI32(self.memtable_throughput_in_mb)
oprot.writeFieldEnd()
if self.memtable_operations_in_millions is not None:
oprot.writeFieldBegin('memtable_operations_in_millions', TType.DOUBLE, 23)
oprot.writeDouble(self.memtable_operations_in_millions)
oprot.writeFieldEnd()
if self.replicate_on_write is not None:
oprot.writeFieldBegin('replicate_on_write', TType.BOOL, 24)
oprot.writeBool(self.replicate_on_write)
oprot.writeFieldEnd()
if self.merge_shards_chance is not None:
oprot.writeFieldBegin('merge_shards_chance', TType.DOUBLE, 25)
oprot.writeDouble(self.merge_shards_chance)
oprot.writeFieldEnd()
if self.key_validation_class is not None:
oprot.writeFieldBegin('key_validation_class', TType.STRING, 26)
oprot.writeString(self.key_validation_class)
oprot.writeFieldEnd()
if self.row_cache_provider is not None:
oprot.writeFieldBegin('row_cache_provider', TType.STRING, 27)
oprot.writeString(self.row_cache_provider)
oprot.writeFieldEnd()
if self.key_alias is not None:
oprot.writeFieldBegin('key_alias', TType.STRING, 28)
oprot.writeString(self.key_alias)
oprot.writeFieldEnd()
if self.compaction_strategy is not None:
oprot.writeFieldBegin('compaction_strategy', TType.STRING, 29)
oprot.writeString(self.compaction_strategy)
oprot.writeFieldEnd()
if self.compaction_strategy_options is not None:
oprot.writeFieldBegin('compaction_strategy_options', TType.MAP, 30)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.compaction_strategy_options))
for kiter108,viter109 in self.compaction_strategy_options.items():
oprot.writeString(kiter108)
oprot.writeString(viter109)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.row_cache_keys_to_save is not None:
oprot.writeFieldBegin('row_cache_keys_to_save', TType.I32, 31)
oprot.writeI32(self.row_cache_keys_to_save)
oprot.writeFieldEnd()
if self.compression_options is not None:
oprot.writeFieldBegin('compression_options', TType.MAP, 32)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.compression_options))
for kiter110,viter111 in self.compression_options.items():
oprot.writeString(kiter110)
oprot.writeString(viter111)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.bloom_filter_fp_chance is not None:
oprot.writeFieldBegin('bloom_filter_fp_chance', TType.DOUBLE, 33)
oprot.writeDouble(self.bloom_filter_fp_chance)
oprot.writeFieldEnd()
if self.caching is not None:
oprot.writeFieldBegin('caching', TType.STRING, 34)
oprot.writeString(self.caching)
oprot.writeFieldEnd()
if self.column_aliases is not None:
oprot.writeFieldBegin('column_aliases', TType.LIST, 35)
oprot.writeListBegin(TType.STRING, len(self.column_aliases))
for iter112 in self.column_aliases:
oprot.writeString(iter112)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.value_alias is not None:
oprot.writeFieldBegin('value_alias', TType.STRING, 36)
oprot.writeString(self.value_alias)
oprot.writeFieldEnd()
if self.dclocal_read_repair_chance is not None:
oprot.writeFieldBegin('dclocal_read_repair_chance', TType.DOUBLE, 37)
oprot.writeDouble(self.dclocal_read_repair_chance)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.keyspace is None:
raise TProtocol.TProtocolException(message='Required field keyspace is unset!')
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class KsDef:
"""
Attributes:
- name
- strategy_class
- strategy_options
- replication_factor: @deprecated, ignored
- cf_defs
- durable_writes
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'name', None, None, ), # 1
(2, TType.STRING, 'strategy_class', None, None, ), # 2
(3, TType.MAP, 'strategy_options', (TType.STRING,None,TType.STRING,None), None, ), # 3
(4, TType.I32, 'replication_factor', None, None, ), # 4
(5, TType.LIST, 'cf_defs', (TType.STRUCT,(CfDef, CfDef.thrift_spec)), None, ), # 5
(6, TType.BOOL, 'durable_writes', None, True, ), # 6
)
def __init__(self, name=None, strategy_class=None, strategy_options=None, replication_factor=None, cf_defs=None, durable_writes=thrift_spec[6][4],):
self.name = name
self.strategy_class = strategy_class
self.strategy_options = strategy_options
self.replication_factor = replication_factor
self.cf_defs = cf_defs
self.durable_writes = durable_writes
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.strategy_class = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.strategy_options = {}
(_ktype114, _vtype115, _size113 ) = iprot.readMapBegin()
for _i117 in xrange(_size113):
_key118 = iprot.readString();
_val119 = iprot.readString();
self.strategy_options[_key118] = _val119
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.replication_factor = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.cf_defs = []
(_etype123, _size120) = iprot.readListBegin()
for _i124 in xrange(_size120):
_elem125 = CfDef()
_elem125.read(iprot)
self.cf_defs.append(_elem125)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BOOL:
self.durable_writes = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('KsDef')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name)
oprot.writeFieldEnd()
if self.strategy_class is not None:
oprot.writeFieldBegin('strategy_class', TType.STRING, 2)
oprot.writeString(self.strategy_class)
oprot.writeFieldEnd()
if self.strategy_options is not None:
oprot.writeFieldBegin('strategy_options', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.strategy_options))
for kiter126,viter127 in self.strategy_options.items():
oprot.writeString(kiter126)
oprot.writeString(viter127)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.replication_factor is not None:
oprot.writeFieldBegin('replication_factor', TType.I32, 4)
oprot.writeI32(self.replication_factor)
oprot.writeFieldEnd()
if self.cf_defs is not None:
oprot.writeFieldBegin('cf_defs', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.cf_defs))
for iter128 in self.cf_defs:
iter128.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.durable_writes is not None:
oprot.writeFieldBegin('durable_writes', TType.BOOL, 6)
oprot.writeBool(self.durable_writes)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name is None:
raise TProtocol.TProtocolException(message='Required field name is unset!')
if self.strategy_class is None:
raise TProtocol.TProtocolException(message='Required field strategy_class is unset!')
if self.cf_defs is None:
raise TProtocol.TProtocolException(message='Required field cf_defs is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CqlRow:
"""
Row returned from a CQL query
Attributes:
- key
- columns
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'key', None, None, ), # 1
(2, TType.LIST, 'columns', (TType.STRUCT,(Column, Column.thrift_spec)), None, ), # 2
)
def __init__(self, key=None, columns=None,):
self.key = key
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.columns = []
(_etype132, _size129) = iprot.readListBegin()
for _i133 in xrange(_size129):
_elem134 = Column()
_elem134.read(iprot)
self.columns.append(_elem134)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CqlRow')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key)
oprot.writeFieldEnd()
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter135 in self.columns:
iter135.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.key is None:
raise TProtocol.TProtocolException(message='Required field key is unset!')
if self.columns is None:
raise TProtocol.TProtocolException(message='Required field columns is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CqlMetadata:
"""
Attributes:
- name_types
- value_types
- default_name_type
- default_value_type
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'name_types', (TType.STRING,None,TType.STRING,None), None, ), # 1
(2, TType.MAP, 'value_types', (TType.STRING,None,TType.STRING,None), None, ), # 2
(3, TType.STRING, 'default_name_type', None, None, ), # 3
(4, TType.STRING, 'default_value_type', None, None, ), # 4
)
def __init__(self, name_types=None, value_types=None, default_name_type=None, default_value_type=None,):
self.name_types = name_types
self.value_types = value_types
self.default_name_type = default_name_type
self.default_value_type = default_value_type
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.name_types = {}
(_ktype137, _vtype138, _size136 ) = iprot.readMapBegin()
for _i140 in xrange(_size136):
_key141 = iprot.readString();
_val142 = iprot.readString();
self.name_types[_key141] = _val142
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.MAP:
self.value_types = {}
(_ktype144, _vtype145, _size143 ) = iprot.readMapBegin()
for _i147 in xrange(_size143):
_key148 = iprot.readString();
_val149 = iprot.readString();
self.value_types[_key148] = _val149
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.default_name_type = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.default_value_type = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CqlMetadata')
if self.name_types is not None:
oprot.writeFieldBegin('name_types', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.name_types))
for kiter150,viter151 in self.name_types.items():
oprot.writeString(kiter150)
oprot.writeString(viter151)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.value_types is not None:
oprot.writeFieldBegin('value_types', TType.MAP, 2)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.value_types))
for kiter152,viter153 in self.value_types.items():
oprot.writeString(kiter152)
oprot.writeString(viter153)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.default_name_type is not None:
oprot.writeFieldBegin('default_name_type', TType.STRING, 3)
oprot.writeString(self.default_name_type)
oprot.writeFieldEnd()
if self.default_value_type is not None:
oprot.writeFieldBegin('default_value_type', TType.STRING, 4)
oprot.writeString(self.default_value_type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.name_types is None:
raise TProtocol.TProtocolException(message='Required field name_types is unset!')
if self.value_types is None:
raise TProtocol.TProtocolException(message='Required field value_types is unset!')
if self.default_name_type is None:
raise TProtocol.TProtocolException(message='Required field default_name_type is unset!')
if self.default_value_type is None:
raise TProtocol.TProtocolException(message='Required field default_value_type is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CqlResult:
"""
Attributes:
- type
- rows
- num
- schema
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'type', None, None, ), # 1
(2, TType.LIST, 'rows', (TType.STRUCT,(CqlRow, CqlRow.thrift_spec)), None, ), # 2
(3, TType.I32, 'num', None, None, ), # 3
(4, TType.STRUCT, 'schema', (CqlMetadata, CqlMetadata.thrift_spec), None, ), # 4
)
def __init__(self, type=None, rows=None, num=None, schema=None,):
self.type = type
self.rows = rows
self.num = num
self.schema = schema
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.type = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.rows = []
(_etype157, _size154) = iprot.readListBegin()
for _i158 in xrange(_size154):
_elem159 = CqlRow()
_elem159.read(iprot)
self.rows.append(_elem159)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.num = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.schema = CqlMetadata()
self.schema.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CqlResult')
if self.type is not None:
oprot.writeFieldBegin('type', TType.I32, 1)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
if self.rows is not None:
oprot.writeFieldBegin('rows', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.rows))
for iter160 in self.rows:
iter160.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.num is not None:
oprot.writeFieldBegin('num', TType.I32, 3)
oprot.writeI32(self.num)
oprot.writeFieldEnd()
if self.schema is not None:
oprot.writeFieldBegin('schema', TType.STRUCT, 4)
self.schema.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.type is None:
raise TProtocol.TProtocolException(message='Required field type is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CqlPreparedResult:
"""
Attributes:
- itemId
- count
- variable_types
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'itemId', None, None, ), # 1
(2, TType.I32, 'count', None, None, ), # 2
(3, TType.LIST, 'variable_types', (TType.STRING,None), None, ), # 3
)
def __init__(self, itemId=None, count=None, variable_types=None,):
self.itemId = itemId
self.count = count
self.variable_types = variable_types
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.itemId = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.count = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.variable_types = []
(_etype164, _size161) = iprot.readListBegin()
for _i165 in xrange(_size161):
_elem166 = iprot.readString();
self.variable_types.append(_elem166)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CqlPreparedResult')
if self.itemId is not None:
oprot.writeFieldBegin('itemId', TType.I32, 1)
oprot.writeI32(self.itemId)
oprot.writeFieldEnd()
if self.count is not None:
oprot.writeFieldBegin('count', TType.I32, 2)
oprot.writeI32(self.count)
oprot.writeFieldEnd()
if self.variable_types is not None:
oprot.writeFieldBegin('variable_types', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.variable_types))
for iter167 in self.variable_types:
oprot.writeString(iter167)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.itemId is None:
raise TProtocol.TProtocolException(message='Required field itemId is unset!')
if self.count is None:
raise TProtocol.TProtocolException(message='Required field count is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
|
apache-2.0
|
m4sth0/sauventory
|
tests/test_ranktransform.py
|
1
|
4574
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2016
# Author(s):
# Thomas Leppelt <thomas.leppelt@dwd.de>
# This file is part of sauventory.
# Spatial Autocorrelated Uncertainty of Inventories
# sauventory is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# sauventory is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# sauventory comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
# This is free software, and you are welcome to redistribute it
# under certain conditions; type `show c' for details.
"""
This module perform unittests for the rank transformation module.
Testing the module with example from Iman & Conover 1981:
https://www.uio.no/studier/emner/matnat/math/STK4400/v05/undervisningsmateriale
/A%20distribution-free%20approach%20to%20rank%20correlation.pdf
"""
import numpy as np
import unittest
from sauventory import ranktransform
class RankTransformTest(unittest.TestCase):
def setUp(self):
# Matrix R with n independent sampeld columns k, R = kxn.
self.r = np.array([[1.534, 1.534, -1.534, -1.534, .489, -.319],
[-.887, -.489, .887, -.887, -.157, .674],
[-.489, .674, -.489, 1.150, 1.534, -.489],
[.887, 0.000, -.674, .319, 0.000, -1.534],
[1.150, -.319, .489, .674, .157, 1.150],
[.157, -1.534, -.887, -.674, -.319, .157],
[-1.150, -.674, -.157, .157, -1.534, -.157],
[0.000, -.887, .157, -.319, -.674, .887],
[.319, -.157, .674, .887, .574, 1.534],
[-.319, .157, -.319, -1.150, 1.150, -.887],
[-1.534, .887, 1.150, 1.534, -.489, -1.150],
[-.157, -1.150, 1.534, -.157, -1.150, -.674],
[.489, .489, -1.150, .489, -.887, 0.000],
[.674, .319, .319, 0.000, .887, .319],
[-.674, 1.150, 0.000, -.489, .319, .489]])
# Example target correlation matrix.
self.c_star = np.array([[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, .75, -.70],
[0, 0, 0, .75, 1, -.95],
[0, 0, 0, -.70, -.95, 1]])
# Result sample columns arranged to given correlation amtrix.
self.res = np.array([[1.534, 1.534, -1.534, -1.534, -0.887, 0.489],
[-0.887, -0.489, 0.887, -0.887, -0.674, 1.15],
[-0.489, 0.674, -0.489, 1.15, 1.534, -1.534],
[0.887, 0., -0.674, 0.319, 0.319, -0.887],
[1.15, -0.319, 0.489, 0.674, 0.574, -0.319],
[0.157, -1.534, -0.887, -0.674, -0.489, 0.674],
[-1.15, -0.674, -0.157, 0.157, -1.534, 0.887],
[0., -0.887, 0.157, -0.319, -0.319, 1.534],
[0.319, -0.157, 0.674, 0.887, 1.15, -0.674],
[-0.319, 0.157, -0.319, -1.15, 0.157, -0.157],
[-1.534, 0.887, 1.15, 1.534, 0.887, -1.15],
[-0.157, -1.15, 1.534, -0.157, -1.15, 0.319],
[0.489, 0.489, -1.15, 0.489, -0.157, 0.],
[0.674, 0.319, 0.319, 0., 0.489, -0.489],
[-0.674, 1.15, 0., -0.489, 0., 0.157]])
def tearDown(self):
pass
def test_conover(self):
r_cor = ranktransform.transform_by_corrmat(self.r, self.c_star)
compare = r_cor == self.res
self.assertEqual(compare.all(), True)
def suite():
loader = unittest.TestLoader()
suite = unittest.TestSuite()
suite.addTest(loader.loadTestsFromTestCase(RankTransformTest))
return suite
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=2).run(suite())
|
gpl-3.0
|
petabricks/petabricks
|
scripts/misc/graphgen.py
|
2
|
3272
|
#!/usr/bin/python
import csv,optparse,os,sys
import pbutil
import storagedirs
import candidatetester
from candidatetester import Candidate, CandidateTester
from sgatuner import mainname
from configtool import ConfigFile
from traininginfo import TrainingInfo
import tunerwarnings
import warnings
options=None
def main(benchmark, n, filename):
if os.path.isdir(filename):
filename=os.path.join(filename, 'stats/candidatelog.csv')
f = open(filename)
infoxml = TrainingInfo(pbutil.benchmarkToInfo(benchmark))
main = mainname([pbutil.benchmarkToBin(benchmark)])
infoxml = infoxml.transform(main)
binpath=pbutil.benchmarkToBin(benchmark)
tester = CandidateTester(benchmark, n)
root = os.path.dirname(filename)
def findconfig(c):
if c[0]=='/':
c=c[1:]
if os.path.isfile(os.path.join(root, c)):
return os.path.join(root, c)
if os.path.isfile(os.path.join(root, '..', c)):
return os.path.join(root, '..', c)
return None
rows = list(csv.DictReader(f))
for i, row in enumerate(rows):
if options.onlyrounds \
and i+1<len(rows) \
and row.has_key('round_number') \
and rows[i+1]['round_number']==row['round_number']:
continue
config = findconfig(row['config_path'])
row['tests'] = int(row['tests_complete'])+int(row['tests_timeout'])+int(row['tests_crashed'])
candidate = Candidate(ConfigFile(config), infoxml)
while candidate.numTests(n)<options.trials:
try:
tester.testN(candidate, options.trials, options.timeout)
except candidatetester.CrashException, e:
print >>sys.stderr, e
try:
row['minperf'] = candidate.metrics[0][n].min()
row['perf_on_%d'%n], row['perf_on_%d_ci'%n] = candidate.metrics[0][n].interval(options.confidence)
row['invperf']=1.0/row['perf_on_%d'%n]
except Exception,e:
row['minperf'] = -1
row['perf_on_%d'%n] = -1
print >>sys.stderr, e
row['include']=True
print >>sys.stderr, row['time'], row['minperf'], row['perf_on_%d'%n], ' (%d of %d)'%(i, len(rows))
rows = filter(lambda x: x.has_key('include'), rows)
headers = ['time','minperf', 'perf_on_%d'%n, 'perf_on_%d_ci'%n, 'tests', 'candidates', 'input_size', 'invperf', 'tests_timeout']
print '#',','.join(headers)
t=csv.DictWriter(sys.stdout, headers, extrasaction='ignore')
t.writerows(rows)
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser(usage="usage: graphgen.py [options] benchmark candidatelog.csv")
parser.add_option('--trials', type='int', default=10)
parser.add_option('--confidence', type='float', default=.95)
parser.add_option('--timeout', type='float', default=5.0)
parser.add_option('--onlyrounds', type='int', default=True)
parser.add_option('-n', type='int', default=1024)
warnings.simplefilter('ignore', tunerwarnings.TooManyTrials)
(options, args) = parser.parse_args()
if len(args)!=2:
parser.print_usage()
sys.exit(1)
benchmark=args[0]
config=os.path.abspath(args[1])
pbutil.chdirToPetabricksRoot();
#pbutil.compilePetabricks();
benchmark=pbutil.normalizeBenchmarkName(benchmark)
#pbutil.compileBenchmarks([benchmark])
storagedirs.callWithLogDir(lambda: main(benchmark, options.n, config), '/tmp', True)
|
mit
|
0Chencc/CTFCrackTools
|
Lib/test/test_macostools.py
|
91
|
2956
|
# Copyright (C) 2003 Python Software Foundation
import unittest
import os
import sys
from test import test_support
MacOS = test_support.import_module('MacOS')
#The following modules should exist if MacOS exists.
import Carbon.File
import macostools
TESTFN2 = test_support.TESTFN + '2'
class TestMacostools(unittest.TestCase):
def setUp(self):
fp = open(test_support.TESTFN, 'w')
fp.write('hello world\n')
fp.close()
rfp = MacOS.openrf(test_support.TESTFN, '*wb')
rfp.write('goodbye world\n')
rfp.close()
def tearDown(self):
test_support.unlink(test_support.TESTFN)
test_support.unlink(TESTFN2)
def compareData(self):
fp = open(test_support.TESTFN, 'r')
data1 = fp.read()
fp.close()
fp = open(TESTFN2, 'r')
data2 = fp.read()
fp.close()
if data1 != data2:
return 'Data forks differ'
rfp = MacOS.openrf(test_support.TESTFN, '*rb')
data1 = rfp.read(1000)
rfp.close()
rfp = MacOS.openrf(TESTFN2, '*rb')
data2 = rfp.read(1000)
rfp.close()
if data1 != data2:
return 'Resource forks differ'
return ''
def test_touched(self):
# This really only tests that nothing unforeseen happens.
with test_support.check_warnings(('macostools.touched*',
DeprecationWarning), quiet=True):
macostools.touched(test_support.TESTFN)
if sys.maxint < 2**32:
def test_copy(self):
test_support.unlink(TESTFN2)
macostools.copy(test_support.TESTFN, TESTFN2)
self.assertEqual(self.compareData(), '')
if sys.maxint < 2**32:
def test_mkalias(self):
test_support.unlink(TESTFN2)
macostools.mkalias(test_support.TESTFN, TESTFN2)
fss, _, _ = Carbon.File.ResolveAliasFile(TESTFN2, 0)
self.assertEqual(fss.as_pathname(), os.path.realpath(test_support.TESTFN))
def test_mkalias_relative(self):
test_support.unlink(TESTFN2)
# If the directory doesn't exist, then chances are this is a new
# install of Python so don't create it since the user might end up
# running ``sudo make install`` and creating the directory here won't
# leave it with the proper permissions.
if not os.path.exists(sys.prefix):
return
macostools.mkalias(test_support.TESTFN, TESTFN2, sys.prefix)
fss, _, _ = Carbon.File.ResolveAliasFile(TESTFN2, 0)
self.assertEqual(fss.as_pathname(), os.path.realpath(test_support.TESTFN))
def test_main():
# Skip on wide unicode
if len(u'\0'.encode('unicode-internal')) == 4:
raise unittest.SkipTest("test_macostools is broken in USC4")
test_support.run_unittest(TestMacostools)
if __name__ == '__main__':
test_main()
|
gpl-3.0
|
T2DREAM/t2dream-portal
|
src/encoded/upgrade/file.py
|
1
|
22524
|
from snovault import upgrade_step
from pyramid.traversal import find_root
from datetime import datetime, time
@upgrade_step('file', '', '2')
def file_0_2(value, system):
# http://redmine.encodedcc.org/issues/1295
# http://redmine.encodedcc.org/issues/1307
if 'status' in value:
value['status'] = value['status'].lower()
@upgrade_step('file', '2', '3')
def file_2_3(value, system):
# http://redmine.encodedcc.org/issues/1572
file_format = value.get('file_format')
file_name = value['download_path'].rsplit('/', 1)[-1]
file_ext = file_name[file_name.find('.'):]
# REJECTIONS
if file_ext in ['.gtf.bigBed', '.pdf', '.pdf.gz', '.gff.bigBed', '.spikeins']:
value['status'] = 'deleted'
# Find the miscatorgorized bedMethyls
if file_ext == '.bed.bigBed' and 'MethylRrbs' in value.get('submitted_file_name'):
value['file_format'] = 'bedMethyl'
if file_ext == '.bed.gz' and 'MethylRrbs' in value.get('submitted_file_name'):
value['file_format'] = 'bed_bedMethyl'
unknownDict = {'.CEL.gz': 'CEL',
'.bb': 'bedMethyl',
'.bed': 'bed',
'.bed.gz': 'bed',
'.bed.bigBed': 'bigBed',
'.bigBed': 'bigBed',
'.bed9': 'bedMethyl',
'.bed9.gz': 'bed_bedMethyl',
'.bedCluster.bigBed': 'bigBed',
'.bedLogR.bigBed': 'bedLogR',
'.bedRnaElements.bigBed': 'bedRnaElements',
'.bedRrbs.bigBed': 'bedMethyl',
'.broadPeak.gz': 'bed_broadPeak',
'.bigBed': 'bigBed',
'.hic': 'hic',
'.bedgraph': 'bedgraph',
'.csfasta.gz': 'csfasta',
'.csqual.gz': 'csqual',
'.fasta.gz': 'fasta',
'.gff.bigBed': 'bigBed',
'.gff.gz': 'gtf',
'.gp.bigBed': 'bigBed',
'.matrix.gz': 'tsv',
'.matrix.tgz': 'tar',
'.narrowPeak': 'bed_narrowPeak',
'.narrowPeak.gz': 'bed_narrowPeak',
'.pdf': 'tsv', # These are going to be obsolete
'.pdf.gz': 'tsv', # These are going to be obsolete
'.peaks.gz': 'tsv',
'.peptideMapping.bigBed': 'bigBed',
'.shortFrags.bigBed': 'bigBed',
'.sorted.bigBed': 'bigBed',
'.tab.gz': 'tsv',
'.tgz': 'tar',
'.txt': 'tsv',
'.xlsx': 'tsv', # These need to be converted to tsv
}
if file_format in ['unknown', 'customTrack']:
value['file_format'] = unknownDict[file_ext]
# http://redmine.encodedcc.org/issues/1429
context = system['context']
root = find_root(context)
dataset = root.get_by_uuid(value['dataset']).upgrade_properties()
dataset_status = dataset.get('status')
status = value.get('status')
if status == 'current':
if dataset_status == 'released':
value['status'] = 'released'
else:
value['status'] = 'in progress'
if status == 'obsolete':
if dataset_status in ['released', 'revoked']:
value['status'] = 'revoked'
else:
value['status'] = 'deleted'
# http://redmine.encodedcc.org/issues/568
output_type_dict = {
'': 'raw data',
'Alignments': 'alignments',
'bigBed': 'sites',
'bigWig': 'sites',
'bedgraph': 'sites',
'hic': 'sites',
'Clusters': 'clusters',
'Contigs': 'contigs',
'FastqRd1': 'reads',
'FastqRd2': 'reads',
'forebrain_enhancers': 'enhancers_forebrain',
'heart_enhancers': 'enhancers_heart',
'GreenIdat': 'idat green file',
'hotspot_broad_peaks': 'hotspots',
'hotspot_narrow_peaks': 'hotspots',
'hotspot_signal': 'hotspots',
'Hotspots': 'hotspots',
'Interactions': 'interactions',
'MinusRawSignal': 'raw minus signal',
'PlusRawSignal': 'raw plus signal',
'macs2_dnase_peaks': 'peaks',
'macs2_dnase_signal': 'signal',
'MinusSignal': 'minus signal',
'minusSignal': 'minus signal',
'MultiMinus': 'multi-read minus signal',
'MultiPlus': 'multi-read plus signal',
'MultiSignal': 'multi-read signal',
'MultiUnstranded': 'multi-read signal',
'RawData2': 'reads',
'RedIdat': 'idat red file',
'peak': 'peaks',
'PeakCalls': 'peaks',
'Peaks': 'peaks',
'PlusSignal': 'plus signal',
'plusSignal': 'plus signal',
'predicted_enhancers_heart': 'enhancers_heart',
'RawSignal': 'raw signal',
'RawData': 'raw data',
'rcc': 'raw data',
'Read': 'reads',
'read': 'reads',
'read1': 'reads',
'rejected_reads': 'rejected reads',
'RepPeaks': 'peaks',
'RepSignal': 'signal',
'Signal': 'signal',
'SimpleSignal': 'signal',
'Sites': 'sites',
'Spikeins': 'spike-ins',
'Spikes': 'spike-ins',
'Splices': 'splice junctions',
'uniqueReads': 'unique signal',
'UniqueSignal': 'unique signal',
'uniqueSignal': 'unique signal',
'UniqueMinus': 'unique minus signal',
'uniqueMinusSignal': 'unique minus signal',
'UniquePlus': 'unique plus signal',
'uniquePlusSignal': 'unique plus signal',
'UniqueUnstranded': 'unique signal',
'UnstrandedSignal': 'signal',
'dataset_used': 'enhancers',
'TRAINING_DATA_MOUSE_VISTA': 'enhancers',
'method_description': 'enhancers',
'unknown': 'enhancers',
'Protocol': 'raw data',
}
current_output_type = value['output_type']
if current_output_type in output_type_dict:
value['output_type'] = output_type_dict[current_output_type]
# Help the raw data problem
if value['output_type'] == 'raw data' and value['file_format'] == 'fastq':
value['output_type'] = 'reads'
@upgrade_step('file', '3', '4')
def file_3_4(value, system):
# http://redmine.encodedcc.org/issues/1714
context = system['context']
root = find_root(context)
dataset = root.get_by_uuid(value['dataset']).upgrade_properties()
if 'download_path' in value:
value.pop('download_path')
value['lab'] = dataset['lab']
value['award'] = dataset['award']
# EDW User
if value.get('submitted_by') == '0e04cd39-006b-4b4a-afb3-b6d76c4182ff':
value['lab'] = 'fb0af3d0-3a4c-4e96-b67a-f273fe527b04'
value['award'] = '8bafd685-aa17-43fe-95aa-37bc1c90074a'
@upgrade_step('file', '4', '5')
def file_4_5(value, system):
# http://redmine.encodedcc.org/issues/2566
# http://redmine.encodedcc.org/issues/2565
# we need to remeber bedRnaElements,
bed_files = {
'bed_bedLogR': 'bedLogR',
'bed_bedMethyl': 'bedMethyl',
'bed_broadPeak': 'broadPeak',
'bed_gappedPeak': 'gappedPeak',
'bed_narrowPeak': 'narrowPeak',
'bed_bedRnaElements': 'bedRnaElements'
}
bigBed_files = [
'bedLogR',
'bedMethyl',
'broadPeak',
'narrowPeak',
'gappedPeak',
'bedRnaElements'
]
current = value['file_format']
if current in ['bed', 'bigBed']:
value['file_format_type'] = 'unknown'
# we do not know what those formats were, wranglers will need to investigate
elif current in bigBed_files:
value['file_format_type'] = current
value['file_format'] = 'bigBed'
elif current in bed_files:
value['file_format_type'] = bed_files[current]
value['file_format'] = 'bed'
elif current in ['gff']:
value['file_format_type'] = 'unknown'
# all gffs todate were in gff3, but we wouldn't know without wranglers checking
# classify the peptide stuff
if value['output_type'] in ['mPepMapGcFt', 'mPepMapGcUnFt']:
value['file_format_type'] = 'modPepMap'
elif value['output_type'] in ['pepMapGcFt', 'pepMapGcUnFt']:
value['file_format_type'] = 'pepMap'
# http://redmine.encodedcc.org/issues/2565
output_mapping = {
# Category: Raw data
'idat green file': 'idat green channel',
'idat red file': 'idat red channel',
'reads': 'reads',
'rejected reads': 'rejected reads',
'rcc': 'reporter code counts',
'CEL': 'intensity values',
'raw data': 'raw data',
'alignments': 'alignments',
'transcriptome alignments': 'transcriptome alignments',
'spike-ins': 'spike-in alignments',
'multi-read minus signal': 'minus strand signal of multi-mapped reads',
'multi-read plus signal': 'plus strand signal of multi-mapped reads',
'multi-read signal': 'signal of multi-mapped reads',
'multi-read normalized signal': 'normalized signal of multi-mapped reads',
'raw minus signal': 'raw minus strand signal',
'raw plus signal': 'raw plus strand signal',
'raw signal': 'raw signal',
'raw normalized signal': 'raw normalized signal',
'unique minus signal': 'minus strand signal of unique reads',
'unique plus signal': 'plus strand signal of unique reads',
'unique signal': 'signal of unique reads',
'signal': 'signal',
'minus signal': 'minus strand signal',
'plus signal': 'plus strand signal',
'Base_Overlap_Signal': 'base overlap signal',
'PctSignal': 'percentage normalized signal',
'SumSignal': 'summed densities signal',
'WaveSignal': 'wavelet-smoothed signal',
'signal p-value': 'signal p-value',
'fold change over control': 'fold change over control',
'gene read counts': 'gene read counts',
'enrichment': 'enrichment',
'exon quantifications': 'exon quantifications',
'ExonsDeNovo': 'exon quantifications',
'ExonsEnsV65IAcuff': 'exon quantifications',
'ExonsGencV10': 'exon quantifications',
'ExonsGencV3c': 'exon quantifications',
'ExonsGencV7': 'exon quantifications',
'GeneDeNovo': 'gene quantifications',
'GeneEnsV65IAcuff': 'gene quantifications',
'GeneGencV10': 'gene quantifications',
'GeneGencV3c': 'gene quantifications',
'GeneGencV7': 'gene quantifications',
'genome quantifications': 'gene quantifications',
'library_fraction': 'library fraction',
'transcript quantifications': 'transcript quantifications',
'TranscriptDeNovo': 'transcript quantifications',
'TranscriptEnsV65IAcuff': 'transcript quantifications',
'TranscriptGencV10': 'transcript quantifications',
'TranscriptGencV3c': 'transcript quantifications',
'TranscriptGencV7': 'transcript quantifications',
'mPepMapGcFt': 'filtered modified peptide quantification',
'mPepMapGcUnFt': 'unfiltered modified peptide quantification',
'pepMapGcFt': 'filtered peptide quantification',
'pepMapGcUnFt': 'unfiltered peptide quantification',
'clusters': 'clusters',
'CNV': 'copy number variation',
'contigs': 'contigs',
'enhancer validation': 'enhancer validation',
'FiltTransfrags': 'filtered transcribed fragments',
'hotspots': 'hotspots',
'Junctions': 'splice junctions',
'interactions': 'long range chromatin interactions',
'Matrix': 'long range chromatin interactions',
'PrimerPeaks': 'long range chromatin interactions',
'sites': 'methylation state at CpG',
'methyl CG': 'methylation state at CpG',
'methyl CHG': 'methylation state at CHG',
'methyl CHH': 'methylation state at CHH',
'peaks': 'peaks',
'replicated peaks': 'replicated peaks',
'RbpAssocRna': 'RNA-binding protein associated mRNAs',
'splice junctions': 'splice junctions',
'Transfrags': 'transcribed fragments',
'TssGencV3c': 'transcription start sites',
'TssGencV7': 'transcription start sites',
'Valleys': 'valleys',
'Alignability': 'sequence alignability',
'Excludable': 'blacklisted regions',
'Uniqueness': 'sequence uniqueness',
'genome index': 'genome index',
'genome reference': 'genome reference',
'Primer': 'primer sequence',
'spike-in sequence': 'spike-in sequence',
'reference': 'reference',
'enhancers': 'predicted enhancers',
'enhancers_forebrain': 'predicted forebrain enhancers',
'enhancers_heart': 'predicted heart enhancers',
'enhancers_wholebrain': 'predicted whole brain enhancers',
'TssHmm': 'predicted transcription start sites',
'UniformlyProcessedPeakCalls': 'optimal idr thresholded peaks',
'Validation': 'validation',
'HMM': 'HMM predicted chromatin state'
}
old_output_type = value['output_type']
# The peptide mapping files from UCSC all assumed V10 hg19
if old_output_type in ['mPepMapGcFt', 'mPepMapGcUnFt', 'pepMapGcFt', 'pepMapGcUnFt']:
value['genome_annotation'] = 'V10'
value['assembly'] = 'hg19'
elif old_output_type in ['ExonsEnsV65IAcuff', 'GeneEnsV65IAcuff', 'TranscriptEnsV65IAcuff']:
value['genome_annotation'] = 'ENSEMBL V65'
elif old_output_type in ['ExonsGencV3c', 'GeneGencV3c', 'TranscriptGencV3c', 'TssGencV3c']:
value['genome_annotation'] = 'V3c'
elif old_output_type in ['ExonsGencV7', 'GeneGenc7', 'TranscriptGencV7', 'TssGencV7']:
value['genome_annotation'] = 'V7'
elif old_output_type in ['ExonsGencV10', 'GeneGenc10', 'TranscriptGencV10', 'TssGencV10']:
value['genome_annotation'] = 'V10'
elif old_output_type in ['spike-ins'] and value['file_format'] == 'fasta':
old_output_type = 'spike-in sequence'
elif old_output_type in ['raw data'] and value['file_format'] in ['fastq', 'csfasta', 'csqual', 'fasta']:
old_output_type = 'reads'
elif old_output_type in ['raw data'] and value['file_format'] in ['CEL', 'tar']:
old_output_type = 'CEL'
elif old_output_type in ['raw data'] and value['file_format'] in ['rcc']:
old_output_type = 'rcc'
elif old_output_type in ['raw data'] and value['lab'] == '/labs/timothy-hubbard/':
old_output_type = 'reference'
elif old_output_type in ['raw data']:
if 'These are protocol documents' in value.get('notes', ''):
old_output_type = 'reference'
elif old_output_type == 'sites' and value['file_format'] == 'tsv':
old_output_type = 'interactions'
elif old_output_type in ['Validation'] and value['file_format'] == '2bit':
old_output_type = 'genome reference'
value['output_type'] = output_mapping[old_output_type]
# label the lost bedRnaElements files #2940
bedRnaElements_files = [
'transcript quantifications',
'gene quantifications',
'exon quantifications'
]
if (
value['output_type'] in bedRnaElements_files
and value['status'] in ['deleted', 'replaced']
and value['file_format'] == 'bigBed'
and value['file_format_type'] == 'unknown'
):
value['file_format_type'] = 'bedRnaElements'
# Get the replicate information
if value.get('file_format') in ['fastq', 'fasta', 'csfasta']:
context = system['context']
root = find_root(context)
if 'replicate' in value:
replicate = root.get_by_uuid(value['replicate']).upgrade_properties()
if 'read_length' not in value:
value['read_length'] = replicate.get('read_length')
if value['read_length'] is None:
del value['read_length']
run_type_dict = {
True: 'paired-ended',
False: 'single-ended',
None: 'unknown'
}
if 'run_type' not in value:
value['run_type'] = run_type_dict[replicate.get('paired_ended')]
if value.get('paired_end') in ['2']:
value['run_type'] = 'paired-ended'
# Backfill content_md5sum #2683
if 'content_md5sum' not in value:
md5sum_content_md5sum = system['registry'].get('backfill_2683', {})
if value['md5sum'] in md5sum_content_md5sum:
value['content_md5sum'] = md5sum_content_md5sum[value['md5sum']]
@upgrade_step('file', '5', '6')
def file_5_6(value, system):
# http://redmine.encodedcc.org/issues/3019
import re
if value.get('output_type') in [
'minus strand signal of multi-mapped reads',
'plus strand signal of multi-mapped reads',
'signal of multi-mapped reads',
'normalized signal of multi-mapped reads'
]:
value['output_type'] = re.sub('multi-mapped', 'all', value['output_type'])
@upgrade_step('file', '6', '7')
def file_6_7(value, system):
# http://redmine.encodedcc.org/issues/3063
if 'file_format_specifications' in value:
value['file_format_specifications'] = list(set(value['file_format_specifications']))
if 'controlled_by' in value:
value['controlled_by'] = list(set(value['controlled_by']))
if 'derived_from' in value:
value['derived_from'] = list(set(value['derived_from']))
if 'supercedes' in value:
value['supercedes'] = list(set(value['supersedes']))
if 'aliases' in value:
value['aliases'] = list(set(value['aliases']))
@upgrade_step('file', '7', '8')
def file_7_8(value, system):
return
@upgrade_step('file', '8', '9')
def file_8_9(value, system):
# http://redmine.encodedcc.org/issues/4183
if (value['file_format'] == 'fastq') and ('assembly' in value):
value.pop('assembly')
# http://redmine.encodedcc.org/issues/1859
if 'supercedes' in value:
value['supersedes'] = value['supercedes']
value.pop('supercedes', None)
def set_to_midnight(date_string):
release_date = datetime.strptime(date_string, '%Y-%m-%d')
min_pub_date_time = datetime.combine(release_date, time.min)
return '{:%Y-%m-%dT%H:%M:%S.%f+00:00}'.format(min_pub_date_time)
@upgrade_step('file', '10', '11')
def file_10_11(value, system):
# http://redmine.encodedcc.org/issues/5021
# http://redmine.encodedcc.org/issues/4929
# http://redmine.encodedcc.org/issues/4927
# http://redmine.encodedcc.org/issues/4903
# http://redmine.encodedcc.org/issues/4904
date_created = value.get('date_created')
if date_created.find('T') == -1:
value['date_created'] = set_to_midnight(date_created)
# http://redmine.encodedcc.org/issues/4748
aliases = []
if 'aliases' in value and value['aliases']:
aliases = value['aliases']
else:
return
aliases_to_remove = []
for i in range(0, len(aliases)):
new_alias = ''
if 'roadmap-epigenomics' in aliases[i]:
if '||' in aliases[i]:
scrub_parts = aliases[i].split('||')
date_split = scrub_parts[1].split(' ')
date = "-".join([date_split[1].strip(),
date_split[2].strip(),
date_split[5].strip()])
scrubbed_list = [scrub_parts[0].strip(), date.strip(), scrub_parts[2].strip()]
if len(scrub_parts) == 4:
scrubbed_list.append(scrub_parts[3].strip())
new_alias = '_'.join(scrubbed_list)
parts = aliases[i].split(':') if not new_alias else new_alias.split(':')
namespace = parts[0]
if namespace in ['ucsc_encode_db', 'UCSC_encode_db', 'versionof']:
# Remove the alias with the bad namespace
aliases_to_remove.append(aliases[i])
namespace = 'encode'
if namespace in ['CGC']:
namespace = namespace.lower()
rest = '_'.join(parts[1:]).strip()
# Remove or substitute bad characters and multiple whitespaces
import re
if '"' or '#' or '@' or '!' or '$' or '^' or '&' or '|' or '~' or ';' or '`' in rest:
rest = re.sub(r'[\"#@!$^&|~;`\/\\]', '', rest)
rest = ' '.join(rest.split())
if '%' in rest:
rest = re.sub(r'%', 'pct', rest)
if '[' or '{' in rest:
rest = re.sub('[\[{]', '(', rest)
if ']' or '}' in rest:
rest = re.sub('[\]}]', ')', rest)
new_alias = ':'.join([namespace, rest])
if new_alias not in aliases:
aliases[i] = new_alias
if aliases_to_remove and aliases:
for a in aliases_to_remove:
if a in aliases:
aliases.remove(a)
@upgrade_step('file', '10', '11')
def file_10_11(value, system):
# http://redmine.encodedcc.org/issues/5081
# http://redmine.encodedcc.org/issues/5049
# http://redmine.encodedcc.org/issues/4924
if not value.get('no_file_available'):
value['no_file_available'] = False
@upgrade_step('file', '11', '12')
def file_11_12(value, system):
# https://encodedcc.atlassian.net/browse/ENCD-3347
return
|
mit
|
vineodd/PIMSim
|
GEM5Simulation/gem5/src/arch/arm/ArmNativeTrace.py
|
42
|
1917
|
# Copyright (c) 2009 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
from m5.SimObject import SimObject
from m5.params import *
from CPUTracers import NativeTrace
class ArmNativeTrace(NativeTrace):
type = 'ArmNativeTrace'
cxx_class = 'Trace::ArmNativeTrace'
cxx_header = "arch/arm/nativetrace.hh"
stop_on_pc_error = Param.Bool(True,
"Stop M5 if it and statetrace's pcs are different")
|
gpl-3.0
|
Brocade-OpenSource/OpenStack-DNRM-Nova
|
nova/virt/hyperv/vmops.py
|
3
|
14493
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Cloud.com, Inc
# Copyright 2012 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Management class for basic VM operations.
"""
import os
from oslo.config import cfg
from nova.api.metadata import base as instance_metadata
from nova import exception
from nova.openstack.common import excutils
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova.openstack.common import processutils
from nova import utils
from nova.virt import configdrive
from nova.virt.hyperv import constants
from nova.virt.hyperv import hostutils
from nova.virt.hyperv import imagecache
from nova.virt.hyperv import pathutils
from nova.virt.hyperv import vhdutils
from nova.virt.hyperv import vmutils
from nova.virt.hyperv import volumeops
LOG = logging.getLogger(__name__)
hyperv_opts = [
cfg.BoolOpt('limit_cpu_features',
default=False,
help='Required for live migration among '
'hosts with different CPU features'),
cfg.BoolOpt('config_drive_inject_password',
default=False,
help='Sets the admin password in the config drive image'),
cfg.StrOpt('qemu_img_cmd',
default="qemu-img.exe",
help='qemu-img is used to convert between '
'different image types'),
cfg.BoolOpt('config_drive_cdrom',
default=False,
help='Attaches the Config Drive image as a cdrom drive '
'instead of a disk drive')
]
CONF = cfg.CONF
CONF.register_opts(hyperv_opts, 'hyperv')
CONF.import_opt('use_cow_images', 'nova.virt.driver')
CONF.import_opt('network_api_class', 'nova.network')
class VMOps(object):
_vif_driver_class_map = {
'nova.network.neutronv2.api.API':
'nova.virt.hyperv.vif.HyperVNeutronVIFDriver',
'nova.network.api.API':
'nova.virt.hyperv.vif.HyperVNovaNetworkVIFDriver',
}
def __init__(self):
self._hostutils = hostutils.HostUtils()
self._vmutils = vmutils.VMUtils()
self._vhdutils = vhdutils.VHDUtils()
self._pathutils = pathutils.PathUtils()
self._volumeops = volumeops.VolumeOps()
self._imagecache = imagecache.ImageCache()
self._vif_driver = None
self._load_vif_driver_class()
def _load_vif_driver_class(self):
try:
class_name = self._vif_driver_class_map[CONF.network_api_class]
self._vif_driver = importutils.import_object(class_name)
except KeyError:
raise TypeError(_("VIF driver not found for "
"network_api_class: %s") %
CONF.network_api_class)
def list_instances(self):
return self._vmutils.list_instances()
def get_info(self, instance):
"""Get information about the VM."""
LOG.debug(_("get_info called for instance"), instance=instance)
instance_name = instance['name']
if not self._vmutils.vm_exists(instance_name):
raise exception.InstanceNotFound(instance_id=instance['uuid'])
info = self._vmutils.get_vm_summary_info(instance_name)
state = constants.HYPERV_POWER_STATE[info['EnabledState']]
return {'state': state,
'max_mem': info['MemoryUsage'],
'mem': info['MemoryUsage'],
'num_cpu': info['NumberOfProcessors'],
'cpu_time': info['UpTime']}
def _create_root_vhd(self, context, instance):
base_vhd_path = self._imagecache.get_cached_image(context, instance)
root_vhd_path = self._pathutils.get_vhd_path(instance['name'])
try:
if CONF.use_cow_images:
LOG.debug(_("Creating differencing VHD. Parent: "
"%(base_vhd_path)s, Target: %(root_vhd_path)s"),
{'base_vhd_path': base_vhd_path,
'root_vhd_path': root_vhd_path})
self._vhdutils.create_differencing_vhd(root_vhd_path,
base_vhd_path)
else:
LOG.debug(_("Copying VHD image %(base_vhd_path)s to target: "
"%(root_vhd_path)s"),
{'base_vhd_path': base_vhd_path,
'root_vhd_path': root_vhd_path})
self._pathutils.copyfile(base_vhd_path, root_vhd_path)
base_vhd_info = self._vhdutils.get_vhd_info(base_vhd_path)
base_vhd_size = base_vhd_info['MaxInternalSize']
root_vhd_size = instance['root_gb'] * 1024 ** 3
if root_vhd_size < base_vhd_size:
raise vmutils.HyperVException(_("Cannot resize a VHD to a "
"smaller size"))
elif root_vhd_size > base_vhd_size:
LOG.debug(_("Resizing VHD %(root_vhd_path)s to new "
"size %(root_vhd_size)s"),
{'base_vhd_path': base_vhd_path,
'root_vhd_path': root_vhd_path})
self._vhdutils.resize_vhd(root_vhd_path, root_vhd_size)
except Exception:
with excutils.save_and_reraise_exception():
if self._pathutils.exists(root_vhd_path):
self._pathutils.remove(root_vhd_path)
return root_vhd_path
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info=None):
"""Create a new VM and start it."""
LOG.info(_("Spawning new instance"), instance=instance)
instance_name = instance['name']
if self._vmutils.vm_exists(instance_name):
raise exception.InstanceExists(name=instance_name)
# Make sure we're starting with a clean slate.
self._delete_disk_files(instance_name)
if self._volumeops.ebs_root_in_block_devices(block_device_info):
root_vhd_path = None
else:
root_vhd_path = self._create_root_vhd(context, instance)
try:
self.create_instance(instance, network_info, block_device_info,
root_vhd_path)
if configdrive.required_by(instance):
self._create_config_drive(instance, injected_files,
admin_password)
self.power_on(instance)
except Exception as ex:
LOG.exception(ex)
self.destroy(instance)
raise vmutils.HyperVException(_('Spawn instance failed'))
def create_instance(self, instance, network_info,
block_device_info, root_vhd_path):
instance_name = instance['name']
self._vmutils.create_vm(instance_name,
instance['memory_mb'],
instance['vcpus'],
CONF.hyperv.limit_cpu_features)
if root_vhd_path:
self._vmutils.attach_ide_drive(instance_name,
root_vhd_path,
0,
0,
constants.IDE_DISK)
self._vmutils.create_scsi_controller(instance_name)
self._volumeops.attach_volumes(block_device_info,
instance_name,
root_vhd_path is None)
for vif in network_info:
LOG.debug(_('Creating nic for instance: %s'), instance_name)
self._vmutils.create_nic(instance_name,
vif['id'],
vif['address'])
self._vif_driver.plug(instance, vif)
def _create_config_drive(self, instance, injected_files, admin_password):
if CONF.config_drive_format != 'iso9660':
vmutils.HyperVException(_('Invalid config_drive_format "%s"') %
CONF.config_drive_format)
LOG.info(_('Using config drive for instance: %s'), instance=instance)
extra_md = {}
if admin_password and CONF.hyperv.config_drive_inject_password:
extra_md['admin_pass'] = admin_password
inst_md = instance_metadata.InstanceMetadata(instance,
content=injected_files,
extra_md=extra_md)
instance_path = self._pathutils.get_instance_dir(
instance['name'])
configdrive_path_iso = os.path.join(instance_path, 'configdrive.iso')
LOG.info(_('Creating config drive at %(path)s'),
{'path': configdrive_path_iso}, instance=instance)
with configdrive.ConfigDriveBuilder(instance_md=inst_md) as cdb:
try:
cdb.make_drive(configdrive_path_iso)
except processutils.ProcessExecutionError as e:
with excutils.save_and_reraise_exception():
LOG.error(_('Creating config drive failed with error: %s'),
e, instance=instance)
if not CONF.hyperv.config_drive_cdrom:
drive_type = constants.IDE_DISK
configdrive_path = os.path.join(instance_path,
'configdrive.vhd')
utils.execute(CONF.hyperv.qemu_img_cmd,
'convert',
'-f',
'raw',
'-O',
'vpc',
configdrive_path_iso,
configdrive_path,
attempts=1)
self._pathutils.remove(configdrive_path_iso)
else:
drive_type = constants.IDE_DVD
configdrive_path = configdrive_path_iso
self._vmutils.attach_ide_drive(instance['name'], configdrive_path,
1, 0, drive_type)
def _disconnect_volumes(self, volume_drives):
for volume_drive in volume_drives:
self._volumeops.disconnect_volume(volume_drive)
def _delete_disk_files(self, instance_name):
self._pathutils.get_instance_dir(instance_name,
create_dir=False,
remove_dir=True)
def destroy(self, instance, network_info=None, block_device_info=None,
destroy_disks=True):
instance_name = instance['name']
LOG.info(_("Got request to destroy instance: %s"), instance_name)
try:
if self._vmutils.vm_exists(instance_name):
#Stop the VM first.
self.power_off(instance)
storage = self._vmutils.get_vm_storage_paths(instance_name)
(disk_files, volume_drives) = storage
self._vmutils.destroy_vm(instance_name)
self._disconnect_volumes(volume_drives)
else:
LOG.debug(_("Instance not found: %s"), instance_name)
if destroy_disks:
self._delete_disk_files(instance_name)
except Exception as ex:
LOG.exception(ex)
raise vmutils.HyperVException(_('Failed to destroy instance: %s') %
instance_name)
def reboot(self, instance, network_info, reboot_type):
"""Reboot the specified instance."""
LOG.debug(_("reboot instance"), instance=instance)
self._set_vm_state(instance['name'],
constants.HYPERV_VM_STATE_REBOOT)
def pause(self, instance):
"""Pause VM instance."""
LOG.debug(_("Pause instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_PAUSED)
def unpause(self, instance):
"""Unpause paused VM instance."""
LOG.debug(_("Unpause instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_ENABLED)
def suspend(self, instance):
"""Suspend the specified instance."""
LOG.debug(_("Suspend instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_SUSPENDED)
def resume(self, instance):
"""Resume the suspended VM instance."""
LOG.debug(_("Resume instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_ENABLED)
def power_off(self, instance):
"""Power off the specified instance."""
LOG.debug(_("Power off instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_DISABLED)
def power_on(self, instance):
"""Power on the specified instance."""
LOG.debug(_("Power on instance"), instance=instance)
self._set_vm_state(instance["name"],
constants.HYPERV_VM_STATE_ENABLED)
def _set_vm_state(self, vm_name, req_state):
try:
self._vmutils.set_vm_state(vm_name, req_state)
LOG.debug(_("Successfully changed state of VM %(vm_name)s"
" to: %(req_state)s"),
{'vm_name': vm_name, 'req_state': req_state})
except Exception as ex:
LOG.exception(ex)
msg = (_("Failed to change vm state of %(vm_name)s"
" to %(req_state)s") %
{'vm_name': vm_name, 'req_state': req_state})
raise vmutils.HyperVException(msg)
|
apache-2.0
|
tensorflow/ranking
|
tensorflow_ranking/python/keras/canned/dnn.py
|
1
|
5354
|
# Copyright 2021 The TensorFlow Ranking Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""DNN Ranking network in Keras."""
import tensorflow.compat.v2 as tf
from tensorflow_ranking.python.keras import network as network_lib
class DNNRankingNetwork(network_lib.UnivariateRankingNetwork):
"""Deep Neural Network (DNN) scoring based univariate ranking network."""
def __init__(self,
context_feature_columns=None,
example_feature_columns=None,
hidden_layer_dims=None,
activation=None,
use_batch_norm=True,
batch_norm_moment=0.999,
dropout=0.5,
name='dnn_ranking_network',
**kwargs):
"""Initializes an instance of DNN ranking network.
This network consists of feedforward linear units passed through a
non-linear
activation. The hidden size of the linear units and the activation are
specified by the user.
Args:
context_feature_columns: A dict containing all the context feature columns
used by the network. Keys are feature names, and values are instances of
classes derived from `_FeatureColumn`.
example_feature_columns: A dict containing all the example feature columns
used by the network. Keys are feature names, and values are instances of
classes derived from `_FeatureColumn`.
hidden_layer_dims: Iterable of number hidden units per layer. All layers
are fully connected. Ex. `[64, 32]` means first layer has 64 nodes and
second one has 32.
activation: Activation function applied to each layer. If `None`, will use
an identity activation, which is default behavior in Keras activations.
use_batch_norm: Whether to use batch normalization after each hidden
layer.
batch_norm_moment: Momentum for the moving average in batch normalization.
dropout: When not `None`, the probability we will drop out a given
coordinate.
name: name of Keras network.
**kwargs: keyword arguments.
Raises:
`ValueError` if `example_feature_columns` or `hidden_layer_dims` is empty.
"""
if not example_feature_columns or not hidden_layer_dims:
raise ValueError('example_feature_columns or hidden_layer_dims must not '
'be empty.')
super(DNNRankingNetwork, self).__init__(
context_feature_columns=context_feature_columns,
example_feature_columns=example_feature_columns,
name=name,
**kwargs)
self._hidden_layer_dims = [int(d) for d in hidden_layer_dims]
self._activation = activation
self._use_batch_norm = use_batch_norm
self._batch_norm_moment = batch_norm_moment
self._dropout = dropout
layers = []
if self._use_batch_norm:
layers.append(
tf.keras.layers.BatchNormalization(momentum=self._batch_norm_moment))
for _, layer_width in enumerate(self._hidden_layer_dims):
layers.append(tf.keras.layers.Dense(units=layer_width))
if self._use_batch_norm:
layers.append(
tf.keras.layers.BatchNormalization(
momentum=self._batch_norm_moment))
layers.append(tf.keras.layers.Activation(activation=self._activation))
layers.append(tf.keras.layers.Dropout(rate=self._dropout))
self._scoring_layers = layers
self._output_score_layer = tf.keras.layers.Dense(units=1)
def score(self, context_features=None, example_features=None, training=True):
"""Univariate scoring of context and one example to generate a score.
Args:
context_features: (dict) context feature names to 2D tensors of shape
[batch_size, ...].
example_features: (dict) example feature names to 2D tensors of shape
[batch_size, ...].
training: (bool) whether in training or inference mode.
Returns:
(tf.Tensor) A score tensor of shape [batch_size, 1].
"""
context_input = [
tf.keras.layers.Flatten()(context_features[name])
for name in sorted(self.context_feature_columns)
]
example_input = [
tf.keras.layers.Flatten()(example_features[name])
for name in sorted(self.example_feature_columns)
]
inputs = tf.concat(context_input + example_input, 1)
outputs = inputs
for layer in self._scoring_layers:
outputs = layer(outputs, training=training)
return self._output_score_layer(outputs, training=training)
def get_config(self):
config = super(DNNRankingNetwork, self).get_config()
config.update({
'hidden_layer_dims': self._hidden_layer_dims,
'activation': self._activation,
'use_batch_norm': self._use_batch_norm,
'batch_norm_moment': self._batch_norm_moment,
'dropout': self._dropout,
})
return config
|
apache-2.0
|
Intel-Corporation/tensorflow
|
tensorflow/python/tools/saved_model_cli_test.py
|
41
|
22291
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for SavedModelCLI tool.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import os
import pickle
import shutil
import sys
import numpy as np
from six import StringIO
from tensorflow.core.framework import types_pb2
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.python.debug.wrappers import local_cli_wrapper
from tensorflow.python.platform import test
from tensorflow.python.tools import saved_model_cli
SAVED_MODEL_PATH = ('cc/saved_model/testdata/half_plus_two/00000123')
@contextlib.contextmanager
def captured_output():
new_out, new_err = StringIO(), StringIO()
old_out, old_err = sys.stdout, sys.stderr
try:
sys.stdout, sys.stderr = new_out, new_err
yield sys.stdout, sys.stderr
finally:
sys.stdout, sys.stderr = old_out, old_err
class SavedModelCLITestCase(test.TestCase):
def testShowCommandAll(self):
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
self.parser = saved_model_cli.create_parser()
args = self.parser.parse_args(['show', '--dir', base_path, '--all'])
with captured_output() as (out, err):
saved_model_cli.show(args)
output = out.getvalue().strip()
# pylint: disable=line-too-long
exp_out = """MetaGraphDef with tag-set: 'serve' contains the following SignatureDefs:
signature_def['classify_x2_to_y3']:
The given SavedModel SignatureDef contains the following input(s):
inputs['inputs'] tensor_info:
dtype: DT_FLOAT
shape: (-1, 1)
name: x2:0
The given SavedModel SignatureDef contains the following output(s):
outputs['scores'] tensor_info:
dtype: DT_FLOAT
shape: (-1, 1)
name: y3:0
Method name is: tensorflow/serving/classify
signature_def['classify_x_to_y']:
The given SavedModel SignatureDef contains the following input(s):
inputs['inputs'] tensor_info:
dtype: DT_STRING
shape: unknown_rank
name: tf_example:0
The given SavedModel SignatureDef contains the following output(s):
outputs['scores'] tensor_info:
dtype: DT_FLOAT
shape: (-1, 1)
name: y:0
Method name is: tensorflow/serving/classify
signature_def['regress_x2_to_y3']:
The given SavedModel SignatureDef contains the following input(s):
inputs['inputs'] tensor_info:
dtype: DT_FLOAT
shape: (-1, 1)
name: x2:0
The given SavedModel SignatureDef contains the following output(s):
outputs['outputs'] tensor_info:
dtype: DT_FLOAT
shape: (-1, 1)
name: y3:0
Method name is: tensorflow/serving/regress
signature_def['regress_x_to_y']:
The given SavedModel SignatureDef contains the following input(s):
inputs['inputs'] tensor_info:
dtype: DT_STRING
shape: unknown_rank
name: tf_example:0
The given SavedModel SignatureDef contains the following output(s):
outputs['outputs'] tensor_info:
dtype: DT_FLOAT
shape: (-1, 1)
name: y:0
Method name is: tensorflow/serving/regress
signature_def['regress_x_to_y2']:
The given SavedModel SignatureDef contains the following input(s):
inputs['inputs'] tensor_info:
dtype: DT_STRING
shape: unknown_rank
name: tf_example:0
The given SavedModel SignatureDef contains the following output(s):
outputs['outputs'] tensor_info:
dtype: DT_FLOAT
shape: (-1, 1)
name: y2:0
Method name is: tensorflow/serving/regress
signature_def['serving_default']:
The given SavedModel SignatureDef contains the following input(s):
inputs['x'] tensor_info:
dtype: DT_FLOAT
shape: (-1, 1)
name: x:0
The given SavedModel SignatureDef contains the following output(s):
outputs['y'] tensor_info:
dtype: DT_FLOAT
shape: (-1, 1)
name: y:0
Method name is: tensorflow/serving/predict"""
# pylint: enable=line-too-long
self.maxDiff = None # Produce a useful error msg if the comparison fails
self.assertMultiLineEqual(output, exp_out)
self.assertEqual(err.getvalue().strip(), '')
def testShowCommandTags(self):
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
self.parser = saved_model_cli.create_parser()
args = self.parser.parse_args(['show', '--dir', base_path])
with captured_output() as (out, err):
saved_model_cli.show(args)
output = out.getvalue().strip()
exp_out = 'The given SavedModel contains the following tag-sets:\nserve'
self.assertMultiLineEqual(output, exp_out)
self.assertEqual(err.getvalue().strip(), '')
def testShowCommandSignature(self):
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
self.parser = saved_model_cli.create_parser()
args = self.parser.parse_args(
['show', '--dir', base_path, '--tag_set', 'serve'])
with captured_output() as (out, err):
saved_model_cli.show(args)
output = out.getvalue().strip()
exp_header = ('The given SavedModel MetaGraphDef contains SignatureDefs '
'with the following keys:')
exp_start = 'SignatureDef key: '
exp_keys = [
'"classify_x2_to_y3"', '"classify_x_to_y"', '"regress_x2_to_y3"',
'"regress_x_to_y"', '"regress_x_to_y2"', '"serving_default"'
]
# Order of signatures does not matter
self.assertMultiLineEqual(
output,
'\n'.join([exp_header] + [exp_start + exp_key for exp_key in exp_keys]))
self.assertEqual(err.getvalue().strip(), '')
def testShowCommandErrorNoTagSet(self):
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
self.parser = saved_model_cli.create_parser()
args = self.parser.parse_args(
['show', '--dir', base_path, '--tag_set', 'badtagset'])
with self.assertRaises(RuntimeError):
saved_model_cli.show(args)
def testShowCommandInputsOutputs(self):
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
self.parser = saved_model_cli.create_parser()
args = self.parser.parse_args([
'show', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'serving_default'
])
with captured_output() as (out, err):
saved_model_cli.show(args)
output = out.getvalue().strip()
expected_output = (
'The given SavedModel SignatureDef contains the following input(s):\n'
' inputs[\'x\'] tensor_info:\n'
' dtype: DT_FLOAT\n shape: (-1, 1)\n name: x:0\n'
'The given SavedModel SignatureDef contains the following output(s):\n'
' outputs[\'y\'] tensor_info:\n'
' dtype: DT_FLOAT\n shape: (-1, 1)\n name: y:0\n'
'Method name is: tensorflow/serving/predict')
self.assertEqual(output, expected_output)
self.assertEqual(err.getvalue().strip(), '')
def testPrintREFTypeTensor(self):
ref_tensor_info = meta_graph_pb2.TensorInfo()
ref_tensor_info.dtype = types_pb2.DT_FLOAT_REF
with captured_output() as (out, err):
saved_model_cli._print_tensor_info(ref_tensor_info)
self.assertTrue('DT_FLOAT_REF' in out.getvalue().strip())
self.assertEqual(err.getvalue().strip(), '')
def testInputPreProcessFormats(self):
input_str = 'input1=/path/file.txt[ab3];input2=file2'
input_expr_str = 'input3=np.zeros([2,2]);input4=[4,5]'
input_dict = saved_model_cli.preprocess_inputs_arg_string(input_str)
input_expr_dict = saved_model_cli.preprocess_input_exprs_arg_string(
input_expr_str)
self.assertTrue(input_dict['input1'] == ('/path/file.txt', 'ab3'))
self.assertTrue(input_dict['input2'] == ('file2', None))
print(input_expr_dict['input3'])
self.assertAllClose(input_expr_dict['input3'], np.zeros([2, 2]))
self.assertAllClose(input_expr_dict['input4'], [4, 5])
self.assertTrue(len(input_dict) == 2)
self.assertTrue(len(input_expr_dict) == 2)
def testInputPreProcessFileNames(self):
input_str = (r'inputx=C:\Program Files\data.npz[v:0];'
r'input:0=c:\PROGRA~1\data.npy')
input_dict = saved_model_cli.preprocess_inputs_arg_string(input_str)
self.assertTrue(input_dict['inputx'] == (r'C:\Program Files\data.npz',
'v:0'))
self.assertTrue(input_dict['input:0'] == (r'c:\PROGRA~1\data.npy', None))
def testInputPreProcessErrorBadFormat(self):
input_str = 'inputx=file[[v1]v2'
with self.assertRaises(RuntimeError):
saved_model_cli.preprocess_inputs_arg_string(input_str)
input_str = 'inputx:file'
with self.assertRaises(RuntimeError):
saved_model_cli.preprocess_inputs_arg_string(input_str)
input_str = 'inputx:np.zeros((5))'
with self.assertRaises(RuntimeError):
saved_model_cli.preprocess_input_exprs_arg_string(input_str)
def testInputParserNPY(self):
x0 = np.array([[1], [2]])
x1 = np.array(range(6)).reshape(2, 3)
input0_path = os.path.join(test.get_temp_dir(), 'input0.npy')
input1_path = os.path.join(test.get_temp_dir(), 'input1.npy')
np.save(input0_path, x0)
np.save(input1_path, x1)
input_str = 'x0=' + input0_path + '[x0];x1=' + input1_path
feed_dict = saved_model_cli.load_inputs_from_input_arg_string(
input_str, '', '')
self.assertTrue(np.all(feed_dict['x0'] == x0))
self.assertTrue(np.all(feed_dict['x1'] == x1))
def testInputParserNPZ(self):
x0 = np.array([[1], [2]])
input_path = os.path.join(test.get_temp_dir(), 'input.npz')
np.savez(input_path, a=x0)
input_str = 'x=' + input_path + '[a];y=' + input_path
feed_dict = saved_model_cli.load_inputs_from_input_arg_string(
input_str, '', '')
self.assertTrue(np.all(feed_dict['x'] == x0))
self.assertTrue(np.all(feed_dict['y'] == x0))
def testInputParserPickle(self):
pkl0 = {'a': 5, 'b': np.array(range(4))}
pkl1 = np.array([1])
pkl2 = np.array([[1], [3]])
input_path0 = os.path.join(test.get_temp_dir(), 'pickle0.pkl')
input_path1 = os.path.join(test.get_temp_dir(), 'pickle1.pkl')
input_path2 = os.path.join(test.get_temp_dir(), 'pickle2.pkl')
with open(input_path0, 'wb') as f:
pickle.dump(pkl0, f)
with open(input_path1, 'wb') as f:
pickle.dump(pkl1, f)
with open(input_path2, 'wb') as f:
pickle.dump(pkl2, f)
input_str = 'x=' + input_path0 + '[b];y=' + input_path1 + '[c];'
input_str += 'z=' + input_path2
feed_dict = saved_model_cli.load_inputs_from_input_arg_string(
input_str, '', '')
self.assertTrue(np.all(feed_dict['x'] == pkl0['b']))
self.assertTrue(np.all(feed_dict['y'] == pkl1))
self.assertTrue(np.all(feed_dict['z'] == pkl2))
def testInputParserPythonExpression(self):
x1 = np.ones([2, 10])
x2 = np.array([[1], [2], [3]])
x3 = np.mgrid[0:5, 0:5]
x4 = [[3], [4]]
input_expr_str = ('x1=np.ones([2,10]);x2=np.array([[1],[2],[3]]);'
'x3=np.mgrid[0:5,0:5];x4=[[3],[4]]')
feed_dict = saved_model_cli.load_inputs_from_input_arg_string(
'', input_expr_str, '')
self.assertTrue(np.all(feed_dict['x1'] == x1))
self.assertTrue(np.all(feed_dict['x2'] == x2))
self.assertTrue(np.all(feed_dict['x3'] == x3))
self.assertTrue(np.all(feed_dict['x4'] == x4))
def testInputParserBoth(self):
x0 = np.array([[1], [2]])
input_path = os.path.join(test.get_temp_dir(), 'input.npz')
np.savez(input_path, a=x0)
x1 = np.ones([2, 10])
input_str = 'x0=' + input_path + '[a]'
input_expr_str = 'x1=np.ones([2,10])'
feed_dict = saved_model_cli.load_inputs_from_input_arg_string(
input_str, input_expr_str, '')
self.assertTrue(np.all(feed_dict['x0'] == x0))
self.assertTrue(np.all(feed_dict['x1'] == x1))
def testInputParserBothDuplicate(self):
x0 = np.array([[1], [2]])
input_path = os.path.join(test.get_temp_dir(), 'input.npz')
np.savez(input_path, a=x0)
x1 = np.ones([2, 10])
input_str = 'x0=' + input_path + '[a]'
input_expr_str = 'x0=np.ones([2,10])'
feed_dict = saved_model_cli.load_inputs_from_input_arg_string(
input_str, input_expr_str, '')
self.assertTrue(np.all(feed_dict['x0'] == x1))
def testInputParserErrorNoName(self):
x0 = np.array([[1], [2]])
x1 = np.array(range(5))
input_path = os.path.join(test.get_temp_dir(), 'input.npz')
np.savez(input_path, a=x0, b=x1)
input_str = 'x=' + input_path
with self.assertRaises(RuntimeError):
saved_model_cli.load_inputs_from_input_arg_string(input_str, '', '')
def testInputParserErrorWrongName(self):
x0 = np.array([[1], [2]])
x1 = np.array(range(5))
input_path = os.path.join(test.get_temp_dir(), 'input.npz')
np.savez(input_path, a=x0, b=x1)
input_str = 'x=' + input_path + '[c]'
with self.assertRaises(RuntimeError):
saved_model_cli.load_inputs_from_input_arg_string(input_str, '', '')
def testRunCommandInputExamples(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
output_dir = os.path.join(test.get_temp_dir(), 'new_dir')
args = self.parser.parse_args([
'run', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'regress_x_to_y', '--input_examples',
'inputs=[{"x":[8.0],"x2":[5.0]}, {"x":[4.0],"x2":[3.0]}]', '--outdir',
output_dir
])
saved_model_cli.run(args)
y_actual = np.load(os.path.join(output_dir, 'outputs.npy'))
y_expected = np.array([[6.0], [4.0]])
self.assertAllEqual(y_expected, y_actual)
def testRunCommandExistingOutdir(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
x = np.array([[1], [2]])
x_notused = np.zeros((6, 3))
input_path = os.path.join(test.get_temp_dir(), 'testRunCommand_inputs.npz')
np.savez(input_path, x0=x, x1=x_notused)
output_file = os.path.join(test.get_temp_dir(), 'outputs.npy')
if os.path.exists(output_file):
os.remove(output_file)
args = self.parser.parse_args([
'run', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'regress_x2_to_y3', '--inputs', 'inputs=' + input_path + '[x0]',
'--outdir',
test.get_temp_dir()
])
saved_model_cli.run(args)
y_actual = np.load(output_file)
y_expected = np.array([[3.5], [4.0]])
self.assertAllClose(y_expected, y_actual)
def testRunCommandNewOutdir(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
x = np.array([[1], [2]])
x_notused = np.zeros((6, 3))
input_path = os.path.join(test.get_temp_dir(),
'testRunCommandNewOutdir_inputs.npz')
output_dir = os.path.join(test.get_temp_dir(), 'new_dir')
if os.path.isdir(output_dir):
shutil.rmtree(output_dir)
np.savez(input_path, x0=x, x1=x_notused)
args = self.parser.parse_args([
'run', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'serving_default', '--inputs', 'x=' + input_path + '[x0]', '--outdir',
output_dir
])
saved_model_cli.run(args)
y_actual = np.load(os.path.join(output_dir, 'y.npy'))
y_expected = np.array([[2.5], [3.0]])
self.assertAllClose(y_expected, y_actual)
def testRunCommandOutOverwrite(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
x = np.array([[1], [2]])
x_notused = np.zeros((6, 3))
input_path = os.path.join(test.get_temp_dir(),
'testRunCommandOutOverwrite_inputs.npz')
np.savez(input_path, x0=x, x1=x_notused)
output_file = os.path.join(test.get_temp_dir(), 'y.npy')
open(output_file, 'a').close()
args = self.parser.parse_args([
'run', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'serving_default', '--inputs', 'x=' + input_path + '[x0]', '--outdir',
test.get_temp_dir(), '--overwrite'
])
saved_model_cli.run(args)
y_actual = np.load(output_file)
y_expected = np.array([[2.5], [3.0]])
self.assertAllClose(y_expected, y_actual)
def testRunCommandInvalidInputKeyError(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
args = self.parser.parse_args([
'run', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'regress_x2_to_y3', '--input_exprs', 'x2=np.ones((3,1))'
])
with self.assertRaises(ValueError):
saved_model_cli.run(args)
def testRunCommandInputExamplesNotListError(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
output_dir = os.path.join(test.get_temp_dir(), 'new_dir')
args = self.parser.parse_args([
'run', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'regress_x_to_y', '--input_examples', 'inputs={"x":8.0,"x2":5.0}',
'--outdir', output_dir
])
with self.assertRaisesRegexp(ValueError, 'must be a list'):
saved_model_cli.run(args)
def testRunCommandInputExamplesFeatureValueNotListError(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
output_dir = os.path.join(test.get_temp_dir(), 'new_dir')
args = self.parser.parse_args([
'run', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'regress_x_to_y', '--input_examples', 'inputs=[{"x":8.0,"x2":5.0}]',
'--outdir', output_dir
])
with self.assertRaisesRegexp(ValueError, 'feature value must be a list'):
saved_model_cli.run(args)
def testRunCommandInputExamplesFeatureBadType(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
output_dir = os.path.join(test.get_temp_dir(), 'new_dir')
args = self.parser.parse_args([
'run', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'regress_x_to_y', '--input_examples', 'inputs=[{"x":[[1],[2]]}]',
'--outdir', output_dir
])
with self.assertRaisesRegexp(ValueError, 'is not supported'):
saved_model_cli.run(args)
def testRunCommandOutputFileExistError(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
x = np.array([[1], [2]])
x_notused = np.zeros((6, 3))
input_path = os.path.join(test.get_temp_dir(),
'testRunCommandOutOverwrite_inputs.npz')
np.savez(input_path, x0=x, x1=x_notused)
output_file = os.path.join(test.get_temp_dir(), 'y.npy')
open(output_file, 'a').close()
args = self.parser.parse_args([
'run', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'serving_default', '--inputs', 'x=' + input_path + '[x0]', '--outdir',
test.get_temp_dir()
])
with self.assertRaises(RuntimeError):
saved_model_cli.run(args)
def testRunCommandInputNotGivenError(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
args = self.parser.parse_args([
'run', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'serving_default'
])
with self.assertRaises(AttributeError):
saved_model_cli.run(args)
def testRunCommandWithDebuggerEnabled(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
x = np.array([[1], [2]])
x_notused = np.zeros((6, 3))
input_path = os.path.join(test.get_temp_dir(),
'testRunCommandNewOutdir_inputs.npz')
output_dir = os.path.join(test.get_temp_dir(), 'new_dir')
if os.path.isdir(output_dir):
shutil.rmtree(output_dir)
np.savez(input_path, x0=x, x1=x_notused)
args = self.parser.parse_args([
'run', '--dir', base_path, '--tag_set', 'serve', '--signature_def',
'serving_default', '--inputs', 'x=' + input_path + '[x0]', '--outdir',
output_dir, '--tf_debug'
])
def fake_wrapper_session(sess):
return sess
with test.mock.patch.object(local_cli_wrapper,
'LocalCLIDebugWrapperSession',
side_effect=fake_wrapper_session,
autospec=True) as fake:
saved_model_cli.run(args)
fake.assert_called_with(test.mock.ANY)
y_actual = np.load(os.path.join(output_dir, 'y.npy'))
y_expected = np.array([[2.5], [3.0]])
self.assertAllClose(y_expected, y_actual)
def testScanCommand(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
args = self.parser.parse_args(['scan', '--dir', base_path])
with captured_output() as (out, _):
saved_model_cli.scan(args)
output = out.getvalue().strip()
self.assertTrue('does not contain blacklisted ops' in output)
def testScanCommandFoundBlacklistedOp(self):
self.parser = saved_model_cli.create_parser()
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
args = self.parser.parse_args(
['scan', '--dir', base_path, '--tag_set', 'serve'])
op_blacklist = saved_model_cli._OP_BLACKLIST
saved_model_cli._OP_BLACKLIST = set(['VariableV2'])
with captured_output() as (out, _):
saved_model_cli.scan(args)
saved_model_cli._OP_BLACKLIST = op_blacklist
output = out.getvalue().strip()
self.assertTrue('\'VariableV2\'' in output)
if __name__ == '__main__':
test.main()
|
apache-2.0
|
fraga/gnucash
|
src/optional/python-bindings/example_scripts/quotes_historic.py
|
13
|
2473
|
#!/usr/bin/env python
# quotes_historic.py -- Example Script to read historic quote data into gnucash
#
## @file
# @brief Example Script to read historic stock data into gnucash
# @author Peter Holtermann
# @date January 2011
# @ingroup python_bindings_examples
#
# Call the perl-script @code
# ./get_quotes.pl INTC
# @endcode first to achieve data into file INTC which can thereafter be imported to GnuCash using this script.
#
# For explanation of use have a look at the wiki:
# http://wiki.gnucash.org/wiki/Stocks/get_prices
#
from gnucash import Session, Account, Split
import gnucash
import datetime
from fractions import Fraction
from gnc_convenience import find_account
FILE = "./test.gnucash"
url = "xml://"+FILE
# Read data from file
f = open('INTC')
data = []
while 1:
tmp = f.readline()
if(len(tmp)<2):
break
data.append(tmp)
f.close()
stock_date = []
stock_price = []
for i in range(1,len(data)):
year = int(data[i].rsplit(',')[1].rsplit('/')[0])
month = int(data[i].rsplit(',')[1].rsplit('/')[1])
day = int(data[i].rsplit(',')[1].rsplit('/')[2])
stock_date.append(datetime.datetime(year,month,day))
stock_price.append(float(data[i].rsplit(',')[5]))
# Initialize Gnucash session
session = Session(url, True, False, False)
root = session.book.get_root_account()
book = session.book
account = book.get_root_account()
pdb = book.get_price_db()
comm_table = book.get_table()
ac = find_account(account,'Intel')[0]
stock = ac.GetCommodity()
# Add the prices
pdb = book.get_price_db()
if len(ac.GetSplitList())<1:
print 'Need at least one Split to get currency info ... '
raise SystemExit
cur = ac.GetSplitList()[0].GetParent().GetCurrency()
# Get stock data
pl = pdb.get_prices(stock,cur)
if len(pl)<1:
print 'Need at least one database entry to clone ...'
raise SystemExit
pl0 = pl[0]
for i in range(1,len(pl)):
pdb.remove_price(pl[i])
for i in range(0,len(stock_date)):
p_new = pl0.clone(book)
p_new = gnucash.GncPrice(instance=p_new)
print 'Adding',i,stock_date[i],stock_price[i]
p_new.set_time(stock_date[i])
v = p_new.get_value()
v.num = int(Fraction.from_float(stock_price[i]).limit_denominator(100000).numerator)
v.denom = int(Fraction.from_float(stock_price[i]).limit_denominator(100000).denominator)
p_new.set_value(v)
p_new.set_source("Finance::Quotes::Historic")
pdb.add_price(p_new)
# Clean up
session.save()
session.end()
session.destroy()
|
gpl-2.0
|
berjc/aus-senate-audit
|
aus_senate_audit/audits/bayesian_audit.py
|
1
|
5902
|
# -*- coding: utf-8 -*-
""" Implements the Bayesian Audit. """
from collections import Counter
from itertools import chain
from random import gammavariate
from random import seed as set_seed
from time import time
def get_new_ballot_weights(election, r):
""" Returns new ballot weights for the given election.
The new ballot weights are constructed using Gamma Variates to draw from a Dirichlet distribution over existing
ballots, based on existing ballot weights. The sum of the new ballot weights should be equal to :param:`r`
(approximately). Note that ballot weights are rounded down.
:param :class:`BaseSenateElection` election: The senate election to generate new ballot weights for.
:param int r: The sum of the new ballot weights.
:returns: The new ballot weights generated using Gamma Variates.
:rtype: dict
"""
new_ballot_weights = {}
total = 0
for ballot in election.get_ballots():
weight = election.get_ballot_weight(ballot)
new_ballot_weights[ballot] = gammavariate(weight, 1) if weight else 0
total += new_ballot_weights[ballot]
for ballot in election.get_ballots():
new_ballot_weights[ballot] = int(r * new_ballot_weights[ballot] / total)
return new_ballot_weights
def audit(election, seed, unpopular_freq_threshold, stage_counter=0, alpha=0.05, trials=100, quick=False):
""" Runs a Bayesian audit on the given senate election.
:param :class:`BaseSenateElection` election: The senate election to audit.
:param int seed: The seed for the random number generator.
:param float unpopular_freq_threshold: The upper bound on the frequency of trials a candidate is elected in order
for the candidate to be deemed unpopular.
:param int stage_counter: The current audit stage (default: 0).
:param float alpha: The error tolerance for the given audit (default: 0.05).
:param int trials: The number of trials performed per sample (default: 100).
:param bool quick: A boolean indicating whether the audit should run to completion (True) or only run one stage
(False) (default: False).
"""
print(
'Audit of {} election.\n'.format(election.get_type()),
' Election ID: {}\n'.format(election.get_election_id()),
' Candidates: {}\n'.format(election.get_candidates()),
' Number of ballots cast: {}\n'.format(election.get_num_cast_ballots()),
' Number of seats being contested: {}\n'.format(election.get_num_seats()),
' Number of trials per sample: {}\n'.format(trials),
' Random number seed: {}'.format(seed),
)
start_time = time()
set_seed(seed)
# Cast one "prior" ballot for each candidate to establish a Bayesian prior. The prior ballot is a length-one partial
# ballot with just a first choice vote for that candidate.
for cid in election.get_candidate_ids():
election.add_ballot((cid,), 1)
# Mapping from candidates to the set of ballots that elected them.
candidate_to_ballots_map = {}
candidate_outcomes = None
done = False
while True:
stage_counter += 1
election.draw_ballots() # Increase sample of cast ballots.
print(
'\nAudit stage number: {}\n'.format(stage_counter),
' Sample size (including prior ballots): {}\n'.format(election.get_num_ballots_drawn()),
)
# -- Run trials in a Bayesian manner --
# Each outcome is a tuple of candidates who have been elected in lexicographical order (NOT the order in which
# they were elected).
print(' Performing {} Bayesian trials (posterior-based election simulations) in this stage.'.format(trials))
outcomes = []
for _ in range(trials):
new_ballot_weights = get_new_ballot_weights(election, election.get_num_cast_ballots())
outcome = election.get_outcome(new_ballot_weights)
for cid in outcome:
if cid not in candidate_to_ballots_map:
candidate_to_ballots_map[cid] = new_ballot_weights
outcomes.append(outcome)
best, freq = Counter(outcomes).most_common(1)[0]
print(
' Most common outcome ({} seats):\n'.format(election.get_num_seats()),
' {}\n'.format(best),
' Frequency of most common outcome: {} / {}'.format(freq, trials),
)
candidate_outcomes = Counter(chain(*outcomes))
print(
' Fraction present in outcome by candidate:\n {}'.format(
', '.join([
'{}: {}'.format(str(cid), cid_freq / trials)
for cid, cid_freq in sorted(candidate_outcomes.items(), key=lambda x: (x[1], x[0]))
]),
),
)
if freq >= trials * (1 - alpha):
print(
'Stopping because audit confirmed outcome:\n',
' {}\n'.format(best),
'Total number of ballots examined: {}'.format(election.get_num_ballots_drawn()),
)
done = True
break
if election.get_num_ballots_drawn() >= election.get_num_cast_ballots():
print('Audit has looked at all ballots. Done.')
done = True
break
if not quick:
break
if candidate_outcomes is not None and done:
for cid, cid_freq in sorted(candidate_outcomes.items(), key=lambda x: (x[1], x[0])):
if cid_freq / trials < unpopular_freq_threshold:
print(
' One set of ballots that elected low frequency '
'candidate {} which occurred in {}% of outcomes\n'.format(str(cid), str(cid_freq)),
' {}'.format(candidate_to_ballots_map[cid]),
)
print('Elapsed time: {} seconds.'.format(time() - start_time))
return done
|
apache-2.0
|
nemomobile-graveyard/mcompositor
|
tests/functional/test14.py
|
4
|
1543
|
#!/usr/bin/python
# Check that removal of _NET_WM_STATE_FULLSCREEN in _NET_WM_STATE is respected.
#* Test steps
# * show a fullscreen application window
# * verify that the window has fullscreen size
# * request removal of _NET_WM_STATE_FULLSCREEN for the window
#* Post-conditions
# * verify that the window now has non-fullscreen size (due to decorator)
import os, re, sys, time
if os.system('mcompositor-test-init.py'):
sys.exit(1)
def get_window_size(w):
ret = [0, 0]
size_re = re.compile('([0-9]+)x([0-9]+)x([0-9]+)')
fd = os.popen('windowstack v')
s = fd.read(5000)
for l in s.splitlines():
if re.search("%s " % w, l.strip()):
ret[0] = int(size_re.search(l.strip()).group(1))
ret[1] = int(size_re.search(l.strip()).group(2))
break
return ret
(fs_w, fs_h) = [ int(x) for x in os.popen("windowctl D").readline().split() ]
# map fullscreen application window
fd = os.popen('windowctl fn')
old_win = fd.readline().strip()
time.sleep(1)
ret = 0
size = get_window_size(old_win)
if size[0] != fs_w or size[1] != fs_h:
print 'FAIL: old_win is not fullscreen'
ret = 1
# unfullscreen the application window
os.popen('windowctl N %s' % old_win)
time.sleep(1)
size = get_window_size(old_win)
if size[0] == fs_w and size[1] == fs_h:
print 'FAIL: old_win is fullscreen'
ret = 1
# cleanup
os.popen('pkill windowctl')
time.sleep(1)
if os.system('/usr/bin/gconftool-2 --type bool --set /desktop/meego/notifications/previews_enabled true'):
print 'cannot re-enable notifications'
sys.exit(ret)
|
lgpl-2.1
|
labordoc/labordoc-next
|
modules/bibindex/lib/bibindex_engine_unit_tests.py
|
2
|
6760
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for the indexing engine."""
__revision__ = \
"$Id$"
from invenio.importutils import lazy_import
from invenio.testutils import make_test_suite, run_test_suite, InvenioTestCase
bibindex_engine = lazy_import('invenio.bibindex_engine')
class TestListSetOperations(InvenioTestCase):
"""Tests for list set operations."""
def test_list_union(self):
"""bibindex engine - list union"""
self.assertEqual([1, 2, 3, 4],
bibindex_engine.list_union([1, 2, 3],
[1, 3, 4]))
class TestWashIndexTerm(InvenioTestCase):
"""Tests for washing index terms, useful for both searching and indexing."""
def test_wash_index_term_short(self):
"""bibindex engine - wash index term, short word"""
self.assertEqual("ellis",
bibindex_engine.wash_index_term("ellis"))
def test_wash_index_term_long(self):
"""bibindex engine - wash index term, long word"""
self.assertEqual(50*"e",
bibindex_engine.wash_index_term(1234*"e"))
def test_wash_index_term_case(self):
"""bibindex engine - wash index term, lower the case"""
self.assertEqual("ellis",
bibindex_engine.wash_index_term("Ellis"))
def test_wash_index_term_unicode(self):
"""bibindex engine - wash index term, unicode"""
self.assertEqual("ελληνικό αλφάβητο",
bibindex_engine.wash_index_term("Ελληνικό αλφάβητο"))
class TestGetWordsFromPhrase(InvenioTestCase):
"""Tests for getting words from phrase."""
def test_easy_phrase(self):
"""bibindex engine - getting words from `word1 word2' phrase"""
test_phrase = 'word1 word2'
l_words_expected = ['word1', 'word2']
l_words_obtained = bibindex_engine.get_words_from_phrase(test_phrase)
l_words_obtained.sort()
self.assertEqual(l_words_obtained, l_words_expected)
def test_stemming_phrase(self):
"""bibindex engine - getting stemmed words from l'anthropologie"""
test_phrase = "l'anthropologie"
l_words_not_expected = ['anthropolog', 'l', "l'anthropolog", "l'anthropologi"]
l_words_expected = ['anthropologi', 'l', "l'anthropologi"]
l_words_obtained = bibindex_engine.get_words_from_phrase(test_phrase, 'en')
l_words_obtained.sort()
self.assertNotEqual(l_words_obtained, l_words_not_expected)
self.assertEqual(l_words_obtained, l_words_expected)
def test_dashed_phrase(self):
"""bibindex engine - getting words from `word1-word2' phrase"""
test_phrase = 'word1-word2'
l_words_expected = ['word1', 'word1-word2', 'word2']
l_words_obtained = bibindex_engine.get_words_from_phrase(test_phrase)
l_words_obtained.sort()
self.assertEqual(l_words_obtained, l_words_expected)
def test_arXiv_good(self):
"""bibindex engine - getting words from `arXiv:1007.5048' phrase"""
test_phrase = 'arXiv:1007.5048'
l_words_expected = ['1007', '1007.5048', '5048', 'arxiv', 'arxiv:1007.5048']
l_words_obtained = bibindex_engine.get_words_from_phrase(test_phrase)
l_words_obtained.sort()
self.assertEqual(l_words_obtained, l_words_expected)
def test_arXiv_bad(self):
"""bibindex engine - getting words from `arXiv:1xy7.5z48' phrase"""
test_phrase = 'arXiv:1xy7.5z48'
l_words_expected = ['1xy7', '5z48', 'arxiv', 'arxiv:1xy7.5z48']
l_words_obtained = bibindex_engine.get_words_from_phrase(test_phrase)
l_words_obtained.sort()
self.assertEqual(l_words_obtained, l_words_expected)
class TestGetWordsFromDateTag(InvenioTestCase):
"""Tests for getting words for date-like tag."""
def test_dateindex_yyyy(self):
"""bibindex engine - index date-like tag, yyyy"""
self.assertEqual(["2010"],
bibindex_engine.get_words_from_date_tag("2010"))
def test_dateindex_yyyy_mm(self):
"""bibindex engine - index date-like tag, yyyy-mm"""
self.assertEqual(["2010-03", "2010"],
bibindex_engine.get_words_from_date_tag("2010-03"))
def test_dateindex_yyyy_mm_dd(self):
"""bibindex engine - index date-like tag, yyyy-mm-dd"""
self.assertEqual(["2010-03-08", "2010", "2010-03", ],
bibindex_engine.get_words_from_date_tag("2010-03-08"))
def test_dateindex_freetext(self):
"""bibindex engine - index date-like tag, yyyy-mm-dd"""
self.assertEqual(["dd", "mon", "yyyy"],
bibindex_engine.get_words_from_date_tag("dd mon yyyy"))
class TestGetAuthorFamilyNameWords(InvenioTestCase):
"""Tests for getting family name words from author names."""
def test_authornames_john_doe(self):
"""bibindex engine - get author family name words for John Doe"""
self.assertEqual(['doe',],
bibindex_engine.get_author_family_name_words_from_phrase('John Doe'))
def test_authornames_doe_john(self):
"""bibindex engine - get author family name words for Doe, John"""
self.assertEqual(['doe',],
bibindex_engine.get_author_family_name_words_from_phrase('Doe, John'))
def test_authornames_campbell_wilson(self):
"""bibindex engine - get author family name words for Campbell-Wilson, D"""
self.assertEqual(['campbell', 'wilson', 'campbell-wilson'],
bibindex_engine.get_author_family_name_words_from_phrase('Campbell-Wilson, D'))
TEST_SUITE = make_test_suite(TestListSetOperations,
TestWashIndexTerm,
TestGetWordsFromPhrase,
TestGetWordsFromDateTag,
TestGetAuthorFamilyNameWords)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
|
gpl-2.0
|
zarboz/Evita_UL_422-JB
|
tools/perf/scripts/python/net_dropmonitor.py
|
4235
|
1554
|
# Monitor the system for dropped packets and proudce a report of drop locations and counts
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
drop_log = {}
kallsyms = []
def get_kallsyms_table():
global kallsyms
try:
f = open("/proc/kallsyms", "r")
linecount = 0
for line in f:
linecount = linecount+1
f.seek(0)
except:
return
j = 0
for line in f:
loc = int(line.split()[0], 16)
name = line.split()[2]
j = j +1
if ((j % 100) == 0):
print "\r" + str(j) + "/" + str(linecount),
kallsyms.append({ 'loc': loc, 'name' : name})
print "\r" + str(j) + "/" + str(linecount)
kallsyms.sort()
return
def get_sym(sloc):
loc = int(sloc)
for i in kallsyms:
if (i['loc'] >= loc):
return (i['name'], i['loc']-loc)
return (None, 0)
def print_drop_table():
print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT")
for i in drop_log.keys():
(sym, off) = get_sym(i)
if sym == None:
sym = i
print "%25s %25s %25s" % (sym, off, drop_log[i])
def trace_begin():
print "Starting trace (Ctrl-C to dump results)"
def trace_end():
print "Gathering kallsyms data"
get_kallsyms_table()
print_drop_table()
# called from perf, when it finds a correspoinding event
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
slocation = str(location)
try:
drop_log[slocation] = drop_log[slocation] + 1
except:
drop_log[slocation] = 1
|
gpl-2.0
|
thaumos/ansible
|
test/sanity/validate-modules/utils.py
|
15
|
5660
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Matt Martz <matt@sivel.net>
# Copyright (C) 2015 Rackspace US, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ast
import os
import sys
from io import BytesIO, TextIOWrapper
import yaml
import yaml.reader
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import AnsibleModule
class AnsibleTextIOWrapper(TextIOWrapper):
def write(self, s):
super(AnsibleTextIOWrapper, self).write(to_text(s, self.encoding, errors='replace'))
def find_executable(executable, cwd=None, path=None):
"""Finds the full path to the executable specified"""
# This is mostly a copy from test/runner/lib/util.py. Should be removed once validate-modules has been integrated
# into ansible-test
match = None
real_cwd = os.getcwd()
if not cwd:
cwd = real_cwd
if os.path.dirname(executable):
target = os.path.join(cwd, executable)
if os.path.exists(target) and os.access(target, os.F_OK | os.X_OK):
match = executable
else:
path = os.environ.get('PATH', os.path.defpath)
path_dirs = path.split(os.path.pathsep)
seen_dirs = set()
for path_dir in path_dirs:
if path_dir in seen_dirs:
continue
seen_dirs.add(path_dir)
if os.path.abspath(path_dir) == real_cwd:
path_dir = cwd
candidate = os.path.join(path_dir, executable)
if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK):
match = candidate
break
return match
def find_globals(g, tree):
"""Uses AST to find globals in an ast tree"""
for child in tree:
if hasattr(child, 'body') and isinstance(child.body, list):
find_globals(g, child.body)
elif isinstance(child, (ast.FunctionDef, ast.ClassDef)):
g.add(child.name)
continue
elif isinstance(child, ast.Assign):
try:
g.add(child.targets[0].id)
except (IndexError, AttributeError):
pass
elif isinstance(child, ast.Import):
g.add(child.names[0].name)
elif isinstance(child, ast.ImportFrom):
for name in child.names:
g_name = name.asname or name.name
if g_name == '*':
continue
g.add(g_name)
class CaptureStd():
"""Context manager to handle capturing stderr and stdout"""
def __enter__(self):
self.sys_stdout = sys.stdout
self.sys_stderr = sys.stderr
sys.stdout = self.stdout = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stdout.encoding)
sys.stderr = self.stderr = AnsibleTextIOWrapper(BytesIO(), encoding=self.sys_stderr.encoding)
return self
def __exit__(self, exc_type, exc_value, traceback):
sys.stdout = self.sys_stdout
sys.stderr = self.sys_stderr
def get(self):
"""Return ``(stdout, stderr)``"""
return self.stdout.buffer.getvalue(), self.stderr.buffer.getvalue()
def parse_yaml(value, lineno, module, name, load_all=False):
traces = []
errors = []
data = None
if load_all:
loader = yaml.safe_load_all
else:
loader = yaml.safe_load
try:
data = loader(value)
if load_all:
data = list(data)
except yaml.MarkedYAMLError as e:
e.problem_mark.line += lineno - 1
e.problem_mark.name = '%s.%s' % (module, name)
errors.append({
'msg': '%s is not valid YAML' % name,
'line': e.problem_mark.line + 1,
'column': e.problem_mark.column + 1
})
traces.append(e)
except yaml.reader.ReaderError as e:
traces.append(e)
# TODO: Better line/column detection
errors.append({
'msg': ('%s is not valid YAML. Character '
'0x%x at position %d.' % (name, e.character, e.position)),
'line': lineno
})
except yaml.YAMLError as e:
traces.append(e)
errors.append({
'msg': '%s is not valid YAML: %s: %s' % (name, type(e), e),
'line': lineno
})
return data, errors, traces
def is_empty(value):
"""Evaluate null like values excluding False"""
if value is False:
return False
return not bool(value)
def compare_unordered_lists(a, b):
"""Safe list comparisons
Supports:
- unordered lists
- unhashable elements
"""
return len(a) == len(b) and all(x in b for x in a)
class NoArgsAnsibleModule(AnsibleModule):
"""AnsibleModule that does not actually load params. This is used to get access to the
methods within AnsibleModule without having to fake a bunch of data
"""
def _load_params(self):
self.params = {'_ansible_selinux_special_fs': [], '_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False, '_ansible_check_mode': False}
|
gpl-3.0
|
nwiizo/workspace_2017
|
ansible-modules-core/cloud/rackspace/rax_cdb.py
|
13
|
8278
|
#!/usr/bin/python -tt
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: rax_cdb
short_description: create/delete or resize a Rackspace Cloud Databases instance
description:
- creates / deletes or resize a Rackspace Cloud Databases instance
and optionally waits for it to be 'running'. The name option needs to be
unique since it's used to identify the instance.
version_added: "1.8"
options:
name:
description:
- Name of the databases server instance
default: null
flavor:
description:
- flavor to use for the instance 1 to 6 (i.e. 512MB to 16GB)
default: 1
volume:
description:
- Volume size of the database 1-150GB
default: 2
cdb_type:
description:
- type of instance (i.e. MySQL, MariaDB, Percona)
default: MySQL
version_added: "2.0"
aliases: ['type']
cdb_version:
description:
- version of database (MySQL supports 5.1 and 5.6, MariaDB supports 10, Percona supports 5.6)
choices: ['5.1', '5.6', '10']
version_added: "2.0"
aliases: ['version']
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
wait:
description:
- wait for the instance to be in state 'running' before returning
default: "no"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
author: "Simon JAILLET (@jails)"
extends_documentation_fragment: rackspace
'''
EXAMPLES = '''
- name: Build a Cloud Databases
gather_facts: False
tasks:
- name: Server build request
local_action:
module: rax_cdb
credentials: ~/.raxpub
region: IAD
name: db-server1
flavor: 1
volume: 2
cdb_type: MySQL
cdb_version: 5.6
wait: yes
state: present
register: rax_db_server
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def find_instance(name):
cdb = pyrax.cloud_databases
instances = cdb.list()
if instances:
for instance in instances:
if instance.name == name:
return instance
return False
def save_instance(module, name, flavor, volume, cdb_type, cdb_version, wait,
wait_timeout):
for arg, value in dict(name=name, flavor=flavor,
volume=volume, type=cdb_type, version=cdb_version
).iteritems():
if not value:
module.fail_json(msg='%s is required for the "rax_cdb"'
' module' % arg)
if not (volume >= 1 and volume <= 150):
module.fail_json(msg='volume is required to be between 1 and 150')
cdb = pyrax.cloud_databases
flavors = []
for item in cdb.list_flavors():
flavors.append(item.id)
if not (flavor in flavors):
module.fail_json(msg='unexisting flavor reference "%s"' % str(flavor))
changed = False
instance = find_instance(name)
if not instance:
action = 'create'
try:
instance = cdb.create(name=name, flavor=flavor, volume=volume,
type=cdb_type, version=cdb_version)
except Exception as e:
module.fail_json(msg='%s' % e.message)
else:
changed = True
else:
action = None
if instance.volume.size != volume:
action = 'resize'
if instance.volume.size > volume:
module.fail_json(changed=False, action=action,
msg='The new volume size must be larger than '
'the current volume size',
cdb=rax_to_dict(instance))
instance.resize_volume(volume)
changed = True
if int(instance.flavor.id) != flavor:
action = 'resize'
pyrax.utils.wait_until(instance, 'status', 'ACTIVE',
attempts=wait_timeout)
instance.resize(flavor)
changed = True
if wait:
pyrax.utils.wait_until(instance, 'status', 'ACTIVE',
attempts=wait_timeout)
if wait and instance.status != 'ACTIVE':
module.fail_json(changed=changed, action=action,
cdb=rax_to_dict(instance),
msg='Timeout waiting for "%s" databases instance to '
'be created' % name)
module.exit_json(changed=changed, action=action, cdb=rax_to_dict(instance))
def delete_instance(module, name, wait, wait_timeout):
if not name:
module.fail_json(msg='name is required for the "rax_cdb" module')
changed = False
instance = find_instance(name)
if not instance:
module.exit_json(changed=False, action='delete')
try:
instance.delete()
except Exception as e:
module.fail_json(msg='%s' % e.message)
else:
changed = True
if wait:
pyrax.utils.wait_until(instance, 'status', 'SHUTDOWN',
attempts=wait_timeout)
if wait and instance.status != 'SHUTDOWN':
module.fail_json(changed=changed, action='delete',
cdb=rax_to_dict(instance),
msg='Timeout waiting for "%s" databases instance to '
'be deleted' % name)
module.exit_json(changed=changed, action='delete',
cdb=rax_to_dict(instance))
def rax_cdb(module, state, name, flavor, volume, cdb_type, cdb_version, wait,
wait_timeout):
# act on the state
if state == 'present':
save_instance(module, name, flavor, volume, cdb_type, cdb_version, wait,
wait_timeout)
elif state == 'absent':
delete_instance(module, name, wait, wait_timeout)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
name=dict(type='str', required=True),
flavor=dict(type='int', default=1),
volume=dict(type='int', default=2),
cdb_type=dict(type='str', default='MySQL', aliases=['type']),
cdb_version=dict(type='str', default='5.6', aliases=['version']),
state=dict(default='present', choices=['present', 'absent']),
wait=dict(type='bool', default=False),
wait_timeout=dict(type='int', default=300),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together(),
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
name = module.params.get('name')
flavor = module.params.get('flavor')
volume = module.params.get('volume')
cdb_type = module.params.get('cdb_type')
cdb_version = module.params.get('cdb_version')
state = module.params.get('state')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
setup_rax_module(module, pyrax)
rax_cdb(module, state, name, flavor, volume, cdb_type, cdb_version, wait, wait_timeout)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# invoke the module
if __name__ == '__main__':
main()
|
mit
|
ksrajkumar/openerp-6.1
|
openerp/tools/amount_to_text.py
|
15
|
7742
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#-------------------------------------------------------------
# French
#-------------------------------------------------------------
to_19_fr = ( 'zéro', 'un', 'deux', 'trois', 'quatre', 'cinq', 'six',
'sept', 'huit', 'neuf', 'dix', 'onze', 'douze', 'treize',
'quatorze', 'quinze', 'seize', 'dix-sept', 'dix-huit', 'dix-neuf' )
tens_fr = ( 'vingt', 'trente', 'quarante', 'Cinquante', 'Soixante', 'Soixante-dix', 'Quatre-vingts', 'Quatre-vingt Dix')
denom_fr = ( '',
'Mille', 'Millions', 'Milliards', 'Billions', 'Quadrillions',
'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion',
'Décillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Icosillion', 'Vigintillion' )
def _convert_nn_fr(val):
""" convert a value < 100 to French
"""
if val < 20:
return to_19_fr[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_fr)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_fr[val % 10]
return dcap
def _convert_nnn_fr(val):
""" convert a value < 1000 to french
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_fr[rem] + ' Cent'
if mod > 0:
word = word + ' '
if mod > 0:
word = word + _convert_nn_fr(mod)
return word
def french_number(val):
if val < 100:
return _convert_nn_fr(val)
if val < 1000:
return _convert_nnn_fr(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_fr))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_fr(l) + ' ' + denom_fr[didx]
if r > 0:
ret = ret + ', ' + french_number(r)
return ret
def amount_to_text_fr(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = french_number(abs(int(list[0])))
end_word = french_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and ' Cents' or ' Cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Dutch
#-------------------------------------------------------------
to_19_nl = ( 'Nul', 'Een', 'Twee', 'Drie', 'Vier', 'Vijf', 'Zes',
'Zeven', 'Acht', 'Negen', 'Tien', 'Elf', 'Twaalf', 'Dertien',
'Veertien', 'Vijftien', 'Zestien', 'Zeventien', 'Achttien', 'Negentien' )
tens_nl = ( 'Twintig', 'Dertig', 'Veertig', 'Vijftig', 'Zestig', 'Zeventig', 'Tachtig', 'Negentig')
denom_nl = ( '',
'Duizend', 'Miljoen', 'Miljard', 'Triljoen', 'Quadriljoen',
'Quintillion', 'Sextiljoen', 'Septillion', 'Octillion', 'Nonillion',
'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' )
def _convert_nn_nl(val):
""" convert a value < 100 to Dutch
"""
if val < 20:
return to_19_nl[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_nl)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19_nl[val % 10]
return dcap
def _convert_nnn_nl(val):
""" convert a value < 1000 to Dutch
special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19_nl[rem] + ' Honderd'
if mod > 0:
word = word + ' '
if mod > 0:
word = word + _convert_nn_nl(mod)
return word
def dutch_number(val):
if val < 100:
return _convert_nn_nl(val)
if val < 1000:
return _convert_nnn_nl(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_nl))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn_nl(l) + ' ' + denom_nl[didx]
if r > 0:
ret = ret + ', ' + dutch_number(r)
return ret
def amount_to_text_nl(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = dutch_number(int(list[0]))
end_word = dutch_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and 'cent' or 'cent'
final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name
return final_result
#-------------------------------------------------------------
# Generic functions
#-------------------------------------------------------------
_translate_funcs = {'fr' : amount_to_text_fr, 'nl' : amount_to_text_nl}
def add_amount_to_text_function(lang, func):
_translate_funcs[lang] = func
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='fr', currency='euro'):
""" Converts an integer to its textual representation, using the language set in the context if any.
Example::
1654: mille six cent cinquante-quatre.
"""
# if nbr > 1000000:
##TODO: use logger
# print "WARNING: number too large '%d', can't translate it!" % (nbr,)
# return str(nbr)
if not _translate_funcs.has_key(lang):
#TODO: use logger
print "WARNING: no translation function found for lang: '%s'" % (lang,)
#TODO: (default should be en) same as above
lang = 'fr'
return _translate_funcs[lang](abs(nbr), currency)
if __name__=='__main__':
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", amount_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", amount_to_text(i, lang)
else:
print amount_to_text(int(argv[1]), lang)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
markkerzner/nn_kove
|
hadoop/src/contrib/hod/hodlib/Common/xmlrpc.py
|
182
|
2374
|
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import xmlrpclib, time, random, signal
from hodlib.Common.util import hodInterrupt, HodInterruptException
class hodXRClient(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, installSignalHandlers=1, retryRequests=True, timeOut=15):
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none)
self.__retryRequests = retryRequests
self.__timeOut = timeOut
if (installSignalHandlers!=0):
self.__set_alarm()
def __set_alarm(self):
def alarm_handler(sigNum, sigHandler):
raise Exception("XML-RPC socket timeout.")
signal.signal(signal.SIGALRM, alarm_handler)
def __request(self, methodname, params):
response = None
retryWaitTime = 5 + random.randint(0, 5)
for i in range(0, 30):
signal.alarm(self.__timeOut)
try:
response = self._ServerProxy__request(methodname, params)
signal.alarm(0)
break
except Exception:
if self.__retryRequests:
if hodInterrupt.isSet():
raise HodInterruptException()
time.sleep(retryWaitTime)
else:
raise Exception("hodXRClientTimeout")
return response
def __getattr__(self, name):
# magic method dispatcher
return xmlrpclib._Method(self.__request, name)
|
apache-2.0
|
Affirm/moto
|
moto/dynamodb2/comparisons.py
|
3
|
15123
|
from __future__ import unicode_literals
import re
import six
# TODO add tests for all of these
EQ_FUNCTION = lambda item_value, test_value: item_value == test_value # flake8: noqa
NE_FUNCTION = lambda item_value, test_value: item_value != test_value # flake8: noqa
LE_FUNCTION = lambda item_value, test_value: item_value <= test_value # flake8: noqa
LT_FUNCTION = lambda item_value, test_value: item_value < test_value # flake8: noqa
GE_FUNCTION = lambda item_value, test_value: item_value >= test_value # flake8: noqa
GT_FUNCTION = lambda item_value, test_value: item_value > test_value # flake8: noqa
COMPARISON_FUNCS = {
'EQ': EQ_FUNCTION,
'=': EQ_FUNCTION,
'NE': NE_FUNCTION,
'!=': NE_FUNCTION,
'LE': LE_FUNCTION,
'<=': LE_FUNCTION,
'LT': LT_FUNCTION,
'<': LT_FUNCTION,
'GE': GE_FUNCTION,
'>=': GE_FUNCTION,
'GT': GT_FUNCTION,
'>': GT_FUNCTION,
'NULL': lambda item_value: item_value is None,
'NOT_NULL': lambda item_value: item_value is not None,
'CONTAINS': lambda item_value, test_value: test_value in item_value,
'NOT_CONTAINS': lambda item_value, test_value: test_value not in item_value,
'BEGINS_WITH': lambda item_value, test_value: item_value.startswith(test_value),
'IN': lambda item_value, *test_values: item_value in test_values,
'BETWEEN': lambda item_value, lower_test_value, upper_test_value: lower_test_value <= item_value <= upper_test_value,
}
def get_comparison_func(range_comparison):
return COMPARISON_FUNCS.get(range_comparison)
class RecursionStopIteration(StopIteration):
pass
def get_filter_expression(expr, names, values):
# Examples
# expr = 'Id > 5 AND attribute_exists(test) AND Id BETWEEN 5 AND 6 OR length < 6 AND contains(test, 1) AND 5 IN (4,5, 6) OR (Id < 5 AND 5 > Id)'
# expr = 'Id > 5 AND Subs < 7'
if names is None:
names = {}
if values is None:
values = {}
# Do substitutions
for key, value in names.items():
expr = expr.replace(key, value)
# Store correct types of values for use later
values_map = {}
for key, value in values.items():
if 'N' in value:
values_map[key] = float(value['N'])
elif 'BOOL' in value:
values_map[key] = value['BOOL']
elif 'S' in value:
values_map[key] = value['S']
elif 'NS' in value:
values_map[key] = tuple(value['NS'])
elif 'SS' in value:
values_map[key] = tuple(value['SS'])
elif 'L' in value:
values_map[key] = tuple(value['L'])
else:
raise NotImplementedError()
# Remove all spaces, tbf we could just skip them in the next step.
# The number of known options is really small so we can do a fair bit of cheating
expr = list(expr.strip())
# DodgyTokenisation stage 1
def is_value(val):
return val not in ('<', '>', '=', '(', ')')
def contains_keyword(val):
for kw in ('BETWEEN', 'IN', 'AND', 'OR', 'NOT'):
if kw in val:
return kw
return None
def is_function(val):
return val in ('attribute_exists', 'attribute_not_exists', 'attribute_type', 'begins_with', 'contains', 'size')
# Does the main part of splitting between sections of characters
tokens = []
stack = ''
while len(expr) > 0:
current_char = expr.pop(0)
if current_char == ' ':
if len(stack) > 0:
tokens.append(stack)
stack = ''
elif current_char == ',': # Split params ,
if len(stack) > 0:
tokens.append(stack)
stack = ''
elif is_value(current_char):
stack += current_char
kw = contains_keyword(stack)
if kw is not None:
# We have a kw in the stack, could be AND or something like 5AND
tmp = stack.replace(kw, '')
if len(tmp) > 0:
tokens.append(tmp)
tokens.append(kw)
stack = ''
else:
if len(stack) > 0:
tokens.append(stack)
tokens.append(current_char)
stack = ''
if len(stack) > 0:
tokens.append(stack)
def is_op(val):
return val in ('<', '>', '=', '>=', '<=', '<>', 'BETWEEN', 'IN', 'AND', 'OR', 'NOT')
# DodgyTokenisation stage 2, it groups together some elements to make RPN'ing it later easier.
def handle_token(token, tokens2, token_iterator):
# ok so this essentially groups up some tokens to make later parsing easier,
# when it encounters brackets it will recurse and then unrecurse when RecursionStopIteration is raised.
if token == ')':
raise RecursionStopIteration() # Should be recursive so this should work
elif token == '(':
temp_list = []
try:
while True:
next_token = six.next(token_iterator)
handle_token(next_token, temp_list, token_iterator)
except RecursionStopIteration:
pass # Continue
except StopIteration:
ValueError('Malformed filter expression, type1')
# Sigh, we only want to group a tuple if it doesnt contain operators
if any([is_op(item) for item in temp_list]):
# Its an expression
tokens2.append('(')
tokens2.extend(temp_list)
tokens2.append(')')
else:
tokens2.append(tuple(temp_list))
elif token == 'BETWEEN':
field = tokens2.pop()
# if values map contains a number, it would be a float
# so we need to int() it anyway
op1 = six.next(token_iterator)
op1 = int(values_map.get(op1, op1))
and_op = six.next(token_iterator)
assert and_op == 'AND'
op2 = six.next(token_iterator)
op2 = int(values_map.get(op2, op2))
tokens2.append(['between', field, op1, op2])
elif is_function(token):
function_list = [token]
lbracket = six.next(token_iterator)
assert lbracket == '('
next_token = six.next(token_iterator)
while next_token != ')':
function_list.append(next_token)
next_token = six.next(token_iterator)
tokens2.append(function_list)
else:
# Convert tokens back to real types
if token in values_map:
token = values_map[token]
# Need to join >= <= <>
if len(tokens2) > 0 and ((tokens2[-1] == '>' and token == '=') or (tokens2[-1] == '<' and token == '=') or (tokens2[-1] == '<' and token == '>')):
tokens2.append(tokens2.pop() + token)
else:
tokens2.append(token)
tokens2 = []
token_iterator = iter(tokens)
for token in token_iterator:
handle_token(token, tokens2, token_iterator)
# Start of the Shunting-Yard algorithm. <-- Proper beast algorithm!
def is_number(val):
return val not in ('<', '>', '=', '>=', '<=', '<>', 'BETWEEN', 'IN', 'AND', 'OR', 'NOT')
OPS = {'<': 5, '>': 5, '=': 5, '>=': 5, '<=': 5, '<>': 5, 'IN': 8, 'AND': 11, 'OR': 12, 'NOT': 10, 'BETWEEN': 9, '(': 100, ')': 100}
def shunting_yard(token_list):
output = []
op_stack = []
# Basically takes in an infix notation calculation, converts it to a reverse polish notation where there is no
# ambiguity on which order operators are applied.
while len(token_list) > 0:
token = token_list.pop(0)
if token == '(':
op_stack.append(token)
elif token == ')':
while len(op_stack) > 0 and op_stack[-1] != '(':
output.append(op_stack.pop())
lbracket = op_stack.pop()
assert lbracket == '('
elif is_number(token):
output.append(token)
else:
# Must be operator kw
# Cheat, NOT is our only RIGHT associative operator, should really have dict of operator associativity
while len(op_stack) > 0 and OPS[op_stack[-1]] <= OPS[token] and op_stack[-1] != 'NOT':
output.append(op_stack.pop())
op_stack.append(token)
while len(op_stack) > 0:
output.append(op_stack.pop())
return output
output = shunting_yard(tokens2)
# Hacky function to convert dynamo functions (which are represented as lists) to their Class equivalent
def to_func(val):
if isinstance(val, list):
func_name = val.pop(0)
# Expand rest of the list to arguments
val = FUNC_CLASS[func_name](*val)
return val
# Simple reverse polish notation execution. Builts up a nested filter object.
# The filter object then takes a dynamo item and returns true/false
stack = []
for token in output:
if is_op(token):
op_cls = OP_CLASS[token]
if token == 'NOT':
op1 = stack.pop()
op2 = True
else:
op2 = stack.pop()
op1 = stack.pop()
stack.append(op_cls(op1, op2))
else:
stack.append(to_func(token))
result = stack.pop(0)
if len(stack) > 0:
raise ValueError('Malformed filter expression, type2')
return result
class Op(object):
"""
Base class for a FilterExpression operator
"""
OP = ''
def __init__(self, lhs, rhs):
self.lhs = lhs
self.rhs = rhs
def _lhs(self, item):
"""
:type item: moto.dynamodb2.models.Item
"""
lhs = self.lhs
if isinstance(self.lhs, (Op, Func)):
lhs = self.lhs.expr(item)
elif isinstance(self.lhs, six.string_types):
try:
lhs = item.attrs[self.lhs].cast_value
except Exception:
pass
return lhs
def _rhs(self, item):
rhs = self.rhs
if isinstance(self.rhs, (Op, Func)):
rhs = self.rhs.expr(item)
elif isinstance(self.rhs, six.string_types):
try:
rhs = item.attrs[self.rhs].cast_value
except Exception:
pass
return rhs
def expr(self, item):
return True
def __repr__(self):
return '({0} {1} {2})'.format(self.lhs, self.OP, self.rhs)
class Func(object):
"""
Base class for a FilterExpression function
"""
FUNC = 'Unknown'
def expr(self, item):
return True
def __repr__(self):
return 'Func(...)'.format(self.FUNC)
class OpNot(Op):
OP = 'NOT'
def expr(self, item):
lhs = self._lhs(item)
return not lhs
def __str__(self):
return '({0} {1})'.format(self.OP, self.lhs)
class OpAnd(Op):
OP = 'AND'
def expr(self, item):
lhs = self._lhs(item)
rhs = self._rhs(item)
return lhs and rhs
class OpLessThan(Op):
OP = '<'
def expr(self, item):
lhs = self._lhs(item)
rhs = self._rhs(item)
return lhs < rhs
class OpGreaterThan(Op):
OP = '>'
def expr(self, item):
lhs = self._lhs(item)
rhs = self._rhs(item)
return lhs > rhs
class OpEqual(Op):
OP = '='
def expr(self, item):
lhs = self._lhs(item)
rhs = self._rhs(item)
return lhs == rhs
class OpNotEqual(Op):
OP = '<>'
def expr(self, item):
lhs = self._lhs(item)
rhs = self._rhs(item)
return lhs == rhs
class OpLessThanOrEqual(Op):
OP = '<='
def expr(self, item):
lhs = self._lhs(item)
rhs = self._rhs(item)
return lhs <= rhs
class OpGreaterThanOrEqual(Op):
OP = '>='
def expr(self, item):
lhs = self._lhs(item)
rhs = self._rhs(item)
return lhs >= rhs
class OpOr(Op):
OP = 'OR'
def expr(self, item):
lhs = self._lhs(item)
rhs = self._rhs(item)
return lhs or rhs
class OpIn(Op):
OP = 'IN'
def expr(self, item):
lhs = self._lhs(item)
rhs = self._rhs(item)
return lhs in rhs
class FuncAttrExists(Func):
FUNC = 'attribute_exists'
def __init__(self, attribute):
self.attr = attribute
def expr(self, item):
return self.attr in item.attrs
class FuncAttrNotExists(Func):
FUNC = 'attribute_not_exists'
def __init__(self, attribute):
self.attr = attribute
def expr(self, item):
return self.attr not in item.attrs
class FuncAttrType(Func):
FUNC = 'attribute_type'
def __init__(self, attribute, _type):
self.attr = attribute
self.type = _type
def expr(self, item):
return self.attr in item.attrs and item.attrs[self.attr].type == self.type
class FuncBeginsWith(Func):
FUNC = 'begins_with'
def __init__(self, attribute, substr):
self.attr = attribute
self.substr = substr
def expr(self, item):
return self.attr in item.attrs and item.attrs[self.attr].type == 'S' and item.attrs[self.attr].value.startswith(self.substr)
class FuncContains(Func):
FUNC = 'contains'
def __init__(self, attribute, operand):
self.attr = attribute
self.operand = operand
def expr(self, item):
if self.attr not in item.attrs:
return False
if item.attrs[self.attr].type in ('S', 'SS', 'NS', 'BS', 'L', 'M'):
return self.operand in item.attrs[self.attr].value
return False
class FuncSize(Func):
FUNC = 'contains'
def __init__(self, attribute):
self.attr = attribute
def expr(self, item):
if self.attr not in item.attrs:
raise ValueError('Invalid attribute name {0}'.format(self.attr))
if item.attrs[self.attr].type in ('S', 'SS', 'NS', 'B', 'BS', 'L', 'M'):
return len(item.attrs[self.attr].value)
raise ValueError('Invalid filter expression')
class FuncBetween(Func):
FUNC = 'between'
def __init__(self, attribute, start, end):
self.attr = attribute
self.start = start
self.end = end
def expr(self, item):
if self.attr not in item.attrs:
raise ValueError('Invalid attribute name {0}'.format(self.attr))
return self.start <= item.attrs[self.attr].cast_value <= self.end
OP_CLASS = {
'NOT': OpNot,
'AND': OpAnd,
'OR': OpOr,
'IN': OpIn,
'<': OpLessThan,
'>': OpGreaterThan,
'<=': OpLessThanOrEqual,
'>=': OpGreaterThanOrEqual,
'=': OpEqual,
'<>': OpNotEqual
}
FUNC_CLASS = {
'attribute_exists': FuncAttrExists,
'attribute_not_exists': FuncAttrNotExists,
'attribute_type': FuncAttrType,
'begins_with': FuncBeginsWith,
'contains': FuncContains,
'size': FuncSize,
'between': FuncBetween
}
|
apache-2.0
|
petricm/DIRAC
|
FrameworkSystem/scripts/dirac-proxy-destroy.py
|
7
|
4939
|
#!/usr/bin/env python
"""
command line tool to remove local and remote proxies
"""
import sys
import os
import DIRAC
from DIRAC import gLogger, S_OK
from DIRAC.Core.Security import Locations
from DIRAC.Core.Base import Script
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.FrameworkSystem.Client.ProxyManagerClient import gProxyManager
from DIRAC.Core.Security import ProxyInfo
from DIRAC.ConfigurationSystem.Client.Helpers import Registry
__RCSID__ = "$Id$"
class Params( object ):
"""
handles input options for dirac-proxy-destroy
"""
def __init__( self ):
"""
creates a Params class with default values
"""
self.vos = []
self.delete_all = False
def addVO( self, voname ):
"""
adds a VO to be deleted from remote proxies
"""
self.vos.append( voname )
return S_OK()
def setDeleteAll( self, _ ):
"""
deletes local and remote proxies
"""
self.delete_all = True
return S_OK()
def needsValidProxy( self ):
"""
returns true if any remote operations are required
"""
return self.vos or self.delete_all
# note the magic : and =
def registerCLISwitches( self ):
"""
add options to dirac option parser
"""
Script.setUsageMessage( "Script to delete a dirac proxy. Default: delete local proxy only." )
Script.registerSwitch( "a", "all", "Delete the local and all uploaded proxies (the nuclear option)", self.setDeleteAll )
Script.registerSwitch( "v:", "vo=", "Delete uploaded proxy for vo name given", self.addVO )
def getProxyGroups():
"""
Returns a set of all remote proxy groups stored on the dirac server for the user invoking the command.
"""
proxies = gProxyManager.getUserProxiesInfo()
if not proxies[ 'OK' ]:
raise RuntimeError( 'Could not retrieve uploaded proxy info.' )
user_groups = set()
for dn in proxies[ 'Value' ]:
dn_groups = set( proxies[ 'Value' ][ dn ].keys() )
user_groups.update( dn_groups )
return user_groups
def mapVoToGroups( voname ):
"""
Returns all groups available for a given VO as a set.
"""
vo_dict = Registry.getGroupsForVO( voname )
if not vo_dict[ 'OK' ]:
raise RuntimeError( 'Could not retrieve groups for vo %s.' % voname )
return set( vo_dict[ 'Value' ] )
def deleteRemoteProxy( userdn, vogroup ):
"""
Deletes proxy for a vogroup for the user envoking this function.
Returns a list of all deleted proxies (if any).
"""
rpcClient = RPCClient( "Framework/ProxyManager" )
retVal = rpcClient.deleteProxyBundle( [ ( userdn, vogroup ) ] )
if retVal[ 'OK' ]:
gLogger.notice( 'Deleted proxy for %s.' % vogroup )
else:
gLogger.error( 'Failed to delete proxy for %s.' % vogroup )
def deleteLocalProxy( proxyLoc ):
"""
Deletes the local proxy.
Returns false if no local proxy found.
"""
try:
os.unlink( proxyLoc )
except IOError:
gLogger.error( 'IOError: Failed to delete local proxy.' )
return
except OSError:
gLogger.error( 'OSError: Failed to delete local proxy.' )
return
gLogger.notice( 'Local proxy deleted.' )
def main():
"""
main program entry point
"""
options = Params()
options.registerCLISwitches()
Script.parseCommandLine( ignoreErrors = True )
if options.delete_all and options.vos:
gLogger.error( "-a and -v options are mutually exclusive. Please pick one or the other." )
return 1
proxyLoc = Locations.getDefaultProxyLocation()
if not os.path.exists( proxyLoc ):
gLogger.error( "No local proxy found in %s, exiting." % proxyLoc )
return 1
result = ProxyInfo.getProxyInfo( proxyLoc, True )
if not result[ 'OK' ]:
raise RuntimeError( 'Failed to get local proxy info.' )
if result[ 'Value' ][ 'secondsLeft' ] < 60 and options.needsValidProxy():
raise RuntimeError( 'Lifetime of local proxy too short, please renew proxy.' )
userDN=result[ 'Value' ][ 'identity' ]
if options.delete_all:
# delete remote proxies
remote_groups = getProxyGroups()
if not remote_groups:
gLogger.notice( 'No remote proxies found.' )
for vo_group in remote_groups:
deleteRemoteProxy( userDN, vo_group )
# delete local proxy
deleteLocalProxy( proxyLoc )
elif options.vos:
vo_groups = set()
for voname in options.vos:
vo_groups.update(mapVoToGroups( voname ) )
# filter set of all groups to only contain groups for which there is a user proxy
user_groups = getProxyGroups()
vo_groups.intersection_update( user_groups )
if not vo_groups:
gLogger.notice( 'You have no proxies registered for any of the specified VOs.' )
for group in vo_groups:
deleteRemoteProxy( userDN, group )
else:
deleteLocalProxy( proxyLoc )
return 0
if __name__ == "__main__":
try:
retval = main()
DIRAC.exit( retval )
except RuntimeError as rtError:
gLogger.error( 'Operation failed: %s' % str( rtError ) )
DIRAC.exit( 1 )
|
gpl-3.0
|
snakeleon/YouCompleteMe-x86
|
third_party/ycmd/third_party/JediHTTP/vendor/jedi/jedi/refactoring.py
|
49
|
7033
|
"""
Introduce some basic refactoring functions to |jedi|. This module is still in a
very early development stage and needs much testing and improvement.
.. warning:: I won't do too much here, but if anyone wants to step in, please
do. Refactoring is none of my priorities
It uses the |jedi| `API <plugin-api.html>`_ and supports currently the
following functions (sometimes bug-prone):
- rename
- extract variable
- inline variable
"""
import difflib
from jedi import common
from jedi.evaluate import helpers
from jedi.parser import tree as pt
class Refactoring(object):
def __init__(self, change_dct):
"""
:param change_dct: dict(old_path=(new_path, old_lines, new_lines))
"""
self.change_dct = change_dct
def old_files(self):
dct = {}
for old_path, (new_path, old_l, new_l) in self.change_dct.items():
dct[new_path] = '\n'.join(new_l)
return dct
def new_files(self):
dct = {}
for old_path, (new_path, old_l, new_l) in self.change_dct.items():
dct[new_path] = '\n'.join(new_l)
return dct
def diff(self):
texts = []
for old_path, (new_path, old_l, new_l) in self.change_dct.items():
if old_path:
udiff = difflib.unified_diff(old_l, new_l)
else:
udiff = difflib.unified_diff(old_l, new_l, old_path, new_path)
texts.append('\n'.join(udiff))
return '\n'.join(texts)
def rename(script, new_name):
""" The `args` / `kwargs` params are the same as in `api.Script`.
:param operation: The refactoring operation to execute.
:type operation: str
:type source: str
:return: list of changed lines/changed files
"""
return Refactoring(_rename(script.usages(), new_name))
def _rename(names, replace_str):
""" For both rename and inline. """
order = sorted(names, key=lambda x: (x.module_path, x.line, x.column),
reverse=True)
def process(path, old_lines, new_lines):
if new_lines is not None: # goto next file, save last
dct[path] = path, old_lines, new_lines
dct = {}
current_path = object()
new_lines = old_lines = None
for name in order:
if name.in_builtin_module():
continue
if current_path != name.module_path:
current_path = name.module_path
process(current_path, old_lines, new_lines)
if current_path is not None:
# None means take the source that is a normal param.
with open(current_path) as f:
source = f.read()
new_lines = common.splitlines(common.source_to_unicode(source))
old_lines = new_lines[:]
nr, indent = name.line, name.column
line = new_lines[nr - 1]
new_lines[nr - 1] = line[:indent] + replace_str + \
line[indent + len(name.name):]
process(current_path, old_lines, new_lines)
return dct
def extract(script, new_name):
""" The `args` / `kwargs` params are the same as in `api.Script`.
:param operation: The refactoring operation to execute.
:type operation: str
:type source: str
:return: list of changed lines/changed files
"""
new_lines = common.splitlines(common.source_to_unicode(script.source))
old_lines = new_lines[:]
user_stmt = script._parser.user_stmt()
# TODO care for multiline extracts
dct = {}
if user_stmt:
pos = script._pos
line_index = pos[0] - 1
arr, index = helpers.array_for_pos(user_stmt, pos)
if arr is not None:
start_pos = arr[index].start_pos
end_pos = arr[index].end_pos
# take full line if the start line is different from end line
e = end_pos[1] if end_pos[0] == start_pos[0] else None
start_line = new_lines[start_pos[0] - 1]
text = start_line[start_pos[1]:e]
for l in range(start_pos[0], end_pos[0] - 1):
text += '\n' + l
if e is None:
end_line = new_lines[end_pos[0] - 1]
text += '\n' + end_line[:end_pos[1]]
# remove code from new lines
t = text.lstrip()
del_start = start_pos[1] + len(text) - len(t)
text = t.rstrip()
del_end = len(t) - len(text)
if e is None:
new_lines[end_pos[0] - 1] = end_line[end_pos[1] - del_end:]
e = len(start_line)
else:
e = e - del_end
start_line = start_line[:del_start] + new_name + start_line[e:]
new_lines[start_pos[0] - 1] = start_line
new_lines[start_pos[0]:end_pos[0] - 1] = []
# add parentheses in multiline case
open_brackets = ['(', '[', '{']
close_brackets = [')', ']', '}']
if '\n' in text and not (text[0] in open_brackets and text[-1] ==
close_brackets[open_brackets.index(text[0])]):
text = '(%s)' % text
# add new line before statement
indent = user_stmt.start_pos[1]
new = "%s%s = %s" % (' ' * indent, new_name, text)
new_lines.insert(line_index, new)
dct[script.path] = script.path, old_lines, new_lines
return Refactoring(dct)
def inline(script):
"""
:type script: api.Script
"""
new_lines = common.splitlines(common.source_to_unicode(script.source))
dct = {}
definitions = script.goto_assignments()
with common.ignored(AssertionError):
assert len(definitions) == 1
stmt = definitions[0]._definition
usages = script.usages()
inlines = [r for r in usages
if not stmt.start_pos <= (r.line, r.column) <= stmt.end_pos]
inlines = sorted(inlines, key=lambda x: (x.module_path, x.line, x.column),
reverse=True)
expression_list = stmt.expression_list()
# don't allow multiline refactorings for now.
assert stmt.start_pos[0] == stmt.end_pos[0]
index = stmt.start_pos[0] - 1
line = new_lines[index]
replace_str = line[expression_list[0].start_pos[1]:stmt.end_pos[1] + 1]
replace_str = replace_str.strip()
# tuples need parentheses
if expression_list and isinstance(expression_list[0], pr.Array):
arr = expression_list[0]
if replace_str[0] not in ['(', '[', '{'] and len(arr) > 1:
replace_str = '(%s)' % replace_str
# if it's the only assignment, remove the statement
if len(stmt.get_defined_names()) == 1:
line = line[:stmt.start_pos[1]] + line[stmt.end_pos[1]:]
dct = _rename(inlines, replace_str)
# remove the empty line
new_lines = dct[script.path][2]
if line.strip():
new_lines[index] = line
else:
new_lines.pop(index)
return Refactoring(dct)
|
gpl-3.0
|
ofayans/freeipa
|
ipatests/test_xmlrpc/test_privilege_plugin.py
|
8
|
14607
|
# Authors:
# Rob Crittenden <rcritten@redhat.com>
#
# Copyright (C) 2010 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Test the `ipaserver/plugins/privilege.py` module.
"""
from ipalib import api, errors
from ipatests.test_xmlrpc import objectclasses
from ipatests.test_xmlrpc.xmlrpc_test import Declarative
from ipapython.dn import DN
import pytest
permission1 = u'testperm'
permission1_dn = DN(('cn',permission1),
api.env.container_permission,api.env.basedn)
permission2 = u'testperm2'
permission2_dn = DN(('cn',permission2),
api.env.container_permission,api.env.basedn)
privilege1 = u'testpriv1'
privilege1_dn = DN(('cn',privilege1),
api.env.container_privilege,api.env.basedn)
users_dn = DN(api.env.container_user, api.env.basedn)
@pytest.mark.tier1
class test_privilege(Declarative):
cleanup_commands = [
('permission_del', [permission1], {}),
('permission_del', [permission2], {}),
('privilege_del', [privilege1], {}),
]
tests = [
dict(
desc='Try to retrieve non-existent %r' % privilege1,
command=('privilege_show', [privilege1], {}),
expected=errors.NotFound(
reason=u'%s: privilege not found' % privilege1),
),
dict(
desc='Try to update non-existent %r' % privilege1,
command=('privilege_mod', [privilege1], dict(description=u'Foo')),
expected=errors.NotFound(
reason=u'%s: privilege not found' % privilege1),
),
dict(
desc='Try to delete non-existent %r' % privilege1,
command=('privilege_del', [privilege1], {}),
expected=errors.NotFound(
reason=u'%s: privilege not found' % privilege1),
),
dict(
desc='Search for non-existent %r' % privilege1,
command=('privilege_find', [privilege1], {}),
expected=dict(
count=0,
truncated=False,
summary=u'0 privileges matched',
result=[],
),
),
dict(
desc='Create %r' % permission1,
command=(
'permission_add', [permission1], dict(
type=u'user',
ipapermright=[u'add', u'delete'],
)
),
expected=dict(
value=permission1,
summary=u'Added permission "%s"' % permission1,
result=dict(
dn=permission1_dn,
cn=[permission1],
objectclass=objectclasses.permission,
type=[u'user'],
ipapermright=[u'add', u'delete'],
ipapermbindruletype=[u'permission'],
ipapermissiontype=[u'SYSTEM', u'V2'],
ipapermlocation=[users_dn],
),
),
),
dict(
desc='Create %r' % privilege1,
command=('privilege_add', [privilege1],
dict(description=u'privilege desc. 1')
),
expected=dict(
value=privilege1,
summary=u'Added privilege "%s"' % privilege1,
result=dict(
dn=privilege1_dn,
cn=[privilege1],
description=[u'privilege desc. 1'],
objectclass=objectclasses.privilege,
),
),
),
dict(
desc='Add permission %r to privilege %r' % (permission1, privilege1),
command=('privilege_add_permission', [privilege1],
dict(permission=permission1)
),
expected=dict(
completed=1,
failed=dict(
member=dict(
permission=[],
),
),
result={
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'privilege desc. 1'],
'memberof_permission': [permission1],
}
),
),
dict(
desc='Retrieve %r' % privilege1,
command=('privilege_show', [privilege1], {}),
expected=dict(
value=privilege1,
summary=None,
result={
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'privilege desc. 1'],
'memberof_permission': [permission1],
},
),
),
dict(
desc='Search for %r with members' % privilege1,
command=('privilege_find', [privilege1], {'no_members': False}),
expected=dict(
count=1,
truncated=False,
summary=u'1 privilege matched',
result=[
{
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'privilege desc. 1'],
'memberof_permission': [permission1],
},
],
),
),
dict(
desc='Search for %r' % privilege1,
command=('privilege_find', [privilege1], {}),
expected=dict(
count=1,
truncated=False,
summary=u'1 privilege matched',
result=[
{
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'privilege desc. 1'],
},
],
),
),
dict(
desc='Search for %r with members' % privilege1,
command=('privilege_find', [privilege1], {'no_members': False}),
expected=dict(
count=1,
truncated=False,
summary=u'1 privilege matched',
result=[
{
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'privilege desc. 1'],
'memberof_permission': [permission1],
},
],
),
),
dict(
desc='Search for %r' % privilege1,
command=('privilege_find', [privilege1], {}),
expected=dict(
count=1,
truncated=False,
summary=u'1 privilege matched',
result=[
{
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'privilege desc. 1'],
},
],
),
),
dict(
desc='Create %r' % permission2,
command=(
'permission_add', [permission2], dict(
type=u'user',
ipapermright=u'write',
)
),
expected=dict(
value=permission2,
summary=u'Added permission "%s"' % permission2,
result=dict(
dn=permission2_dn,
cn=[permission2],
objectclass=objectclasses.permission,
type=[u'user'],
ipapermright=[u'write'],
ipapermbindruletype=[u'permission'],
ipapermissiontype=[u'SYSTEM', u'V2'],
ipapermlocation=[users_dn],
),
),
),
dict(
desc='Add permission %r to privilege %r' % (permission2, privilege1),
command=('privilege_add_permission', [privilege1],
dict(permission=permission2)
),
expected=dict(
completed=1,
failed=dict(
member=dict(
permission=[],
),
),
result={
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'privilege desc. 1'],
'memberof_permission': [permission1, permission2],
}
),
),
dict(
desc='Add permission %r to privilege %r again' % (permission2, privilege1),
command=('privilege_add_permission', [privilege1],
dict(permission=permission2)
),
expected=dict(
completed=0,
failed=dict(
member=dict(
permission=[(u'testperm2', u'This entry is already a member'),],
),
),
result={
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'privilege desc. 1'],
'memberof_permission': [permission1, permission2],
}
),
),
dict(
desc='Search for %r with memebers' % privilege1,
command=('privilege_find', [privilege1], {'no_members': False}),
expected=dict(
count=1,
truncated=False,
summary=u'1 privilege matched',
result=[
{
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'privilege desc. 1'],
'memberof_permission': [permission1, permission2],
},
],
),
),
dict(
desc='Search for %r' % privilege1,
command=('privilege_find', [privilege1], {}),
expected=dict(
count=1,
truncated=False,
summary=u'1 privilege matched',
result=[
{
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'privilege desc. 1'],
},
],
),
),
dict(
desc='Update %r' % privilege1,
command=(
'privilege_mod', [privilege1], dict(description=u'New desc 1')
),
expected=dict(
value=privilege1,
summary=u'Modified privilege "%s"' % privilege1,
result=dict(
cn=[privilege1],
description=[u'New desc 1'],
memberof_permission=[permission1, permission2],
),
),
),
dict(
desc='Remove permission from %r' % privilege1,
command=('privilege_remove_permission', [privilege1],
dict(permission=permission1),
),
expected=dict(
completed=1,
failed=dict(
member=dict(
permission=[],
),
),
result={
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'New desc 1'],
'memberof_permission': [permission2],
}
),
),
dict(
desc='Remove permission from %r again' % privilege1,
command=('privilege_remove_permission', [privilege1],
dict(permission=permission1),
),
expected=dict(
completed=0,
failed=dict(
member=dict(
permission=[(u'testperm', u'This entry is not a member'),],
),
),
result={
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'New desc 1'],
'memberof_permission': [permission2],
}
),
),
dict(
desc='Add zero permissions to %r' % privilege1,
command=('privilege_add_permission', [privilege1],
dict(permission=None),
),
expected=dict(
completed=0,
failed=dict(
member=dict(
permission=[],
),
),
result={
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'New desc 1'],
'memberof_permission': [permission2],
}
),
),
dict(
desc='Remove zero permissions from %r' % privilege1,
command=('privilege_remove_permission', [privilege1],
dict(permission=None),
),
expected=dict(
completed=0,
failed=dict(
member=dict(
permission=[],
),
),
result={
'dn': privilege1_dn,
'cn': [privilege1],
'description': [u'New desc 1'],
'memberof_permission': [permission2],
}
),
),
dict(
desc='Delete %r' % privilege1,
command=('privilege_del', [privilege1], {}),
expected=dict(
result=dict(failed=[]),
value=[privilege1],
summary=u'Deleted privilege "%s"' % privilege1,
)
),
]
|
gpl-3.0
|
zenofewords/zenofewords
|
tests/utils.py
|
1
|
1705
|
from copy import copy
from django.conf import settings
from django.urls import reverse
def get_permissions(response_mapping, custom_mapping):
"""
Build permission mappings.
:param response_mapping: usually a predefined permission template (FORBIDDEN, NOT_FOUND, etc.)
:type response_mapping: dict
:param custom_mapping: key/value pairs which need to be customised
:type custom_mapping: dict
:returns: a new response method and status code mapping
:rtype: dict
"""
response_mapping = copy(response_mapping)
response_mapping.update(custom_mapping)
return response_mapping
def assert_permissions(client_type, response_code_mapping, client_mapping, url_reverse):
"""
Test URL response depending on client type.
:param client_type: type of client (anonymous, user, admin, etc.)
:type client_type: string
:param response_code_mapping: request type with a matching response code
:type response_code_mapping: dict
:param client_mapping: a fixture that contains client types
:type client_mapping: dict
:param url_reverse: tuple of reverse strings for URLs which receive requests
:type url_reverse: tuple
"""
for method in response_code_mapping.keys():
for url in url_reverse:
response_code = getattr(
client_mapping[client_type], method
)(reverse(url), secure=not settings.DEBUG).status_code
assert response_code == response_code_mapping[method], print(
'client: {}, method: {}, received: {}, expected: {}'.format(
client_type, method, response_code, response_code_mapping[method]
)
)
|
mit
|
yelongyu/chihu
|
venv/lib/python2.7/site-packages/alembic/autogenerate/api.py
|
26
|
14714
|
"""Provide the 'autogenerate' feature which can produce migration operations
automatically."""
from ..operations import ops
from . import render
from . import compare
from .. import util
from sqlalchemy.engine.reflection import Inspector
import contextlib
def compare_metadata(context, metadata):
"""Compare a database schema to that given in a
:class:`~sqlalchemy.schema.MetaData` instance.
The database connection is presented in the context
of a :class:`.MigrationContext` object, which
provides database connectivity as well as optional
comparison functions to use for datatypes and
server defaults - see the "autogenerate" arguments
at :meth:`.EnvironmentContext.configure`
for details on these.
The return format is a list of "diff" directives,
each representing individual differences::
from alembic.migration import MigrationContext
from alembic.autogenerate import compare_metadata
from sqlalchemy.schema import SchemaItem
from sqlalchemy.types import TypeEngine
from sqlalchemy import (create_engine, MetaData, Column,
Integer, String, Table)
import pprint
engine = create_engine("sqlite://")
engine.execute('''
create table foo (
id integer not null primary key,
old_data varchar,
x integer
)''')
engine.execute('''
create table bar (
data varchar
)''')
metadata = MetaData()
Table('foo', metadata,
Column('id', Integer, primary_key=True),
Column('data', Integer),
Column('x', Integer, nullable=False)
)
Table('bat', metadata,
Column('info', String)
)
mc = MigrationContext.configure(engine.connect())
diff = compare_metadata(mc, metadata)
pprint.pprint(diff, indent=2, width=20)
Output::
[ ( 'add_table',
Table('bat', MetaData(bind=None),
Column('info', String(), table=<bat>), schema=None)),
( 'remove_table',
Table(u'bar', MetaData(bind=None),
Column(u'data', VARCHAR(), table=<bar>), schema=None)),
( 'add_column',
None,
'foo',
Column('data', Integer(), table=<foo>)),
( 'remove_column',
None,
'foo',
Column(u'old_data', VARCHAR(), table=None)),
[ ( 'modify_nullable',
None,
'foo',
u'x',
{ 'existing_server_default': None,
'existing_type': INTEGER()},
True,
False)]]
:param context: a :class:`.MigrationContext`
instance.
:param metadata: a :class:`~sqlalchemy.schema.MetaData`
instance.
.. seealso::
:func:`.produce_migrations` - produces a :class:`.MigrationScript`
structure based on metadata comparison.
"""
migration_script = produce_migrations(context, metadata)
return migration_script.upgrade_ops.as_diffs()
def produce_migrations(context, metadata):
"""Produce a :class:`.MigrationScript` structure based on schema
comparison.
This function does essentially what :func:`.compare_metadata` does,
but then runs the resulting list of diffs to produce the full
:class:`.MigrationScript` object. For an example of what this looks like,
see the example in :ref:`customizing_revision`.
.. versionadded:: 0.8.0
.. seealso::
:func:`.compare_metadata` - returns more fundamental "diff"
data from comparing a schema.
"""
autogen_context = AutogenContext(context, metadata=metadata)
migration_script = ops.MigrationScript(
rev_id=None,
upgrade_ops=ops.UpgradeOps([]),
downgrade_ops=ops.DowngradeOps([]),
)
compare._populate_migration_script(autogen_context, migration_script)
return migration_script
def render_python_code(
up_or_down_op,
sqlalchemy_module_prefix='sa.',
alembic_module_prefix='op.',
render_as_batch=False,
imports=(),
render_item=None,
):
"""Render Python code given an :class:`.UpgradeOps` or
:class:`.DowngradeOps` object.
This is a convenience function that can be used to test the
autogenerate output of a user-defined :class:`.MigrationScript` structure.
"""
opts = {
'sqlalchemy_module_prefix': sqlalchemy_module_prefix,
'alembic_module_prefix': alembic_module_prefix,
'render_item': render_item,
'render_as_batch': render_as_batch,
}
autogen_context = AutogenContext(None, opts=opts)
autogen_context.imports = set(imports)
return render._indent(render._render_cmd_body(
up_or_down_op, autogen_context))
def _render_migration_diffs(context, template_args):
"""legacy, used by test_autogen_composition at the moment"""
autogen_context = AutogenContext(context)
upgrade_ops = ops.UpgradeOps([])
compare._produce_net_changes(autogen_context, upgrade_ops)
migration_script = ops.MigrationScript(
rev_id=None,
upgrade_ops=upgrade_ops,
downgrade_ops=upgrade_ops.reverse(),
)
render._render_python_into_templatevars(
autogen_context, migration_script, template_args
)
class AutogenContext(object):
"""Maintains configuration and state that's specific to an
autogenerate operation."""
metadata = None
"""The :class:`~sqlalchemy.schema.MetaData` object
representing the destination.
This object is the one that is passed within ``env.py``
to the :paramref:`.EnvironmentContext.configure.target_metadata`
parameter. It represents the structure of :class:`.Table` and other
objects as stated in the current database model, and represents the
destination structure for the database being examined.
While the :class:`~sqlalchemy.schema.MetaData` object is primarily
known as a collection of :class:`~sqlalchemy.schema.Table` objects,
it also has an :attr:`~sqlalchemy.schema.MetaData.info` dictionary
that may be used by end-user schemes to store additional schema-level
objects that are to be compared in custom autogeneration schemes.
"""
connection = None
"""The :class:`~sqlalchemy.engine.base.Connection` object currently
connected to the database backend being compared.
This is obtained from the :attr:`.MigrationContext.bind` and is
utimately set up in the ``env.py`` script.
"""
dialect = None
"""The :class:`~sqlalchemy.engine.Dialect` object currently in use.
This is normally obtained from the
:attr:`~sqlalchemy.engine.base.Connection.dialect` attribute.
"""
imports = None
"""A ``set()`` which contains string Python import directives.
The directives are to be rendered into the ``${imports}`` section
of a script template. The set is normally empty and can be modified
within hooks such as the :paramref:`.EnvironmentContext.configure.render_item`
hook.
.. versionadded:: 0.8.3
.. seealso::
:ref:`autogen_render_types`
"""
migration_context = None
"""The :class:`.MigrationContext` established by the ``env.py`` script."""
def __init__(
self, migration_context, metadata=None,
opts=None, autogenerate=True):
if autogenerate and \
migration_context is not None and migration_context.as_sql:
raise util.CommandError(
"autogenerate can't use as_sql=True as it prevents querying "
"the database for schema information")
if opts is None:
opts = migration_context.opts
self.metadata = metadata = opts.get('target_metadata', None) \
if metadata is None else metadata
if metadata is None and \
migration_context is not None and \
migration_context.script is not None:
raise util.CommandError(
"Can't proceed with --autogenerate option; environment "
"script %s does not provide "
"a MetaData object to the context." % (
migration_context.script.env_py_location
))
include_symbol = opts.get('include_symbol', None)
include_object = opts.get('include_object', None)
object_filters = []
if include_symbol:
def include_symbol_filter(
object, name, type_, reflected, compare_to):
if type_ == "table":
return include_symbol(name, object.schema)
else:
return True
object_filters.append(include_symbol_filter)
if include_object:
object_filters.append(include_object)
self._object_filters = object_filters
self.migration_context = migration_context
if self.migration_context is not None:
self.connection = self.migration_context.bind
self.dialect = self.migration_context.dialect
self.imports = set()
self.opts = opts
self._has_batch = False
@util.memoized_property
def inspector(self):
return Inspector.from_engine(self.connection)
@contextlib.contextmanager
def _within_batch(self):
self._has_batch = True
yield
self._has_batch = False
def run_filters(self, object_, name, type_, reflected, compare_to):
"""Run the context's object filters and return True if the targets
should be part of the autogenerate operation.
This method should be run for every kind of object encountered within
an autogenerate operation, giving the environment the chance
to filter what objects should be included in the comparison.
The filters here are produced directly via the
:paramref:`.EnvironmentContext.configure.include_object`
and :paramref:`.EnvironmentContext.configure.include_symbol`
functions, if present.
"""
for fn in self._object_filters:
if not fn(object_, name, type_, reflected, compare_to):
return False
else:
return True
class RevisionContext(object):
"""Maintains configuration and state that's specific to a revision
file generation operation."""
def __init__(self, config, script_directory, command_args):
self.config = config
self.script_directory = script_directory
self.command_args = command_args
self.template_args = {
'config': config # Let templates use config for
# e.g. multiple databases
}
self.generated_revisions = [
self._default_revision()
]
def _to_script(self, migration_script):
template_args = {}
for k, v in self.template_args.items():
template_args.setdefault(k, v)
if getattr(migration_script, '_needs_render', False):
autogen_context = self._last_autogen_context
# clear out existing imports if we are doing multiple
# renders
autogen_context.imports = set()
if migration_script.imports:
autogen_context.imports.union_update(migration_script.imports)
render._render_python_into_templatevars(
autogen_context, migration_script, template_args
)
return self.script_directory.generate_revision(
migration_script.rev_id,
migration_script.message,
refresh=True,
head=migration_script.head,
splice=migration_script.splice,
branch_labels=migration_script.branch_label,
version_path=migration_script.version_path,
depends_on=migration_script.depends_on,
**template_args)
def run_autogenerate(self, rev, migration_context):
self._run_environment(rev, migration_context, True)
def run_no_autogenerate(self, rev, migration_context):
self._run_environment(rev, migration_context, False)
def _run_environment(self, rev, migration_context, autogenerate):
if autogenerate:
if self.command_args['sql']:
raise util.CommandError(
"Using --sql with --autogenerate does not make any sense")
if set(self.script_directory.get_revisions(rev)) != \
set(self.script_directory.get_revisions("heads")):
raise util.CommandError("Target database is not up to date.")
upgrade_token = migration_context.opts['upgrade_token']
downgrade_token = migration_context.opts['downgrade_token']
migration_script = self.generated_revisions[-1]
if not getattr(migration_script, '_needs_render', False):
migration_script.upgrade_ops_list[-1].upgrade_token = upgrade_token
migration_script.downgrade_ops_list[-1].downgrade_token = \
downgrade_token
migration_script._needs_render = True
else:
migration_script._upgrade_ops.append(
ops.UpgradeOps([], upgrade_token=upgrade_token)
)
migration_script._downgrade_ops.append(
ops.DowngradeOps([], downgrade_token=downgrade_token)
)
self._last_autogen_context = autogen_context = \
AutogenContext(migration_context, autogenerate=autogenerate)
if autogenerate:
compare._populate_migration_script(
autogen_context, migration_script)
hook = migration_context.opts['process_revision_directives']
if hook:
hook(migration_context, rev, self.generated_revisions)
for migration_script in self.generated_revisions:
migration_script._needs_render = True
def _default_revision(self):
op = ops.MigrationScript(
rev_id=self.command_args['rev_id'] or util.rev_id(),
message=self.command_args['message'],
upgrade_ops=ops.UpgradeOps([]),
downgrade_ops=ops.DowngradeOps([]),
head=self.command_args['head'],
splice=self.command_args['splice'],
branch_label=self.command_args['branch_label'],
version_path=self.command_args['version_path'],
depends_on=self.command_args['depends_on']
)
return op
def generate_scripts(self):
for generated_revision in self.generated_revisions:
yield self._to_script(generated_revision)
|
gpl-3.0
|
benthomasson/ansible
|
lib/ansible/modules/windows/win_firewall_rule.py
|
37
|
3713
|
#!/usr/bin/env python
# (c) 2014, Timothy Vandenbrande <timothy.vandenbrande@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_firewall_rule
version_added: "2.0"
author: Timothy Vandenbrande
short_description: Windows firewall automation
description:
- Allows you to create/remove/update firewall rules
options:
enabled:
description:
- Is this firewall rule enabled or disabled
default: 'yes'
choices: [ 'no', 'yes' ]
aliases: [ 'enable' ]
state:
description:
- Should this rule be added or removed
default: "present"
choices: ['present', 'absent']
name:
description:
- The rules name
required: true
direction:
description:
- Is this rule for inbound or outbound traffic
required: true
choices: ['in', 'out']
action:
description:
- What to do with the items this rule is for
required: true
choices: ['allow', 'block', 'bypass']
description:
description:
- Description for the firewall rule
localip:
description:
- The local ip address this rule applies to
default: 'any'
remoteip:
description:
- The remote ip address/range this rule applies to
default: 'any'
localport:
description:
- The local port this rule applies to
remoteport:
description:
- The remote port this rule applies to
program:
description:
- The program this rule applies to
service:
description:
- The service this rule applies to
protocol:
description:
- The protocol this rule applies to
default: 'any'
profiles:
description:
- The profile this rule applies to
default: 'domain,private,public'
aliases: [ 'profile' ]
force:
description:
- Replace any existing rule by removing it first.
default: 'no'
choices: [ 'no', 'yes' ]
notes:
- The implementation uses C(netsh advfirewall) underneath, a pure-Powershell
reimplementation would be more powerful.
- Modifying existing firewall rules is not possible, the module does allow
replacing complete rules based on name, but that works by removing the
existing rule completely, and recreating it with provided information
(when using C(force)).
'''
EXAMPLES = r'''
- name: Firewall rule to allow SMTP on TCP port 25
win_firewall_rule:
name: SMTP
localport: 25
action: allow
direction: in
protocol: tcp
state: present
enabled: yes
- name: Firewall rule to allow RDP on TCP port 3389
win_firewall_rule:
name: Remote Desktop
localport: 3389
action: allow
direction: in
protocol: tcp
profiles: private
state: present
enabled: yes
'''
|
gpl-3.0
|
romain-dartigues/ansible
|
lib/ansible/modules/windows/win_say.py
|
28
|
4090
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016, Jon Hawkesworth (@jhawkesworth) <figs@unity.demon.co.uk>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_say
version_added: "2.3"
short_description: Text to speech module for Windows to speak messages and optionally play sounds
description:
- Uses .NET libraries to convert text to speech and optionally play .wav sounds. Audio Service needs to be running and some kind of speakers or
headphones need to be attached to the windows target(s) for the speech to be audible.
options:
msg:
description:
- The text to be spoken.
- Use either C(msg) or C(msg_file).
- Optional so that you can use this module just to play sounds.
msg_file:
description:
- Full path to a windows format text file containing the text to be spokend.
- Use either C(msg) or C(msg_file).
- Optional so that you can use this module just to play sounds.
type: path
voice:
description:
- Which voice to use. See notes for how to discover installed voices.
- If the requested voice is not available the default voice will be used.
Example voice names from Windows 10 are C(Microsoft Zira Desktop) and C(Microsoft Hazel Desktop).
default: system default voice
speech_speed:
description:
- How fast or slow to speak the text.
- Must be an integer value in the range -10 to 10.
- -10 is slowest, 10 is fastest.
type: int
default: 0
start_sound_path:
description:
- Full path to a C(.wav) file containing a sound to play before the text is spoken.
- Useful on conference calls to alert other speakers that ansible has something to say.
type: path
end_sound_path:
description:
- Full path to a C(.wav) file containing a sound to play after the text has been spoken.
- Useful on conference calls to alert other speakers that ansible has finished speaking.
type: path
author:
- Jon Hawkesworth (@jhawkesworth)
notes:
- Needs speakers or headphones to do anything useful.
- |
To find which voices are installed, run the following Powershell commands.
Add-Type -AssemblyName System.Speech
$speech = New-Object -TypeName System.Speech.Synthesis.SpeechSynthesizer
$speech.GetInstalledVoices() | ForEach-Object { $_.VoiceInfo }
$speech.Dispose()
- Speech can be surprisingly slow, so it's best to keep message text short.
'''
EXAMPLES = r'''
- name: Warn of impending deployment
win_say:
msg: Warning, deployment commencing in 5 minutes, please log out.
- name: Using a different voice and a start sound
win_say:
start_sound_path: C:\Windows\Media\ding.wav
msg: Warning, deployment commencing in 5 minutes, please log out.
voice: Microsoft Hazel Desktop
- name: With start and end sound
win_say:
start_sound_path: C:\Windows\Media\Windows Balloon.wav
msg: New software installed
end_sound_path: C:\Windows\Media\chimes.wav
- name: Text from file example
win_say:
start_sound_path: C:\Windows\Media\Windows Balloon.wav
msg_file: AppData\Local\Temp\morning_report.txt
end_sound_path: C:\Windows\Media\chimes.wav
'''
RETURN = r'''
message_text:
description: the text that the module attempted to speak
returned: success
type: string
sample: "Warning, deployment commencing in 5 minutes."
voice:
description: the voice used to speak the text.
returned: success
type: string
sample: Microsoft Hazel Desktop
voice_info:
description: the voice used to speak the text.
returned: when requested voice could not be loaded
type: string
sample: Could not load voice TestVoice, using system default voice
'''
|
gpl-3.0
|
Plain-Andy-legacy/android_external_chromium_org
|
tools/telemetry/telemetry/timeline/async_slice.py
|
45
|
1147
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import telemetry.timeline.event as event
class AsyncSlice(event.TimelineEvent):
''' A AsyncSlice represents an interval of time during which an
asynchronous operation is in progress. An AsyncSlice consumes no CPU time
itself and so is only associated with Threads at its start and end point.
'''
def __init__(self, category, name, timestamp, args=None,
duration=0, start_thread=None, end_thread=None,
thread_start=None, thread_duration=None):
super(AsyncSlice, self).__init__(
category, name, timestamp, duration, thread_start, thread_duration,
args)
self.parent_slice = None
self.start_thread = start_thread
self.end_thread = end_thread
self.sub_slices = []
self.id = None
def AddSubSlice(self, sub_slice):
assert sub_slice.parent_slice == self
self.sub_slices.append(sub_slice)
def IterEventsInThisContainerRecrusively(self):
for sub_slice in self.sub_slices:
yield sub_slice
|
bsd-3-clause
|
lmazuel/azure-sdk-for-python
|
azure-servicefabric/azure/servicefabric/models/application_health_state_filter.py
|
1
|
6307
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ApplicationHealthStateFilter(Model):
"""Defines matching criteria to determine whether a application should be
included in the cluster health chunk.
One filter can match zero, one or multiple applications, depending on its
properties.
.
:param application_name_filter: The name of the application that matches
the filter, as a fabric uri. The filter is applied only to the specified
application, if it exists.
If the application doesn't exist, no application is returned in the
cluster health chunk based on this filter.
If the application exists, it is included in the cluster health chunk if
it respects the other filter properties.
If not specified, all applications are matched against the other filter
members, like health state filter.
:type application_name_filter: str
:param application_type_name_filter: The name of the application type that
matches the filter.
If specified, the filter is applied only to applications of the selected
application type, if any exists.
If no applications of the specified application type exists, no
application is returned in the cluster health chunk based on this filter.
Each application of the specified application type is included in the
cluster health chunk if it respects the other filter properties.
If not specified, all applications are matched against the other filter
members, like health state filter.
:type application_type_name_filter: str
:param health_state_filter: The filter for the health state of the
applications. It allows selecting applications if they match the desired
health states.
The possible values are integer value of one of the following health
states. Only applications that match the filter are returned. All
applications are used to evaluate the cluster aggregated health state.
If not specified, default value is None, unless the application name or
the application type name are specified. If the filter has default value
and application name is specified, the matching application is returned.
The state values are flag based enumeration, so the value could be a
combination of these values obtained using bitwise 'OR' operator.
For example, if the provided value is 6, it matches applications with
HealthState value of OK (2) and Warning (4).
- Default - Default value. Matches any HealthState. The value is zero.
- None - Filter that doesn't match any HealthState value. Used in order to
return no results on a given collection of states. The value is 1.
- Ok - Filter that matches input with HealthState value Ok. The value is
2.
- Warning - Filter that matches input with HealthState value Warning. The
value is 4.
- Error - Filter that matches input with HealthState value Error. The
value is 8.
- All - Filter that matches input with any HealthState value. The value is
65535.
. Default value: 0 .
:type health_state_filter: int
:param service_filters: Defines a list of filters that specify which
services to be included in the returned cluster health chunk as children
of the application. The services are returned only if the parent
application matches a filter.
If the list is empty, no services are returned. All the services are used
to evaluate the parent application aggregated health state, regardless of
the input filters.
The application filter may specify multiple service filters.
For example, it can specify a filter to return all services with health
state Error and another filter to always include a service identified by
its service name.
:type service_filters:
list[~azure.servicefabric.models.ServiceHealthStateFilter]
:param deployed_application_filters: Defines a list of filters that
specify which deployed applications to be included in the returned cluster
health chunk as children of the application. The deployed applications are
returned only if the parent application matches a filter.
If the list is empty, no deployed applications are returned. All the
deployed applications are used to evaluate the parent application
aggregated health state, regardless of the input filters.
The application filter may specify multiple deployed application filters.
For example, it can specify a filter to return all deployed applications
with health state Error and another filter to always include a deployed
application on a specified node.
:type deployed_application_filters:
list[~azure.servicefabric.models.DeployedApplicationHealthStateFilter]
"""
_attribute_map = {
'application_name_filter': {'key': 'ApplicationNameFilter', 'type': 'str'},
'application_type_name_filter': {'key': 'ApplicationTypeNameFilter', 'type': 'str'},
'health_state_filter': {'key': 'HealthStateFilter', 'type': 'int'},
'service_filters': {'key': 'ServiceFilters', 'type': '[ServiceHealthStateFilter]'},
'deployed_application_filters': {'key': 'DeployedApplicationFilters', 'type': '[DeployedApplicationHealthStateFilter]'},
}
def __init__(self, application_name_filter=None, application_type_name_filter=None, health_state_filter=0, service_filters=None, deployed_application_filters=None):
super(ApplicationHealthStateFilter, self).__init__()
self.application_name_filter = application_name_filter
self.application_type_name_filter = application_type_name_filter
self.health_state_filter = health_state_filter
self.service_filters = service_filters
self.deployed_application_filters = deployed_application_filters
|
mit
|
efortuna/AndroidSDKClone
|
ndk_experimental/prebuilt/linux-x86_64/lib/python2.7/test/test_cmath.py
|
84
|
18637
|
from test.test_support import run_unittest
from test.test_math import parse_testfile, test_file
import unittest
import cmath, math
from cmath import phase, polar, rect, pi
INF = float('inf')
NAN = float('nan')
complex_zeros = [complex(x, y) for x in [0.0, -0.0] for y in [0.0, -0.0]]
complex_infinities = [complex(x, y) for x, y in [
(INF, 0.0), # 1st quadrant
(INF, 2.3),
(INF, INF),
(2.3, INF),
(0.0, INF),
(-0.0, INF), # 2nd quadrant
(-2.3, INF),
(-INF, INF),
(-INF, 2.3),
(-INF, 0.0),
(-INF, -0.0), # 3rd quadrant
(-INF, -2.3),
(-INF, -INF),
(-2.3, -INF),
(-0.0, -INF),
(0.0, -INF), # 4th quadrant
(2.3, -INF),
(INF, -INF),
(INF, -2.3),
(INF, -0.0)
]]
complex_nans = [complex(x, y) for x, y in [
(NAN, -INF),
(NAN, -2.3),
(NAN, -0.0),
(NAN, 0.0),
(NAN, 2.3),
(NAN, INF),
(-INF, NAN),
(-2.3, NAN),
(-0.0, NAN),
(0.0, NAN),
(2.3, NAN),
(INF, NAN)
]]
class CMathTests(unittest.TestCase):
# list of all functions in cmath
test_functions = [getattr(cmath, fname) for fname in [
'acos', 'acosh', 'asin', 'asinh', 'atan', 'atanh',
'cos', 'cosh', 'exp', 'log', 'log10', 'sin', 'sinh',
'sqrt', 'tan', 'tanh']]
# test first and second arguments independently for 2-argument log
test_functions.append(lambda x : cmath.log(x, 1729. + 0j))
test_functions.append(lambda x : cmath.log(14.-27j, x))
def setUp(self):
self.test_values = open(test_file)
def tearDown(self):
self.test_values.close()
def rAssertAlmostEqual(self, a, b, rel_err = 2e-15, abs_err = 5e-323,
msg=None):
"""Fail if the two floating-point numbers are not almost equal.
Determine whether floating-point values a and b are equal to within
a (small) rounding error. The default values for rel_err and
abs_err are chosen to be suitable for platforms where a float is
represented by an IEEE 754 double. They allow an error of between
9 and 19 ulps.
"""
# special values testing
if math.isnan(a):
if math.isnan(b):
return
self.fail(msg or '{!r} should be nan'.format(b))
if math.isinf(a):
if a == b:
return
self.fail(msg or 'finite result where infinity expected: '
'expected {!r}, got {!r}'.format(a, b))
# if both a and b are zero, check whether they have the same sign
# (in theory there are examples where it would be legitimate for a
# and b to have opposite signs; in practice these hardly ever
# occur).
if not a and not b:
if math.copysign(1., a) != math.copysign(1., b):
self.fail(msg or 'zero has wrong sign: expected {!r}, '
'got {!r}'.format(a, b))
# if a-b overflows, or b is infinite, return False. Again, in
# theory there are examples where a is within a few ulps of the
# max representable float, and then b could legitimately be
# infinite. In practice these examples are rare.
try:
absolute_error = abs(b-a)
except OverflowError:
pass
else:
# test passes if either the absolute error or the relative
# error is sufficiently small. The defaults amount to an
# error of between 9 ulps and 19 ulps on an IEEE-754 compliant
# machine.
if absolute_error <= max(abs_err, rel_err * abs(a)):
return
self.fail(msg or
'{!r} and {!r} are not sufficiently close'.format(a, b))
def test_constants(self):
e_expected = 2.71828182845904523536
pi_expected = 3.14159265358979323846
self.assertAlmostEqual(cmath.pi, pi_expected, places=9,
msg="cmath.pi is {}; should be {}".format(cmath.pi, pi_expected))
self.assertAlmostEqual(cmath.e, e_expected, places=9,
msg="cmath.e is {}; should be {}".format(cmath.e, e_expected))
def test_user_object(self):
# Test automatic calling of __complex__ and __float__ by cmath
# functions
# some random values to use as test values; we avoid values
# for which any of the functions in cmath is undefined
# (i.e. 0., 1., -1., 1j, -1j) or would cause overflow
cx_arg = 4.419414439 + 1.497100113j
flt_arg = -6.131677725
# a variety of non-complex numbers, used to check that
# non-complex return values from __complex__ give an error
non_complexes = ["not complex", 1, 5L, 2., None,
object(), NotImplemented]
# Now we introduce a variety of classes whose instances might
# end up being passed to the cmath functions
# usual case: new-style class implementing __complex__
class MyComplex(object):
def __init__(self, value):
self.value = value
def __complex__(self):
return self.value
# old-style class implementing __complex__
class MyComplexOS:
def __init__(self, value):
self.value = value
def __complex__(self):
return self.value
# classes for which __complex__ raises an exception
class SomeException(Exception):
pass
class MyComplexException(object):
def __complex__(self):
raise SomeException
class MyComplexExceptionOS:
def __complex__(self):
raise SomeException
# some classes not providing __float__ or __complex__
class NeitherComplexNorFloat(object):
pass
class NeitherComplexNorFloatOS:
pass
class MyInt(object):
def __int__(self): return 2
def __long__(self): return 2L
def __index__(self): return 2
class MyIntOS:
def __int__(self): return 2
def __long__(self): return 2L
def __index__(self): return 2
# other possible combinations of __float__ and __complex__
# that should work
class FloatAndComplex(object):
def __float__(self):
return flt_arg
def __complex__(self):
return cx_arg
class FloatAndComplexOS:
def __float__(self):
return flt_arg
def __complex__(self):
return cx_arg
class JustFloat(object):
def __float__(self):
return flt_arg
class JustFloatOS:
def __float__(self):
return flt_arg
for f in self.test_functions:
# usual usage
self.assertEqual(f(MyComplex(cx_arg)), f(cx_arg))
self.assertEqual(f(MyComplexOS(cx_arg)), f(cx_arg))
# other combinations of __float__ and __complex__
self.assertEqual(f(FloatAndComplex()), f(cx_arg))
self.assertEqual(f(FloatAndComplexOS()), f(cx_arg))
self.assertEqual(f(JustFloat()), f(flt_arg))
self.assertEqual(f(JustFloatOS()), f(flt_arg))
# TypeError should be raised for classes not providing
# either __complex__ or __float__, even if they provide
# __int__, __long__ or __index__. An old-style class
# currently raises AttributeError instead of a TypeError;
# this could be considered a bug.
self.assertRaises(TypeError, f, NeitherComplexNorFloat())
self.assertRaises(TypeError, f, MyInt())
self.assertRaises(Exception, f, NeitherComplexNorFloatOS())
self.assertRaises(Exception, f, MyIntOS())
# non-complex return value from __complex__ -> TypeError
for bad_complex in non_complexes:
self.assertRaises(TypeError, f, MyComplex(bad_complex))
self.assertRaises(TypeError, f, MyComplexOS(bad_complex))
# exceptions in __complex__ should be propagated correctly
self.assertRaises(SomeException, f, MyComplexException())
self.assertRaises(SomeException, f, MyComplexExceptionOS())
def test_input_type(self):
# ints and longs should be acceptable inputs to all cmath
# functions, by virtue of providing a __float__ method
for f in self.test_functions:
for arg in [2, 2L, 2.]:
self.assertEqual(f(arg), f(arg.__float__()))
# but strings should give a TypeError
for f in self.test_functions:
for arg in ["a", "long_string", "0", "1j", ""]:
self.assertRaises(TypeError, f, arg)
def test_cmath_matches_math(self):
# check that corresponding cmath and math functions are equal
# for floats in the appropriate range
# test_values in (0, 1)
test_values = [0.01, 0.1, 0.2, 0.5, 0.9, 0.99]
# test_values for functions defined on [-1., 1.]
unit_interval = test_values + [-x for x in test_values] + \
[0., 1., -1.]
# test_values for log, log10, sqrt
positive = test_values + [1.] + [1./x for x in test_values]
nonnegative = [0.] + positive
# test_values for functions defined on the whole real line
real_line = [0.] + positive + [-x for x in positive]
test_functions = {
'acos' : unit_interval,
'asin' : unit_interval,
'atan' : real_line,
'cos' : real_line,
'cosh' : real_line,
'exp' : real_line,
'log' : positive,
'log10' : positive,
'sin' : real_line,
'sinh' : real_line,
'sqrt' : nonnegative,
'tan' : real_line,
'tanh' : real_line}
for fn, values in test_functions.items():
float_fn = getattr(math, fn)
complex_fn = getattr(cmath, fn)
for v in values:
z = complex_fn(v)
self.rAssertAlmostEqual(float_fn(v), z.real)
self.assertEqual(0., z.imag)
# test two-argument version of log with various bases
for base in [0.5, 2., 10.]:
for v in positive:
z = cmath.log(v, base)
self.rAssertAlmostEqual(math.log(v, base), z.real)
self.assertEqual(0., z.imag)
def test_specific_values(self):
if not float.__getformat__("double").startswith("IEEE"):
return
def rect_complex(z):
"""Wrapped version of rect that accepts a complex number instead of
two float arguments."""
return cmath.rect(z.real, z.imag)
def polar_complex(z):
"""Wrapped version of polar that returns a complex number instead of
two floats."""
return complex(*polar(z))
for id, fn, ar, ai, er, ei, flags in parse_testfile(test_file):
arg = complex(ar, ai)
expected = complex(er, ei)
if fn == 'rect':
function = rect_complex
elif fn == 'polar':
function = polar_complex
else:
function = getattr(cmath, fn)
if 'divide-by-zero' in flags or 'invalid' in flags:
try:
actual = function(arg)
except ValueError:
continue
else:
self.fail('ValueError not raised in test '
'{}: {}(complex({!r}, {!r}))'.format(id, fn, ar, ai))
if 'overflow' in flags:
try:
actual = function(arg)
except OverflowError:
continue
else:
self.fail('OverflowError not raised in test '
'{}: {}(complex({!r}, {!r}))'.format(id, fn, ar, ai))
actual = function(arg)
if 'ignore-real-sign' in flags:
actual = complex(abs(actual.real), actual.imag)
expected = complex(abs(expected.real), expected.imag)
if 'ignore-imag-sign' in flags:
actual = complex(actual.real, abs(actual.imag))
expected = complex(expected.real, abs(expected.imag))
# for the real part of the log function, we allow an
# absolute error of up to 2e-15.
if fn in ('log', 'log10'):
real_abs_err = 2e-15
else:
real_abs_err = 5e-323
error_message = (
'{}: {}(complex({!r}, {!r}))\n'
'Expected: complex({!r}, {!r})\n'
'Received: complex({!r}, {!r})\n'
'Received value insufficiently close to expected value.'
).format(id, fn, ar, ai,
expected.real, expected.imag,
actual.real, actual.imag)
self.rAssertAlmostEqual(expected.real, actual.real,
abs_err=real_abs_err,
msg=error_message)
self.rAssertAlmostEqual(expected.imag, actual.imag,
msg=error_message)
def assertCISEqual(self, a, b):
eps = 1E-7
if abs(a[0] - b[0]) > eps or abs(a[1] - b[1]) > eps:
self.fail((a ,b))
def test_polar(self):
self.assertCISEqual(polar(0), (0., 0.))
self.assertCISEqual(polar(1.), (1., 0.))
self.assertCISEqual(polar(-1.), (1., pi))
self.assertCISEqual(polar(1j), (1., pi/2))
self.assertCISEqual(polar(-1j), (1., -pi/2))
def test_phase(self):
self.assertAlmostEqual(phase(0), 0.)
self.assertAlmostEqual(phase(1.), 0.)
self.assertAlmostEqual(phase(-1.), pi)
self.assertAlmostEqual(phase(-1.+1E-300j), pi)
self.assertAlmostEqual(phase(-1.-1E-300j), -pi)
self.assertAlmostEqual(phase(1j), pi/2)
self.assertAlmostEqual(phase(-1j), -pi/2)
# zeros
self.assertEqual(phase(complex(0.0, 0.0)), 0.0)
self.assertEqual(phase(complex(0.0, -0.0)), -0.0)
self.assertEqual(phase(complex(-0.0, 0.0)), pi)
self.assertEqual(phase(complex(-0.0, -0.0)), -pi)
# infinities
self.assertAlmostEqual(phase(complex(-INF, -0.0)), -pi)
self.assertAlmostEqual(phase(complex(-INF, -2.3)), -pi)
self.assertAlmostEqual(phase(complex(-INF, -INF)), -0.75*pi)
self.assertAlmostEqual(phase(complex(-2.3, -INF)), -pi/2)
self.assertAlmostEqual(phase(complex(-0.0, -INF)), -pi/2)
self.assertAlmostEqual(phase(complex(0.0, -INF)), -pi/2)
self.assertAlmostEqual(phase(complex(2.3, -INF)), -pi/2)
self.assertAlmostEqual(phase(complex(INF, -INF)), -pi/4)
self.assertEqual(phase(complex(INF, -2.3)), -0.0)
self.assertEqual(phase(complex(INF, -0.0)), -0.0)
self.assertEqual(phase(complex(INF, 0.0)), 0.0)
self.assertEqual(phase(complex(INF, 2.3)), 0.0)
self.assertAlmostEqual(phase(complex(INF, INF)), pi/4)
self.assertAlmostEqual(phase(complex(2.3, INF)), pi/2)
self.assertAlmostEqual(phase(complex(0.0, INF)), pi/2)
self.assertAlmostEqual(phase(complex(-0.0, INF)), pi/2)
self.assertAlmostEqual(phase(complex(-2.3, INF)), pi/2)
self.assertAlmostEqual(phase(complex(-INF, INF)), 0.75*pi)
self.assertAlmostEqual(phase(complex(-INF, 2.3)), pi)
self.assertAlmostEqual(phase(complex(-INF, 0.0)), pi)
# real or imaginary part NaN
for z in complex_nans:
self.assertTrue(math.isnan(phase(z)))
def test_abs(self):
# zeros
for z in complex_zeros:
self.assertEqual(abs(z), 0.0)
# infinities
for z in complex_infinities:
self.assertEqual(abs(z), INF)
# real or imaginary part NaN
self.assertEqual(abs(complex(NAN, -INF)), INF)
self.assertTrue(math.isnan(abs(complex(NAN, -2.3))))
self.assertTrue(math.isnan(abs(complex(NAN, -0.0))))
self.assertTrue(math.isnan(abs(complex(NAN, 0.0))))
self.assertTrue(math.isnan(abs(complex(NAN, 2.3))))
self.assertEqual(abs(complex(NAN, INF)), INF)
self.assertEqual(abs(complex(-INF, NAN)), INF)
self.assertTrue(math.isnan(abs(complex(-2.3, NAN))))
self.assertTrue(math.isnan(abs(complex(-0.0, NAN))))
self.assertTrue(math.isnan(abs(complex(0.0, NAN))))
self.assertTrue(math.isnan(abs(complex(2.3, NAN))))
self.assertEqual(abs(complex(INF, NAN)), INF)
self.assertTrue(math.isnan(abs(complex(NAN, NAN))))
# result overflows
if float.__getformat__("double").startswith("IEEE"):
self.assertRaises(OverflowError, abs, complex(1.4e308, 1.4e308))
def assertCEqual(self, a, b):
eps = 1E-7
if abs(a.real - b[0]) > eps or abs(a.imag - b[1]) > eps:
self.fail((a ,b))
def test_rect(self):
self.assertCEqual(rect(0, 0), (0, 0))
self.assertCEqual(rect(1, 0), (1., 0))
self.assertCEqual(rect(1, -pi), (-1., 0))
self.assertCEqual(rect(1, pi/2), (0, 1.))
self.assertCEqual(rect(1, -pi/2), (0, -1.))
def test_isnan(self):
self.assertFalse(cmath.isnan(1))
self.assertFalse(cmath.isnan(1j))
self.assertFalse(cmath.isnan(INF))
self.assertTrue(cmath.isnan(NAN))
self.assertTrue(cmath.isnan(complex(NAN, 0)))
self.assertTrue(cmath.isnan(complex(0, NAN)))
self.assertTrue(cmath.isnan(complex(NAN, NAN)))
self.assertTrue(cmath.isnan(complex(NAN, INF)))
self.assertTrue(cmath.isnan(complex(INF, NAN)))
def test_isinf(self):
self.assertFalse(cmath.isinf(1))
self.assertFalse(cmath.isinf(1j))
self.assertFalse(cmath.isinf(NAN))
self.assertTrue(cmath.isinf(INF))
self.assertTrue(cmath.isinf(complex(INF, 0)))
self.assertTrue(cmath.isinf(complex(0, INF)))
self.assertTrue(cmath.isinf(complex(INF, INF)))
self.assertTrue(cmath.isinf(complex(NAN, INF)))
self.assertTrue(cmath.isinf(complex(INF, NAN)))
def test_main():
run_unittest(CMathTests)
if __name__ == "__main__":
test_main()
|
apache-2.0
|
gamesbrewer/kegger
|
kegger/myapp/libs/webob/datetime_utils.py
|
60
|
2552
|
import calendar
from datetime import (
date,
datetime,
timedelta,
tzinfo,
)
from email.utils import (
formatdate,
mktime_tz,
parsedate_tz,
)
import time
from webob.compat import (
integer_types,
long,
native_,
text_type,
)
__all__ = [
'UTC', 'timedelta_to_seconds',
'year', 'month', 'week', 'day', 'hour', 'minute', 'second',
'parse_date', 'serialize_date',
'parse_date_delta', 'serialize_date_delta',
]
_now = datetime.now # hook point for unit tests
class _UTC(tzinfo):
def dst(self, dt):
return timedelta(0)
def utcoffset(self, dt):
return timedelta(0)
def tzname(self, dt):
return 'UTC'
def __repr__(self):
return 'UTC'
UTC = _UTC()
def timedelta_to_seconds(td):
"""
Converts a timedelta instance to seconds.
"""
return td.seconds + (td.days*24*60*60)
day = timedelta(days=1)
week = timedelta(weeks=1)
hour = timedelta(hours=1)
minute = timedelta(minutes=1)
second = timedelta(seconds=1)
# Estimate, I know; good enough for expirations
month = timedelta(days=30)
year = timedelta(days=365)
def parse_date(value):
if not value:
return None
try:
value = native_(value)
except:
return None
t = parsedate_tz(value)
if t is None:
# Could not parse
return None
if t[-1] is None:
# No timezone given. None would mean local time, but we'll force UTC
t = t[:9] + (0,)
t = mktime_tz(t)
return datetime.fromtimestamp(t, UTC)
def serialize_date(dt):
if isinstance(dt, (bytes, text_type)):
return native_(dt)
if isinstance(dt, timedelta):
dt = _now() + dt
if isinstance(dt, (datetime, date)):
dt = dt.timetuple()
if isinstance(dt, (tuple, time.struct_time)):
dt = calendar.timegm(dt)
if not (isinstance(dt, float) or isinstance(dt, integer_types)):
raise ValueError(
"You must pass in a datetime, date, time tuple, or integer object, "
"not %r" % dt)
return formatdate(dt, usegmt=True)
def parse_date_delta(value):
"""
like parse_date, but also handle delta seconds
"""
if not value:
return None
try:
value = int(value)
except ValueError:
return parse_date(value)
else:
return _now() + timedelta(seconds=value)
def serialize_date_delta(value):
if isinstance(value, (float, int, long)):
return str(int(value))
else:
return serialize_date(value)
|
cc0-1.0
|
guillaume-philippon/aquilon
|
lib/aquilon/worker/formats/chassis.py
|
1
|
1715
|
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2013,2014,2016 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Chassis formatter."""
from aquilon.aqdb.model import Chassis
from aquilon.worker.formats.formatters import ObjectFormatter
from aquilon.worker.formats.hardware_entity import HardwareEntityFormatter
class ChassisFormatter(HardwareEntityFormatter):
def format_raw(self, chassis, indent="", embedded=True,
indirect_attrs=True):
details = [super(ChassisFormatter, self).format_raw(chassis, indent)]
for slot in chassis.slots:
if slot.machine:
if slot.machine.primary_name:
hostname = slot.machine.primary_name
else:
hostname = "no hostname"
details.append(indent + " Slot #%d: %s (%s)" %
(slot.slot_number, slot.machine.label, hostname))
else:
details.append(indent + " Slot #%d: Empty" % slot.slot_number)
return "\n".join(details)
ObjectFormatter.handlers[Chassis] = ChassisFormatter()
|
apache-2.0
|
lenovor/bosen
|
app/dnn/script/predict.py
|
13
|
1559
|
#!/usr/bin/env python
"""
This script starts a process locally, using <client-id> <hostfile> as inputs.
"""
import os
from os.path import dirname
from os.path import join
import time
import sys
if len(sys.argv) != 3:
print "usage: %s <client-id> <hostfile>" % sys.argv[0]
sys.exit(1)
# Please set the FULL app dir path here
app_dir = "/home/user/bosen/app/dnn"
client_id = sys.argv[1]
hostfile = sys.argv[2]
proj_dir = dirname(dirname(app_dir))
params = {
"parafile": join(app_dir, "datasets/para_imnet.txt")
, "data_ptt_file": join(app_dir, "datasets/data_ptt_file.txt")
, "model_weight_file": join(app_dir, "datasets/weights.txt")
, "model_bias_file": join(app_dir, "datasets/biases.txt")
}
petuum_params = {
"hostfile": hostfile,
}
prog_name = "DNN_PRED"
prog_path = join(app_dir, "bin", prog_name)
hadoop_path = os.popen('hadoop classpath --glob').read()
env_params = (
"GLOG_logtostderr=true "
"GLOG_v=-1 "
"GLOG_minloglevel=0 "
)
# Get host IPs
with open(hostfile, "r") as f:
hostlines = f.read().splitlines()
host_ips = [line.split()[1] for line in hostlines]
petuum_params["num_clients"] = len(host_ips)
cmd = "killall -q " + prog_name
# os.system is synchronous call.
os.system(cmd)
print "Done killing"
cmd = "export CLASSPATH=`hadoop classpath --glob`:$CLASSPATH; "
cmd += env_params + prog_path
petuum_params["client_id"] = client_id
cmd += "".join([" --%s=%s" % (k,v) for k,v in petuum_params.items()])
cmd += "".join([" --%s=%s" % (k,v) for k,v in params.items()])
print cmd
os.system(cmd)
|
bsd-3-clause
|
aurusov/rdo_studio
|
scripts/python/utils.py
|
4
|
1404
|
###############################################################################
# Copyright (c) 2013 Evgeny Proydakov <lord.tiran@gmail.com>
###############################################################################
# -*- coding: utf-8 -*-
###############################################################################
ENC = ''
DEC = ''
import os, sys
def enc_print(*args):
flag = False
for arg in args:
if flag:
sys.stdout.write(' ')
if len(ENC)*len(DEC):
sys.stdout.write(str(arg).encode(ENC).decode(DEC))
else:
sys.stdout.write(str(arg))
flag = True
sys.stdout.write('\n')
def print_list_by_line(list):
for string in list:
enc_print (string)
def get_files_list(dir):
dirs = []
nfile = []
files = []
for dirname, dirnames, filenames in os.walk(dir):
dirs.append(dirname)
for subdirname in dirnames:
dirs.append(os.path.join(dirname, subdirname))
for filename in filenames:
files.append(os.path.join(dirname, filename))
return files
def cut_slash(list):
for index in range(len(list)):
list[index] = list[index].strip()
list[index] = list[index].replace('\n', '')
list[index] = list[index].rstrip()
def wrap_the_string_in_quotes(string):
new_string = '"' + string + '"'
return new_string
|
mit
|
bcarr092/CAHAL
|
test/test_driver.py
|
1
|
1595
|
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
import cahal_tests
import unittest
import sys
from test_cahal import TestsCAHAL
from test_cahal_device import TestsCAHALDevice
from test_cahal_device_stream import TestsCAHALDeviceStream
from test_cahal_audio_format_description import \
TestsCAHALAudioFormatDescription
cahal_tests.cpc_log_set_log_level( cahal_tests.CPC_LOG_LEVEL_NO_LOGGING )
cahal_tests.python_cahal_initialize()
alltests = unittest.TestSuite ( [ \
unittest.TestLoader().loadTestsFromTestCase( TestsCAHAL ), \
unittest.TestLoader().loadTestsFromTestCase( TestsCAHALDevice ), \
unittest.TestLoader().loadTestsFromTestCase( TestsCAHALDeviceStream ), \
unittest.TestLoader().loadTestsFromTestCase ( \
TestsCAHALAudioFormatDescription \
) \
] )
result = unittest.TextTestRunner( verbosity=2 ).run( alltests )
cahal_tests.cahal_terminate()
if( len( result.errors ) == 0 and len( result.failures ) == 0 ):
sys.exit( 0 )
else:
sys.exit( -1 )
|
apache-2.0
|
nguyentran/openviber
|
tools/scons-local/scons-local-2.0.1/SCons/Tool/hplink.py
|
61
|
2390
|
"""SCons.Tool.hplink
Tool-specific initialization for the HP linker.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/hplink.py 5134 2010/08/16 23:02:40 bdeegan"
import os
import os.path
import SCons.Util
import link
ccLinker = None
# search for the acc compiler and linker front end
try:
dirs = os.listdir('/opt')
except (IOError, OSError):
# Not being able to read the directory because it doesn't exist
# (IOError) or isn't readable (OSError) is okay.
dirs = []
for dir in dirs:
linker = '/opt/' + dir + '/bin/aCC'
if os.path.exists(linker):
ccLinker = linker
break
def generate(env):
"""
Add Builders and construction variables for Visual Age linker to
an Environment.
"""
link.generate(env)
env['LINKFLAGS'] = SCons.Util.CLVar('-Wl,+s -Wl,+vnocompatwarnings')
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -b')
env['SHLIBSUFFIX'] = '.sl'
def exists(env):
return ccLinker
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
mit
|
redhat-openstack/nova
|
nova/api/openstack/compute/plugins/v3/access_ips.py
|
23
|
4161
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack.compute.schemas.v3 import access_ips
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
ALIAS = "os-access-ips"
authorize = extensions.soft_extension_authorizer('compute', 'v3:' + ALIAS)
class AccessIPsController(wsgi.Controller):
def _extend_server(self, req, server):
db_instance = req.get_db_instance(server['id'])
ip_v4 = db_instance.get('access_ip_v4')
ip_v6 = db_instance.get('access_ip_v6')
server['accessIPv4'] = (
str(ip_v4) if ip_v4 is not None else '')
server['accessIPv6'] = (
str(ip_v6) if ip_v6 is not None else '')
@wsgi.extends
def create(self, req, resp_obj, body):
context = req.environ['nova.context']
if authorize(context) and 'server' in resp_obj.obj:
server = resp_obj.obj['server']
self._extend_server(req, server)
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if authorize(context):
server = resp_obj.obj['server']
self._extend_server(req, server)
@wsgi.extends
def update(self, req, resp_obj, id, body):
context = req.environ['nova.context']
if authorize(context):
server = resp_obj.obj['server']
self._extend_server(req, server)
@wsgi.extends(action='rebuild')
def rebuild(self, req, resp_obj, id, body):
context = req.environ['nova.context']
if authorize(context):
server = resp_obj.obj['server']
self._extend_server(req, server)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if authorize(context):
servers = resp_obj.obj['servers']
for server in servers:
self._extend_server(req, server)
class AccessIPs(extensions.V3APIExtensionBase):
"""Access IPs support."""
name = "AccessIPs"
alias = ALIAS
version = 1
v4_key = 'accessIPv4'
v6_key = 'accessIPv6'
def get_controller_extensions(self):
controller = AccessIPsController()
extension = extensions.ControllerExtension(self, 'servers',
controller)
return [extension]
def get_resources(self):
return []
# NOTE(gmann): This function is not supposed to use 'body_deprecated_param'
# parameter as this is placed to handle scheduler_hint extension for V2.1.
# making 'body_deprecated_param' as optional to avoid changes for
# server_update & server_rebuild
def server_create(self, server_dict, create_kwargs,
body_deprecated_param=None):
if AccessIPs.v4_key in server_dict:
access_ip_v4 = server_dict.get(AccessIPs.v4_key)
if access_ip_v4:
create_kwargs['access_ip_v4'] = access_ip_v4
else:
create_kwargs['access_ip_v4'] = None
if AccessIPs.v6_key in server_dict:
access_ip_v6 = server_dict.get(AccessIPs.v6_key)
if access_ip_v6:
create_kwargs['access_ip_v6'] = access_ip_v6
else:
create_kwargs['access_ip_v6'] = None
server_update = server_create
server_rebuild = server_create
def get_server_create_schema(self):
return access_ips.server_create
get_server_update_schema = get_server_create_schema
get_server_rebuild_schema = get_server_create_schema
|
apache-2.0
|
mlperf/training_results_v0.5
|
v0.5.0/nvidia/submission/code/rnn_translator/pytorch/seq2seq/data/sampler.py
|
1
|
5047
|
import logging
import torch
from torch.utils.data.sampler import Sampler
from mlperf_compliance import mlperf_log
from seq2seq.utils import gnmt_print
from seq2seq.utils import get_world_size, get_rank
class BucketingSampler(Sampler):
"""
Distributed data sampler supporting bucketing by sequence length.
"""
def __init__(self, dataset, batch_size, seeds, bucketing=True,
world_size=None, rank=None):
"""
Constructor for the BucketingSampler.
:param dataset: dataset
:param batch_size: batch size
:param bucketing: if True enables bucketing by sequence length
:param world_size: number of processes participating in distributed
training
:param rank: rank of the current process within world_size
"""
if world_size is None:
world_size = get_world_size()
if rank is None:
rank = get_rank()
self.dataset = dataset
self.world_size = world_size
self.rank = rank
self.epoch = 0
self.bucketing = bucketing
self.seeds = seeds
self.batch_size = batch_size
self.global_batch_size = batch_size * world_size
self.data_len = len(self.dataset)
self.num_samples = self.data_len // self.global_batch_size \
* self.global_batch_size
def __iter__(self):
gnmt_print(key=mlperf_log.INPUT_ORDER)
# deterministically shuffle based on epoch
g = torch.Generator()
seed = self.seeds[self.epoch]
logging.info(f'Sampler for epoch {self.epoch} uses seed {seed}')
g.manual_seed(seed)
# generate permutation
indices = torch.randperm(self.data_len, generator=g)
# make indices evenly divisible by (batch_size * world_size)
indices = indices[:self.num_samples]
# splits the dataset into chunks of 'batches_in_shard' global batches
# each, sorts by (src + tgt) sequence length within each chunk,
# reshuffles all global batches
if self.bucketing:
batches_in_shard = 80
shard_size = self.global_batch_size * batches_in_shard
gnmt_print(key=mlperf_log.INPUT_SHARD, value=shard_size)
nshards = (self.num_samples + shard_size - 1) // shard_size
lengths = self.dataset.lengths[indices]
shards = [indices[i * shard_size:(i+1) * shard_size] for i in range(nshards)]
len_shards = [lengths[i * shard_size:(i+1) * shard_size] for i in range(nshards)]
indices = []
for len_shard in len_shards:
_, ind = len_shard.sort()
indices.append(ind)
output = tuple(shard[idx] for shard, idx in zip(shards, indices))
indices = torch.cat(output)
# global reshuffle
indices = indices.view(-1, self.global_batch_size)
order = torch.randperm(indices.shape[0], generator=g)
indices = indices[order, :]
indices = indices.view(-1)
assert len(indices) == self.num_samples
# build indices for each individual worker
# consecutive ranks are getting consecutive batches,
# default pytorch DistributedSampler assigns strided batches
# with offset = length / world_size
indices = indices.view(-1, self.batch_size)
indices = indices[self.rank::self.world_size].contiguous()
indices = indices.view(-1)
indices = indices.tolist()
assert len(indices) == self.num_samples // self.world_size
return iter(indices)
def __len__(self):
return self.num_samples // self.world_size
def set_epoch(self, epoch):
"""
Sets current epoch index. This value is used to seed RNGs in __iter__()
function.
:param epoch: index of current epoch
"""
self.epoch = epoch
class StaticDistributedSampler(Sampler):
def __init__(self, dataset, batch_size, pad, world_size=None, rank=None):
if world_size is None:
world_size = get_world_size()
if rank is None:
rank = get_rank()
self.world_size = world_size
global_batch_size = batch_size * world_size
data_len = len(dataset)
num_samples = (data_len + global_batch_size - 1) \
// global_batch_size * global_batch_size
self.num_samples = num_samples
indices = list(range(data_len))
if pad:
indices += [0] * (num_samples - len(indices))
else:
indices += [-1] * (num_samples - len(indices))
indices = torch.tensor(indices)
indices = indices.view(-1, batch_size)
indices = indices[rank::world_size].contiguous()
indices = indices.view(-1)
indices = indices[indices != -1]
indices = indices.tolist()
self.indices = indices
def __iter__(self):
return iter(self.indices)
def __len__(self):
return len(self.indices)
|
apache-2.0
|
pwendell/mesos
|
third_party/boto-2.0b2/boto/ec2/spotinstancerequest.py
|
8
|
3747
|
# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Represents an EC2 Spot Instance Request
"""
from boto.ec2.ec2object import EC2Object
from boto.ec2.launchspecification import LaunchSpecification
class SpotInstanceStateFault(object):
def __init__(self, code=None, message=None):
self.code = code
self.message = message
def __repr__(self):
return '(%s, %s)' % (self.code, self.message)
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'code':
self.code = value
elif name == 'message':
self.message = value
setattr(self, name, value)
class SpotInstanceRequest(EC2Object):
def __init__(self, connection=None):
EC2Object.__init__(self, connection)
self.id = None
self.price = None
self.type = None
self.state = None
self.fault = None
self.valid_from = None
self.valid_until = None
self.launch_group = None
self.product_description = None
self.availability_zone_group = None
self.create_time = None
self.launch_specification = None
self.instance_id = None
def __repr__(self):
return 'SpotInstanceRequest:%s' % self.id
def startElement(self, name, attrs, connection):
if name == 'launchSpecification':
self.launch_specification = LaunchSpecification(connection)
return self.launch_specification
elif name == 'fault':
self.fault = SpotInstanceStateFault()
return self.fault
else:
return None
def endElement(self, name, value, connection):
if name == 'spotInstanceRequestId':
self.id = value
elif name == 'spotPrice':
self.price = float(value)
elif name == 'type':
self.type = value
elif name == 'state':
self.state = value
elif name == 'productDescription':
self.product_description = value
elif name == 'validFrom':
self.valid_from = value
elif name == 'validUntil':
self.valid_until = value
elif name == 'launchGroup':
self.launch_group = value
elif name == 'availabilityZoneGroup':
self.availability_zone_group = value
elif name == 'createTime':
self.create_time = value
elif name == 'instanceId':
self.instance_id = value
else:
setattr(self, name, value)
def cancel(self):
self.connection.cancel_spot_instance_requests([self.id])
|
apache-2.0
|
Corsol/server
|
dep/ACE_wrappers/bin/svn_props.py
|
95
|
1292
|
#!/usr/bin/python
import sys
import re
import os
import string
print """WARNING: this script is dumb. I mean, really, really dumb. Every file is treated
as a text file, so if you are checking in any binary files, YOU MUST set a non-text
MIME type by hand, otherwise it WILL be corrupted by the checkout process.
A better approach will be to add the unmatched files to the config file in
ACE/docs/svn/config (and update yours!) so others won't have to put up with them
in the future.
To use this program, copy and paste the output from the svn command into standard
input.
"""
foo = raw_input("That being said, if you want to continue, press enter")
sin, sout = os.popen2 ("svn info")
sin.close ()
os.wait ()
url = ""
root = ""
path = ""
for line in sout.readlines ():
if line.startswith ("URL: "):
url = line.replace ("URL: ", "")[:-1]
if line.startswith ("Repository Root: "):
root = line.replace ("Repository Root: ", "")[:-1]
path = url.replace (root, "")[1:] + '/'
files = ""
eol_style = " svn ps svn:eol-style native "
keywords = " svn ps svn:keywords 'Author Date Id Revision' "
for line in sys.stdin.readlines ():
ln = line[0:line.find (':')] + ' '
ln = ln.replace (path,"")
os.system (eol_style + ln)
os.system (keywords + ln)
|
gpl-2.0
|
kjc88/sl4a
|
python/src/Demo/rpc/mountclient.py
|
42
|
6632
|
# Mount RPC client -- RFC 1094 (NFS), Appendix A
# This module demonstrates how to write your own RPC client in Python.
# When this example was written, there was no RPC compiler for
# Python. Without such a compiler, you must first create classes
# derived from Packer and Unpacker to handle the data types for the
# server you want to interface to. You then write the client class.
# If you want to support both the TCP and the UDP version of a
# protocol, use multiple inheritance as shown below.
import rpc
from rpc import Packer, Unpacker, TCPClient, UDPClient
# Program number and version for the mount protocol
MOUNTPROG = 100005
MOUNTVERS = 1
# Size of the 'fhandle' opaque structure
FHSIZE = 32
# Packer derived class for Mount protocol clients.
# The only thing we need to pack beyond basic types is an 'fhandle'
class MountPacker(Packer):
def pack_fhandle(self, fhandle):
self.pack_fopaque(FHSIZE, fhandle)
# Unpacker derived class for Mount protocol clients.
# The important types we need to unpack are fhandle, fhstatus,
# mountlist and exportlist; mountstruct, exportstruct and groups are
# used to unpack components of mountlist and exportlist and the
# corresponding functions are passed as function argument to the
# generic unpack_list function.
class MountUnpacker(Unpacker):
def unpack_fhandle(self):
return self.unpack_fopaque(FHSIZE)
def unpack_fhstatus(self):
status = self.unpack_uint()
if status == 0:
fh = self.unpack_fhandle()
else:
fh = None
return status, fh
def unpack_mountlist(self):
return self.unpack_list(self.unpack_mountstruct)
def unpack_mountstruct(self):
hostname = self.unpack_string()
directory = self.unpack_string()
return (hostname, directory)
def unpack_exportlist(self):
return self.unpack_list(self.unpack_exportstruct)
def unpack_exportstruct(self):
filesys = self.unpack_string()
groups = self.unpack_groups()
return (filesys, groups)
def unpack_groups(self):
return self.unpack_list(self.unpack_string)
# These are the procedures specific to the Mount client class.
# Think of this as a derived class of either TCPClient or UDPClient.
class PartialMountClient:
# This method is called by Client.__init__ to initialize
# self.packer and self.unpacker
def addpackers(self):
self.packer = MountPacker()
self.unpacker = MountUnpacker('')
# This method is called by Client.__init__ to bind the socket
# to a particular network interface and port. We use the
# default network interface, but if we're running as root,
# we want to bind to a reserved port
def bindsocket(self):
import os
try:
uid = os.getuid()
except AttributeError:
uid = 1
if uid == 0:
port = rpc.bindresvport(self.sock, '')
# 'port' is not used
else:
self.sock.bind(('', 0))
# This function is called to cough up a suitable
# authentication object for a call to procedure 'proc'.
def mkcred(self):
if self.cred is None:
self.cred = rpc.AUTH_UNIX, rpc.make_auth_unix_default()
return self.cred
# The methods Mnt, Dump etc. each implement one Remote
# Procedure Call. This is done by calling self.make_call()
# with as arguments:
#
# - the procedure number
# - the arguments (or None)
# - the "packer" function for the arguments (or None)
# - the "unpacker" function for the return value (or None)
#
# The packer and unpacker function, if not None, *must* be
# methods of self.packer and self.unpacker, respectively.
# A value of None means that there are no arguments or is no
# return value, respectively.
#
# The return value from make_call() is the return value from
# the remote procedure call, as unpacked by the "unpacker"
# function, or None if the unpacker function is None.
#
# (Even if you expect a result of None, you should still
# return the return value from make_call(), since this may be
# needed by a broadcasting version of the class.)
#
# If the call fails, make_call() raises an exception
# (this includes time-outs and invalid results).
#
# Note that (at least with the UDP protocol) there is no
# guarantee that a call is executed at most once. When you do
# get a reply, you know it has been executed at least once;
# when you don't get a reply, you know nothing.
def Mnt(self, directory):
return self.make_call(1, directory, \
self.packer.pack_string, \
self.unpacker.unpack_fhstatus)
def Dump(self):
return self.make_call(2, None, \
None, self.unpacker.unpack_mountlist)
def Umnt(self, directory):
return self.make_call(3, directory, \
self.packer.pack_string, None)
def Umntall(self):
return self.make_call(4, None, None, None)
def Export(self):
return self.make_call(5, None, \
None, self.unpacker.unpack_exportlist)
# We turn the partial Mount client into a full one for either protocol
# by use of multiple inheritance. (In general, when class C has base
# classes B1...Bn, if x is an instance of class C, methods of x are
# searched first in C, then in B1, then in B2, ..., finally in Bn.)
class TCPMountClient(PartialMountClient, TCPClient):
def __init__(self, host):
TCPClient.__init__(self, host, MOUNTPROG, MOUNTVERS)
class UDPMountClient(PartialMountClient, UDPClient):
def __init__(self, host):
UDPClient.__init__(self, host, MOUNTPROG, MOUNTVERS)
# A little test program for the Mount client. This takes a host as
# command line argument (default the local machine), prints its export
# list, and attempts to mount and unmount each exported files system.
# An optional first argument of -t or -u specifies the protocol to use
# (TCP or UDP), default is UDP.
def test():
import sys
if sys.argv[1:] and sys.argv[1] == '-t':
C = TCPMountClient
del sys.argv[1]
elif sys.argv[1:] and sys.argv[1] == '-u':
C = UDPMountClient
del sys.argv[1]
else:
C = UDPMountClient
if sys.argv[1:]: host = sys.argv[1]
else: host = ''
mcl = C(host)
list = mcl.Export()
for item in list:
print item
try:
mcl.Mnt(item[0])
except:
print 'Sorry'
continue
mcl.Umnt(item[0])
|
apache-2.0
|
ubirch/aws-tools
|
virtual-env/lib/python2.7/site-packages/boto/file/__init__.py
|
153
|
1258
|
# Copyright 2010 Google Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import boto
from boto.file.connection import FileConnection as Connection
from boto.file.key import Key
from boto.file.bucket import Bucket
__all__ = ['Connection', 'Key', 'Bucket']
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.