repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
CloudServer/nova
|
nova/version.py
|
61
|
2298
|
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from nova.i18n import _LE
NOVA_VENDOR = "OpenStack Foundation"
NOVA_PRODUCT = "OpenStack Nova"
NOVA_PACKAGE = None # OS distro package version suffix
loaded = False
version_info = pbr.version.VersionInfo('nova')
version_string = version_info.version_string
def _load_config():
# Don't load in global context, since we can't assume
# these modules are accessible when distutils uses
# this module
from six.moves import configparser
from oslo_config import cfg
import logging
global loaded, NOVA_VENDOR, NOVA_PRODUCT, NOVA_PACKAGE
if loaded:
return
loaded = True
cfgfile = cfg.CONF.find_file("release")
if cfgfile is None:
return
try:
cfg = configparser.RawConfigParser()
cfg.read(cfgfile)
if cfg.has_option("Nova", "vendor"):
NOVA_VENDOR = cfg.get("Nova", "vendor")
if cfg.has_option("Nova", "product"):
NOVA_PRODUCT = cfg.get("Nova", "product")
if cfg.has_option("Nova", "package"):
NOVA_PACKAGE = cfg.get("Nova", "package")
except Exception as ex:
LOG = logging.getLogger(__name__)
LOG.error(_LE("Failed to load %(cfgfile)s: %(ex)s"),
{'cfgfile': cfgfile, 'ex': ex})
def vendor_string():
_load_config()
return NOVA_VENDOR
def product_string():
_load_config()
return NOVA_PRODUCT
def package_string():
_load_config()
return NOVA_PACKAGE
def version_string_with_package():
if package_string() is None:
return version_info.version_string()
else:
return "%s-%s" % (version_info.version_string(), package_string())
|
apache-2.0
|
disruptek/boto
|
boto/dynamodb/layer2.py
|
18
|
33732
|
# Copyright (c) 2011 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2011 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.dynamodb.layer1 import Layer1
from boto.dynamodb.table import Table
from boto.dynamodb.schema import Schema
from boto.dynamodb.item import Item
from boto.dynamodb.batch import BatchList, BatchWriteList
from boto.dynamodb.types import get_dynamodb_type, Dynamizer, \
LossyFloatDynamizer
class TableGenerator(object):
"""
This is an object that wraps up the table_generator function.
The only real reason to have this is that we want to be able
to accumulate and return the ConsumedCapacityUnits element that
is part of each response.
:ivar last_evaluated_key: A sequence representing the key(s)
of the item last evaluated, or None if no additional
results are available.
:ivar remaining: The remaining quantity of results requested.
:ivar table: The table to which the call was made.
"""
def __init__(self, table, callable, remaining, item_class, kwargs):
self.table = table
self.callable = callable
self.remaining = -1 if remaining is None else remaining
self.item_class = item_class
self.kwargs = kwargs
self._consumed_units = 0.0
self.last_evaluated_key = None
self._count = 0
self._scanned_count = 0
self._response = None
@property
def count(self):
"""
The total number of items retrieved thus far. This value changes with
iteration and even when issuing a call with count=True, it is necessary
to complete the iteration to assert an accurate count value.
"""
self.response
return self._count
@property
def scanned_count(self):
"""
As above, but representing the total number of items scanned by
DynamoDB, without regard to any filters.
"""
self.response
return self._scanned_count
@property
def consumed_units(self):
"""
Returns a float representing the ConsumedCapacityUnits accumulated.
"""
self.response
return self._consumed_units
@property
def response(self):
"""
The current response to the call from DynamoDB.
"""
return self.next_response() if self._response is None else self._response
def next_response(self):
"""
Issue a call and return the result. You can invoke this method
while iterating over the TableGenerator in order to skip to the
next "page" of results.
"""
# preserve any existing limit in case the user alters self.remaining
limit = self.kwargs.get('limit')
if (self.remaining > 0 and (limit is None or limit > self.remaining)):
self.kwargs['limit'] = self.remaining
self._response = self.callable(**self.kwargs)
self.kwargs['limit'] = limit
self._consumed_units += self._response.get('ConsumedCapacityUnits', 0.0)
self._count += self._response.get('Count', 0)
self._scanned_count += self._response.get('ScannedCount', 0)
# at the expense of a possibly gratuitous dynamize, ensure that
# early generator termination won't result in bad LEK values
if 'LastEvaluatedKey' in self._response:
lek = self._response['LastEvaluatedKey']
esk = self.table.layer2.dynamize_last_evaluated_key(lek)
self.kwargs['exclusive_start_key'] = esk
lektuple = (lek['HashKeyElement'],)
if 'RangeKeyElement' in lek:
lektuple += (lek['RangeKeyElement'],)
self.last_evaluated_key = lektuple
else:
self.last_evaluated_key = None
return self._response
def __iter__(self):
while self.remaining != 0:
response = self.response
for item in response.get('Items', []):
self.remaining -= 1
yield self.item_class(self.table, attrs=item)
if self.remaining == 0:
break
if response is not self._response:
break
else:
if self.last_evaluated_key is not None:
self.next_response()
continue
break
if response is not self._response:
continue
break
class Layer2(object):
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
debug=0, security_token=None, region=None,
validate_certs=True, dynamizer=LossyFloatDynamizer,
profile_name=None):
self.layer1 = Layer1(aws_access_key_id, aws_secret_access_key,
is_secure, port, proxy, proxy_port,
debug, security_token, region,
validate_certs=validate_certs,
profile_name=profile_name)
self.dynamizer = dynamizer()
def use_decimals(self):
"""
Use the ``decimal.Decimal`` type for encoding/decoding numeric types.
By default, ints/floats are used to represent numeric types
('N', 'NS') received from DynamoDB. Using the ``Decimal``
type is recommended to prevent loss of precision.
"""
# Eventually this should be made the default dynamizer.
self.dynamizer = Dynamizer()
def dynamize_attribute_updates(self, pending_updates):
"""
Convert a set of pending item updates into the structure
required by Layer1.
"""
d = {}
for attr_name in pending_updates:
action, value = pending_updates[attr_name]
if value is None:
# DELETE without an attribute value
d[attr_name] = {"Action": action}
else:
d[attr_name] = {"Action": action,
"Value": self.dynamizer.encode(value)}
return d
def dynamize_item(self, item):
d = {}
for attr_name in item:
d[attr_name] = self.dynamizer.encode(item[attr_name])
return d
def dynamize_range_key_condition(self, range_key_condition):
"""
Convert a layer2 range_key_condition parameter into the
structure required by Layer1.
"""
return range_key_condition.to_dict()
def dynamize_scan_filter(self, scan_filter):
"""
Convert a layer2 scan_filter parameter into the
structure required by Layer1.
"""
d = None
if scan_filter:
d = {}
for attr_name in scan_filter:
condition = scan_filter[attr_name]
d[attr_name] = condition.to_dict()
return d
def dynamize_expected_value(self, expected_value):
"""
Convert an expected_value parameter into the data structure
required for Layer1.
"""
d = None
if expected_value:
d = {}
for attr_name in expected_value:
attr_value = expected_value[attr_name]
if attr_value is True:
attr_value = {'Exists': True}
elif attr_value is False:
attr_value = {'Exists': False}
else:
val = self.dynamizer.encode(expected_value[attr_name])
attr_value = {'Value': val}
d[attr_name] = attr_value
return d
def dynamize_last_evaluated_key(self, last_evaluated_key):
"""
Convert a last_evaluated_key parameter into the data structure
required for Layer1.
"""
d = None
if last_evaluated_key:
hash_key = last_evaluated_key['HashKeyElement']
d = {'HashKeyElement': self.dynamizer.encode(hash_key)}
if 'RangeKeyElement' in last_evaluated_key:
range_key = last_evaluated_key['RangeKeyElement']
d['RangeKeyElement'] = self.dynamizer.encode(range_key)
return d
def build_key_from_values(self, schema, hash_key, range_key=None):
"""
Build a Key structure to be used for accessing items
in Amazon DynamoDB. This method takes the supplied hash_key
and optional range_key and validates them against the
schema. If there is a mismatch, a TypeError is raised.
Otherwise, a Python dict version of a Amazon DynamoDB Key
data structure is returned.
:type hash_key: int|float|str|unicode|Binary
:param hash_key: The hash key of the item you are looking for.
The type of the hash key should match the type defined in
the schema.
:type range_key: int|float|str|unicode|Binary
:param range_key: The range key of the item your are looking for.
This should be supplied only if the schema requires a
range key. The type of the range key should match the
type defined in the schema.
"""
dynamodb_key = {}
dynamodb_value = self.dynamizer.encode(hash_key)
if list(dynamodb_value.keys())[0] != schema.hash_key_type:
msg = 'Hashkey must be of type: %s' % schema.hash_key_type
raise TypeError(msg)
dynamodb_key['HashKeyElement'] = dynamodb_value
if range_key is not None:
dynamodb_value = self.dynamizer.encode(range_key)
if list(dynamodb_value.keys())[0] != schema.range_key_type:
msg = 'RangeKey must be of type: %s' % schema.range_key_type
raise TypeError(msg)
dynamodb_key['RangeKeyElement'] = dynamodb_value
return dynamodb_key
def new_batch_list(self):
"""
Return a new, empty :class:`boto.dynamodb.batch.BatchList`
object.
"""
return BatchList(self)
def new_batch_write_list(self):
"""
Return a new, empty :class:`boto.dynamodb.batch.BatchWriteList`
object.
"""
return BatchWriteList(self)
def list_tables(self, limit=None):
"""
Return a list of the names of all tables associated with the
current account and region.
:type limit: int
:param limit: The maximum number of tables to return.
"""
tables = []
start_table = None
while not limit or len(tables) < limit:
this_round_limit = None
if limit:
this_round_limit = limit - len(tables)
this_round_limit = min(this_round_limit, 100)
result = self.layer1.list_tables(limit=this_round_limit, start_table=start_table)
tables.extend(result.get('TableNames', []))
start_table = result.get('LastEvaluatedTableName', None)
if not start_table:
break
return tables
def describe_table(self, name):
"""
Retrieve information about an existing table.
:type name: str
:param name: The name of the desired table.
"""
return self.layer1.describe_table(name)
def table_from_schema(self, name, schema):
"""
Create a Table object from a schema.
This method will create a Table object without
making any API calls. If you know the name and schema
of the table, you can use this method instead of
``get_table``.
Example usage::
table = layer2.table_from_schema(
'tablename',
Schema.create(hash_key=('foo', 'N')))
:type name: str
:param name: The name of the table.
:type schema: :class:`boto.dynamodb.schema.Schema`
:param schema: The schema associated with the table.
:rtype: :class:`boto.dynamodb.table.Table`
:return: A Table object representing the table.
"""
return Table.create_from_schema(self, name, schema)
def get_table(self, name):
"""
Retrieve the Table object for an existing table.
:type name: str
:param name: The name of the desired table.
:rtype: :class:`boto.dynamodb.table.Table`
:return: A Table object representing the table.
"""
response = self.layer1.describe_table(name)
return Table(self, response)
lookup = get_table
def create_table(self, name, schema, read_units, write_units):
"""
Create a new Amazon DynamoDB table.
:type name: str
:param name: The name of the desired table.
:type schema: :class:`boto.dynamodb.schema.Schema`
:param schema: The Schema object that defines the schema used
by this table.
:type read_units: int
:param read_units: The value for ReadCapacityUnits.
:type write_units: int
:param write_units: The value for WriteCapacityUnits.
:rtype: :class:`boto.dynamodb.table.Table`
:return: A Table object representing the new Amazon DynamoDB table.
"""
response = self.layer1.create_table(name, schema.dict,
{'ReadCapacityUnits': read_units,
'WriteCapacityUnits': write_units})
return Table(self, response)
def update_throughput(self, table, read_units, write_units):
"""
Update the ProvisionedThroughput for the Amazon DynamoDB Table.
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object whose throughput is being updated.
:type read_units: int
:param read_units: The new value for ReadCapacityUnits.
:type write_units: int
:param write_units: The new value for WriteCapacityUnits.
"""
response = self.layer1.update_table(table.name,
{'ReadCapacityUnits': read_units,
'WriteCapacityUnits': write_units})
table.update_from_response(response)
def delete_table(self, table):
"""
Delete this table and all items in it. After calling this
the Table objects status attribute will be set to 'DELETING'.
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object that is being deleted.
"""
response = self.layer1.delete_table(table.name)
table.update_from_response(response)
def create_schema(self, hash_key_name, hash_key_proto_value,
range_key_name=None, range_key_proto_value=None):
"""
Create a Schema object used when creating a Table.
:type hash_key_name: str
:param hash_key_name: The name of the HashKey for the schema.
:type hash_key_proto_value: int|long|float|str|unicode|Binary
:param hash_key_proto_value: A sample or prototype of the type
of value you want to use for the HashKey. Alternatively,
you can also just pass in the Python type (e.g. int, float, etc.).
:type range_key_name: str
:param range_key_name: The name of the RangeKey for the schema.
This parameter is optional.
:type range_key_proto_value: int|long|float|str|unicode|Binary
:param range_key_proto_value: A sample or prototype of the type
of value you want to use for the RangeKey. Alternatively,
you can also pass in the Python type (e.g. int, float, etc.)
This parameter is optional.
"""
hash_key = (hash_key_name, get_dynamodb_type(hash_key_proto_value))
if range_key_name and range_key_proto_value is not None:
range_key = (range_key_name,
get_dynamodb_type(range_key_proto_value))
else:
range_key = None
return Schema.create(hash_key, range_key)
def get_item(self, table, hash_key, range_key=None,
attributes_to_get=None, consistent_read=False,
item_class=Item):
"""
Retrieve an existing item from the table.
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object from which the item is retrieved.
:type hash_key: int|long|float|str|unicode|Binary
:param hash_key: The HashKey of the requested item. The
type of the value must match the type defined in the
schema for the table.
:type range_key: int|long|float|str|unicode|Binary
:param range_key: The optional RangeKey of the requested item.
The type of the value must match the type defined in the
schema for the table.
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
:type consistent_read: bool
:param consistent_read: If True, a consistent read
request is issued. Otherwise, an eventually consistent
request is issued.
:type item_class: Class
:param item_class: Allows you to override the class used
to generate the items. This should be a subclass of
:class:`boto.dynamodb.item.Item`
"""
key = self.build_key_from_values(table.schema, hash_key, range_key)
response = self.layer1.get_item(table.name, key,
attributes_to_get, consistent_read,
object_hook=self.dynamizer.decode)
item = item_class(table, hash_key, range_key, response['Item'])
if 'ConsumedCapacityUnits' in response:
item.consumed_units = response['ConsumedCapacityUnits']
return item
def batch_get_item(self, batch_list):
"""
Return a set of attributes for a multiple items in
multiple tables using their primary keys.
:type batch_list: :class:`boto.dynamodb.batch.BatchList`
:param batch_list: A BatchList object which consists of a
list of :class:`boto.dynamoddb.batch.Batch` objects.
Each Batch object contains the information about one
batch of objects that you wish to retrieve in this
request.
"""
request_items = batch_list.to_dict()
return self.layer1.batch_get_item(request_items,
object_hook=self.dynamizer.decode)
def batch_write_item(self, batch_list):
"""
Performs multiple Puts and Deletes in one batch.
:type batch_list: :class:`boto.dynamodb.batch.BatchWriteList`
:param batch_list: A BatchWriteList object which consists of a
list of :class:`boto.dynamoddb.batch.BatchWrite` objects.
Each Batch object contains the information about one
batch of objects that you wish to put or delete.
"""
request_items = batch_list.to_dict()
return self.layer1.batch_write_item(request_items,
object_hook=self.dynamizer.decode)
def put_item(self, item, expected_value=None, return_values=None):
"""
Store a new item or completely replace an existing item
in Amazon DynamoDB.
:type item: :class:`boto.dynamodb.item.Item`
:param item: The Item to write to Amazon DynamoDB.
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you expect.
This dictionary should have name/value pairs where the name
is the name of the attribute and the value is either the value
you are expecting or False if you expect the attribute not to
exist.
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
values are: None or 'ALL_OLD'. If 'ALL_OLD' is
specified and the item is overwritten, the content
of the old item is returned.
"""
expected_value = self.dynamize_expected_value(expected_value)
response = self.layer1.put_item(item.table.name,
self.dynamize_item(item),
expected_value, return_values,
object_hook=self.dynamizer.decode)
if 'ConsumedCapacityUnits' in response:
item.consumed_units = response['ConsumedCapacityUnits']
return response
def update_item(self, item, expected_value=None, return_values=None):
"""
Commit pending item updates to Amazon DynamoDB.
:type item: :class:`boto.dynamodb.item.Item`
:param item: The Item to update in Amazon DynamoDB. It is expected
that you would have called the add_attribute, put_attribute
and/or delete_attribute methods on this Item prior to calling
this method. Those queued changes are what will be updated.
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you
expect. This dictionary should have name/value pairs where the
name is the name of the attribute and the value is either the
value you are expecting or False if you expect the attribute
not to exist.
:type return_values: str
:param return_values: Controls the return of attribute name/value pairs
before they were updated. Possible values are: None, 'ALL_OLD',
'UPDATED_OLD', 'ALL_NEW' or 'UPDATED_NEW'. If 'ALL_OLD' is
specified and the item is overwritten, the content of the old item
is returned. If 'ALL_NEW' is specified, then all the attributes of
the new version of the item are returned. If 'UPDATED_NEW' is
specified, the new versions of only the updated attributes are
returned.
"""
expected_value = self.dynamize_expected_value(expected_value)
key = self.build_key_from_values(item.table.schema,
item.hash_key, item.range_key)
attr_updates = self.dynamize_attribute_updates(item._updates)
response = self.layer1.update_item(item.table.name, key,
attr_updates,
expected_value, return_values,
object_hook=self.dynamizer.decode)
item._updates.clear()
if 'ConsumedCapacityUnits' in response:
item.consumed_units = response['ConsumedCapacityUnits']
return response
def delete_item(self, item, expected_value=None, return_values=None):
"""
Delete the item from Amazon DynamoDB.
:type item: :class:`boto.dynamodb.item.Item`
:param item: The Item to delete from Amazon DynamoDB.
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you expect.
This dictionary should have name/value pairs where the name
is the name of the attribute and the value is either the value
you are expecting or False if you expect the attribute not to
exist.
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
values are: None or 'ALL_OLD'. If 'ALL_OLD' is
specified and the item is overwritten, the content
of the old item is returned.
"""
expected_value = self.dynamize_expected_value(expected_value)
key = self.build_key_from_values(item.table.schema,
item.hash_key, item.range_key)
return self.layer1.delete_item(item.table.name, key,
expected=expected_value,
return_values=return_values,
object_hook=self.dynamizer.decode)
def query(self, table, hash_key, range_key_condition=None,
attributes_to_get=None, request_limit=None,
max_results=None, consistent_read=False,
scan_index_forward=True, exclusive_start_key=None,
item_class=Item, count=False):
"""
Perform a query on the table.
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object that is being queried.
:type hash_key: int|long|float|str|unicode|Binary
:param hash_key: The HashKey of the requested item. The
type of the value must match the type defined in the
schema for the table.
:type range_key_condition: :class:`boto.dynamodb.condition.Condition`
:param range_key_condition: A Condition object.
Condition object can be one of the following types:
EQ|LE|LT|GE|GT|BEGINS_WITH|BETWEEN
The only condition which expects or will accept two
values is 'BETWEEN', otherwise a single value should
be passed to the Condition constructor.
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
:type request_limit: int
:param request_limit: The maximum number of items to retrieve
from Amazon DynamoDB on each request. You may want to set
a specific request_limit based on the provisioned throughput
of your table. The default behavior is to retrieve as many
results as possible per request.
:type max_results: int
:param max_results: The maximum number of results that will
be retrieved from Amazon DynamoDB in total. For example,
if you only wanted to see the first 100 results from the
query, regardless of how many were actually available, you
could set max_results to 100 and the generator returned
from the query method will only yeild 100 results max.
:type consistent_read: bool
:param consistent_read: If True, a consistent read
request is issued. Otherwise, an eventually consistent
request is issued.
:type scan_index_forward: bool
:param scan_index_forward: Specified forward or backward
traversal of the index. Default is forward (True).
:type count: bool
:param count: If True, Amazon DynamoDB returns a total
number of items for the Query operation, even if the
operation has no matching items for the assigned filter.
If count is True, the actual items are not returned and
the count is accessible as the ``count`` attribute of
the returned object.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
which to continue an earlier query. This would be
provided as the LastEvaluatedKey in that query.
:type item_class: Class
:param item_class: Allows you to override the class used
to generate the items. This should be a subclass of
:class:`boto.dynamodb.item.Item`
:rtype: :class:`boto.dynamodb.layer2.TableGenerator`
"""
if range_key_condition:
rkc = self.dynamize_range_key_condition(range_key_condition)
else:
rkc = None
if exclusive_start_key:
esk = self.build_key_from_values(table.schema,
*exclusive_start_key)
else:
esk = None
kwargs = {'table_name': table.name,
'hash_key_value': self.dynamizer.encode(hash_key),
'range_key_conditions': rkc,
'attributes_to_get': attributes_to_get,
'limit': request_limit,
'count': count,
'consistent_read': consistent_read,
'scan_index_forward': scan_index_forward,
'exclusive_start_key': esk,
'object_hook': self.dynamizer.decode}
return TableGenerator(table, self.layer1.query,
max_results, item_class, kwargs)
def scan(self, table, scan_filter=None,
attributes_to_get=None, request_limit=None, max_results=None,
exclusive_start_key=None, item_class=Item, count=False):
"""
Perform a scan of DynamoDB.
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object that is being scanned.
:type scan_filter: A dict
:param scan_filter: A dictionary where the key is the
attribute name and the value is a
:class:`boto.dynamodb.condition.Condition` object.
Valid Condition objects include:
* EQ - equal (1)
* NE - not equal (1)
* LE - less than or equal (1)
* LT - less than (1)
* GE - greater than or equal (1)
* GT - greater than (1)
* NOT_NULL - attribute exists (0, use None)
* NULL - attribute does not exist (0, use None)
* CONTAINS - substring or value in list (1)
* NOT_CONTAINS - absence of substring or value in list (1)
* BEGINS_WITH - substring prefix (1)
* IN - exact match in list (N)
* BETWEEN - >= first value, <= second value (2)
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
:type request_limit: int
:param request_limit: The maximum number of items to retrieve
from Amazon DynamoDB on each request. You may want to set
a specific request_limit based on the provisioned throughput
of your table. The default behavior is to retrieve as many
results as possible per request.
:type max_results: int
:param max_results: The maximum number of results that will
be retrieved from Amazon DynamoDB in total. For example,
if you only wanted to see the first 100 results from the
query, regardless of how many were actually available, you
could set max_results to 100 and the generator returned
from the query method will only yeild 100 results max.
:type count: bool
:param count: If True, Amazon DynamoDB returns a total
number of items for the Scan operation, even if the
operation has no matching items for the assigned filter.
If count is True, the actual items are not returned and
the count is accessible as the ``count`` attribute of
the returned object.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
which to continue an earlier query. This would be
provided as the LastEvaluatedKey in that query.
:type item_class: Class
:param item_class: Allows you to override the class used
to generate the items. This should be a subclass of
:class:`boto.dynamodb.item.Item`
:rtype: :class:`boto.dynamodb.layer2.TableGenerator`
"""
if exclusive_start_key:
esk = self.build_key_from_values(table.schema,
*exclusive_start_key)
else:
esk = None
kwargs = {'table_name': table.name,
'scan_filter': self.dynamize_scan_filter(scan_filter),
'attributes_to_get': attributes_to_get,
'limit': request_limit,
'count': count,
'exclusive_start_key': esk,
'object_hook': self.dynamizer.decode}
return TableGenerator(table, self.layer1.scan,
max_results, item_class, kwargs)
|
mit
|
xupit3r/askpgh
|
askbot/migrations/0034_auto__add_field_user_avatar_url.py
|
13
|
26131
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from askbot.migrations_api import safe_add_column
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Tag.avatar_url'
safe_add_column(u'auth_user', 'has_custom_avatar', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'Tag.avatar_url'
db.delete_column(u'auth_user', 'has_custom_avatar')
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'through': "'ActivityAuditStatus'", 'to': "orm['auth.User']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'})
},
'askbot.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['askbot.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.answerrevision': {
'Meta': {'object_name': 'AnswerRevision', 'db_table': "u'answer_revision'"},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answerrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'through': "'Award'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'askbot.comment': {
'Meta': {'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'askbot.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'through': "'FavoriteQuestion'", 'to': "orm['auth.User']"}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.questionrevision': {
'Meta': {'object_name': 'QuestionRevision', 'db_table': "u'question_revision'"},
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questionrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'revisions'", 'to': "orm['askbot.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'blank': 'True'}),
'has_custom_avatar': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'hide_ignored_questions': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'tag_filter_setting': ('django.db.models.fields.CharField', [], {'default': "'ignored'", 'max_length': '16'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot']
|
gpl-3.0
|
waheedahmed/edx-platform
|
common/djangoapps/student/management/tests/test_manage_group.py
|
50
|
7602
|
"""
Unit tests for user_management management commands.
"""
import sys
import ddt
from django.contrib.auth.models import Group, Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management import call_command, CommandError
from django.test import TestCase
TEST_EMAIL = 'test@example.com'
TEST_GROUP = 'test-group'
TEST_USERNAME = 'test-user'
TEST_DATA = (
{},
{
TEST_GROUP: ['add_group', 'change_group', 'change_group'],
},
{
'other-group': ['add_group', 'change_group', 'change_group'],
},
)
@ddt.ddt
class TestManageGroupCommand(TestCase):
"""
Tests the `manage_group` command.
"""
def set_group_permissions(self, group_permissions):
"""
Sets up a before-state for groups and permissions in tests, which
can be checked afterward to ensure that a failed atomic
operation has not had any side effects.
"""
content_type = ContentType.objects.get_for_model(Group)
for group_name, permission_codenames in group_permissions.items():
group = Group.objects.create(name=group_name)
for codename in permission_codenames:
group.permissions.add(
Permission.objects.get(content_type=content_type, codename=codename) # pylint: disable=no-member
)
def check_group_permissions(self, group_permissions):
"""
Checks that the current state of the database matches the specified groups and
permissions.
"""
self.check_groups(group_permissions.keys())
for group_name, permission_codenames in group_permissions.items():
self.check_permissions(group_name, permission_codenames)
def check_groups(self, group_names):
"""
DRY helper.
"""
self.assertEqual(set(group_names), {g.name for g in Group.objects.all()}) # pylint: disable=no-member
def check_permissions(self, group_name, permission_codenames):
"""
DRY helper.
"""
self.assertEqual(
set(permission_codenames),
{p.codename for p in Group.objects.get(name=group_name).permissions.all()} # pylint: disable=no-member
)
@ddt.data(
*(
(data, args, exception)
for data in TEST_DATA
for args, exception in (
((), 'too few arguments' if sys.version_info.major == 2 else 'required: group_name'), # no group name
(('x' * 81,), 'invalid group name'), # invalid group name
((TEST_GROUP, 'some-other-group'), 'unrecognized arguments'), # multiple arguments
((TEST_GROUP, '--some-option', 'dummy'), 'unrecognized arguments') # unexpected option name
)
)
)
@ddt.unpack
def test_invalid_input(self, initial_group_permissions, command_args, exception_message):
"""
Ensures that invalid inputs result in errors with relevant output,
and that no persistent state is changed.
"""
self.set_group_permissions(initial_group_permissions)
with self.assertRaises(CommandError) as exc_context:
call_command('manage_group', *command_args)
self.assertIn(exception_message, str(exc_context.exception).lower())
self.check_group_permissions(initial_group_permissions)
@ddt.data(*TEST_DATA)
def test_invalid_permission(self, initial_group_permissions):
"""
Ensures that a permission that cannot be parsed or resolved results in
and error and that no persistent state is changed.
"""
self.set_group_permissions(initial_group_permissions)
# not parseable
with self.assertRaises(CommandError) as exc_context:
call_command('manage_group', TEST_GROUP, '--permissions', 'fail')
self.assertIn('invalid permission option', str(exc_context.exception).lower())
self.check_group_permissions(initial_group_permissions)
# not parseable
with self.assertRaises(CommandError) as exc_context:
call_command('manage_group', TEST_GROUP, '--permissions', 'f:a:i:l')
self.assertIn('invalid permission option', str(exc_context.exception).lower())
self.check_group_permissions(initial_group_permissions)
# invalid app label
with self.assertRaises(CommandError) as exc_context:
call_command('manage_group', TEST_GROUP, '--permissions', 'nonexistent-label:dummy-model:dummy-perm')
self.assertIn('no installed app', str(exc_context.exception).lower())
self.assertIn('nonexistent-label', str(exc_context.exception).lower())
self.check_group_permissions(initial_group_permissions)
# invalid model name
with self.assertRaises(CommandError) as exc_context:
call_command('manage_group', TEST_GROUP, '--permissions', 'auth:nonexistent-model:dummy-perm')
self.assertIn('nonexistent-model', str(exc_context.exception).lower())
self.check_group_permissions(initial_group_permissions)
# invalid model name
with self.assertRaises(CommandError) as exc_context:
call_command('manage_group', TEST_GROUP, '--permissions', 'auth:Group:nonexistent-perm')
self.assertIn('invalid permission codename', str(exc_context.exception).lower())
self.assertIn('nonexistent-perm', str(exc_context.exception).lower())
self.check_group_permissions(initial_group_permissions)
def test_group(self):
"""
Ensures that groups are created if they don't exist and reused if they do.
"""
self.check_groups([])
call_command('manage_group', TEST_GROUP)
self.check_groups([TEST_GROUP])
# check idempotency
call_command('manage_group', TEST_GROUP)
self.check_groups([TEST_GROUP])
def test_group_remove(self):
"""
Ensures that groups are removed if they exist and we exit cleanly otherwise.
"""
self.set_group_permissions({TEST_GROUP: ['add_group']})
self.check_groups([TEST_GROUP])
call_command('manage_group', TEST_GROUP, '--remove')
self.check_groups([])
# check idempotency
call_command('manage_group', TEST_GROUP, '--remove')
self.check_groups([])
def test_permissions(self):
"""
Ensures that permissions are set on the group as specified.
"""
self.check_groups([])
call_command('manage_group', TEST_GROUP, '--permissions', 'auth:Group:add_group')
self.check_groups([TEST_GROUP])
self.check_permissions(TEST_GROUP, ['add_group'])
# check idempotency
call_command('manage_group', TEST_GROUP, '--permissions', 'auth:Group:add_group')
self.check_groups([TEST_GROUP])
self.check_permissions(TEST_GROUP, ['add_group'])
# check adding a permission
call_command('manage_group', TEST_GROUP, '--permissions', 'auth:Group:add_group', 'auth:Group:change_group')
self.check_groups([TEST_GROUP])
self.check_permissions(TEST_GROUP, ['add_group', 'change_group'])
# check removing a permission
call_command('manage_group', TEST_GROUP, '--permissions', 'auth:Group:change_group')
self.check_groups([TEST_GROUP])
self.check_permissions(TEST_GROUP, ['change_group'])
# check removing all permissions
call_command('manage_group', TEST_GROUP)
self.check_groups([TEST_GROUP])
self.check_permissions(TEST_GROUP, [])
|
agpl-3.0
|
prampey/servo
|
tests/wpt/web-platform-tests/tools/pytest/testing/test_nose.py
|
173
|
10146
|
import pytest
def setup_module(mod):
mod.nose = pytest.importorskip("nose")
def test_nose_setup(testdir):
p = testdir.makepyfile("""
l = []
from nose.tools import with_setup
@with_setup(lambda: l.append(1), lambda: l.append(2))
def test_hello():
assert l == [1]
def test_world():
assert l == [1,2]
test_hello.setup = lambda: l.append(1)
test_hello.teardown = lambda: l.append(2)
""")
result = testdir.runpytest(p, '-p', 'nose')
result.assert_outcomes(passed=2)
def test_setup_func_with_setup_decorator():
from _pytest.nose import call_optional
l = []
class A:
@pytest.fixture(autouse=True)
def f(self):
l.append(1)
call_optional(A(), "f")
assert not l
def test_setup_func_not_callable():
from _pytest.nose import call_optional
class A:
f = 1
call_optional(A(), "f")
def test_nose_setup_func(testdir):
p = testdir.makepyfile("""
from nose.tools import with_setup
l = []
def my_setup():
a = 1
l.append(a)
def my_teardown():
b = 2
l.append(b)
@with_setup(my_setup, my_teardown)
def test_hello():
print (l)
assert l == [1]
def test_world():
print (l)
assert l == [1,2]
""")
result = testdir.runpytest(p, '-p', 'nose')
result.assert_outcomes(passed=2)
def test_nose_setup_func_failure(testdir):
p = testdir.makepyfile("""
from nose.tools import with_setup
l = []
my_setup = lambda x: 1
my_teardown = lambda x: 2
@with_setup(my_setup, my_teardown)
def test_hello():
print (l)
assert l == [1]
def test_world():
print (l)
assert l == [1,2]
""")
result = testdir.runpytest(p, '-p', 'nose')
result.stdout.fnmatch_lines([
"*TypeError: <lambda>()*"
])
def test_nose_setup_func_failure_2(testdir):
testdir.makepyfile("""
l = []
my_setup = 1
my_teardown = 2
def test_hello():
assert l == []
test_hello.setup = my_setup
test_hello.teardown = my_teardown
""")
reprec = testdir.inline_run()
reprec.assertoutcome(passed=1)
def test_nose_setup_partial(testdir):
pytest.importorskip("functools")
p = testdir.makepyfile("""
from functools import partial
l = []
def my_setup(x):
a = x
l.append(a)
def my_teardown(x):
b = x
l.append(b)
my_setup_partial = partial(my_setup, 1)
my_teardown_partial = partial(my_teardown, 2)
def test_hello():
print (l)
assert l == [1]
def test_world():
print (l)
assert l == [1,2]
test_hello.setup = my_setup_partial
test_hello.teardown = my_teardown_partial
""")
result = testdir.runpytest(p, '-p', 'nose')
result.stdout.fnmatch_lines([
"*2 passed*"
])
def test_nose_test_generator_fixtures(testdir):
p = testdir.makepyfile("""
# taken from nose-0.11.1 unit_tests/test_generator_fixtures.py
from nose.tools import eq_
called = []
def outer_setup():
called.append('outer_setup')
def outer_teardown():
called.append('outer_teardown')
def inner_setup():
called.append('inner_setup')
def inner_teardown():
called.append('inner_teardown')
def test_gen():
called[:] = []
for i in range(0, 5):
yield check, i
def check(i):
expect = ['outer_setup']
for x in range(0, i):
expect.append('inner_setup')
expect.append('inner_teardown')
expect.append('inner_setup')
eq_(called, expect)
test_gen.setup = outer_setup
test_gen.teardown = outer_teardown
check.setup = inner_setup
check.teardown = inner_teardown
class TestClass(object):
def setup(self):
print ("setup called in %s" % self)
self.called = ['setup']
def teardown(self):
print ("teardown called in %s" % self)
eq_(self.called, ['setup'])
self.called.append('teardown')
def test(self):
print ("test called in %s" % self)
for i in range(0, 5):
yield self.check, i
def check(self, i):
print ("check called in %s" % self)
expect = ['setup']
#for x in range(0, i):
# expect.append('setup')
# expect.append('teardown')
#expect.append('setup')
eq_(self.called, expect)
""")
result = testdir.runpytest(p, '-p', 'nose')
result.stdout.fnmatch_lines([
"*10 passed*"
])
def test_module_level_setup(testdir):
testdir.makepyfile("""
from nose.tools import with_setup
items = {}
def setup():
items[1]=1
def teardown():
del items[1]
def setup2():
items[2] = 2
def teardown2():
del items[2]
def test_setup_module_setup():
assert items[1] == 1
@with_setup(setup2, teardown2)
def test_local_setup():
assert items[2] == 2
assert 1 not in items
""")
result = testdir.runpytest('-p', 'nose')
result.stdout.fnmatch_lines([
"*2 passed*",
])
def test_nose_style_setup_teardown(testdir):
testdir.makepyfile("""
l = []
def setup_module():
l.append(1)
def teardown_module():
del l[0]
def test_hello():
assert l == [1]
def test_world():
assert l == [1]
""")
result = testdir.runpytest('-p', 'nose')
result.stdout.fnmatch_lines([
"*2 passed*",
])
def test_nose_setup_ordering(testdir):
testdir.makepyfile("""
def setup_module(mod):
mod.visited = True
class TestClass:
def setup(self):
assert visited
def test_first(self):
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*1 passed*",
])
def test_apiwrapper_problem_issue260(testdir):
# this would end up trying a call a optional teardown on the class
# for plain unittests we dont want nose behaviour
testdir.makepyfile("""
import unittest
class TestCase(unittest.TestCase):
def setup(self):
#should not be called in unittest testcases
assert 0, 'setup'
def teardown(self):
#should not be called in unittest testcases
assert 0, 'teardown'
def setUp(self):
print('setup')
def tearDown(self):
print('teardown')
def test_fun(self):
pass
""")
result = testdir.runpytest()
result.assert_outcomes(passed=1)
def test_setup_teardown_linking_issue265(testdir):
# we accidentally didnt integrate nose setupstate with normal setupstate
# this test ensures that won't happen again
testdir.makepyfile('''
import pytest
class TestGeneric(object):
def test_nothing(self):
"""Tests the API of the implementation (for generic and specialized)."""
@pytest.mark.skipif("True", reason=
"Skip tests to check if teardown is skipped as well.")
class TestSkipTeardown(TestGeneric):
def setup(self):
"""Sets up my specialized implementation for $COOL_PLATFORM."""
raise Exception("should not call setup for skipped tests")
def teardown(self):
"""Undoes the setup."""
raise Exception("should not call teardown for skipped tests")
''')
reprec = testdir.runpytest()
reprec.assert_outcomes(passed=1, skipped=1)
def test_SkipTest_during_collection(testdir):
p = testdir.makepyfile("""
import nose
raise nose.SkipTest("during collection")
def test_failing():
assert False
""")
result = testdir.runpytest(p)
result.assert_outcomes(skipped=1)
def test_SkipTest_in_test(testdir):
testdir.makepyfile("""
import nose
def test_skipping():
raise nose.SkipTest("in test")
""")
reprec = testdir.inline_run()
reprec.assertoutcome(skipped=1)
def test_istest_function_decorator(testdir):
p = testdir.makepyfile("""
import nose.tools
@nose.tools.istest
def not_test_prefix():
pass
""")
result = testdir.runpytest(p)
result.assert_outcomes(passed=1)
def test_nottest_function_decorator(testdir):
testdir.makepyfile("""
import nose.tools
@nose.tools.nottest
def test_prefix():
pass
""")
reprec = testdir.inline_run()
assert not reprec.getfailedcollections()
calls = reprec.getreports("pytest_runtest_logreport")
assert not calls
def test_istest_class_decorator(testdir):
p = testdir.makepyfile("""
import nose.tools
@nose.tools.istest
class NotTestPrefix:
def test_method(self):
pass
""")
result = testdir.runpytest(p)
result.assert_outcomes(passed=1)
def test_nottest_class_decorator(testdir):
testdir.makepyfile("""
import nose.tools
@nose.tools.nottest
class TestPrefix:
def test_method(self):
pass
""")
reprec = testdir.inline_run()
assert not reprec.getfailedcollections()
calls = reprec.getreports("pytest_runtest_logreport")
assert not calls
|
mpl-2.0
|
xiaotianyi/INTELLI-City
|
docs/refer_project/wx_with_web/wenpl/divide.py
|
1
|
10289
|
# encoding=utf-8
'''
程序入口showreply
'''
import jieba.posseg as pseg
import jieba
import sys
import urllib2
import json
import re
import copy
import datetime
import time
import calendar
from parsedate import parseDate
from getdata import*
from showAll import*
#增加用户词汇库,此处的绝对定位,以后要修改
jieba.load_userdict('wendata/dict/dict1.txt')
jieba.load_userdict('wendata/dict/dict_manual.txt')
jieba.load_userdict('wendata/dict/dict_date.txt')
jieba.load_userdict('wendata/dict/dict2.txt')
# jieba.load_userdict('/root/wechat/wx/wendata/dict/dict2.txt')
reload(sys)
sys.setdefaultencoding('utf-8')
def mergePositions(l):
'''
把device\town\city\station 都标记为position
'''
positions = {}
for x in l:
for y in x:
positions[y] = 'position'
return positions
def divide(str):
'''
输入语句分词,分别得到独立词汇和词性
'''
words = pseg.cut(str)
li = []
for w in words:
li.append([w.word.encode('utf-8'), w.flag.encode('utf-8')])
return li
def filt(li, type):
'''词性筛选,暂时没用到
# get the specific words depending on the type you want
'''
rli = []
for w in li:
if w[1] == type:
rli.append(w[0])
return rli
def paraFilter(store):
'''
#查看
# check parameters in store
'''
dictionary = {}
for x in store.keys():
dictionary[x] = []
for y in x.split(" "):
j = []
j = re.findall(r'\w+', y)
if j != []:
dictionary[x].append(j)
return dictionary
def getQueryTypeSet(li, dictionary, para, pro, paraCategory):
'''
输入语句分完词后,判断是不是有pro中的关键词,没的话,就反回0,表示这句话不在查询范围,调用外部资源回答,同时获取参数词:people,position
# calculate the types of the query words
'''
qType = []
Nkey = 0
hasPosition = 0
hasName = 0
paradic = {}
# print pro
for w in li:
word = w[0]
if word in dictionary.keys():
qType.append(dictionary[word])
if word in pro:
Nkey += 1
if word in paraCategory.keys():
paradic[paraCategory[word]] = word
for x in paradic.values():
para.append(x)
if Nkey == 0:
return 0
return qType
def pointquery(li,points,devices,stations,para):
'''
#"获取某个监测点的数据"
'''
point=""
device=""
station=""
for w in li:
word=w[0]
# print 1
if points.has_key(word):
point=word
elif devices.has_key(word):
device=word
elif stations.has_key(word):
station=word
if point!="" and station!="" and device!="":
url ="/data/point_info_with_real_time?station_name="+station+"&device_name="+device+"&point_name="+point
return getResult(url)
else:
return 0
def getPrefixHit(qType, store):
'''
获命中数量
# calculate the hit times of each prefix sentences in store
'''
count = {}
setType = set(qType)
for i in range(len(store.keys())):
setStore = set(store.keys()[i].split(' '))
count[store.keys()[i]] = len(setStore & setType)
return count
def ranking(count, qType):
'''
计算命中率
# calculate the probability
'''
setType = set(qType)
N = len(setType)
p = {}
for x in count.keys():
p[x] = float(count[x] / float(N))
p = sort(p)
return p
def sort(p):
'''
#对命中率进行排序
'''
dicts = sorted(p.iteritems(), key=lambda d: d[1], reverse=True)
return dicts
# print dicts
def revranking(count):
'''
计算效率recall
# showDict(count)
'''
p = {}
for x in count.keys():
p[x] = float(count[x] / float(len(x.split(" "))))
# showDict(p)
p = sort(p)
# print p
return p
def excuteREST(p, rp, st, para, paraDict, qType,remember):
'''
#执行查询,这里按照参数优先顺序,以后可以优化调整
# p:正排序后的store匹配度列表
# rp:反排序后的store匹配度列表
# st:store字典
# para:输入语句中的参数列表
# paraDict: store中参数列表
# print showList()
# p[[[],[]],[]]
# st{:}
'''
p = resort(p, rp) # 命中率相同的情况下,按效率来决定先后顺序
# print p
url=""
if len(para) == 0:
for x in p:
if len(paraDict[x[0]]) == 0:
url = st[x[0]]
remember.append(x)
break
elif len(para) == 1:
for x in p:
if len(paraDict[x[0]]) == 1:
# print paraDict[x[0]][0][0]
if qType.count(paraDict[x[0]][0][0]) == 1:
url = st[x[0]] + para[0]
remember.append(x)
break
if url=="":
return 0
elif len(para) == 2:
for x in p:
if len(paraDict[x[0]]) == 2:
url = st[x[0]][0] + para[0] + st[x[0]][1] + para[1][0]+st[x[0]][2]+para[1][1]
remember.append(x)
break
if url=="":
return 0
return getResult(url)
def getResult(url):
'''
#与服务器建立连接,获取json数据并返回.turl也是需要改成相对路径
'''
turl = '/root/INTELLI-City/docs/refer_project/wx/wendata/token'
fin1 = open(turl, 'r+')
token = fin1.read()
url = 'http://www.intellense.com:3080' + url
print url
fin1.close()
req = urllib2.Request(url)
req.add_header('authorization', token)
try:
response = urllib2.urlopen(req)
except Exception as e:
return 0
# print response.read()
return response.read()
def resort(l1, l2):
'''
# 反向检查匹配度
'''
l1 = copy.deepcopy(l1)
l2 = copy.deepcopy(l2)
nl = []
g = -1
group = -1
gdict = {}
newlist = []
for x in l1:
if g != x[1]:
group += 1
g = x[1]
nl.append([])
nl[group].append(x)
else:
nl[group].append(x)
for g in nl:
for x in g:
for y in range(len(l2)):
if x[0] == l1[y][0]:
gdict[x] = y
break
sublist = sort(gdict)
for x in sublist:
newlist.append(x[0])
return newlist
def connectTuring(a):
'''
#在没有匹配的时候调用外部问答
'''
kurl = '/root/INTELLI-City/docs/refer_project/wx/wendata/turkey'
fin = open(kurl, 'r+')
key = fin.read()
url = r'http://www.tuling123.com/openapi/api?key=' + key + '&info=' + a
reson = urllib2.urlopen(url)
reson = json.loads(reson.read())
fin.close()
# print reson['text'],'\n'
return reson['text']
def toUTF8(origin):
# change unicode type dict to UTF-8
result = {}
for x in origin.keys():
val = origin[x].encode('utf-8')
x = x.encode('utf-8')
result[x] = val
return result
def showDict(l):
for x in l.keys():
print x + ' ' + str(l[x])
def showList(l):
for x in l:
print x
def test(sentence):
sentence = sentence.replace(' ', '')
people = getPeople()
cities = getPosition('cities')
towns = getPosition('towns')
stations = getPosition('stations')
devices = getPosition('devices')
positions = mergePositions([cities, towns, stations, devices])
points=getPoints()
pro = getPros()
general = getGenerals()
paraCategory = dict(positions, **people)
dict1 = dict(general, **pro)
dict2 = dict(dict1, **paraCategory)
st = getStore() # store dict
para = []
keyphrase = pro.keys()
paraDict = paraFilter(st)
date = parseDate(sentence)
ftype=0
remember=[]
divideResult = divide(sentence) # list
sentenceResult = getQueryTypeSet(
divideResult,
dict2,
para,
pro,
paraCategory) # set
pointResult=pointquery(divideResult,points,devices,stations,para)
if pointResult!=0:
# print "-----------------------------这是结果哦--------------------------------"
# print get_point_info_with_real_time(json.loads(pointResult))
return get_point_info_with_real_time(json.loads(pointResult))
elif sentenceResult == 0:
# print "-----------------------------这是结果哦--------------------------------"
# print connectTuring(sentence)
return connectTuring(sentence)
else:
if date!=0:
sentenceResult.append('time')
hitResult = getPrefixHit(sentenceResult, st) # dict
rankResult = ranking(hitResult, sentenceResult) # dict
rerankingResult = revranking(hitResult)
if date!=0:
para.append(date)
excuteResult = excuteREST(
rankResult,
rerankingResult,
st,
para,
paraDict,
sentenceResult,remember)
if excuteResult==0:
# print "-----------------------------这是结果哦--------------------------------"
# print connectTuring(sentence)
return connectTuring(sentence)
# b=filt(a,'v')
else:
reinfo=showResult(json.loads(excuteResult),remember[0])
if reinfo=="":
# print "-----------------------------这是结果哦--------------------------------"
# print '没有相关数据信息'
return '没有相关数据信息'
else:
# print "-----------------------------这是结果哦--------------------------------"
# print reinfo
return reinfo
# test()
def showReply(sentence):
'''程序入口'''
sentence=str(sentence)
try:
return test(sentence)
except Exception as e:
# print "-----------------------------这是结果哦--------------------------------"
# print '我好像不太明白·_·'
return '我好像不太明白·_·'
# print showReply("查询工单")
|
mit
|
ujenmr/ansible
|
contrib/inventory/ec2.py
|
18
|
73063
|
#!/usr/bin/env python
'''
EC2 external inventory script
=================================
Generates inventory that Ansible can understand by making API request to
AWS EC2 using the Boto library.
NOTE: This script assumes Ansible is being executed where the environment
variables needed for Boto have already been set:
export AWS_ACCESS_KEY_ID='AK123'
export AWS_SECRET_ACCESS_KEY='abc123'
Optional region environment variable if region is 'auto'
This script also assumes that there is an ec2.ini file alongside it. To specify a
different path to ec2.ini, define the EC2_INI_PATH environment variable:
export EC2_INI_PATH=/path/to/my_ec2.ini
If you're using eucalyptus you need to set the above variables and
you need to define:
export EC2_URL=http://hostname_of_your_cc:port/services/Eucalyptus
If you're using boto profiles (requires boto>=2.24.0) you can choose a profile
using the --boto-profile command line argument (e.g. ec2.py --boto-profile prod) or using
the AWS_PROFILE variable:
AWS_PROFILE=prod ansible-playbook -i ec2.py myplaybook.yml
For more details, see: http://docs.pythonboto.org/en/latest/boto_config_tut.html
You can filter for specific EC2 instances by creating an environment variable
named EC2_INSTANCE_FILTERS, which has the same format as the instance_filters
entry documented in ec2.ini. For example, to find all hosts whose name begins
with 'webserver', one might use:
export EC2_INSTANCE_FILTERS='tag:Name=webserver*'
When run against a specific host, this script returns the following variables:
- ec2_ami_launch_index
- ec2_architecture
- ec2_association
- ec2_attachTime
- ec2_attachment
- ec2_attachmentId
- ec2_block_devices
- ec2_client_token
- ec2_deleteOnTermination
- ec2_description
- ec2_deviceIndex
- ec2_dns_name
- ec2_eventsSet
- ec2_group_name
- ec2_hypervisor
- ec2_id
- ec2_image_id
- ec2_instanceState
- ec2_instance_type
- ec2_ipOwnerId
- ec2_ip_address
- ec2_item
- ec2_kernel
- ec2_key_name
- ec2_launch_time
- ec2_monitored
- ec2_monitoring
- ec2_networkInterfaceId
- ec2_ownerId
- ec2_persistent
- ec2_placement
- ec2_platform
- ec2_previous_state
- ec2_private_dns_name
- ec2_private_ip_address
- ec2_publicIp
- ec2_public_dns_name
- ec2_ramdisk
- ec2_reason
- ec2_region
- ec2_requester_id
- ec2_root_device_name
- ec2_root_device_type
- ec2_security_group_ids
- ec2_security_group_names
- ec2_shutdown_state
- ec2_sourceDestCheck
- ec2_spot_instance_request_id
- ec2_state
- ec2_state_code
- ec2_state_reason
- ec2_status
- ec2_subnet_id
- ec2_tenancy
- ec2_virtualization_type
- ec2_vpc_id
These variables are pulled out of a boto.ec2.instance object. There is a lack of
consistency with variable spellings (camelCase and underscores) since this
just loops through all variables the object exposes. It is preferred to use the
ones with underscores when multiple exist.
In addition, if an instance has AWS tags associated with it, each tag is a new
variable named:
- ec2_tag_[Key] = [Value]
Security groups are comma-separated in 'ec2_security_group_ids' and
'ec2_security_group_names'.
When destination_format and destination_format_tags are specified
the destination_format can be built from the instance tags and attributes.
The behavior will first check the user defined tags, then proceed to
check instance attributes, and finally if neither are found 'nil' will
be used instead.
'my_instance': {
'region': 'us-east-1', # attribute
'availability_zone': 'us-east-1a', # attribute
'private_dns_name': '172.31.0.1', # attribute
'ec2_tag_deployment': 'blue', # tag
'ec2_tag_clusterid': 'ansible', # tag
'ec2_tag_Name': 'webserver', # tag
...
}
Inside of the ec2.ini file the following settings are specified:
...
destination_format: {0}-{1}-{2}-{3}
destination_format_tags: Name,clusterid,deployment,private_dns_name
...
These settings would produce a destination_format as the following:
'webserver-ansible-blue-172.31.0.1'
'''
# (c) 2012, Peter Sankauskas
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import sys
import os
import argparse
import re
from time import time
from copy import deepcopy
import boto
from boto import ec2
from boto import rds
from boto import elasticache
from boto import route53
from boto import sts
import six
from ansible.module_utils import ec2 as ec2_utils
HAS_BOTO3 = False
try:
import boto3 # noqa
HAS_BOTO3 = True
except ImportError:
pass
from six.moves import configparser
from collections import defaultdict
import json
DEFAULTS = {
'all_elasticache_clusters': 'False',
'all_elasticache_nodes': 'False',
'all_elasticache_replication_groups': 'False',
'all_instances': 'False',
'all_rds_instances': 'False',
'aws_access_key_id': '',
'aws_secret_access_key': '',
'aws_security_token': '',
'boto_profile': '',
'cache_max_age': '300',
'cache_path': '~/.ansible/tmp',
'destination_variable': 'public_dns_name',
'elasticache': 'True',
'eucalyptus': 'False',
'eucalyptus_host': '',
'expand_csv_tags': 'False',
'group_by_ami_id': 'True',
'group_by_availability_zone': 'True',
'group_by_aws_account': 'False',
'group_by_elasticache_cluster': 'True',
'group_by_elasticache_engine': 'True',
'group_by_elasticache_parameter_group': 'True',
'group_by_elasticache_replication_group': 'True',
'group_by_instance_id': 'True',
'group_by_instance_state': 'False',
'group_by_instance_type': 'True',
'group_by_key_pair': 'True',
'group_by_platform': 'True',
'group_by_rds_engine': 'True',
'group_by_rds_parameter_group': 'True',
'group_by_region': 'True',
'group_by_route53_names': 'True',
'group_by_security_group': 'True',
'group_by_tag_keys': 'True',
'group_by_tag_none': 'True',
'group_by_vpc_id': 'True',
'hostname_variable': '',
'iam_role': '',
'include_rds_clusters': 'False',
'nested_groups': 'False',
'pattern_exclude': '',
'pattern_include': '',
'rds': 'False',
'regions': 'all',
'regions_exclude': 'us-gov-west-1, cn-north-1',
'replace_dash_in_groups': 'True',
'route53': 'False',
'route53_excluded_zones': '',
'route53_hostnames': '',
'stack_filters': 'False',
'vpc_destination_variable': 'ip_address'
}
class Ec2Inventory(object):
def _empty_inventory(self):
return {"_meta": {"hostvars": {}}}
def __init__(self):
''' Main execution path '''
# Inventory grouped by instance IDs, tags, security groups, regions,
# and availability zones
self.inventory = self._empty_inventory()
self.aws_account_id = None
# Index of hostname (address) to instance ID
self.index = {}
# Boto profile to use (if any)
self.boto_profile = None
# AWS credentials.
self.credentials = {}
# Read settings and parse CLI arguments
self.parse_cli_args()
self.read_settings()
# Make sure that profile_name is not passed at all if not set
# as pre 2.24 boto will fall over otherwise
if self.boto_profile:
if not hasattr(boto.ec2.EC2Connection, 'profile_name'):
self.fail_with_error("boto version must be >= 2.24 to use profile")
# Cache
if self.args.refresh_cache:
self.do_api_calls_update_cache()
elif not self.is_cache_valid():
self.do_api_calls_update_cache()
# Data to print
if self.args.host:
data_to_print = self.get_host_info()
elif self.args.list:
# Display list of instances for inventory
if self.inventory == self._empty_inventory():
data_to_print = self.get_inventory_from_cache()
else:
data_to_print = self.json_format_dict(self.inventory, True)
print(data_to_print)
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
if os.path.isfile(self.cache_path_index):
return True
return False
def read_settings(self):
''' Reads the settings from the ec2.ini file '''
scriptbasename = __file__
scriptbasename = os.path.basename(scriptbasename)
scriptbasename = scriptbasename.replace('.py', '')
defaults = {
'ec2': {
'ini_fallback': os.path.join(os.path.dirname(__file__), 'ec2.ini'),
'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename)
}
}
if six.PY3:
config = configparser.ConfigParser(DEFAULTS)
else:
config = configparser.SafeConfigParser(DEFAULTS)
ec2_ini_path = os.environ.get('EC2_INI_PATH', defaults['ec2']['ini_path'])
ec2_ini_path = os.path.expanduser(os.path.expandvars(ec2_ini_path))
if not os.path.isfile(ec2_ini_path):
ec2_ini_path = os.path.expanduser(defaults['ec2']['ini_fallback'])
if os.path.isfile(ec2_ini_path):
config.read(ec2_ini_path)
# Add empty sections if they don't exist
try:
config.add_section('ec2')
except configparser.DuplicateSectionError:
pass
try:
config.add_section('credentials')
except configparser.DuplicateSectionError:
pass
# is eucalyptus?
self.eucalyptus = config.getboolean('ec2', 'eucalyptus')
self.eucalyptus_host = config.get('ec2', 'eucalyptus_host')
# Regions
self.regions = []
config_regions = config.get('ec2', 'regions')
if (config_regions == 'all'):
if self.eucalyptus_host:
self.regions.append(boto.connect_euca(host=self.eucalyptus_host).region.name, **self.credentials)
else:
config_regions_exclude = config.get('ec2', 'regions_exclude')
for region_info in ec2.regions():
if region_info.name not in config_regions_exclude:
self.regions.append(region_info.name)
else:
self.regions = config_regions.split(",")
if 'auto' in self.regions:
env_region = os.environ.get('AWS_REGION')
if env_region is None:
env_region = os.environ.get('AWS_DEFAULT_REGION')
self.regions = [env_region]
# Destination addresses
self.destination_variable = config.get('ec2', 'destination_variable')
self.vpc_destination_variable = config.get('ec2', 'vpc_destination_variable')
self.hostname_variable = config.get('ec2', 'hostname_variable')
if config.has_option('ec2', 'destination_format') and \
config.has_option('ec2', 'destination_format_tags'):
self.destination_format = config.get('ec2', 'destination_format')
self.destination_format_tags = config.get('ec2', 'destination_format_tags').split(',')
else:
self.destination_format = None
self.destination_format_tags = None
# Route53
self.route53_enabled = config.getboolean('ec2', 'route53')
self.route53_hostnames = config.get('ec2', 'route53_hostnames')
self.route53_excluded_zones = []
self.route53_excluded_zones = [a for a in config.get('ec2', 'route53_excluded_zones').split(',') if a]
# Include RDS instances?
self.rds_enabled = config.getboolean('ec2', 'rds')
# Include RDS cluster instances?
self.include_rds_clusters = config.getboolean('ec2', 'include_rds_clusters')
# Include ElastiCache instances?
self.elasticache_enabled = config.getboolean('ec2', 'elasticache')
# Return all EC2 instances?
self.all_instances = config.getboolean('ec2', 'all_instances')
# Instance states to be gathered in inventory. Default is 'running'.
# Setting 'all_instances' to 'yes' overrides this option.
ec2_valid_instance_states = [
'pending',
'running',
'shutting-down',
'terminated',
'stopping',
'stopped'
]
self.ec2_instance_states = []
if self.all_instances:
self.ec2_instance_states = ec2_valid_instance_states
elif config.has_option('ec2', 'instance_states'):
for instance_state in config.get('ec2', 'instance_states').split(','):
instance_state = instance_state.strip()
if instance_state not in ec2_valid_instance_states:
continue
self.ec2_instance_states.append(instance_state)
else:
self.ec2_instance_states = ['running']
# Return all RDS instances? (if RDS is enabled)
self.all_rds_instances = config.getboolean('ec2', 'all_rds_instances')
# Return all ElastiCache replication groups? (if ElastiCache is enabled)
self.all_elasticache_replication_groups = config.getboolean('ec2', 'all_elasticache_replication_groups')
# Return all ElastiCache clusters? (if ElastiCache is enabled)
self.all_elasticache_clusters = config.getboolean('ec2', 'all_elasticache_clusters')
# Return all ElastiCache nodes? (if ElastiCache is enabled)
self.all_elasticache_nodes = config.getboolean('ec2', 'all_elasticache_nodes')
# boto configuration profile (prefer CLI argument then environment variables then config file)
self.boto_profile = self.args.boto_profile or \
os.environ.get('AWS_PROFILE') or \
config.get('ec2', 'boto_profile')
# AWS credentials (prefer environment variables)
if not (self.boto_profile or os.environ.get('AWS_ACCESS_KEY_ID') or
os.environ.get('AWS_PROFILE')):
aws_access_key_id = config.get('credentials', 'aws_access_key_id')
aws_secret_access_key = config.get('credentials', 'aws_secret_access_key')
aws_security_token = config.get('credentials', 'aws_security_token')
if aws_access_key_id:
self.credentials = {
'aws_access_key_id': aws_access_key_id,
'aws_secret_access_key': aws_secret_access_key
}
if aws_security_token:
self.credentials['security_token'] = aws_security_token
# Cache related
cache_dir = os.path.expanduser(config.get('ec2', 'cache_path'))
if self.boto_profile:
cache_dir = os.path.join(cache_dir, 'profile_' + self.boto_profile)
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
cache_name = 'ansible-ec2'
cache_id = self.boto_profile or os.environ.get('AWS_ACCESS_KEY_ID', self.credentials.get('aws_access_key_id'))
if cache_id:
cache_name = '%s-%s' % (cache_name, cache_id)
cache_name += '-' + str(abs(hash(__file__)))[1:7]
self.cache_path_cache = os.path.join(cache_dir, "%s.cache" % cache_name)
self.cache_path_index = os.path.join(cache_dir, "%s.index" % cache_name)
self.cache_max_age = config.getint('ec2', 'cache_max_age')
self.expand_csv_tags = config.getboolean('ec2', 'expand_csv_tags')
# Configure nested groups instead of flat namespace.
self.nested_groups = config.getboolean('ec2', 'nested_groups')
# Replace dash or not in group names
self.replace_dash_in_groups = config.getboolean('ec2', 'replace_dash_in_groups')
# IAM role to assume for connection
self.iam_role = config.get('ec2', 'iam_role')
# Configure which groups should be created.
group_by_options = [a for a in DEFAULTS if a.startswith('group_by')]
for option in group_by_options:
setattr(self, option, config.getboolean('ec2', option))
# Do we need to just include hosts that match a pattern?
self.pattern_include = config.get('ec2', 'pattern_include')
if self.pattern_include:
self.pattern_include = re.compile(self.pattern_include)
# Do we need to exclude hosts that match a pattern?
self.pattern_exclude = config.get('ec2', 'pattern_exclude')
if self.pattern_exclude:
self.pattern_exclude = re.compile(self.pattern_exclude)
# Do we want to stack multiple filters?
self.stack_filters = config.getboolean('ec2', 'stack_filters')
# Instance filters (see boto and EC2 API docs). Ignore invalid filters.
self.ec2_instance_filters = []
if config.has_option('ec2', 'instance_filters') or 'EC2_INSTANCE_FILTERS' in os.environ:
filters = os.getenv('EC2_INSTANCE_FILTERS', config.get('ec2', 'instance_filters') if config.has_option('ec2', 'instance_filters') else '')
if self.stack_filters and '&' in filters:
self.fail_with_error("AND filters along with stack_filter enabled is not supported.\n")
filter_sets = [f for f in filters.split(',') if f]
for filter_set in filter_sets:
filters = {}
filter_set = filter_set.strip()
for instance_filter in filter_set.split("&"):
instance_filter = instance_filter.strip()
if not instance_filter or '=' not in instance_filter:
continue
filter_key, filter_value = [x.strip() for x in instance_filter.split('=', 1)]
if not filter_key:
continue
filters[filter_key] = filter_value
self.ec2_instance_filters.append(filters.copy())
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on EC2')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to EC2 (default: False - use cache files)')
parser.add_argument('--profile', '--boto-profile', action='store', dest='boto_profile',
help='Use boto profile for connections to EC2')
self.args = parser.parse_args()
def do_api_calls_update_cache(self):
''' Do API calls to each region, and save data in cache files '''
if self.route53_enabled:
self.get_route53_records()
for region in self.regions:
self.get_instances_by_region(region)
if self.rds_enabled:
self.get_rds_instances_by_region(region)
if self.elasticache_enabled:
self.get_elasticache_clusters_by_region(region)
self.get_elasticache_replication_groups_by_region(region)
if self.include_rds_clusters:
self.include_rds_clusters_by_region(region)
self.write_to_cache(self.inventory, self.cache_path_cache)
self.write_to_cache(self.index, self.cache_path_index)
def connect(self, region):
''' create connection to api server'''
if self.eucalyptus:
conn = boto.connect_euca(host=self.eucalyptus_host, **self.credentials)
conn.APIVersion = '2010-08-31'
else:
conn = self.connect_to_aws(ec2, region)
return conn
def boto_fix_security_token_in_profile(self, connect_args):
''' monkey patch for boto issue boto/boto#2100 '''
profile = 'profile ' + self.boto_profile
if boto.config.has_option(profile, 'aws_security_token'):
connect_args['security_token'] = boto.config.get(profile, 'aws_security_token')
return connect_args
def connect_to_aws(self, module, region):
connect_args = deepcopy(self.credentials)
# only pass the profile name if it's set (as it is not supported by older boto versions)
if self.boto_profile:
connect_args['profile_name'] = self.boto_profile
self.boto_fix_security_token_in_profile(connect_args)
elif os.environ.get('AWS_SESSION_TOKEN'):
connect_args['security_token'] = os.environ.get('AWS_SESSION_TOKEN')
if self.iam_role:
sts_conn = sts.connect_to_region(region, **connect_args)
role = sts_conn.assume_role(self.iam_role, 'ansible_dynamic_inventory')
connect_args['aws_access_key_id'] = role.credentials.access_key
connect_args['aws_secret_access_key'] = role.credentials.secret_key
connect_args['security_token'] = role.credentials.session_token
conn = module.connect_to_region(region, **connect_args)
# connect_to_region will fail "silently" by returning None if the region name is wrong or not supported
if conn is None:
self.fail_with_error("region name: %s likely not supported, or AWS is down. connection to region failed." % region)
return conn
def get_instances_by_region(self, region):
''' Makes an AWS EC2 API call to the list of instances in a particular
region '''
try:
conn = self.connect(region)
reservations = []
if self.ec2_instance_filters:
if self.stack_filters:
filters_dict = {}
for filters in self.ec2_instance_filters:
filters_dict.update(filters)
reservations.extend(conn.get_all_instances(filters=filters_dict))
else:
for filters in self.ec2_instance_filters:
reservations.extend(conn.get_all_instances(filters=filters))
else:
reservations = conn.get_all_instances()
# Pull the tags back in a second step
# AWS are on record as saying that the tags fetched in the first `get_all_instances` request are not
# reliable and may be missing, and the only way to guarantee they are there is by calling `get_all_tags`
instance_ids = []
for reservation in reservations:
instance_ids.extend([instance.id for instance in reservation.instances])
max_filter_value = 199
tags = []
for i in range(0, len(instance_ids), max_filter_value):
tags.extend(conn.get_all_tags(filters={'resource-type': 'instance', 'resource-id': instance_ids[i:i + max_filter_value]}))
tags_by_instance_id = defaultdict(dict)
for tag in tags:
tags_by_instance_id[tag.res_id][tag.name] = tag.value
if (not self.aws_account_id) and reservations:
self.aws_account_id = reservations[0].owner_id
for reservation in reservations:
for instance in reservation.instances:
instance.tags = tags_by_instance_id[instance.id]
self.add_instance(instance, region)
except boto.exception.BotoServerError as e:
if e.error_code == 'AuthFailure':
error = self.get_auth_error_message()
else:
backend = 'Eucalyptus' if self.eucalyptus else 'AWS'
error = "Error connecting to %s backend.\n%s" % (backend, e.message)
self.fail_with_error(error, 'getting EC2 instances')
def tags_match_filters(self, tags):
''' return True if given tags match configured filters '''
if not self.ec2_instance_filters:
return True
for filters in self.ec2_instance_filters:
for filter_name, filter_value in filters.items():
if filter_name[:4] != 'tag:':
continue
filter_name = filter_name[4:]
if filter_name not in tags:
if self.stack_filters:
return False
continue
if isinstance(filter_value, list):
if self.stack_filters and tags[filter_name] not in filter_value:
return False
if not self.stack_filters and tags[filter_name] in filter_value:
return True
if isinstance(filter_value, six.string_types):
if self.stack_filters and tags[filter_name] != filter_value:
return False
if not self.stack_filters and tags[filter_name] == filter_value:
return True
return self.stack_filters
def get_rds_instances_by_region(self, region):
''' Makes an AWS API call to the list of RDS instances in a particular
region '''
if not HAS_BOTO3:
self.fail_with_error("Working with RDS instances requires boto3 - please install boto3 and try again",
"getting RDS instances")
client = ec2_utils.boto3_inventory_conn('client', 'rds', region, **self.credentials)
db_instances = client.describe_db_instances()
try:
conn = self.connect_to_aws(rds, region)
if conn:
marker = None
while True:
instances = conn.get_all_dbinstances(marker=marker)
marker = instances.marker
for index, instance in enumerate(instances):
# Add tags to instances.
instance.arn = db_instances['DBInstances'][index]['DBInstanceArn']
tags = client.list_tags_for_resource(ResourceName=instance.arn)['TagList']
instance.tags = {}
for tag in tags:
instance.tags[tag['Key']] = tag['Value']
if self.tags_match_filters(instance.tags):
self.add_rds_instance(instance, region)
if not marker:
break
except boto.exception.BotoServerError as e:
error = e.reason
if e.error_code == 'AuthFailure':
error = self.get_auth_error_message()
elif e.error_code == "OptInRequired":
error = "RDS hasn't been enabled for this account yet. " \
"You must either log in to the RDS service through the AWS console to enable it, " \
"or set 'rds = False' in ec2.ini"
elif not e.reason == "Forbidden":
error = "Looks like AWS RDS is down:\n%s" % e.message
self.fail_with_error(error, 'getting RDS instances')
def include_rds_clusters_by_region(self, region):
if not HAS_BOTO3:
self.fail_with_error("Working with RDS clusters requires boto3 - please install boto3 and try again",
"getting RDS clusters")
client = ec2_utils.boto3_inventory_conn('client', 'rds', region, **self.credentials)
marker, clusters = '', []
while marker is not None:
resp = client.describe_db_clusters(Marker=marker)
clusters.extend(resp["DBClusters"])
marker = resp.get('Marker', None)
account_id = boto.connect_iam().get_user().arn.split(':')[4]
c_dict = {}
for c in clusters:
# remove these datetime objects as there is no serialisation to json
# currently in place and we don't need the data yet
if 'EarliestRestorableTime' in c:
del c['EarliestRestorableTime']
if 'LatestRestorableTime' in c:
del c['LatestRestorableTime']
if not self.ec2_instance_filters:
matches_filter = True
else:
matches_filter = False
try:
# arn:aws:rds:<region>:<account number>:<resourcetype>:<name>
tags = client.list_tags_for_resource(
ResourceName='arn:aws:rds:' + region + ':' + account_id + ':cluster:' + c['DBClusterIdentifier'])
c['Tags'] = tags['TagList']
if self.ec2_instance_filters:
for filters in self.ec2_instance_filters:
for filter_key, filter_values in filters.items():
# get AWS tag key e.g. tag:env will be 'env'
tag_name = filter_key.split(":", 1)[1]
# Filter values is a list (if you put multiple values for the same tag name)
matches_filter = any(d['Key'] == tag_name and d['Value'] in filter_values for d in c['Tags'])
if matches_filter:
# it matches a filter, so stop looking for further matches
break
if matches_filter:
break
except Exception as e:
if e.message.find('DBInstanceNotFound') >= 0:
# AWS RDS bug (2016-01-06) means deletion does not fully complete and leave an 'empty' cluster.
# Ignore errors when trying to find tags for these
pass
# ignore empty clusters caused by AWS bug
if len(c['DBClusterMembers']) == 0:
continue
elif matches_filter:
c_dict[c['DBClusterIdentifier']] = c
self.inventory['db_clusters'] = c_dict
def get_elasticache_clusters_by_region(self, region):
''' Makes an AWS API call to the list of ElastiCache clusters (with
nodes' info) in a particular region.'''
# ElastiCache boto module doesn't provide a get_all_instances method,
# that's why we need to call describe directly (it would be called by
# the shorthand method anyway...)
clusters = []
try:
conn = self.connect_to_aws(elasticache, region)
if conn:
# show_cache_node_info = True
# because we also want nodes' information
_marker = 1
while _marker:
if _marker == 1:
_marker = None
response = conn.describe_cache_clusters(None, None, _marker, True)
_marker = response['DescribeCacheClustersResponse']['DescribeCacheClustersResult']['Marker']
try:
# Boto also doesn't provide wrapper classes to CacheClusters or
# CacheNodes. Because of that we can't make use of the get_list
# method in the AWSQueryConnection. Let's do the work manually
clusters = clusters + response['DescribeCacheClustersResponse']['DescribeCacheClustersResult']['CacheClusters']
except KeyError as e:
error = "ElastiCache query to AWS failed (unexpected format)."
self.fail_with_error(error, 'getting ElastiCache clusters')
except boto.exception.BotoServerError as e:
error = e.reason
if e.error_code == 'AuthFailure':
error = self.get_auth_error_message()
elif e.error_code == "OptInRequired":
error = "ElastiCache hasn't been enabled for this account yet. " \
"You must either log in to the ElastiCache service through the AWS console to enable it, " \
"or set 'elasticache = False' in ec2.ini"
elif not e.reason == "Forbidden":
error = "Looks like AWS ElastiCache is down:\n%s" % e.message
self.fail_with_error(error, 'getting ElastiCache clusters')
for cluster in clusters:
self.add_elasticache_cluster(cluster, region)
def get_elasticache_replication_groups_by_region(self, region):
''' Makes an AWS API call to the list of ElastiCache replication groups
in a particular region.'''
# ElastiCache boto module doesn't provide a get_all_instances method,
# that's why we need to call describe directly (it would be called by
# the shorthand method anyway...)
try:
conn = self.connect_to_aws(elasticache, region)
if conn:
response = conn.describe_replication_groups()
except boto.exception.BotoServerError as e:
error = e.reason
if e.error_code == 'AuthFailure':
error = self.get_auth_error_message()
if not e.reason == "Forbidden":
error = "Looks like AWS ElastiCache [Replication Groups] is down:\n%s" % e.message
self.fail_with_error(error, 'getting ElastiCache clusters')
try:
# Boto also doesn't provide wrapper classes to ReplicationGroups
# Because of that we can't make use of the get_list method in the
# AWSQueryConnection. Let's do the work manually
replication_groups = response['DescribeReplicationGroupsResponse']['DescribeReplicationGroupsResult']['ReplicationGroups']
except KeyError as e:
error = "ElastiCache [Replication Groups] query to AWS failed (unexpected format)."
self.fail_with_error(error, 'getting ElastiCache clusters')
for replication_group in replication_groups:
self.add_elasticache_replication_group(replication_group, region)
def get_auth_error_message(self):
''' create an informative error message if there is an issue authenticating'''
errors = ["Authentication error retrieving ec2 inventory."]
if None in [os.environ.get('AWS_ACCESS_KEY_ID'), os.environ.get('AWS_SECRET_ACCESS_KEY')]:
errors.append(' - No AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY environment vars found')
else:
errors.append(' - AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment vars found but may not be correct')
boto_paths = ['/etc/boto.cfg', '~/.boto', '~/.aws/credentials']
boto_config_found = [p for p in boto_paths if os.path.isfile(os.path.expanduser(p))]
if len(boto_config_found) > 0:
errors.append(" - Boto configs found at '%s', but the credentials contained may not be correct" % ', '.join(boto_config_found))
else:
errors.append(" - No Boto config found at any expected location '%s'" % ', '.join(boto_paths))
return '\n'.join(errors)
def fail_with_error(self, err_msg, err_operation=None):
'''log an error to std err for ansible-playbook to consume and exit'''
if err_operation:
err_msg = 'ERROR: "{err_msg}", while: {err_operation}'.format(
err_msg=err_msg, err_operation=err_operation)
sys.stderr.write(err_msg)
sys.exit(1)
def get_instance(self, region, instance_id):
conn = self.connect(region)
reservations = conn.get_all_instances([instance_id])
for reservation in reservations:
for instance in reservation.instances:
return instance
def add_instance(self, instance, region):
''' Adds an instance to the inventory and index, as long as it is
addressable '''
# Only return instances with desired instance states
if instance.state not in self.ec2_instance_states:
return
# Select the best destination address
# When destination_format and destination_format_tags are specified
# the following code will attempt to find the instance tags first,
# then the instance attributes next, and finally if neither are found
# assign nil for the desired destination format attribute.
if self.destination_format and self.destination_format_tags:
dest_vars = []
inst_tags = getattr(instance, 'tags')
for tag in self.destination_format_tags:
if tag in inst_tags:
dest_vars.append(inst_tags[tag])
elif hasattr(instance, tag):
dest_vars.append(getattr(instance, tag))
else:
dest_vars.append('nil')
dest = self.destination_format.format(*dest_vars)
elif instance.subnet_id:
dest = getattr(instance, self.vpc_destination_variable, None)
if dest is None:
dest = getattr(instance, 'tags').get(self.vpc_destination_variable, None)
else:
dest = getattr(instance, self.destination_variable, None)
if dest is None:
dest = getattr(instance, 'tags').get(self.destination_variable, None)
if not dest:
# Skip instances we cannot address (e.g. private VPC subnet)
return
# Set the inventory name
hostname = None
if self.hostname_variable:
if self.hostname_variable.startswith('tag_'):
hostname = instance.tags.get(self.hostname_variable[4:], None)
else:
hostname = getattr(instance, self.hostname_variable)
# set the hostname from route53
if self.route53_enabled and self.route53_hostnames:
route53_names = self.get_instance_route53_names(instance)
for name in route53_names:
if name.endswith(self.route53_hostnames):
hostname = name
# If we can't get a nice hostname, use the destination address
if not hostname:
hostname = dest
# to_safe strips hostname characters like dots, so don't strip route53 hostnames
elif self.route53_enabled and self.route53_hostnames and hostname.endswith(self.route53_hostnames):
hostname = hostname.lower()
else:
hostname = self.to_safe(hostname).lower()
# if we only want to include hosts that match a pattern, skip those that don't
if self.pattern_include and not self.pattern_include.match(hostname):
return
# if we need to exclude hosts that match a pattern, skip those
if self.pattern_exclude and self.pattern_exclude.match(hostname):
return
# Add to index
self.index[hostname] = [region, instance.id]
# Inventory: Group by instance ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[instance.id] = [hostname]
if self.nested_groups:
self.push_group(self.inventory, 'instances', instance.id)
# Inventory: Group by region
if self.group_by_region:
self.push(self.inventory, region, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone
if self.group_by_availability_zone:
self.push(self.inventory, instance.placement, hostname)
if self.nested_groups:
if self.group_by_region:
self.push_group(self.inventory, region, instance.placement)
self.push_group(self.inventory, 'zones', instance.placement)
# Inventory: Group by Amazon Machine Image (AMI) ID
if self.group_by_ami_id:
ami_id = self.to_safe(instance.image_id)
self.push(self.inventory, ami_id, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'images', ami_id)
# Inventory: Group by instance type
if self.group_by_instance_type:
type_name = self.to_safe('type_' + instance.instance_type)
self.push(self.inventory, type_name, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'types', type_name)
# Inventory: Group by instance state
if self.group_by_instance_state:
state_name = self.to_safe('instance_state_' + instance.state)
self.push(self.inventory, state_name, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'instance_states', state_name)
# Inventory: Group by platform
if self.group_by_platform:
if instance.platform:
platform = self.to_safe('platform_' + instance.platform)
else:
platform = self.to_safe('platform_undefined')
self.push(self.inventory, platform, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'platforms', platform)
# Inventory: Group by key pair
if self.group_by_key_pair and instance.key_name:
key_name = self.to_safe('key_' + instance.key_name)
self.push(self.inventory, key_name, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'keys', key_name)
# Inventory: Group by VPC
if self.group_by_vpc_id and instance.vpc_id:
vpc_id_name = self.to_safe('vpc_id_' + instance.vpc_id)
self.push(self.inventory, vpc_id_name, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'vpcs', vpc_id_name)
# Inventory: Group by security group
if self.group_by_security_group:
try:
for group in instance.groups:
key = self.to_safe("security_group_" + group.name)
self.push(self.inventory, key, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'security_groups', key)
except AttributeError:
self.fail_with_error('\n'.join(['Package boto seems a bit older.',
'Please upgrade boto >= 2.3.0.']))
# Inventory: Group by AWS account ID
if self.group_by_aws_account:
self.push(self.inventory, self.aws_account_id, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'accounts', self.aws_account_id)
# Inventory: Group by tag keys
if self.group_by_tag_keys:
for k, v in instance.tags.items():
if self.expand_csv_tags and v and ',' in v:
values = map(lambda x: x.strip(), v.split(','))
else:
values = [v]
for v in values:
if v:
key = self.to_safe("tag_" + k + "=" + v)
else:
key = self.to_safe("tag_" + k)
self.push(self.inventory, key, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'tags', self.to_safe("tag_" + k))
if v:
self.push_group(self.inventory, self.to_safe("tag_" + k), key)
# Inventory: Group by Route53 domain names if enabled
if self.route53_enabled and self.group_by_route53_names:
route53_names = self.get_instance_route53_names(instance)
for name in route53_names:
self.push(self.inventory, name, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'route53', name)
# Global Tag: instances without tags
if self.group_by_tag_none and len(instance.tags) == 0:
self.push(self.inventory, 'tag_none', hostname)
if self.nested_groups:
self.push_group(self.inventory, 'tags', 'tag_none')
# Global Tag: tag all EC2 instances
self.push(self.inventory, 'ec2', hostname)
self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance)
self.inventory["_meta"]["hostvars"][hostname]['ansible_host'] = dest
def add_rds_instance(self, instance, region):
''' Adds an RDS instance to the inventory and index, as long as it is
addressable '''
# Only want available instances unless all_rds_instances is True
if not self.all_rds_instances and instance.status != 'available':
return
# Select the best destination address
dest = instance.endpoint[0]
if not dest:
# Skip instances we cannot address (e.g. private VPC subnet)
return
# Set the inventory name
hostname = None
if self.hostname_variable:
if self.hostname_variable.startswith('tag_'):
hostname = instance.tags.get(self.hostname_variable[4:], None)
else:
hostname = getattr(instance, self.hostname_variable)
# If we can't get a nice hostname, use the destination address
if not hostname:
hostname = dest
hostname = self.to_safe(hostname).lower()
# Add to index
self.index[hostname] = [region, instance.id]
# Inventory: Group by instance ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[instance.id] = [hostname]
if self.nested_groups:
self.push_group(self.inventory, 'instances', instance.id)
# Inventory: Group by region
if self.group_by_region:
self.push(self.inventory, region, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone
if self.group_by_availability_zone:
self.push(self.inventory, instance.availability_zone, hostname)
if self.nested_groups:
if self.group_by_region:
self.push_group(self.inventory, region, instance.availability_zone)
self.push_group(self.inventory, 'zones', instance.availability_zone)
# Inventory: Group by instance type
if self.group_by_instance_type:
type_name = self.to_safe('type_' + instance.instance_class)
self.push(self.inventory, type_name, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'types', type_name)
# Inventory: Group by VPC
if self.group_by_vpc_id and instance.subnet_group and instance.subnet_group.vpc_id:
vpc_id_name = self.to_safe('vpc_id_' + instance.subnet_group.vpc_id)
self.push(self.inventory, vpc_id_name, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'vpcs', vpc_id_name)
# Inventory: Group by security group
if self.group_by_security_group:
try:
if instance.security_group:
key = self.to_safe("security_group_" + instance.security_group.name)
self.push(self.inventory, key, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'security_groups', key)
except AttributeError:
self.fail_with_error('\n'.join(['Package boto seems a bit older.',
'Please upgrade boto >= 2.3.0.']))
# Inventory: Group by tag keys
if self.group_by_tag_keys:
for k, v in instance.tags.items():
if self.expand_csv_tags and v and ',' in v:
values = map(lambda x: x.strip(), v.split(','))
else:
values = [v]
for v in values:
if v:
key = self.to_safe("tag_" + k + "=" + v)
else:
key = self.to_safe("tag_" + k)
self.push(self.inventory, key, hostname)
if self.nested_groups:
self.push_group(self.inventory, 'tags', self.to_safe("tag_" + k))
if v:
self.push_group(self.inventory, self.to_safe("tag_" + k), key)
# Inventory: Group by engine
if self.group_by_rds_engine:
self.push(self.inventory, self.to_safe("rds_" + instance.engine), hostname)
if self.nested_groups:
self.push_group(self.inventory, 'rds_engines', self.to_safe("rds_" + instance.engine))
# Inventory: Group by parameter group
if self.group_by_rds_parameter_group:
self.push(self.inventory, self.to_safe("rds_parameter_group_" + instance.parameter_group.name), hostname)
if self.nested_groups:
self.push_group(self.inventory, 'rds_parameter_groups', self.to_safe("rds_parameter_group_" + instance.parameter_group.name))
# Global Tag: instances without tags
if self.group_by_tag_none and len(instance.tags) == 0:
self.push(self.inventory, 'tag_none', hostname)
if self.nested_groups:
self.push_group(self.inventory, 'tags', 'tag_none')
# Global Tag: all RDS instances
self.push(self.inventory, 'rds', hostname)
self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance)
self.inventory["_meta"]["hostvars"][hostname]['ansible_host'] = dest
def add_elasticache_cluster(self, cluster, region):
''' Adds an ElastiCache cluster to the inventory and index, as long as
it's nodes are addressable '''
# Only want available clusters unless all_elasticache_clusters is True
if not self.all_elasticache_clusters and cluster['CacheClusterStatus'] != 'available':
return
# Select the best destination address
if 'ConfigurationEndpoint' in cluster and cluster['ConfigurationEndpoint']:
# Memcached cluster
dest = cluster['ConfigurationEndpoint']['Address']
is_redis = False
else:
# Redis sigle node cluster
# Because all Redis clusters are single nodes, we'll merge the
# info from the cluster with info about the node
dest = cluster['CacheNodes'][0]['Endpoint']['Address']
is_redis = True
if not dest:
# Skip clusters we cannot address (e.g. private VPC subnet)
return
# Add to index
self.index[dest] = [region, cluster['CacheClusterId']]
# Inventory: Group by instance ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[cluster['CacheClusterId']] = [dest]
if self.nested_groups:
self.push_group(self.inventory, 'instances', cluster['CacheClusterId'])
# Inventory: Group by region
if self.group_by_region and not is_redis:
self.push(self.inventory, region, dest)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone
if self.group_by_availability_zone and not is_redis:
self.push(self.inventory, cluster['PreferredAvailabilityZone'], dest)
if self.nested_groups:
if self.group_by_region:
self.push_group(self.inventory, region, cluster['PreferredAvailabilityZone'])
self.push_group(self.inventory, 'zones', cluster['PreferredAvailabilityZone'])
# Inventory: Group by node type
if self.group_by_instance_type and not is_redis:
type_name = self.to_safe('type_' + cluster['CacheNodeType'])
self.push(self.inventory, type_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'types', type_name)
# Inventory: Group by VPC (information not available in the current
# AWS API version for ElastiCache)
# Inventory: Group by security group
if self.group_by_security_group and not is_redis:
# Check for the existence of the 'SecurityGroups' key and also if
# this key has some value. When the cluster is not placed in a SG
# the query can return None here and cause an error.
if 'SecurityGroups' in cluster and cluster['SecurityGroups'] is not None:
for security_group in cluster['SecurityGroups']:
key = self.to_safe("security_group_" + security_group['SecurityGroupId'])
self.push(self.inventory, key, dest)
if self.nested_groups:
self.push_group(self.inventory, 'security_groups', key)
# Inventory: Group by engine
if self.group_by_elasticache_engine and not is_redis:
self.push(self.inventory, self.to_safe("elasticache_" + cluster['Engine']), dest)
if self.nested_groups:
self.push_group(self.inventory, 'elasticache_engines', self.to_safe(cluster['Engine']))
# Inventory: Group by parameter group
if self.group_by_elasticache_parameter_group:
self.push(self.inventory, self.to_safe("elasticache_parameter_group_" + cluster['CacheParameterGroup']['CacheParameterGroupName']), dest)
if self.nested_groups:
self.push_group(self.inventory, 'elasticache_parameter_groups', self.to_safe(cluster['CacheParameterGroup']['CacheParameterGroupName']))
# Inventory: Group by replication group
if self.group_by_elasticache_replication_group and 'ReplicationGroupId' in cluster and cluster['ReplicationGroupId']:
self.push(self.inventory, self.to_safe("elasticache_replication_group_" + cluster['ReplicationGroupId']), dest)
if self.nested_groups:
self.push_group(self.inventory, 'elasticache_replication_groups', self.to_safe(cluster['ReplicationGroupId']))
# Global Tag: all ElastiCache clusters
self.push(self.inventory, 'elasticache_clusters', cluster['CacheClusterId'])
host_info = self.get_host_info_dict_from_describe_dict(cluster)
self.inventory["_meta"]["hostvars"][dest] = host_info
# Add the nodes
for node in cluster['CacheNodes']:
self.add_elasticache_node(node, cluster, region)
def add_elasticache_node(self, node, cluster, region):
''' Adds an ElastiCache node to the inventory and index, as long as
it is addressable '''
# Only want available nodes unless all_elasticache_nodes is True
if not self.all_elasticache_nodes and node['CacheNodeStatus'] != 'available':
return
# Select the best destination address
dest = node['Endpoint']['Address']
if not dest:
# Skip nodes we cannot address (e.g. private VPC subnet)
return
node_id = self.to_safe(cluster['CacheClusterId'] + '_' + node['CacheNodeId'])
# Add to index
self.index[dest] = [region, node_id]
# Inventory: Group by node ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[node_id] = [dest]
if self.nested_groups:
self.push_group(self.inventory, 'instances', node_id)
# Inventory: Group by region
if self.group_by_region:
self.push(self.inventory, region, dest)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone
if self.group_by_availability_zone:
self.push(self.inventory, cluster['PreferredAvailabilityZone'], dest)
if self.nested_groups:
if self.group_by_region:
self.push_group(self.inventory, region, cluster['PreferredAvailabilityZone'])
self.push_group(self.inventory, 'zones', cluster['PreferredAvailabilityZone'])
# Inventory: Group by node type
if self.group_by_instance_type:
type_name = self.to_safe('type_' + cluster['CacheNodeType'])
self.push(self.inventory, type_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'types', type_name)
# Inventory: Group by VPC (information not available in the current
# AWS API version for ElastiCache)
# Inventory: Group by security group
if self.group_by_security_group:
# Check for the existence of the 'SecurityGroups' key and also if
# this key has some value. When the cluster is not placed in a SG
# the query can return None here and cause an error.
if 'SecurityGroups' in cluster and cluster['SecurityGroups'] is not None:
for security_group in cluster['SecurityGroups']:
key = self.to_safe("security_group_" + security_group['SecurityGroupId'])
self.push(self.inventory, key, dest)
if self.nested_groups:
self.push_group(self.inventory, 'security_groups', key)
# Inventory: Group by engine
if self.group_by_elasticache_engine:
self.push(self.inventory, self.to_safe("elasticache_" + cluster['Engine']), dest)
if self.nested_groups:
self.push_group(self.inventory, 'elasticache_engines', self.to_safe("elasticache_" + cluster['Engine']))
# Inventory: Group by parameter group (done at cluster level)
# Inventory: Group by replication group (done at cluster level)
# Inventory: Group by ElastiCache Cluster
if self.group_by_elasticache_cluster:
self.push(self.inventory, self.to_safe("elasticache_cluster_" + cluster['CacheClusterId']), dest)
# Global Tag: all ElastiCache nodes
self.push(self.inventory, 'elasticache_nodes', dest)
host_info = self.get_host_info_dict_from_describe_dict(node)
if dest in self.inventory["_meta"]["hostvars"]:
self.inventory["_meta"]["hostvars"][dest].update(host_info)
else:
self.inventory["_meta"]["hostvars"][dest] = host_info
def add_elasticache_replication_group(self, replication_group, region):
''' Adds an ElastiCache replication group to the inventory and index '''
# Only want available clusters unless all_elasticache_replication_groups is True
if not self.all_elasticache_replication_groups and replication_group['Status'] != 'available':
return
# Skip clusters we cannot address (e.g. private VPC subnet or clustered redis)
if replication_group['NodeGroups'][0]['PrimaryEndpoint'] is None or \
replication_group['NodeGroups'][0]['PrimaryEndpoint']['Address'] is None:
return
# Select the best destination address (PrimaryEndpoint)
dest = replication_group['NodeGroups'][0]['PrimaryEndpoint']['Address']
# Add to index
self.index[dest] = [region, replication_group['ReplicationGroupId']]
# Inventory: Group by ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[replication_group['ReplicationGroupId']] = [dest]
if self.nested_groups:
self.push_group(self.inventory, 'instances', replication_group['ReplicationGroupId'])
# Inventory: Group by region
if self.group_by_region:
self.push(self.inventory, region, dest)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone (doesn't apply to replication groups)
# Inventory: Group by node type (doesn't apply to replication groups)
# Inventory: Group by VPC (information not available in the current
# AWS API version for replication groups
# Inventory: Group by security group (doesn't apply to replication groups)
# Check this value in cluster level
# Inventory: Group by engine (replication groups are always Redis)
if self.group_by_elasticache_engine:
self.push(self.inventory, 'elasticache_redis', dest)
if self.nested_groups:
self.push_group(self.inventory, 'elasticache_engines', 'redis')
# Global Tag: all ElastiCache clusters
self.push(self.inventory, 'elasticache_replication_groups', replication_group['ReplicationGroupId'])
host_info = self.get_host_info_dict_from_describe_dict(replication_group)
self.inventory["_meta"]["hostvars"][dest] = host_info
def get_route53_records(self):
''' Get and store the map of resource records to domain names that
point to them. '''
if self.boto_profile:
r53_conn = route53.Route53Connection(profile_name=self.boto_profile)
else:
r53_conn = route53.Route53Connection()
all_zones = r53_conn.get_zones()
route53_zones = [zone for zone in all_zones if zone.name[:-1] not in self.route53_excluded_zones]
self.route53_records = {}
for zone in route53_zones:
rrsets = r53_conn.get_all_rrsets(zone.id)
for record_set in rrsets:
record_name = record_set.name
if record_name.endswith('.'):
record_name = record_name[:-1]
for resource in record_set.resource_records:
self.route53_records.setdefault(resource, set())
self.route53_records[resource].add(record_name)
def get_instance_route53_names(self, instance):
''' Check if an instance is referenced in the records we have from
Route53. If it is, return the list of domain names pointing to said
instance. If nothing points to it, return an empty list. '''
instance_attributes = ['public_dns_name', 'private_dns_name',
'ip_address', 'private_ip_address']
name_list = set()
for attrib in instance_attributes:
try:
value = getattr(instance, attrib)
except AttributeError:
continue
if value in self.route53_records:
name_list.update(self.route53_records[value])
return list(name_list)
def get_host_info_dict_from_instance(self, instance):
instance_vars = {}
for key in vars(instance):
value = getattr(instance, key)
key = self.to_safe('ec2_' + key)
# Handle complex types
# state/previous_state changed to properties in boto in https://github.com/boto/boto/commit/a23c379837f698212252720d2af8dec0325c9518
if key == 'ec2__state':
instance_vars['ec2_state'] = instance.state or ''
instance_vars['ec2_state_code'] = instance.state_code
elif key == 'ec2__previous_state':
instance_vars['ec2_previous_state'] = instance.previous_state or ''
instance_vars['ec2_previous_state_code'] = instance.previous_state_code
elif isinstance(value, (int, bool)):
instance_vars[key] = value
elif isinstance(value, six.string_types):
instance_vars[key] = value.strip()
elif value is None:
instance_vars[key] = ''
elif key == 'ec2_region':
instance_vars[key] = value.name
elif key == 'ec2__placement':
instance_vars['ec2_placement'] = value.zone
elif key == 'ec2_tags':
for k, v in value.items():
if self.expand_csv_tags and ',' in v:
v = list(map(lambda x: x.strip(), v.split(',')))
key = self.to_safe('ec2_tag_' + k)
instance_vars[key] = v
elif key == 'ec2_groups':
group_ids = []
group_names = []
for group in value:
group_ids.append(group.id)
group_names.append(group.name)
instance_vars["ec2_security_group_ids"] = ','.join([str(i) for i in group_ids])
instance_vars["ec2_security_group_names"] = ','.join([str(i) for i in group_names])
elif key == 'ec2_block_device_mapping':
instance_vars["ec2_block_devices"] = {}
for k, v in value.items():
instance_vars["ec2_block_devices"][os.path.basename(k)] = v.volume_id
else:
pass
# TODO Product codes if someone finds them useful
# print key
# print type(value)
# print value
instance_vars[self.to_safe('ec2_account_id')] = self.aws_account_id
return instance_vars
def get_host_info_dict_from_describe_dict(self, describe_dict):
''' Parses the dictionary returned by the API call into a flat list
of parameters. This method should be used only when 'describe' is
used directly because Boto doesn't provide specific classes. '''
# I really don't agree with prefixing everything with 'ec2'
# because EC2, RDS and ElastiCache are different services.
# I'm just following the pattern used until now to not break any
# compatibility.
host_info = {}
for key in describe_dict:
value = describe_dict[key]
key = self.to_safe('ec2_' + self.uncammelize(key))
# Handle complex types
# Target: Memcached Cache Clusters
if key == 'ec2_configuration_endpoint' and value:
host_info['ec2_configuration_endpoint_address'] = value['Address']
host_info['ec2_configuration_endpoint_port'] = value['Port']
# Target: Cache Nodes and Redis Cache Clusters (single node)
if key == 'ec2_endpoint' and value:
host_info['ec2_endpoint_address'] = value['Address']
host_info['ec2_endpoint_port'] = value['Port']
# Target: Redis Replication Groups
if key == 'ec2_node_groups' and value:
host_info['ec2_endpoint_address'] = value[0]['PrimaryEndpoint']['Address']
host_info['ec2_endpoint_port'] = value[0]['PrimaryEndpoint']['Port']
replica_count = 0
for node in value[0]['NodeGroupMembers']:
if node['CurrentRole'] == 'primary':
host_info['ec2_primary_cluster_address'] = node['ReadEndpoint']['Address']
host_info['ec2_primary_cluster_port'] = node['ReadEndpoint']['Port']
host_info['ec2_primary_cluster_id'] = node['CacheClusterId']
elif node['CurrentRole'] == 'replica':
host_info['ec2_replica_cluster_address_' + str(replica_count)] = node['ReadEndpoint']['Address']
host_info['ec2_replica_cluster_port_' + str(replica_count)] = node['ReadEndpoint']['Port']
host_info['ec2_replica_cluster_id_' + str(replica_count)] = node['CacheClusterId']
replica_count += 1
# Target: Redis Replication Groups
if key == 'ec2_member_clusters' and value:
host_info['ec2_member_clusters'] = ','.join([str(i) for i in value])
# Target: All Cache Clusters
elif key == 'ec2_cache_parameter_group':
host_info["ec2_cache_node_ids_to_reboot"] = ','.join([str(i) for i in value['CacheNodeIdsToReboot']])
host_info['ec2_cache_parameter_group_name'] = value['CacheParameterGroupName']
host_info['ec2_cache_parameter_apply_status'] = value['ParameterApplyStatus']
# Target: Almost everything
elif key == 'ec2_security_groups':
# Skip if SecurityGroups is None
# (it is possible to have the key defined but no value in it).
if value is not None:
sg_ids = []
for sg in value:
sg_ids.append(sg['SecurityGroupId'])
host_info["ec2_security_group_ids"] = ','.join([str(i) for i in sg_ids])
# Target: Everything
# Preserve booleans and integers
elif isinstance(value, (int, bool)):
host_info[key] = value
# Target: Everything
# Sanitize string values
elif isinstance(value, six.string_types):
host_info[key] = value.strip()
# Target: Everything
# Replace None by an empty string
elif value is None:
host_info[key] = ''
else:
# Remove non-processed complex types
pass
return host_info
def get_host_info(self):
''' Get variables about a specific host '''
if len(self.index) == 0:
# Need to load index from cache
self.load_index_from_cache()
if self.args.host not in self.index:
# try updating the cache
self.do_api_calls_update_cache()
if self.args.host not in self.index:
# host might not exist anymore
return self.json_format_dict({}, True)
(region, instance_id) = self.index[self.args.host]
instance = self.get_instance(region, instance_id)
return self.json_format_dict(self.get_host_info_dict_from_instance(instance), True)
def push(self, my_dict, key, element):
''' Push an element onto an array that may not have been defined in
the dict '''
group_info = my_dict.setdefault(key, [])
if isinstance(group_info, dict):
host_list = group_info.setdefault('hosts', [])
host_list.append(element)
else:
group_info.append(element)
def push_group(self, my_dict, key, element):
''' Push a group as a child of another group. '''
parent_group = my_dict.setdefault(key, {})
if not isinstance(parent_group, dict):
parent_group = my_dict[key] = {'hosts': parent_group}
child_groups = parent_group.setdefault('children', [])
if element not in child_groups:
child_groups.append(element)
def get_inventory_from_cache(self):
''' Reads the inventory from the cache file and returns it as a JSON
object '''
with open(self.cache_path_cache, 'r') as f:
json_inventory = f.read()
return json_inventory
def load_index_from_cache(self):
''' Reads the index from the cache file sets self.index '''
with open(self.cache_path_index, 'rb') as f:
self.index = json.load(f)
def write_to_cache(self, data, filename):
''' Writes data in JSON format to a file '''
json_data = self.json_format_dict(data, True)
with open(filename, 'w') as f:
f.write(json_data)
def uncammelize(self, key):
temp = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', key)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', temp).lower()
def to_safe(self, word):
''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups '''
regex = r"[^A-Za-z0-9\_"
if not self.replace_dash_in_groups:
regex += r"\-"
return re.sub(regex + "]", "_", word)
def json_format_dict(self, data, pretty=False):
''' Converts a dict to a JSON object and dumps it as a formatted
string '''
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
if __name__ == '__main__':
# Run the script
Ec2Inventory()
|
gpl-3.0
|
prakritish/ansible
|
lib/ansible/parsing/dataloader.py
|
25
|
17727
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import os
import json
import tempfile
from yaml import YAMLError
from ansible.errors import AnsibleFileNotFound, AnsibleParserError
from ansible.errors.yaml_strings import YAML_SYNTAX_ERROR
from ansible.module_utils.basic import is_executable
from ansible.module_utils.six import text_type, string_types
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.parsing.vault import VaultLib, b_HEADER, is_encrypted, is_encrypted_file
from ansible.parsing.quoting import unquote
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleUnicode
from ansible.utils.path import unfrackpath
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class DataLoader:
'''
The DataLoader class is used to load and parse YAML or JSON content,
either from a given file name or from a string that was previously
read in through other means. A Vault password can be specified, and
any vault-encrypted files will be decrypted.
Data read from files will also be cached, so the file will never be
read from disk more than once.
Usage:
dl = DataLoader()
# optionally: dl.set_vault_password('foo')
ds = dl.load('...')
ds = dl.load_from_file('/path/to/file')
'''
def __init__(self):
self._basedir = '.'
self._FILE_CACHE = dict()
self._tempfiles = set()
# initialize the vault stuff with an empty password
self.set_vault_password(None)
def set_vault_password(self, b_vault_password):
self._b_vault_password = b_vault_password
self._vault = VaultLib(b_password=b_vault_password)
def load(self, data, file_name='<string>', show_content=True):
'''
Creates a python datastructure from the given data, which can be either
a JSON or YAML string.
'''
new_data = None
try:
# we first try to load this data as JSON
new_data = json.loads(data)
except:
# must not be JSON, let the rest try
if isinstance(data, AnsibleUnicode):
# The PyYAML's libyaml bindings use PyUnicode_CheckExact so
# they are unable to cope with our subclass.
# Unwrap and re-wrap the unicode so we can keep track of line
# numbers
in_data = text_type(data)
else:
in_data = data
try:
new_data = self._safe_load(in_data, file_name=file_name)
except YAMLError as yaml_exc:
self._handle_error(yaml_exc, file_name, show_content)
if isinstance(data, AnsibleUnicode):
new_data = AnsibleUnicode(new_data)
new_data.ansible_pos = data.ansible_pos
return new_data
def load_from_file(self, file_name):
''' Loads data from a file, which can contain either JSON or YAML. '''
file_name = self.path_dwim(file_name)
# if the file has already been read in and cached, we'll
# return those results to avoid more file/vault operations
if file_name in self._FILE_CACHE:
parsed_data = self._FILE_CACHE[file_name]
else:
# read the file contents and load the data structure from them
(b_file_data, show_content) = self._get_file_contents(file_name)
file_data = to_text(b_file_data, errors='surrogate_or_strict')
parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)
# cache the file contents for next time
self._FILE_CACHE[file_name] = parsed_data
# return a deep copy here, so the cache is not affected
return copy.deepcopy(parsed_data)
def path_exists(self, path):
path = self.path_dwim(path)
return os.path.exists(to_bytes(path, errors='surrogate_or_strict'))
def is_file(self, path):
path = self.path_dwim(path)
return os.path.isfile(to_bytes(path, errors='surrogate_or_strict')) or path == os.devnull
def is_directory(self, path):
path = self.path_dwim(path)
return os.path.isdir(to_bytes(path, errors='surrogate_or_strict'))
def list_directory(self, path):
path = self.path_dwim(path)
return os.listdir(path)
def is_executable(self, path):
'''is the given path executable?'''
path = self.path_dwim(path)
return is_executable(path)
def _safe_load(self, stream, file_name=None):
''' Implements yaml.safe_load(), except using our custom loader class. '''
loader = AnsibleLoader(stream, file_name, self._b_vault_password)
try:
return loader.get_single_data()
finally:
try:
loader.dispose()
except AttributeError:
pass # older versions of yaml don't have dispose function, ignore
def _get_file_contents(self, file_name):
'''
Reads the file contents from the given file name, and will decrypt them
if they are found to be vault-encrypted.
'''
if not file_name or not isinstance(file_name, string_types):
raise AnsibleParserError("Invalid filename: '%s'" % str(file_name))
b_file_name = to_bytes(file_name)
if not self.path_exists(b_file_name) or not self.is_file(b_file_name):
raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % file_name)
show_content = True
try:
with open(b_file_name, 'rb') as f:
data = f.read()
if is_encrypted(data):
data = self._vault.decrypt(data, filename=b_file_name)
show_content = False
return (data, show_content)
except (IOError, OSError) as e:
raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, str(e)))
def _handle_error(self, yaml_exc, file_name, show_content):
'''
Optionally constructs an object (AnsibleBaseYAMLObject) to encapsulate the
file name/position where a YAML exception occurred, and raises an AnsibleParserError
to display the syntax exception information.
'''
# if the YAML exception contains a problem mark, use it to construct
# an object the error class can use to display the faulty line
err_obj = None
if hasattr(yaml_exc, 'problem_mark'):
err_obj = AnsibleBaseYAMLObject()
err_obj.ansible_pos = (file_name, yaml_exc.problem_mark.line + 1, yaml_exc.problem_mark.column + 1)
raise AnsibleParserError(YAML_SYNTAX_ERROR, obj=err_obj, show_content=show_content)
def get_basedir(self):
''' returns the current basedir '''
return self._basedir
def set_basedir(self, basedir):
''' sets the base directory, used to find files when a relative path is given '''
if basedir is not None:
self._basedir = to_text(basedir)
def path_dwim(self, given):
'''
make relative paths work like folks expect.
'''
given = unquote(given)
given = to_text(given, errors='surrogate_or_strict')
if given.startswith(u"/"):
return os.path.abspath(given)
elif given.startswith(u"~"):
return os.path.abspath(os.path.expanduser(given))
else:
basedir = to_text(self._basedir, errors='surrogate_or_strict')
return os.path.abspath(os.path.join(basedir, given))
def _is_role(self, path):
''' imperfect role detection, roles are still valid w/o main.yml/yaml/etc '''
isit = False
b_path = to_bytes(path, errors='surrogate_or_strict')
b_upath = to_bytes(unfrackpath(path), errors='surrogate_or_strict')
for suffix in (b'.yml', b'.yaml', b''):
b_main = b'main%s' % (suffix)
b_tasked = b'tasks/%s' % (b_main)
if b_path.endswith(b'tasks') and os.path.exists(os.path.join(b_path, b_main)) \
or os.path.exists(os.path.join(b_upath, b_tasked)) \
or os.path.exists(os.path.join(os.path.dirname(b_path), b_tasked)):
isit = True
break
return isit
def path_dwim_relative(self, path, dirname, source, is_role=False):
'''
find one file in either a role or playbook dir with or without
explicitly named dirname subdirs
Used in action plugins and lookups to find supplemental files that
could be in either place.
'''
search = []
# I have full path, nothing else needs to be looked at
if source.startswith('~') or source.startswith(os.path.sep):
search.append(self.path_dwim(source))
else:
# base role/play path + templates/files/vars + relative filename
search.append(os.path.join(path, dirname, source))
basedir = unfrackpath(path)
# not told if role, but detect if it is a role and if so make sure you get correct base path
if not is_role:
is_role = self._is_role(path)
if is_role and path.endswith('tasks'):
basedir = unfrackpath(os.path.dirname(path))
cur_basedir = self._basedir
self.set_basedir(basedir)
# resolved base role/play path + templates/files/vars + relative filename
search.append(self.path_dwim(os.path.join(basedir, dirname, source)))
self.set_basedir(cur_basedir)
if is_role and not source.endswith(dirname):
# look in role's tasks dir w/o dirname
search.append(self.path_dwim(os.path.join(basedir, 'tasks', source)))
# try to create absolute path for loader basedir + templates/files/vars + filename
search.append(self.path_dwim(os.path.join(dirname,source)))
search.append(self.path_dwim(os.path.join(basedir, source)))
# try to create absolute path for loader basedir + filename
search.append(self.path_dwim(source))
for candidate in search:
if os.path.exists(to_bytes(candidate, errors='surrogate_or_strict')):
break
return candidate
def path_dwim_relative_stack(self, paths, dirname, source, is_role=False):
'''
find one file in first path in stack taking roles into account and adding play basedir as fallback
:arg paths: A list of text strings which are the paths to look for the filename in.
:arg dirname: A text string representing a directory. The directory
is prepended to the source to form the path to search for.
:arg source: A text string which is the filename to search for
:rtype: A text string
:returns: An absolute path to the filename ``source``
'''
b_dirname = to_bytes(dirname)
b_source = to_bytes(source)
result = None
if source is None:
display.warning('Invalid request to find a file that matches a "null" value')
elif source and (source.startswith('~') or source.startswith(os.path.sep)):
# path is absolute, no relative needed, check existence and return source
test_path = unfrackpath(b_source)
if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')):
result = test_path
else:
search = []
display.debug(u'evaluation_path:\n\t%s' % '\n\t'.join(paths))
for path in paths:
upath = unfrackpath(path)
b_upath = to_bytes(upath, errors='surrogate_or_strict')
b_mydir = os.path.dirname(b_upath)
# FIXME: this detection fails with non main.yml roles
# if path is in role and 'tasks' not there already, add it into the search
if is_role or self._is_role(path):
if b_mydir.endswith(b'tasks'):
search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
search.append(os.path.join(b_mydir, b_source))
else:
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != b_dirname:
search.append(os.path.join(b_upath, b_dirname, b_source))
search.append(os.path.join(b_upath, b_source))
elif b_dirname not in b_source.split(b'/'):
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != dirname:
search.append(os.path.join(b_upath, b_dirname, b_source))
search.append(os.path.join(b_upath, b_source))
# always append basedir as last resort
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != dirname:
search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
search.append(os.path.join(to_bytes(self.get_basedir()), b_source))
display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search)))
for b_candidate in search:
display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate)))
if os.path.exists(b_candidate):
result = to_text(b_candidate)
break
return result
def _create_content_tempfile(self, content):
''' Create a tempfile containing defined content '''
fd, content_tempfile = tempfile.mkstemp()
f = os.fdopen(fd, 'wb')
content = to_bytes(content)
try:
f.write(content)
except Exception as err:
os.remove(content_tempfile)
raise Exception(err)
finally:
f.close()
return content_tempfile
def get_real_file(self, file_path, decrypt=True):
"""
If the file is vault encrypted return a path to a temporary decrypted file
If the file is not encrypted then the path is returned
Temporary files are cleanup in the destructor
"""
if not file_path or not isinstance(file_path, string_types):
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))
b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
raise AnsibleFileNotFound("the file_name '%s' does not exist, or is not readable" % to_native(file_path))
if not self._vault:
self._vault = VaultLib(b_password="")
real_path = self.path_dwim(file_path)
try:
if decrypt:
with open(to_bytes(real_path), 'rb') as f:
# Limit how much of the file is read since we do not know
# whether this is a vault file and therefore it could be very
# large.
if is_encrypted_file(f, count=len(b_HEADER)):
# if the file is encrypted and no password was specified,
# the decrypt call would throw an error, but we check first
# since the decrypt function doesn't know the file name
data = f.read()
if not self._b_vault_password:
raise AnsibleParserError("A vault password must be specified to decrypt %s" % file_path)
data = self._vault.decrypt(data, filename=real_path)
# Make a temp file
real_path = self._create_content_tempfile(data)
self._tempfiles.add(real_path)
return real_path
except (IOError, OSError) as e:
raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_native(real_path), to_native(e)))
def cleanup_tmp_file(self, file_path):
"""
Removes any temporary files created from a previous call to
get_real_file. file_path must be the path returned from a
previous call to get_real_file.
"""
if file_path in self._tempfiles:
os.unlink(file_path)
self._tempfiles.remove(file_path)
def cleanup_all_tmp_files(self):
for f in self._tempfiles:
try:
self.cleanup_tmp_file(f)
except:
pass # TODO: this should at least warn
|
gpl-3.0
|
schleichdi2/OPENNFR-6.3-CORE
|
opennfr-openembedded-core/meta/lib/oeqa/utils/decorators.py
|
1
|
10411
|
#
# Copyright (C) 2013 Intel Corporation
#
# SPDX-License-Identifier: MIT
#
# Some custom decorators that can be used by unittests
# Most useful is skipUnlessPassed which can be used for
# creating dependecies between two test methods.
import os
import logging
import sys
import unittest
import threading
import signal
from functools import wraps
#get the "result" object from one of the upper frames provided that one of these upper frames is a unittest.case frame
class getResults(object):
def __init__(self):
#dynamically determine the unittest.case frame and use it to get the name of the test method
ident = threading.current_thread().ident
upperf = sys._current_frames()[ident]
while (upperf.f_globals['__name__'] != 'unittest.case'):
upperf = upperf.f_back
def handleList(items):
ret = []
# items is a list of tuples, (test, failure) or (_ErrorHandler(), Exception())
for i in items:
s = i[0].id()
#Handle the _ErrorHolder objects from skipModule failures
if "setUpModule (" in s:
ret.append(s.replace("setUpModule (", "").replace(")",""))
else:
ret.append(s)
# Append also the test without the full path
testname = s.split('.')[-1]
if testname:
ret.append(testname)
return ret
self.faillist = handleList(upperf.f_locals['result'].failures)
self.errorlist = handleList(upperf.f_locals['result'].errors)
self.skiplist = handleList(upperf.f_locals['result'].skipped)
def getFailList(self):
return self.faillist
def getErrorList(self):
return self.errorlist
def getSkipList(self):
return self.skiplist
class skipIfFailure(object):
def __init__(self,testcase):
self.testcase = testcase
def __call__(self,f):
@wraps(f)
def wrapped_f(*args, **kwargs):
res = getResults()
if self.testcase in (res.getFailList() or res.getErrorList()):
raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
return f(*args, **kwargs)
wrapped_f.__name__ = f.__name__
return wrapped_f
class skipIfSkipped(object):
def __init__(self,testcase):
self.testcase = testcase
def __call__(self,f):
@wraps(f)
def wrapped_f(*args, **kwargs):
res = getResults()
if self.testcase in res.getSkipList():
raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
return f(*args, **kwargs)
wrapped_f.__name__ = f.__name__
return wrapped_f
class skipUnlessPassed(object):
def __init__(self,testcase):
self.testcase = testcase
def __call__(self,f):
@wraps(f)
def wrapped_f(*args, **kwargs):
res = getResults()
if self.testcase in res.getSkipList() or \
self.testcase in res.getFailList() or \
self.testcase in res.getErrorList():
raise unittest.SkipTest("Testcase dependency not met: %s" % self.testcase)
return f(*args, **kwargs)
wrapped_f.__name__ = f.__name__
wrapped_f._depends_on = self.testcase
return wrapped_f
class testcase(object):
def __init__(self, test_case):
self.test_case = test_case
def __call__(self, func):
@wraps(func)
def wrapped_f(*args, **kwargs):
return func(*args, **kwargs)
wrapped_f.test_case = self.test_case
wrapped_f.__name__ = func.__name__
return wrapped_f
class NoParsingFilter(logging.Filter):
def filter(self, record):
return record.levelno == 100
import inspect
def LogResults(original_class):
orig_method = original_class.run
from time import strftime, gmtime
caller = os.path.basename(sys.argv[0])
timestamp = strftime('%Y%m%d%H%M%S',gmtime())
logfile = os.path.join(os.getcwd(),'results-'+caller+'.'+timestamp+'.log')
linkfile = os.path.join(os.getcwd(),'results-'+caller+'.log')
def get_class_that_defined_method(meth):
if inspect.ismethod(meth):
for cls in inspect.getmro(meth.__self__.__class__):
if cls.__dict__.get(meth.__name__) is meth:
return cls
meth = meth.__func__ # fallback to __qualname__ parsing
if inspect.isfunction(meth):
cls = getattr(inspect.getmodule(meth),
meth.__qualname__.split('.<locals>', 1)[0].rsplit('.', 1)[0])
if isinstance(cls, type):
return cls
return None
#rewrite the run method of unittest.TestCase to add testcase logging
def run(self, result, *args, **kws):
orig_method(self, result, *args, **kws)
passed = True
testMethod = getattr(self, self._testMethodName)
#if test case is decorated then use it's number, else use it's name
try:
test_case = testMethod.test_case
except AttributeError:
test_case = self._testMethodName
class_name = str(get_class_that_defined_method(testMethod)).split("'")[1]
#create custom logging level for filtering.
custom_log_level = 100
logging.addLevelName(custom_log_level, 'RESULTS')
def results(self, message, *args, **kws):
if self.isEnabledFor(custom_log_level):
self.log(custom_log_level, message, *args, **kws)
logging.Logger.results = results
logging.basicConfig(filename=logfile,
filemode='w',
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
datefmt='%H:%M:%S',
level=custom_log_level)
for handler in logging.root.handlers:
handler.addFilter(NoParsingFilter())
local_log = logging.getLogger(caller)
#check status of tests and record it
tcid = self.id()
for (name, msg) in result.errors:
if tcid == name.id():
local_log.results("Testcase "+str(test_case)+": ERROR")
local_log.results("Testcase "+str(test_case)+":\n"+msg)
passed = False
for (name, msg) in result.failures:
if tcid == name.id():
local_log.results("Testcase "+str(test_case)+": FAILED")
local_log.results("Testcase "+str(test_case)+":\n"+msg)
passed = False
for (name, msg) in result.skipped:
if tcid == name.id():
local_log.results("Testcase "+str(test_case)+": SKIPPED")
passed = False
if passed:
local_log.results("Testcase "+str(test_case)+": PASSED")
# XXX: In order to avoid race condition when test if exists the linkfile
# use bb.utils.lock, the best solution is to create a unique name for the
# link file.
try:
import bb
has_bb = True
lockfilename = linkfile + '.lock'
except ImportError:
has_bb = False
if has_bb:
lf = bb.utils.lockfile(lockfilename, block=True)
# Create symlink to the current log
if os.path.lexists(linkfile):
os.remove(linkfile)
os.symlink(logfile, linkfile)
if has_bb:
bb.utils.unlockfile(lf)
original_class.run = run
return original_class
class TimeOut(BaseException):
pass
def timeout(seconds):
def decorator(fn):
if hasattr(signal, 'alarm'):
@wraps(fn)
def wrapped_f(*args, **kw):
current_frame = sys._getframe()
def raiseTimeOut(signal, frame):
if frame is not current_frame:
raise TimeOut('%s seconds' % seconds)
prev_handler = signal.signal(signal.SIGALRM, raiseTimeOut)
try:
signal.alarm(seconds)
return fn(*args, **kw)
finally:
signal.alarm(0)
signal.signal(signal.SIGALRM, prev_handler)
return wrapped_f
else:
return fn
return decorator
__tag_prefix = "tag__"
def tag(*args, **kwargs):
"""Decorator that adds attributes to classes or functions
for use with the Attribute (-a) plugin.
"""
def wrap_ob(ob):
for name in args:
setattr(ob, __tag_prefix + name, True)
for name, value in kwargs.items():
setattr(ob, __tag_prefix + name, value)
return ob
return wrap_ob
def gettag(obj, key, default=None):
key = __tag_prefix + key
if not isinstance(obj, unittest.TestCase):
return getattr(obj, key, default)
tc_method = getattr(obj, obj._testMethodName)
ret = getattr(tc_method, key, getattr(obj, key, default))
return ret
def getAllTags(obj):
def __gettags(o):
r = {k[len(__tag_prefix):]:getattr(o,k) for k in dir(o) if k.startswith(__tag_prefix)}
return r
if not isinstance(obj, unittest.TestCase):
return __gettags(obj)
tc_method = getattr(obj, obj._testMethodName)
ret = __gettags(obj)
ret.update(__gettags(tc_method))
return ret
def timeout_handler(seconds):
def decorator(fn):
if hasattr(signal, 'alarm'):
@wraps(fn)
def wrapped_f(self, *args, **kw):
current_frame = sys._getframe()
def raiseTimeOut(signal, frame):
if frame is not current_frame:
try:
self.target.restart()
raise TimeOut('%s seconds' % seconds)
except:
raise TimeOut('%s seconds' % seconds)
prev_handler = signal.signal(signal.SIGALRM, raiseTimeOut)
try:
signal.alarm(seconds)
return fn(self, *args, **kw)
finally:
signal.alarm(0)
signal.signal(signal.SIGALRM, prev_handler)
return wrapped_f
else:
return fn
return decorator
|
gpl-2.0
|
CINPLA/expipe-dev
|
python-neo/neo/io/elphyio.py
|
6
|
158913
|
# -*- coding: utf-8 -*-
"""
README
=====================================================================================
This is the implementation of the NEO IO for Elphy files.
IO dependencies:
- NEO
- types
- numpy
- quantities
Quick reference:
=====================================================================================
Class ElphyIO() with methods read_block() and write_block() are implemented.
This classes represent the way to access and produce Elphy files
from NEO objects.
As regards reading an existing Elphy file, start by initializing a IO class with it:
>>> import neo
>>> r = neo.io.ElphyIO( filename="Elphy.DAT" )
>>> r
<neo.io.elphyio.ElphyIO object at 0xa1e960c>
Read the file content into NEO object Block:
>>> bl = r.read_block(lazy=False, cascade=True)
>>> bl
<neo.core.block.Block object at 0x9e3d44c>
Now you can then read all Elphy data as NEO objects:
>>> b1.segments
[<neo.core.segment.Segment object at 0x9ed85cc>,
<neo.core.segment.Segment object at 0x9ed85ec>,
<neo.core.segment.Segment object at 0x9ed880c>,
<neo.core.segment.Segment object at 0x9ed89cc>]
>>> bl.segments[0].analogsignals[0]
<AnalogSignal(array([ 0. , -0.0061037 , -0.0061037 , ..., 0. ,
-0.0061037 , -0.01831111]) * mV, [0.0 s, 7226.2 s], sampling rate: 10.0 Hz)>
These functions return NEO objects, completely "detached" from the original Elphy file.
Changes to the runtime objects will not cause any changes in the file.
Having already existing NEO structures, it is possible to write them as an Elphy file.
For example, given a segment:
>>> s = neo.Segment()
filled with other NEO structures:
>>> import numpy as np
>>> import quantities as pq
>>> a = AnalogSignal( signal=np.random.rand(300), t_start=42*pq.ms)
>>> s.analogsignals.append( a )
and added to a newly created NEO Block:
>>> bl = neo.Block()
>>> bl.segments.append( s )
Then, it's easy to create an Elphy file:
>>> r = neo.io.ElphyIO( filename="ElphyNeoTest.DAT" )
>>> r.write_block( bl )
Author: Thierry Brizzi
Domenico Guarino
"""
# needed for python 3 compatibility
from __future__ import absolute_import
# python commons:
from datetime import datetime
from fractions import gcd
from os import path
import re
import struct
from time import time
# note neo.core needs only numpy and quantities
import numpy as np
import quantities as pq
# I need to subclass BaseIO
from neo.io.baseio import BaseIO
# to import from core
from neo.core import (Block, Segment, ChannelIndex, RecordingChannel,
AnalogSignal, Event, SpikeTrain)
# --------------------------------------------------------
# OBJECTS
class ElphyScaleFactor(object):
"""
Useful to retrieve real values from integer
ones that are stored in an Elphy file :
``scale`` : compute the actual value of a sample
with this following formula :
``delta`` * value + ``offset``
"""
def __init__(self, delta, offset):
self.delta = delta
self.offset = offset
def scale(self, value):
return value * self.delta + self.offset
class BaseSignal(object):
"""
A descriptor storing main signal properties :
``layout`` : the :class:``ElphyLayout` object
that extracts data from a file.
``episode`` : the episode in which the signal
has been acquired.
``sampling_frequency`` : the sampling frequency
of the analog to digital converter.
``sampling_period`` : the sampling period of the
analog to digital converter computed from sampling_frequency.
``t_start`` : the start time of the signal acquisition.
``t_stop`` : the end time of the signal acquisition.
``duration`` : the duration of the signal acquisition
computed from t_start and t_stop.
``n_samples`` : the number of sample acquired during the
recording computed from the duration and the sampling period.
``name`` : a label to identify the signal.
``data`` : a property triggering data extraction.
"""
def __init__(self, layout, episode, sampling_frequency, start, stop, name=None):
self.layout = layout
self.episode = episode
self.sampling_frequency = sampling_frequency
self.sampling_period = 1 / sampling_frequency
self.t_start = start
self.t_stop = stop
self.duration = self.t_stop - self.t_start
self.n_samples = int(self.duration / self.sampling_period)
self.name = name
@property
def data(self):
raise NotImplementedError('must be overloaded in subclass')
class ElphySignal(BaseSignal):
"""
Subclass of :class:`BaseSignal` corresponding to Elphy's analog channels :
``channel`` : the identifier of the analog channel providing the signal.
``units`` : an array containing x and y coordinates units.
``x_unit`` : a property to access the x-coordinates unit.
``y_unit`` : a property to access the y-coordinates unit.
``data`` : a property that delegate data extraction to the
``get_signal_data`` function of the ```layout`` object.
"""
def __init__(self, layout, episode, channel, x_unit, y_unit, sampling_frequency, start, stop, name=None):
super(ElphySignal, self).__init__(layout, episode, sampling_frequency, start, stop, name)
self.channel = channel
self.units = [x_unit, y_unit]
def __str__(self):
return "%s ep_%s ch_%s [%s, %s]" % (self.layout.file.name, self.episode, self.channel, self.x_unit, self.y_unit)
def __repr__(self):
return self.__str__()
@property
def x_unit(self):
"""
Return the x-coordinate of the signal.
"""
return self.units[0]
@property
def y_unit(self):
"""
Return the y-coordinate of the signal.
"""
return self.units[1]
@property
def data(self):
return self.layout.get_signal_data(self.episode, self.channel)
class ElphyTag(BaseSignal):
"""
Subclass of :class:`BaseSignal` corresponding to Elphy's tag channels :
``number`` : the identifier of the tag channel.
``x_unit`` : the unit of the x-coordinate.
"""
def __init__(self, layout, episode, number, x_unit, sampling_frequency, start, stop, name=None):
super(ElphyTag, self).__init__(layout, episode, sampling_frequency, start, stop, name)
self.number = number
self.units = [x_unit, None]
def __str__(self):
return "%s : ep_%s tag_ch_%s [%s]" % (self.layout.file.name, self.episode, self.number, self.x_unit)
def __repr__(self):
return self.__str__()
@property
def x_unit(self):
"""
Return the x-coordinate of the signal.
"""
return self.units[0]
@property
def data(self):
return self.layout.get_tag_data(self.episode, self.number)
@property
def channel(self):
return self.number
class ElphyEvent(object):
"""
A descriptor that store a set of events properties :
``layout`` : the :class:``ElphyLayout` object
that extracts data from a file.
``episode`` : the episode in which the signal
has been acquired.
``number`` : the identifier of the channel.
``x_unit`` : the unit of the x-coordinate.
``n_events`` : the number of events.
``name`` : a label to identify the event.
``times`` : a property triggering event times extraction.
"""
def __init__(self, layout, episode, number, x_unit, n_events, ch_number=None, name=None):
self.layout = layout
self.episode = episode
self.number = number
self.x_unit = x_unit
self.n_events = n_events
self.name = name
self.ch_number = ch_number
def __str__(self):
return "%s : ep_%s evt_ch_%s [%s]" % (self.layout.file.name, self.episode, self.number, self.x_unit)
def __repr__(self):
return self.__str__()
@property
def channel(self):
return self.number
@property
def times(self):
return self.layout.get_event_data(self.episode, self.number)
@property
def data(self):
return self.times
class ElphySpikeTrain(ElphyEvent):
"""
A descriptor that store spiketrain properties :
``wf_samples`` : number of samples composing waveforms.
``wf_sampling_frequency`` : sampling frequency of waveforms.
``wf_sampling_period`` : sampling period of waveforms.
``wf_units`` : the units of the x and y coordinates of waveforms.
``t_start`` : the time before the arrival of the spike which
corresponds to the starting time of a waveform.
``name`` : a label to identify the event.
``times`` : a property triggering event times extraction.
``waveforms`` : a property triggering waveforms extraction.
"""
def __init__(self, layout, episode, number, x_unit, n_events, wf_sampling_frequency, wf_samples, unit_x_wf, unit_y_wf, t_start, name=None):
super(ElphySpikeTrain, self).__init__(layout, episode, number, x_unit, n_events, name)
self.wf_samples = wf_samples
self.wf_sampling_frequency = wf_sampling_frequency
assert wf_sampling_frequency, "bad sampling frequency"
self.wf_sampling_period = 1.0 / wf_sampling_frequency
self.wf_units = [unit_x_wf, unit_y_wf]
self.t_start = t_start
@property
def x_unit_wf(self):
"""
Return the x-coordinate of waveforms.
"""
return self.wf_units[0]
@property
def y_unit_wf(self):
"""
Return the y-coordinate of waveforms.
"""
return self.wf_units[1]
@property
def times(self):
return self.layout.get_spiketrain_data(self.episode, self.number)
@property
def waveforms(self):
return self.layout.get_waveform_data(self.episode, self.number) if self.wf_samples else None
# --------------------------------------------------------
# BLOCKS
class BaseBlock(object):
"""
Represent a chunk of file storing metadata or
raw data. A convenient class to break down the
structure of an Elphy file to several building
blocks :
``layout`` : the layout containing the block.
``identifier`` : the label that identified the block.
``size`` : the size of the block.
``start`` : the file index corresponding to the starting byte of the block.
``end`` : the file index corresponding to the ending byte of the block
NB : Subclassing this class is a convenient
way to set the properties using polymorphism
rather than a conditional structure. By this
way each :class:`BaseBlock` type know how to
iterate through the Elphy file and store
interesting data.
"""
def __init__(self, layout, identifier, start, size):
self.layout = layout
self.identifier = identifier
self.size = size
self.start = start
self.end = self.start + self.size - 1
class ElphyBlock(BaseBlock):
"""
A subclass of :class:`BaseBlock`. Useful to
store the location and size of interesting
data within a block :
``parent_block`` : the parent block containing the block.
``header_size`` : the size of the header permitting the
identification of the type of the block.
``data_offset`` : the file index located after the block header.
``data_size`` : the size of data located after the header.
``sub_blocks`` : the sub-blocks contained by the block.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="i", parent_block=None):
super(ElphyBlock, self).__init__(layout, identifier, start, size)
# a block may be a sub-block of another block
self.parent_block = parent_block
# pascal language store strings in 2 different ways
# ... first, if in the program the size of the string is
# specified (fixed) then the file stores the length
# of the string and allocate a number of bytes equal
# to the specified size
# ... if this size is not specified the length of the
# string is also stored but the file allocate dynamically
# a number of bytes equal to the actual size of the string
l_ident = len(self.identifier)
if fixed_length :
l_ident += (fixed_length - l_ident)
self.header_size = l_ident + 1 + type_dict[size_format]
# starting point of data located in the block
self.data_offset = self.start + self.header_size
self.data_size = self.size - self.header_size
# a block may have sub-blocks
# it is to subclasses to initialize
# this property
self.sub_blocks = list()
def __repr__(self):
return "%s : size = %s, start = %s, end = %s" % (self.identifier, self.size, self.start, self.end)
def add_sub_block(self, block):
"""
Append a block to the sub-block list.
"""
self.sub_blocks.append(block)
class FileInfoBlock(ElphyBlock):
"""
Base class of all subclasses whose the purpose is to
extract user file info stored into an Elphy file :
``header`` : the header block relative to the block.
``file`` : the file containing the block.
NB : User defined metadata are not really practical.
An Elphy script must know the order of metadata storage
to know exactly how to retrieve these data. That's why
it is necessary to subclass and reproduce elphy script
commands to extract metadata relative to a protocol.
Consequently managing a new protocol implies to refactor
the file info extraction.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="i", parent_block=None):
super(FileInfoBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format, parent_block=parent_block)
self.header = None
self.file = self.layout.file
def get_protocol_and_version(self):
"""
Return a tuple useful to identify the
kind of protocol that has generated a
file during data acquisition.
"""
raise Exception("must be overloaded in a subclass")
def get_user_file_info(self):
"""
Return a dictionary containing all
user file info stored in the file.
"""
raise Exception("must be overloaded in a subclass")
def get_sparsenoise_revcor(self):
"""
Return 'REVCOR' user file info. This method is common
to :class:`ClassicFileInfo` and :class:`MultistimFileInfo`
because the last one is able to store this kind of metadata.
"""
header = dict()
header['n_div_x'] = read_from_char(self.file, 'h')
header['n_div_y'] = read_from_char(self.file, 'h')
header['gray_levels'] = read_from_char(self.file, 'h')
header['position_x'] = read_from_char(self.file, 'ext')
header['position_y'] = read_from_char(self.file, 'ext')
header['length'] = read_from_char(self.file, 'ext')
header['width'] = read_from_char(self.file, 'ext')
header['orientation'] = read_from_char(self.file, 'ext')
header['expansion'] = read_from_char(self.file, 'h')
header['scotoma'] = read_from_char(self.file, 'h')
header['seed'] = read_from_char(self.file, 'h')
#dt_on and dt_off may not exist in old revcor formats
rollback = self.file.tell()
header['dt_on'] = read_from_char(self.file, 'ext')
if header['dt_on'] is None :
self.file.seek(rollback)
rollback = self.file.tell()
header['dt_off'] = read_from_char(self.file, 'ext')
if header['dt_off'] is None :
self.file.seek(rollback)
return header
class ClassicFileInfo(FileInfoBlock):
"""
Extract user file info stored into an Elphy file corresponding to
sparse noise (revcor), moving bar and flashbar protocols.
"""
def detect_protocol_from_name(self, path):
pattern = "\d{4}(\d+|\D)\D"
codes = {
'r':'sparsenoise',
'o':'movingbar',
'f':'flashbar',
'm':'multistim' # here just for assertion
}
filename = path.split(path)[1]
match = re.search(pattern, path)
if hasattr(match, 'end') :
code = codes.get(path[match.end() - 1].lower(), None)
assert code != 'm', "multistim file detected"
return code
elif 'spt' in filename.lower() :
return 'spontaneousactivity'
else :
return None
def get_protocol_and_version(self):
if self.layout and self.layout.info_block :
self.file.seek(self.layout.info_block.data_offset)
version = self.get_title()
if version in ['REVCOR1', 'REVCOR2', 'REVCOR + PAIRING'] :
name = "sparsenoise"
elif version in ['BARFLASH'] :
name = "flashbar"
elif version in ['ORISTIM', 'ORISTM', 'ORISTM1', 'ORITUN'] :
name = "movingbar"
else :
name = self.detect_protocol_from_name(self.file.name)
self.file.seek(0)
return name, version
return None, None
def get_title(self):
title_length, title = struct.unpack('<B20s', self.file.read(21))
return unicode(title[0:title_length])
def get_user_file_info(self):
header = dict()
if self.layout and self.layout.info_block :
self.file.seek(self.layout.info_block.data_offset)
header['title'] = self.get_title()
# test the protocol name to trigger
# the right header extraction
if self.layout.elphy_file.protocol == 'sparsenoise' :
header.update(self.get_sparsenoise_revcor())
elif self.layout.elphy_file.protocol == 'flashbar' :
header.update(self.get_flashbar_header())
elif self.layout.elphy_file.protocol == 'movingbar' :
header.update(self.get_movingbar_header())
self.file.seek(0)
return header
def get_flashbar_header(self):
header = dict()
orientations = list()
tmp = self.file.tell()
for _ in range(0, 50) :
l, ori = struct.unpack('<B5s', self.file.read(6))
try :
orientations.append(float(ori[0:l]))
except :
return header
header['orientations'] = orientations if orientations else None
self.file.seek(tmp + 50 * 6)
_tmp = read_from_char(self.file, 'h')
header['number_of_orientations'] = _tmp if tmp < 0 else None
_tmp = read_from_char(self.file, 'h')
header['number_of_repetitions'] = _tmp if tmp < 0 else None
header['position_x'] = read_from_char(self.file, 'ext')
header['position_y'] = read_from_char(self.file, 'ext')
header['length'] = read_from_char(self.file, 'ext')
header['width'] = read_from_char(self.file, 'ext')
header['orientation'] = read_from_char(self.file, 'ext')
header['excursion'] = read_from_char(self.file, 'i')
header['dt_on'] = None
return header
def get_movingbar_header(self):
header = dict()
orientations = list()
tmp = self.file.tell()
for _ in range(0, 50) :
l, ori = struct.unpack('<B5s', self.file.read(6))
orientations.append(float(ori[0:l]))
header['orientations'] = orientations if orientations else None
self.file.seek(tmp + 50 * 6)
_tmp = read_from_char(self.file, 'h')
header['number_of_orientations'] = _tmp if tmp < 0 else None
_tmp = read_from_char(self.file, 'h')
header['number_of_repetitions'] = _tmp if tmp < 0 else None
header['position_x'] = read_from_char(self.file, 'ext')
header['position_y'] = read_from_char(self.file, 'ext')
header['length'] = read_from_char(self.file, 'ext')
header['width'] = read_from_char(self.file, 'ext')
header['orientation'] = read_from_char(self.file, 'ext')
header['excursion'] = read_from_char(self.file, 'h')
header['speed'] = read_from_char(self.file, 'h')
header['dim_x'] = read_from_char(self.file, 'h')
header['dim_y'] = read_from_char(self.file, 'h')
return header
class MultistimFileInfo(FileInfoBlock):
def get_protocol_and_version(self):
# test if there is an available info_block
if self.layout and self.layout.info_block :
# go to the info_block
sub_block = self.layout.info_block
self.file.seek(sub_block.data_offset)
#get the first four parameters
#acqLGN = read_from_char(self.file, 'i')
center = read_from_char(self.file, 'i')
surround = read_from_char(self.file, 'i')
version = self.get_title()
# test the type of protocol from
# center and surround parameters
if (surround >= 2) :
name = None
version = None
else :
if center == 2 :
name = "sparsenoise"
elif center == 3 :
name = "densenoise"
elif center == 4 :
name = "densenoise"
elif center == 5 :
name = "grating"
else :
name = None
version = None
self.file.seek(0)
return name, version
return None, None
def get_title(self):
title_length = read_from_char(self.file, 'B')
title, = struct.unpack('<%ss' % title_length, self.file.read(title_length))
self.file.seek(self.file.tell() + 255 - title_length)
return unicode(title)
def get_user_file_info(self):
header = dict()
if self.layout and self.layout.info_block :
# go to the info_block
sub_block = self.layout.info_block
self.file.seek(sub_block.data_offset)
#get the first four parameters
acqLGN = read_from_char(self.file, 'i')
center = read_from_char(self.file, 'i')
surround = read_from_char(self.file, 'i')
#store info in the header
header['acqLGN'] = acqLGN
header['center'] = center
header['surround'] = surround
if not (header['surround'] >= 2) :
header.update(self.get_center_header(center))
self.file.seek(0)
return header
def get_center_header(self, code):
#get file info corresponding
#to the executed protocol
#for the center first ...
if code == 0 :
return self.get_sparsenoise_revcor()
elif code == 2 :
return self.get_sparsenoise_center()
elif code == 3 :
return self.get_densenoise_center(True)
elif code == 4 :
return self.get_densenoise_center(False)
elif code == 5 :
return dict()
# return self.get_grating_center()
else :
return dict()
def get_surround_header(self, code):
#then the surround
if code == 2 :
return self.get_sparsenoise_surround()
elif code == 3 :
return self.get_densenoise_surround(True)
elif code == 4 :
return self.get_densenoise_surround(False)
elif code == 5 :
raise NotImplementedError()
return self.get_grating_center()
else :
return dict()
def get_center_surround(self, center, surround):
header = dict()
header['stim_center'] = self.get_center_header(center)
header['stim_surround'] = self.get_surround_header(surround)
return header
def get_sparsenoise_center(self):
header = dict()
header['title'] = self.get_title()
header['number_of_sequences'] = read_from_char(self.file, 'i')
header['pretrigger_duration'] = read_from_char(self.file, 'ext')
header['n_div_x'] = read_from_char(self.file, 'h')
header['n_div_y'] = read_from_char(self.file, 'h')
header['gray_levels'] = read_from_char(self.file, 'h')
header['position_x'] = read_from_char(self.file, 'ext')
header['position_y'] = read_from_char(self.file, 'ext')
header['length'] = read_from_char(self.file, 'ext')
header['width'] = read_from_char(self.file, 'ext')
header['orientation'] = read_from_char(self.file, 'ext')
header['expansion'] = read_from_char(self.file, 'h')
header['scotoma'] = read_from_char(self.file, 'h')
header['seed'] = read_from_char(self.file, 'h')
header['luminance_1'] = read_from_char(self.file, 'ext')
header['luminance_2'] = read_from_char(self.file, 'ext')
header['dt_count'] = read_from_char(self.file, 'i')
dt_array = list()
for _ in range(0, header['dt_count']) :
dt_array.append(read_from_char(self.file, 'ext'))
header['dt_on'] = dt_array if dt_array else None
header['dt_off'] = read_from_char(self.file, 'ext')
return header
def get_sparsenoise_surround(self):
header = dict()
header['title_surround'] = self.get_title()
header['gap'] = read_from_char(self.file, 'ext')
header['n_div_x'] = read_from_char(self.file, 'h')
header['n_div_y'] = read_from_char(self.file, 'h')
header['gray_levels'] = read_from_char(self.file, 'h')
header['expansion'] = read_from_char(self.file, 'h')
header['scotoma'] = read_from_char(self.file, 'h')
header['seed'] = read_from_char(self.file, 'h')
header['luminance_1'] = read_from_char(self.file, 'ext')
header['luminance_2'] = read_from_char(self.file, 'ext')
header['dt_on'] = read_from_char(self.file, 'ext')
header['dt_off'] = read_from_char(self.file, 'ext')
return header
def get_densenoise_center(self, is_binary):
header = dict()
header['stimulus_type'] = "B" if is_binary else "T"
header['title'] = self.get_title()
_tmp = read_from_char(self.file, 'i')
header['number_of_sequences'] = _tmp if _tmp < 0 else None
rollback = self.file.tell()
header['stimulus_duration'] = read_from_char(self.file, 'ext')
if header['stimulus_duration'] is None :
self.file.seek(rollback)
header['pretrigger_duration'] = read_from_char(self.file, 'ext')
header['n_div_x'] = read_from_char(self.file, 'h')
header['n_div_y'] = read_from_char(self.file, 'h')
header['position_x'] = read_from_char(self.file, 'ext')
header['position_y'] = read_from_char(self.file, 'ext')
header['length'] = read_from_char(self.file, 'ext')
header['width'] = read_from_char(self.file, 'ext')
header['orientation'] = read_from_char(self.file, 'ext')
header['expansion'] = read_from_char(self.file, 'h')
header['seed'] = read_from_char(self.file, 'h')
header['luminance_1'] = read_from_char(self.file, 'ext')
header['luminance_2'] = read_from_char(self.file, 'ext')
header['dt_on'] = read_from_char(self.file, 'ext')
header['dt_off'] = read_from_char(self.file, 'ext')
return header
def get_densenoise_surround(self, is_binary):
header = dict()
header['title_surround'] = self.get_title()
header['gap'] = read_from_char(self.file, 'ext')
header['n_div_x'] = read_from_char(self.file, 'h')
header['n_div_y'] = read_from_char(self.file, 'h')
header['expansion'] = read_from_char(self.file, 'h')
header['seed'] = read_from_char(self.file, 'h')
header['luminance_1'] = read_from_char(self.file, 'ext')
header['luminance_2'] = read_from_char(self.file, 'ext')
header['dt_on'] = read_from_char(self.file, 'ext')
header['dt_off'] = read_from_char(self.file, 'ext')
return header
def get_grating_center(self):
pass
def get_grating_surround(self):
pass
class Header(ElphyBlock):
"""
A convenient subclass of :class:`Block` to store
Elphy file header properties.
NB : Subclassing this class is a convenient
way to set the properties of the header using
polymorphism rather than a conditional structure.
"""
def __init__(self, layout, identifier, size, fixed_length=None, size_format="i"):
super(Header, self).__init__(layout, identifier, 0, size, fixed_length, size_format)
class Acquis1Header(Header):
"""
A subclass of :class:`Header` used to
identify the 'ACQUIS1/GS/1991' format.
Whereas more recent format, the header
contains all data relative to episodes,
channels and traces :
``n_channels`` : the number of acquisition channels.
``nbpt`` and ``nbptEx`` : parameters useful to compute the number of samples by episodes.
``tpData`` : the data format identifier used to compute sample size.
``x_unit`` : the x-coordinate unit for all channels in an episode.
``y_units`` : an array containing y-coordinate units for each channel in the episode.
``dX`` and ``X0`` : the scale factors necessary to retrieve the actual
times relative to each sample in a channel.
``dY_ar`` and ``Y0_ar``: arrays of scale factors necessary to retrieve
the actual values relative to samples.
``continuous`` : a boolean telling if the file has been acquired in
continuous mode.
``preSeqI`` : the size in bytes of the data preceding raw data.
``postSeqI`` : the size in bytes of the data preceding raw data.
``dat_length`` : the length in bytes of the data in the file.
``sample_size`` : the size in bytes of a sample.
``n_samples`` : the number of samples.
``ep_size`` : the size in bytes of an episode.
``n_episodes`` : the number of recording sequences store in the file.
NB :
The size is read from the file,
the identifier is a string containing
15 characters and the size is encoded
as small integer.
See file 'FicDefAc1.pas' to identify
the parsed parameters.
"""
def __init__(self, layout):
fileobj = layout.file
super(Acquis1Header, self).__init__(layout, "ACQUIS1/GS/1991", 1024, 15, "h")
#parse the header to store interesting data about episodes and channels
fileobj.seek(18)
#extract episode properties
n_channels = read_from_char(fileobj, 'B')
assert not ((n_channels < 1) or (n_channels > 16)), "bad number of channels"
nbpt = read_from_char(fileobj, 'h')
l_xu, x_unit = struct.unpack('<B3s', fileobj.read(4))
#extract units for each channel
y_units = list()
for i in range(1, 7) :
l_yu, y_unit = struct.unpack('<B3s', fileobj.read(4))
y_units.append(y_unit[0:l_yu])
#extract i1, i2, x1, x2 and compute dX and X0
i1, i2 = struct.unpack('<hh', fileobj.read(4))
x1 = read_from_char(fileobj, 'ext')
x2 = read_from_char(fileobj, 'ext')
if (i1 != i2) and (x1 != x2) :
dX = (x2 - x1) / (i2 - i1)
X0 = x1 - i1 * dX
else :
dX = None
X0 = None
# raise Exception("bad X-scale parameters")
#extract j1 and j2, y1 and y2 and compute dY
j1 = struct.unpack('<hhhhhh', fileobj.read(12))
j2 = struct.unpack('<hhhhhh', fileobj.read(12))
y1 = list()
for i in range(1, 7) :
y1.append(read_from_char(fileobj, 'ext'))
y2 = list()
for i in range(1, 7) :
y2.append(read_from_char(fileobj, 'ext'))
dY_ar = list()
Y0_ar = list()
for i in range(0, n_channels) :
# detect division by zero
if (j1[i] != j2[i]) and (y1[i] != y2[i]) :
dY_ar.append((y2[i] - y1[i]) / (j2[i] - j1[i]))
Y0_ar.append(y1[i] - j1[i] * dY_ar[i])
else :
dY_ar.append(None)
Y0_ar.append(None)
NbMacq = read_from_char(fileobj, 'h')
#fileobj.read(300) #Macq:typeTabMarqueAcq; { 300 octets }
max_mark = 100
Macq = list()
for i in range(0, max_mark) :
Macq.append(list(struct.unpack('<ch', fileobj.read(3))))
#Xmini,Xmaxi,Ymini,Ymaxi:array[1..6] of float; #fileobj.read(240)
x_mini = list()
for i in range(0, 6) :
x_mini.append(read_from_char(fileobj, 'ext'))
x_maxi = list()
for i in range(0, 6) :
x_maxi.append(read_from_char(fileobj, 'ext'))
y_mini = list()
for i in range(0, 6) :
y_mini.append(read_from_char(fileobj, 'ext'))
y_maxi = list()
for i in range(0, 6) :
y_maxi.append(read_from_char(fileobj, 'ext'))
#modeA:array[1..6] of byte; #fileobj.read(6)
modeA = list(struct.unpack('<BBBBBB', fileobj.read(6)))
continuous = read_from_char(fileobj, '?')
preSeqI, postSeqI = struct.unpack('<hh', fileobj.read(4))
#EchelleSeqI:boolean; #fileobj.read(1)
ep_scaled = read_from_char(fileobj, '?')
nbptEx = read_from_char(fileobj, 'H')
x1s, x2s = struct.unpack('<ff', fileobj.read(8))
y1s = list()
for i in range(0, 6):
y1s.append(read_from_char(fileobj, 'f'))
y2s = list()
for i in range(0, 6):
y2s.append(read_from_char(fileobj, 'f'))
#fileobj.read(96) # Xminis,Xmaxis,Yminis,Ymaxis:array[1..6] of single;
x_minis = list()
for i in range(0, 6) :
x_minis.append(read_from_char(fileobj, 'f'))
x_maxis = list()
for i in range(0, 6) :
x_maxis.append(read_from_char(fileobj, 'f'))
y_minis = list()
for i in range(0, 6) :
y_minis.append(read_from_char(fileobj, 'f'))
y_maxis = list()
for i in range(0, 6) :
y_maxis.append(read_from_char(fileobj, 'f'))
n_ep = read_from_char(fileobj, 'h')
tpData = read_from_char(fileobj, 'h')
assert tpData in [3, 2, 1, 0], "bad sample size"
no_analog_data = read_from_char(fileobj, '?')
self.n_ep = n_ep
self.n_channels = n_channels
self.nbpt = nbpt
self.i1 = i1
self.i2 = i2
self.x1 = x1
self.x2 = x2
self.dX = dX
self.X0 = X0
self.x_unit = x_unit[0:l_xu]
self.dY_ar = dY_ar
self.Y0_ar = Y0_ar
self.y_units = y_units[0:n_channels]
self.NbMacq = NbMacq
self.Macq = Macq
self.x_mini = x_mini[0:n_channels]
self.x_maxi = x_maxi[0:n_channels]
self.y_mini = y_mini[0:n_channels]
self.y_maxi = y_maxi[0:n_channels]
self.modeA = modeA
self.continuous = continuous
self.preSeqI = preSeqI
self.postSeqI = postSeqI
self.ep_scaled = ep_scaled
self.nbptEx = nbptEx
self.x1s = x1s
self.x2s = x2s
self.y1s = y1s
self.y2s = y2s
self.x_minis = x_minis[0:n_channels]
self.x_maxis = x_maxis[0:n_channels]
self.y_minis = y_minis[0:n_channels]
self.y_maxis = y_maxis[0:n_channels]
self.tpData = 2 if not tpData else tpData
self.no_analog_data = no_analog_data
self.dat_length = self.layout.file_size - self.layout.data_offset
self.sample_size = type_dict[types[tpData]]
if self.continuous :
self.n_samples = self.dat_length / (self.n_channels * self.sample_size)
else :
self.n_samples = self.nbpt + self.nbptEx * 32768
ep_size = self.preSeqI + self.postSeqI
if not self.no_analog_data :
ep_size += self.n_samples * self.sample_size * self.n_channels
self.ep_size = ep_size
self.n_episodes = (self.dat_length / self.ep_size) if (self.n_samples != 0) else 0
class DAC2GSHeader(Header):
"""
A subclass of :class:`Header` used to
identify the 'DAC2/GS/2000' format.
NB : the size is fixed to 20 bytes,
the identifier is a string containing
15 characters and the size is encoded
as integer.
"""
def __init__(self, layout):
super(DAC2GSHeader, self).__init__(layout, "DAC2/GS/2000", 20, 15, "i")
class DAC2Header(Header):
"""
A subclass of :class:`Header` used to
identify the 'DAC2 objects' format.
NB : the size is fixed to 18 bytes,
the identifier is a string containing
15 characters and the size is encoded
as small integer.
"""
def __init__(self, layout):
super(DAC2Header, self).__init__(layout, "DAC2 objects", 18, 15, "h")
class DAC2GSMainBlock(ElphyBlock):
"""
Subclass of :class:`Block` useful to store data corresponding to
the 'Main' block stored in the DAC2/GS/2000 format :
``n_channels`` : the number of acquisition channels.
``nbpt`` : the number of samples by episodes.
``tpData`` : the data format identifier used to compute sample size.
``x_unit`` : the x-coordinate unit for all channels in an episode.
``y_units`` : an array containing y-coordinate units for each channel in the episode.
``dX`` and ``X0`` : the scale factors necessary to retrieve the actual
times relative to each sample in a channel.
``dY_ar`` and ``Y0_ar``: arrays of scale factors necessary to retrieve
the actual values relative to samples.
``continuous`` : a boolean telling if the file has been acquired in
continuous mode.
``preSeqI`` : the size in bytes of the data preceding raw data.
``postSeqI`` : the size in bytes of the data preceding raw data.
``withTags`` : a boolean telling if tags are recorded.
``tagShift`` : the number of tag channels and the shift to apply
to encoded values to retrieve acquired values.
``dat_length`` : the length in bytes of the data in the file.
``sample_size`` : the size in bytes of a sample.
``n_samples`` : the number of samples.
``ep_size`` : the size in bytes of an episode.
``n_episodes`` : the number of recording sequences store in the file.
NB : see file 'FdefDac2.pas' to identify the other parsed parameters.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="i"):
super(DAC2GSMainBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format)
#parse the file to retrieve episodes and channels properties
n_channels, nbpt, tpData = struct.unpack('<BiB', layout.file.read(6))
l_xu, xu, dX, X0 = struct.unpack('<B10sdd', layout.file.read(27))
y_units = list()
dY_ar = list()
Y0_ar = list()
for _ in range(0, 16) :
l_yu, yu, dY, Y0 = struct.unpack('<B10sdd', layout.file.read(27))
y_units.append(yu[0:l_yu])
dY_ar.append(dY)
Y0_ar.append(Y0)
preSeqI, postSeqI, continuous, varEp, withTags = struct.unpack('<ii???', layout.file.read(11))
#some file doesn't precise the tagShift
position = layout.file.tell()
if position >= self.end :
tagShift = 0
else :
tagShift = read_from_char(layout.file, 'B')
#setup object properties
self.n_channels = n_channels
self.nbpt = nbpt
self.tpData = tpData
self.x_unit = xu[0:l_xu]
self.dX = dX
self.X0 = X0
self.y_units = y_units[0:n_channels]
self.dY_ar = dY_ar[0:n_channels]
self.Y0_ar = Y0_ar[0:n_channels]
self.continuous = continuous
if self.continuous :
self.preSeqI = 0
self.postSeqI = 0
else :
self.preSeqI = preSeqI
self.postSeqI = postSeqI
self.varEp = varEp
self.withTags = withTags
if not self.withTags :
self.tagShift = 0
else :
if tagShift == 0 :
self.tagShift = 4
else :
self.tagShift = tagShift
self.sample_size = type_dict[types[self.tpData]]
self.dat_length = self.layout.file_size - self.layout.data_offset
if self.continuous :
if self.n_channels > 0 :
self.n_samples = self.dat_length / (self.n_channels * self.sample_size)
else :
self.n_samples = 0
else :
self.n_samples = self.nbpt
self.ep_size = self.preSeqI + self.postSeqI + self.n_samples * self.sample_size * self.n_channels
self.n_episodes = self.dat_length / self.ep_size if (self.n_samples != 0) else 0
class DAC2GSEpisodeBlock(ElphyBlock):
"""
Subclass of :class:`Block` useful to store data corresponding to
'DAC2SEQ' blocks stored in the DAC2/GS/2000 format.
``n_channels`` : the number of acquisition channels.
``nbpt`` : the number of samples by episodes.
``tpData`` : the data format identifier used to compute the sample size.
``x_unit`` : the x-coordinate unit for all channels in an episode.
``y_units`` : an array containing y-coordinate units for each channel in the episode.
``dX`` and ``X0`` : the scale factors necessary to retrieve the actual
times relative to each sample in a channel.
``dY_ar`` and ``Y0_ar``: arrays of scale factors necessary to retrieve
the actual values relative to samples.
``postSeqI`` : the size in bytes of the data preceding raw data.
NB : see file 'FdefDac2.pas' to identify the parsed parameters.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="i"):
main = layout.main_block
n_channels, nbpt, tpData, postSeqI = struct.unpack('<BiBi', layout.file.read(10))
l_xu, xu, dX, X0 = struct.unpack('<B10sdd', layout.file.read(27))
y_units = list()
dY_ar = list()
Y0_ar = list()
for _ in range(0, 16) :
l_yu, yu, dY, Y0 = struct.unpack('<B10sdd', layout.file.read(27))
y_units.append(yu[0:l_yu])
dY_ar.append(dY)
Y0_ar.append(Y0)
super(DAC2GSEpisodeBlock, self).__init__(layout, identifier, start, layout.main_block.ep_size, fixed_length, size_format)
self.n_channels = main.n_channels
self.nbpt = main.nbpt
self.tpData = main.tpData
if not main.continuous :
self.postSeqI = postSeqI
self.x_unit = xu[0:l_xu]
self.dX = dX
self.X0 = X0
self.y_units = y_units[0:n_channels]
self.dY_ar = dY_ar[0:n_channels]
self.Y0_ar = Y0_ar[0:n_channels]
else :
self.postSeqI = 0
self.x_unit = main.x_unit
self.dX = main.dX
self.X0 = main.X0
self.y_units = main.y_units
self.dY_ar = main.dY_ar
self.Y0_ar = main.Y0_ar
class DAC2EpisodeBlock(ElphyBlock):
"""
Subclass of :class:`Block` useful to store data corresponding to
'B_Ep' blocks stored in the last version of Elphy format :
``ep_block`` : a shortcut the the 'Ep' sub-block.
``ch_block`` : a shortcut the the 'Adc' sub-block.
``ks_block`` : a shortcut the the 'KSamp' sub-block.
``kt_block`` : a shortcut the the 'Ktype' sub-block.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
super(DAC2EpisodeBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format)
self.ep_block = None
self.ch_block = None
self.ks_block = None
self.kt_block = None
def set_episode_block(self):
blocks = self.layout.get_blocks_of_type('Ep', target_blocks=self.sub_blocks)
self.ep_block = blocks[0] if blocks else None
def set_channel_block(self):
blocks = self.layout.get_blocks_of_type('Adc', target_blocks=self.sub_blocks)
self.ch_block = blocks[0] if blocks else None
def set_sub_sampling_block(self):
blocks = self.layout.get_blocks_of_type('Ksamp', target_blocks=self.sub_blocks)
self.ks_block = blocks[0] if blocks else None
def set_sample_size_block(self):
blocks = self.layout.get_blocks_of_type('Ktype', target_blocks=self.sub_blocks)
self.kt_block = blocks[0] if blocks else None
class DummyDataBlock(BaseBlock):
"""
Subclass of :class:`BaseBlock` useful to
identify chunk of blocks that are actually
corresponding to acquired data.
"""
pass
class DAC2RDataBlock(ElphyBlock):
"""
Subclass of :class:`Block` useful to store data corresponding to
'RDATA' blocks stored in the last version of Elphy format :
``data_start`` : the starting point of raw data.
NB : This kind of block is preceeded by a structure which size is encoded
as a 2 bytes unsigned short. Consequently, data start at data_offset plus
the size.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
super(DAC2RDataBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format)
self.data_start = self.data_offset + read_from_char(layout.file, 'H')
class DAC2CyberTagBlock(ElphyBlock):
"""
Subclass of :class:`Block` useful to store data corresponding to
'RCyberTag' blocks stored in the last version of Elphy format :
``data_start`` : the starting point of raw data.
NB : This kind of block is preceeded by a structure which size is encoded
as a 2 bytes unsigned short. Consequently, data start at data_offset plus
the size.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
super(DAC2CyberTagBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format)
self.data_start = self.data_offset + read_from_char(layout.file, 'H')
class DAC2EventBlock(ElphyBlock):
"""
Subclass of :class:`Block` useful to store
data corresponding to 'REVT' blocks stored
in the last version of Elphy format :
``data_start`` : the starting point of raw data.
``n_evt_channels`` : the number of channels used to acquire events.
``n_events`` : an array containing the number of events for each event channel.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
super(DAC2EventBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format)
fileobj = self.layout.file
jump = self.data_offset + read_from_char(fileobj, 'H')
fileobj.seek(jump)
#extract the number of event channel
self.n_evt_channels = read_from_char(fileobj, 'i')
# extract for each event channel
# the corresponding number of events
n_events = list()
for _ in range(0, self.n_evt_channels) :
n_events.append(read_from_char(fileobj, 'i'))
self.n_events = n_events
self.data_start = fileobj.tell()
class DAC2SpikeBlock(DAC2EventBlock):
"""
Subclass of :class:`DAC2EventBlock` useful
to identify 'RSPK' and make the distinction
with 'REVT' blocks stored in the last version
of Elphy format.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
super(DAC2SpikeBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format)
fileobj = self.layout.file
jump = self.data_offset
fileobj.seek(jump) # go to SpikeBlock
jump = self.data_offset + read_from_char(fileobj, 'h')
fileobj.seek(jump)
#extract the number of event channel
self.n_evt_channels = read_from_char(fileobj, 'i')
# extract for each event channel
# the corresponding number of events
n_events = list()
for _ in range(0, self.n_evt_channels) :
n_events.append(read_from_char(fileobj, 'i'))
self.n_events = n_events
self.data_start = fileobj.tell()
class DAC2WaveFormBlock(ElphyBlock):
"""
Subclass of :class:`Block` useful to store data corresponding to
'RspkWave' blocks stored in the last version of Elphy format :
``data_start`` : the starting point of raw data.
``n_spk_channels`` : the number of channels used to acquire spiketrains.
``n_spikes`` : an array containing the number of spikes for each spiketrain.
``pre_trigger`` : the number of samples of a waveform arriving before a spike.
``wavelength`` : the number of samples in a waveform.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l"):
super(DAC2WaveFormBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format)
fileobj = self.layout.file
jump = self.data_offset + read_from_char(fileobj, 'H')
fileobj.seek(jump)
self.wavelength = read_from_char(fileobj, 'i')
self.pre_trigger = read_from_char(fileobj, 'i')
self.n_spk_channels = read_from_char(fileobj, 'i')
n_spikes = list()
for _ in range(0, self.n_spk_channels) :
n_spikes.append(read_from_char(fileobj, 'i'))
self.n_spikes = n_spikes
self.data_start = fileobj.tell()
class DAC2EpSubBlock(ElphyBlock):
"""
Subclass of :class:`Block` useful to retrieve data corresponding
to a 'Ep' sub-block stored in the last version of Elphy format :
``n_channels`` : the number of acquisition channels.
``nbpt`` : the number of samples by episodes
``tpData`` : the data format identifier used to store signal samples.
``x_unit`` : the x-coordinate unit for all channels in an episode.
``dX`` and ``X0`` : the scale factors necessary to retrieve the actual
times relative to each sample in a channel.
``continuous`` : a boolean telling if the file has been acquired in
continuous mode.
``tag_mode`` : identify the way tags are stored in a file.
``tag_shift`` : the number of bits that tags occupy in a 16-bits sample
and the shift necessary to do to retrieve the value of the sample.
``dX_wf`` and ``X0_wf``: the scale factors necessary to retrieve the actual
times relative to each waveforms.
``dY_wf`` and ``Y0_wf``: the scale factors necessary to retrieve the actual
values relative to waveform samples.
``x_unit_wf`` and ``y_unit_wf``: the unit of x and y coordinates for all waveforms in an episode.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l", parent_block=None):
super(DAC2EpSubBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format, parent_block=parent_block)
fileobj = self.layout.file
n_channels, nbpt, tpData, l_xu, x_unit, dX, X0 = struct.unpack('<BiBB10sdd', fileobj.read(33))
continuous, tag_mode, tag_shift = struct.unpack('<?BB', fileobj.read(3))
DxuSpk, X0uSpk, nbSpk, DyuSpk, Y0uSpk, l_xuspk, unitXSpk, l_yuspk, unitYSpk = struct.unpack('<ddiddB10sB10s', fileobj.read(58))
cyber_time, pc_time = struct.unpack('<dI', fileobj.read(12))
# necessary properties to reconstruct
# signals stored into the file
self.n_channels = n_channels
self.nbpt = nbpt
self.tpData = tpData
self.x_unit = x_unit[0:l_xu]
self.dX = dX
self.X0 = X0
self.continuous = continuous
self.tag_mode = tag_mode
self.tag_shift = tag_shift if self.tag_mode == 1 else 0
# following properties are valid
# when using multielectrode system
# named BlackRock / Cyberkinetics
#if fileobj.tell() < self.end :
self.dX_wf = DxuSpk
self.X0_wf = X0uSpk
self.n_spikes = nbSpk
self.dY_wf = DyuSpk
self.Y0_wf = Y0uSpk
self.x_unit_wf = unitXSpk[0:l_xuspk]
self.y_unit_wf = unitYSpk[0:l_yuspk]
self.cyber_time = cyber_time
self.pc_time = pc_time
class DAC2AdcSubBlock(ElphyBlock):
"""
Subclass of :class:`SubBlock` useful to retrieve data corresponding
to a 'Adc' sub-block stored in the last version of Elphy format :
``y_units`` : an array containing all y-coordinates for each channel.
``dY_ar`` and ``Y0_ar`` : arrays containing scaling factors for each
channel useful to compute the actual value of a signal sample.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l", parent_block=None):
super(DAC2AdcSubBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format, parent_block=parent_block)
fileobj = self.layout.file
#fileobj.seek(start + len(identifier) + 1)
ep_block, = [k for k in self.parent_block.sub_blocks if k.identifier.startswith('Ep')]
n_channels = ep_block.n_channels
self.y_units = list()
self.dY_ar = list()
self.Y0_ar = list()
for _ in range(0, n_channels) :
l_yu, y_unit, dY, Y0 = struct.unpack('<B10sdd', fileobj.read(27))
self.y_units.append(y_unit[0:l_yu])
self.dY_ar.append(dY)
self.Y0_ar.append(Y0)
class DAC2KSampSubBlock(ElphyBlock):
"""
Subclass of :class:`SubBlock` useful to retrieve data corresponding
to a 'Ksamp' sub-block stored in the last version of Elphy format :
``k_sampling`` : an array containing all sub-sampling factors
corresponding to each acquired channel. If a factor is equal to
zero, then the channel has been converted into an event channel.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l", parent_block=None):
super(DAC2KSampSubBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format, parent_block=parent_block)
fileobj = self.layout.file
ep_block, = [k for k in self.parent_block.sub_blocks if k.identifier.startswith('Ep')]
n_channels = ep_block.n_channels
k_sampling = list()
for _ in range(0, n_channels) :
k_sampling.append(read_from_char(fileobj, "H"))
self.k_sampling = k_sampling
class DAC2KTypeSubBlock(ElphyBlock):
"""
Subclass of :class:`SubBlock` useful to retrieve data corresponding
to a 'Ktype' sub-block stored in the last version of Elphy format :
``k_types`` : an array containing all data formats identifier used
to compute sample size.
"""
def __init__(self, layout, identifier, start, size, fixed_length=None, size_format="l", parent_block=None):
super(DAC2KTypeSubBlock, self).__init__(layout, identifier, start, size, fixed_length, size_format, parent_block=parent_block)
fileobj = self.layout.file
ep_block, = [k for k in self.parent_block.sub_blocks if k.identifier.startswith('Ep')]
n_channels = ep_block.n_channels
k_types = list()
for _ in range(0, n_channels) :
k_types.append(read_from_char(fileobj, "B"))
self.k_types = k_types
# --------------------------------------------------------
# UTILS
#symbols of types that could
#encode a value in an elphy file
types = (
'B',
'b',
'h',
'H',
'l',
'f',
'real48',
'd',
'ext',
's_complex',
'd_complex',
'complex',
'none'
)
#a dictionary linking python.struct
#formats to their actual size in bytes
type_dict = {
'c':1,
'b':1,
'B':1,
'?':1,
'h':2,
'H':2,
'i':4,
'I':4,
'l':4,
'L':4,
'q':8,
'Q':8,
'f':4,
'd':8,
'H+l':6,
'ext':10,
'real48':6,
's_complex':8,
'd_complex':16,
'complex':20,
'none':0
}
#a dictionary liking python.struct
#formats to numpy formats
numpy_map = {
'b':np.int8,
'B':np.uint8,
'h':np.int16,
'H':np.uint16,
'i':np.int32,
'I':np.uint32,
'l':np.int32,
'L':np.uint32,
'q':np.int64,
'Q':np.uint64,
'f':np.float32,
'd':np.float64,
'H+l':6,
'ext':10,
'real48':6,
'SComp':8,
'DComp':16,
'Comp':20,
'none':0
}
def read_from_char(data, type_char):
"""
Return the value corresponding
to the specified character type.
"""
n_bytes = type_dict[type_char]
ascii = data.read(n_bytes) if isinstance(data, file) else data
if type_char != 'ext':
try :
value = struct.unpack('<%s' % type_char, ascii)[0]
except :
# the value could not been read
# because the value is not compatible
# with the specified type
value = None
else :
try :
value = float(ascii)
except :
value = None
return value
def least_common_multiple(a, b):
"""
Return the value of the least common multiple.
"""
return (a * b) / gcd(a, b)
# --------------------------------------------------------
# LAYOUT
b_float = 'f8'
b_int = 'i2'
class ElphyLayout(object):
"""
A convenient class to know how data
are organised into an Elphy file :
``elphy_file`` : a :class:`ElphyFile`
asking file introspection.
``blocks`` : a set of :class:``BaseBlock`
objects partitioning a file and extracting
some useful metadata.
``ìnfo_block`` : a shortcut to a :class:`FileInfoBlock`
object containing metadata describing a recording
protocol (sparsenoise, densenoise, movingbar or flashbar)
``data_blocks`` : a shortcut to access directly
blocks containing raw data.
NB : Subclassing this class is a convenient
way to retrieve blocks constituting a file,
their relative information and location of
raw data using polymorphism rather than a
conditional structure.
"""
def __init__(self, elphy_file):
self.elphy_file = elphy_file
self.blocks = list()
self.info_block = None
self.data_blocks = None
@property
def file(self):
return self.elphy_file.file
@property
def file_size(self):
return self.elphy_file.file_size
def is_continuous(self):
return self.is_continuous()
def add_block(self, block):
self.blocks.append(block)
@property
def header(self):
return self.blocks[0]
def get_blocks_of_type(self, identifier, target_blocks=None):
blocks = self.blocks if target_blocks is None else target_blocks
return [k for k in blocks if (k.identifier == identifier)]
def set_info_block(self):
raise NotImplementedError('must be overloaded in a subclass')
def set_data_blocks(self):
raise NotImplementedError('must be overloaded in a subclass')
def get_tag(self, episode, tag_channel):
raise NotImplementedError('must be overloaded in a subclass')
@property
def n_episodes(self):
raise NotImplementedError('must be overloaded in a subclass')
def n_channels(self, episode):
raise NotImplementedError('must be overloaded in a subclass')
def n_tags(self, episode):
raise NotImplementedError('must be overloaded in a subclass')
def n_samples(self, episode, channel):
raise NotImplementedError('must be overloaded in a subclass')
def sample_type(self, ep, ch):
raise NotImplementedError('must be overloaded in a subclass')
def sample_size(self, ep, ch):
symbol = self.sample_symbol(ep, ch)
return type_dict[symbol]
def sample_symbol(self, ep, ch):
tp = self.sample_type(ep, ch)
try:
return types[tp]
except :
return 'h'
def sampling_period(self, ep, ch):
raise NotImplementedError('must be overloaded in a subclass')
def x_scale_factors(self, ep, ch):
raise NotImplementedError('must be overloaded in a subclass')
def y_scale_factors(self, ep, ch):
raise NotImplementedError('must be overloaded in a subclass')
def x_tag_scale_factors(self, ep):
raise NotImplementedError('must be overloaded in a subclass')
def x_unit(self, ep, ch):
raise NotImplementedError('must be overloaded in a subclass')
def y_unit(self, ep, ch):
raise NotImplementedError('must be overloaded in a subclass')
def tag_shift(self, ep):
raise NotImplementedError('must be overloaded in a subclass')
def get_channel_for_tags(self, ep):
raise NotImplementedError('must be overloaded in a subclass')
def get_signal(self, episode, channel):
"""
Return the signal description relative
to the specified episode and channel.
"""
assert episode in range(1, self.n_episodes + 1)
assert channel in range(1, self.n_channels(episode) + 1)
t_start = 0
sampling_period = self.sampling_period(episode, channel)
t_stop = sampling_period * self.n_samples(episode, channel)
return ElphySignal(
self,
episode,
channel,
self.x_unit(episode, channel),
self.y_unit(episode, channel),
1 / sampling_period,
t_start,
t_stop
)
def create_channel_mask(self, ep):
"""
Return the minimal pattern of channel numbers
representing the succession of channels in the
multiplexed data. It is necessary to do the mapping
between a sample stored in the file and its relative
channel.
"""
raise NotImplementedError('must be overloaded in a subclass')
def get_data_blocks(self, ep):
"""
Return a set of :class:`DummyDataBlock` instances
that defined the actual location of samples in blocks
encapsulating raw data.
"""
raise NotImplementedError('must be overloaded in a subclass')
def create_bit_mask(self, ep, ch):
"""
Build a mask to apply on the entire episode
in order to only keep values corresponding
to the specified channel.
"""
ch_mask = self.create_channel_mask(ep)
_mask = list()
for _ch in ch_mask :
size = self.sample_size(ep, _ch)
val = 1 if _ch == ch else 0
for _ in xrange(0, size) :
_mask.append(val)
return np.array(_mask)
def load_bytes(self, data_blocks, dtype='<i1', start=None, end=None, expected_size=None):
"""
Return list of bytes contained
in the specified set of blocks.
NB : load all data as files cannot exceed 4Gb
find later other solutions to spare memory.
"""
chunks = list()
raw = ''
# keep only data blocks having
# a size greater than zero
blocks = [k for k in data_blocks if k.size > 0]
for data_block in blocks :
self.file.seek(data_block.start)
raw = self.file.read(data_block.size)[0:expected_size]
databytes = np.frombuffer(raw, dtype=dtype)
chunks.append(databytes)
# concatenate all chunks and return
# the specified slice
if len(chunks)>0 :
databytes = np.concatenate(chunks)
return databytes[start:end]
else :
return np.array([])
def reshape_bytes(self, databytes, reshape, datatypes, order='<'):
"""
Reshape a numpy array containing a set of databytes.
"""
assert datatypes and len(datatypes) == len(reshape), "datatypes are not well defined"
l_bytes = len(databytes)
#create the mask for each shape
shape_mask = list()
for shape in reshape :
for _ in xrange(1, shape + 1) :
shape_mask.append(shape)
#create a set of masks to extract data
bit_masks = list()
for shape in reshape :
bit_mask = list()
for value in shape_mask :
bit = 1 if (value == shape) else 0
bit_mask.append(bit)
bit_masks.append(np.array(bit_mask))
#extract data
n_samples = l_bytes / np.sum(reshape)
data = np.empty([len(reshape), n_samples], dtype=(int, int))
for index, bit_mask in enumerate(bit_masks) :
tmp = self.filter_bytes(databytes, bit_mask)
tp = '%s%s%s' % (order, datatypes[index], reshape[index])
data[index] = np.frombuffer(tmp, dtype=tp)
return data.T
def filter_bytes(self, databytes, bit_mask):
"""
Detect from a bit mask which bits
to keep to recompose the signal.
"""
n_bytes = len(databytes)
mask = np.ones(n_bytes, dtype=int)
np.putmask(mask, mask, bit_mask)
to_keep = np.where(mask > 0)[0]
return databytes.take(to_keep)
def load_channel_data(self, ep, ch):
"""
Return a numpy array containing the
list of bytes corresponding to the
specified episode and channel.
"""
#memorise the sample size and symbol
sample_size = self.sample_size(ep, ch)
sample_symbol = self.sample_symbol(ep, ch)
#create a bit mask to define which
#sample to keep from the file
bit_mask = self.create_bit_mask(ep, ch)
#load all bytes contained in an episode
data_blocks = self.get_data_blocks(ep)
databytes = self.load_bytes(data_blocks)
raw = self.filter_bytes(databytes, bit_mask)
#reshape bytes from the sample size
dt = np.dtype(numpy_map[sample_symbol])
dt.newbyteorder('<')
return np.frombuffer(raw.reshape([len(raw) / sample_size, sample_size]), dt)
def apply_op(self, np_array, value, op_type):
"""
A convenient function to apply an operator
over all elements of a numpy array.
"""
if op_type == "shift_right" :
return np_array >> value
elif op_type == "shift_left" :
return np_array << value
elif op_type == "mask" :
return np_array & value
else :
return np_array
def get_tag_mask(self, tag_ch, tag_mode):
"""
Return a mask useful to retrieve
bits that encode a tag channel.
"""
if tag_mode == 1 :
tag_mask = 0b01 if (tag_ch == 1) else 0b10
elif tag_mode in [2, 3] :
ar_mask = np.zeros(16, dtype=int)
ar_mask[tag_ch - 1] = 1
st = "0b" + ''.join(np.array(np.flipud(ar_mask), dtype=str))
tag_mask = eval(st)
return tag_mask
def load_encoded_tags(self, ep, tag_ch):
"""
Return a numpy array containing
bytes corresponding to the specified
episode and channel.
"""
tag_mode = self.tag_mode(ep)
tag_mask = self.get_tag_mask(tag_ch, tag_mode)
if tag_mode in [1, 2] :
#digidata or itc mode
#available for all formats
ch = self.get_channel_for_tags(ep)
raw = self.load_channel_data(ep, ch)
return self.apply_op(raw, tag_mask, "mask")
elif tag_mode == 3 :
#cyber k mode
#only available for DAC2 objects format
#store bytes corresponding to the blocks
#containing tags in a numpy array and reshape
#it to have a set of tuples (time, value)
ck_blocks = self.get_blocks_of_type(ep, 'RCyberTag')
databytes = self.load_bytes(ck_blocks)
raw = self.reshape_bytes(databytes, reshape=(4, 2), datatypes=('u', 'u'), order='<')
#keep only items that are compatible
#with the specified tag channel
raw[:, 1] = self.apply_op(raw[:, 1], tag_mask, "mask")
#computing numpy.diff is useful to know
#how many times a value is maintained
#and necessary to reconstruct the
#compressed signal ...
repeats = np.array(np.diff(raw[:, 0]), dtype=int)
data = np.repeat(raw[:-1, 1], repeats, axis=0)
# ... note that there is always
#a transition at t=0 for synchronisation
#purpose, consequently it is not necessary
#to complete with zeros when the first
#transition arrive ...
return data
def load_encoded_data(self, ep, ch):
"""
Get encoded value of raw data from the elphy file.
"""
tag_shift = self.tag_shift(ep)
data = self.load_channel_data(ep, ch)
if tag_shift :
return self.apply_op(data, tag_shift, "shift_right")
else :
return data
def get_signal_data(self, ep, ch):
"""
Return a numpy array containing all samples of a
signal, acquired on an Elphy analog channel, formatted
as a list of (time, value) tuples.
"""
#get data from the file
y_data = self.load_encoded_data(ep, ch)
x_data = np.arange(0, len(y_data))
#create a recarray
data = np.recarray(len(y_data), dtype=[('x', b_float), ('y', b_float)])
#put in the recarray the scaled data
x_factors = self.x_scale_factors(ep, ch)
y_factors = self.y_scale_factors(ep, ch)
data['x'] = x_factors.scale(x_data)
data['y'] = y_factors.scale(y_data)
return data
def get_tag_data(self, ep, tag_ch):
"""
Return a numpy array containing all samples of a
signal, acquired on an Elphy tag channel, formatted
as a list of (time, value) tuples.
"""
#get data from the file
y_data = self.load_encoded_tags(ep, tag_ch)
x_data = np.arange(0, len(y_data))
#create a recarray
data = np.recarray(len(y_data), dtype=[('x', b_float), ('y', b_int)])
#put in the recarray the scaled data
factors = self.x_tag_scale_factors(ep)
data['x'] = factors.scale(x_data)
data['y'] = y_data
return data
class Acquis1Layout(ElphyLayout):
"""
A subclass of :class:`ElphyLayout` to know
how the 'ACQUIS1/GS/1991' format is organised.
Extends :class:`ElphyLayout` to store the
offset used to retrieve directly raw data :
``data_offset`` : an offset to jump directly
to the raw data.
"""
def __init__(self, fileobj, data_offset):
super(Acquis1Layout, self).__init__(fileobj)
self.data_offset = data_offset
self.data_blocks = None
def get_blocks_end(self):
return self.data_offset
def is_continuous(self):
return self.header.continuous
def get_episode_blocks(self):
raise NotImplementedError()
def set_info_block(self):
i_blks = self.get_blocks_of_type('USER INFO')
assert len(i_blks) < 2, 'too many info blocks'
if len(i_blks) :
self.info_block = i_blks[0]
def set_data_blocks(self):
data_blocks = list()
size = self.header.n_samples * self.header.sample_size * self.header.n_channels
for ep in range(0, self.header.n_episodes) :
start = self.data_offset + ep * self.header.ep_size + self.header.preSeqI
data_blocks.append(DummyDataBlock(self, 'Acquis1Data', start, size))
self.data_blocks = data_blocks
def get_data_blocks(self, ep):
return [self.data_blocks[ep - 1]]
@property
def n_episodes(self):
return self.header.n_episodes
def n_channels(self, episode):
return self.header.n_channels
def n_tags(self, episode):
return 0
def tag_mode(self, ep):
return 0
def tag_shift(self, ep):
return 0
def get_channel_for_tags(self, ep):
return None
@property
def no_analog_data(self):
return True if (self.n_episodes == 0) else self.header.no_analog_data
def sample_type(self, ep, ch):
return self.header.tpData
def sampling_period(self, ep, ch):
return self.header.dX
def n_samples(self, ep, ch):
return self.header.n_samples
def x_tag_scale_factors(self, ep):
return ElphyScaleFactor(
self.header.dX,
self.header.X0
)
def x_scale_factors(self, ep, ch):
return ElphyScaleFactor(
self.header.dX,
self.header.X0
)
def y_scale_factors(self, ep, ch):
dY = self.header.dY_ar[ch - 1]
Y0 = self.header.Y0_ar[ch - 1]
# TODO: see why this kind of exception exists
if dY is None or Y0 is None :
raise Exception('bad Y-scale factors for episode %s channel %s' % (ep, ch))
return ElphyScaleFactor(dY, Y0)
def x_unit(self, ep, ch):
return self.header.x_unit
def y_unit(self, ep, ch):
return self.header.y_units[ch - 1]
@property
def ep_size(self):
return self.header.ep_size
@property
def file_duration(self):
return self.header.dX * self.n_samples
def get_tag(self, episode, tag_channel):
return None
def create_channel_mask(self, ep):
return np.arange(1, self.header.n_channels + 1)
class DAC2GSLayout(ElphyLayout):
"""
A subclass of :class:`ElphyLayout` to know
how the 'DAC2 / GS / 2000' format is organised.
Extends :class:`ElphyLayout` to store the
offset used to retrieve directly raw data :
``data_offset`` : an offset to jump directly
after the 'MAIN' block where 'DAC2SEQ' blocks
start.
``main_block```: a shortcut to access 'MAIN' block.
``episode_blocks`` : a shortcut to access blocks
corresponding to episodes.
"""
def __init__(self, fileobj, data_offset):
super(DAC2GSLayout, self).__init__(fileobj)
self.data_offset = data_offset
self.main_block = None
self.episode_blocks = None
def get_blocks_end(self):
return self.file_size #data_offset
def is_continuous(self):
main_block = self.main_block
return main_block.continuous if main_block else False
def get_episode_blocks(self):
raise NotImplementedError()
def set_main_block(self):
main_block = self.get_blocks_of_type('MAIN')
self.main_block = main_block[0] if main_block else None
def set_episode_blocks(self):
ep_blocks = self.get_blocks_of_type('DAC2SEQ')
self.episode_blocks = ep_blocks if ep_blocks else None
def set_info_block(self):
i_blks = self.get_blocks_of_type('USER INFO')
assert len(i_blks) < 2, "too many info blocks"
if len(i_blks) :
self.info_block = i_blks[0]
def set_data_blocks(self):
data_blocks = list()
identifier = 'DAC2GSData'
size = self.main_block.n_samples * self.main_block.sample_size * self.main_block.n_channels
if not self.is_continuous() :
blocks = self.get_blocks_of_type('DAC2SEQ')
for block in blocks :
start = block.start + self.main_block.preSeqI
data_blocks.append(DummyDataBlock(self, identifier, start, size))
else :
start = self.blocks[-1].end + 1 + self.main_block.preSeqI
data_blocks.append(DummyDataBlock(self, identifier, start, size))
self.data_blocks = data_blocks
def get_data_blocks(self, ep):
return [self.data_blocks[ep - 1]]
def episode_block(self, ep):
return self.main_block if self.is_continuous() else self.episode_blocks[ep - 1]
def tag_mode(self, ep):
return 1 if self.main_block.withTags else 0
def tag_shift(self, ep):
return self.main_block.tagShift
def get_channel_for_tags(self, ep):
return 1
def sample_type(self, ep, ch):
return self.main_block.tpData
def sample_size(self, ep, ch):
size = super(DAC2GSLayout, self).sample_size(ep, ch)
assert size == 2, "sample size is always 2 bytes for DAC2/GS/2000 format"
return size
def sampling_period(self, ep, ch):
block = self.episode_block(ep)
return block.dX
def x_tag_scale_factors(self, ep):
block = self.episode_block(ep)
return ElphyScaleFactor(
block.dX,
block.X0,
)
def x_scale_factors(self, ep, ch):
block = self.episode_block(ep)
return ElphyScaleFactor(
block.dX,
block.X0,
)
def y_scale_factors(self, ep, ch):
block = self.episode_block(ep)
return ElphyScaleFactor(
block.dY_ar[ch - 1],
block.Y0_ar[ch - 1]
)
def x_unit(self, ep, ch):
block = self.episode_block(ep)
return block.x_unit
def y_unit(self, ep, ch):
block = self.episode_block(ep)
return block.y_units[ch - 1]
def n_samples(self, ep, ch):
return self.main_block.n_samples
def ep_size(self, ep):
return self.main_block.ep_size
@property
def n_episodes(self):
return self.main_block.n_episodes
def n_channels(self, episode):
return self.main_block.n_channels
def n_tags(self, episode):
return 2 if self.main_block.withTags else 0
@property
def file_duration(self):
return self.main_block.dX * self.n_samples
def get_tag(self, episode, tag_channel):
assert episode in range(1, self.n_episodes + 1)
# there are none or 2 tag channels
if self.tag_mode(episode) == 1 :
assert tag_channel in range(1, 3), "DAC2/GS/2000 format support only 2 tag channels"
block = self.episode_block(episode)
t_stop = self.main_block.n_samples * block.dX
return ElphyTag(self, episode, tag_channel, block.x_unit, 1.0 / block.dX, 0, t_stop)
else :
return None
def n_tag_samples(self, ep, tag_channel):
return self.main_block.n_samples
def get_tag_data(self, episode, tag_channel):
#memorise some useful properties
block = self.episode_block(episode)
sample_size = self.sample_size(episode, tag_channel)
sample_symbol = self.sample_symbol(episode, tag_channel)
#create a bit mask to define which
#sample to keep from the file
channel_mask = self.create_channel_mask(episode)
bit_mask = self.create_bit_mask(channel_mask, 1)
#get bytes from the file
data_block = self.data_blocks[episode - 1]
n_bytes = data_block.size
self.file.seek(data_block.start)
databytes = np.frombuffer(self.file.read(n_bytes), '<i1')
#detect which bits keep to recompose the tag
ep_mask = np.ones(n_bytes, dtype=int)
np.putmask(ep_mask, ep_mask, bit_mask)
to_keep = np.where(ep_mask > 0)[0]
raw = databytes.take(to_keep)
raw = raw.reshape([len(raw) / sample_size, sample_size])
#create a recarray containing data
dt = np.dtype(numpy_map[sample_symbol])
dt.newbyteorder('<')
tag_mask = 0b01 if (tag_channel == 1) else 0b10
y_data = np.frombuffer(raw, dt) & tag_mask
x_data = np.arange(0, len(y_data)) * block.dX + block.X0
data = np.recarray(len(y_data), dtype=[('x', b_float), ('y', b_int)])
data['x'] = x_data
data['y'] = y_data
return data
def create_channel_mask(self, ep):
return np.arange(1, self.main_block.n_channels + 1)
class DAC2Layout(ElphyLayout):
"""
A subclass of :class:`ElphyLayout` to know
how the Elphy format is organised.
Whereas other formats storing raw data at the
end of the file, 'DAC2 objects' format spreads
them over multiple blocks :
``episode_blocks`` : a shortcut to access blocks
corresponding to episodes.
"""
def __init__(self, fileobj):
super(DAC2Layout, self).__init__(fileobj)
self.episode_blocks = None
def get_blocks_end(self):
return self.file_size
def is_continuous(self):
ep_blocks = [k for k in self.blocks if k.identifier.startswith('B_Ep')]
if ep_blocks :
ep_block = ep_blocks[0]
ep_sub_block = ep_block.sub_blocks[0]
return ep_sub_block.continuous
else :
return False
def set_episode_blocks(self):
self.episode_blocks = [k for k in self.blocks if k.identifier.startswith('B_Ep')]
def set_info_block(self):
#in fact the file info are contained into a single sub-block with an USR identifier
i_blks = self.get_blocks_of_type('B_Finfo')
assert len(i_blks) < 2, "too many info blocks"
if len(i_blks) :
i_blk = i_blks[0]
sub_blocks = i_blk.sub_blocks
if len(sub_blocks) :
self.info_block = sub_blocks[0]
def set_data_blocks(self):
data_blocks = list()
blocks = self.get_blocks_of_type('RDATA')
for block in blocks :
start = block.data_start
size = block.end + 1 - start
data_blocks.append(DummyDataBlock(self, 'RDATA', start, size))
self.data_blocks = data_blocks
def get_data_blocks(self, ep):
return self.group_blocks_of_type(ep, 'RDATA')
def group_blocks_of_type(self, ep, identifier):
ep_blocks = list()
blocks = [k for k in self.get_blocks_stored_in_episode(ep) if k.identifier == identifier]
for block in blocks :
start = block.data_start
size = block.end + 1 - start
ep_blocks.append(DummyDataBlock(self, identifier, start, size))
return ep_blocks
def get_blocks_stored_in_episode(self, ep):
data_blocks = [k for k in self.blocks if k.identifier == 'RDATA']
n_ep = self.n_episodes
blk_1 = self.episode_block(ep)
blk_2 = self.episode_block((ep + 1) % n_ep)
i_1 = self.blocks.index(blk_1)
i_2 = self.blocks.index(blk_2)
if (blk_1 == blk_2) or (i_2 < i_1) :
return [k for k in data_blocks if self.blocks.index(k) > i_1]
else :
return [k for k in data_blocks if self.blocks.index(k) in xrange(i_1, i_2)]
def set_cyberk_blocks(self):
ck_blocks = list()
blocks = self.get_blocks_of_type('RCyberTag')
for block in blocks :
start = block.data_start
size = block.end + 1 - start
ck_blocks.append(DummyDataBlock(self, 'RCyberTag', start, size))
self.ck_blocks = ck_blocks
def episode_block(self, ep):
return self.episode_blocks[ep - 1]
@property
def n_episodes(self):
return len(self.episode_blocks)
def analog_index(self, episode):
"""
Return indices relative to channels
used for analog signals.
"""
block = self.episode_block(episode)
tag_mode = block.ep_block.tag_mode
an_index = np.where(np.array(block.ks_block.k_sampling) > 0)
if tag_mode == 2 :
an_index = an_index[:-1]
return an_index
def n_channels(self, episode):
"""
Return the number of channels used
for analog signals but also events.
NB : in Elphy this 2 kinds of channels
are not differenciated.
"""
block = self.episode_block(episode)
tag_mode = block.ep_block.tag_mode
n_channels = len(block.ks_block.k_sampling)
return n_channels if tag_mode != 2 else n_channels - 1
def n_tags(self, episode):
block = self.episode_block(episode)
tag_mode = block.ep_block.tag_mode
tag_map = {0:0, 1:2, 2:16, 3:16}
return tag_map.get(tag_mode, 0)
def n_events(self, episode):
"""
Return the number of channels
dedicated to events.
"""
block = self.episode_block(episode)
return block.ks_block.k_sampling.count(0)
def n_spiketrains(self, episode):
spk_blocks = [k for k in self.blocks if k.identifier == 'RSPK']
return spk_blocks[0].n_evt_channels if spk_blocks else 0
def sub_sampling(self, ep, ch):
"""
Return the sub-sampling factor for
the specified episode and channel.
"""
block = self.episode_block(ep)
return block.ks_block.k_sampling[ch - 1] if block.ks_block else 1
def aggregate_size(self, block, ep):
ag_count = self.aggregate_sample_count(block)
ag_size = 0
for ch in range(1, ag_count + 1) :
if (block.ks_block.k_sampling[ch - 1] != 0) :
ag_size += self.sample_size(ep, ch)
return ag_size
def n_samples(self, ep, ch):
block = self.episode_block(ep)
if not block.ep_block.continuous :
return block.ep_block.nbpt / self.sub_sampling(ep, ch)
else :
# for continuous case there isn't any place
# in the file that contains the number of
# samples unlike the episode case ...
data_blocks = self.get_data_blocks(ep)
total_size = np.sum([k.size for k in data_blocks])
# count the number of samples in an
# aggregate and compute its size in order
# to determine the size of an aggregate
ag_count = self.aggregate_sample_count(block)
ag_size = self.aggregate_size(block, ep)
n_ag = total_size / ag_size
# the number of samples is equal
# to the number of aggregates ...
n_samples = n_ag
n_chunks = total_size % ag_size
# ... but not when there exists
# a incomplete aggregate at the
# end of the file, consequently
# the preeceeding computed number
# of samples must be incremented
# by one only if the channel map
# to a sample in the last aggregate
# ... maybe this last part should be
# deleted because the n_chunks is always
# null in continuous mode
if n_chunks :
last_ag_size = total_size - n_ag * ag_count
size = 0
for i in range(0, ch) :
size += self.sample_size(ep, i + 1)
if size <= last_ag_size :
n_samples += 1
return n_samples
def sample_type(self, ep, ch):
block = self.episode_block(ep)
return block.kt_block.k_types[ch - 1] if block.kt_block else block.ep_block.tpData
def sampling_period(self, ep, ch):
block = self.episode_block(ep)
return block.ep_block.dX * self.sub_sampling(ep, ch)
def x_tag_scale_factors(self, ep):
block = self.episode_block(ep)
return ElphyScaleFactor(
block.ep_block.dX,
block.ep_block.X0
)
def x_scale_factors(self, ep, ch):
block = self.episode_block(ep)
return ElphyScaleFactor(
block.ep_block.dX * block.ks_block.k_sampling[ch - 1],
block.ep_block.X0,
)
def y_scale_factors(self, ep, ch):
block = self.episode_block(ep)
return ElphyScaleFactor(
block.ch_block.dY_ar[ch - 1],
block.ch_block.Y0_ar[ch - 1]
)
def x_unit(self, ep, ch):
block = self.episode_block(ep)
return block.ep_block.x_unit
def y_unit(self, ep, ch):
block = self.episode_block(ep)
return block.ch_block.y_units[ch - 1]
def tag_mode(self, ep):
block = self.episode_block(ep)
return block.ep_block.tag_mode
def tag_shift(self, ep):
block = self.episode_block(ep)
return block.ep_block.tag_shift
def get_channel_for_tags(self, ep):
block = self.episode_block(ep)
tag_mode = self.tag_mode(ep)
if tag_mode == 1 :
ks = np.array(block.ks_block.k_sampling)
mins = np.where(ks == ks.min())[0] + 1
return mins[0]
elif tag_mode == 2 :
return block.ep_block.n_channels
else :
return None
def aggregate_sample_count(self, block):
"""
Return the number of sample in an aggregate.
"""
# compute the least common multiple
# for channels having block.ks_block.k_sampling[ch] > 0
lcm0 = 1
for i in range(0, block.ep_block.n_channels) :
if block.ks_block.k_sampling[i] > 0 :
lcm0 = least_common_multiple(lcm0, block.ks_block.k_sampling[i])
# sum quotients lcm / KSampling
count = 0
for i in range(0, block.ep_block.n_channels) :
if block.ks_block.k_sampling[i] > 0 :
count += lcm0 / block.ks_block.k_sampling[i]
return count
def create_channel_mask(self, ep):
"""
Return the minimal pattern of channel numbers
representing the succession of channels in the
multiplexed data. It is useful to do the mapping
between a sample stored in the file and its relative
channel.
NB : This function has been converted from the
'TseqBlock.BuildMask' method of the file 'ElphyFormat.pas'
stored in Elphy source code.
"""
block = self.episode_block(ep)
ag_count = self.aggregate_sample_count(block)
mask_ar = np.zeros(ag_count, dtype='i')
ag_size = 0
i = 0
k = 0
while k < ag_count :
for j in range(0, block.ep_block.n_channels) :
if (block.ks_block.k_sampling[j] != 0) and (i % block.ks_block.k_sampling[j] == 0) :
mask_ar[k] = j + 1
ag_size += self.sample_size(ep, j + 1)
k += 1
if k >= ag_count :
break
i += 1
return mask_ar
def get_signal(self, episode, channel):
block = self.episode_block(episode)
k_sampling = np.array(block.ks_block.k_sampling)
evt_channels = np.where(k_sampling == 0)[0]
if not channel in evt_channels :
return super(DAC2Layout, self).get_signal(episode, channel)
else :
k_sampling[channel - 1] = -1
return self.get_event(episode, channel, k_sampling)
def get_tag(self, episode, tag_channel):
"""
Return a :class:`ElphyTag` which is a
descriptor of the specified event channel.
"""
assert episode in range(1, self.n_episodes + 1)
# there are none, 2 or 16 tag
# channels depending on tag_mode
tag_mode = self.tag_mode(episode)
if tag_mode :
block = self.episode_block(episode)
x_unit = block.ep_block.x_unit
# verify the validity of the tag channel
if tag_mode == 1 :
assert tag_channel in range(1, 3), "Elphy format support only 2 tag channels for tag_mode == 1"
elif tag_mode == 2 :
assert tag_channel in range(1, 17), "Elphy format support only 16 tag channels for tag_mode == 2"
elif tag_mode == 3 :
assert tag_channel in range(1, 17), "Elphy format support only 16 tag channels for tag_mode == 3"
smp_period = block.ep_block.dX
smp_freq = 1.0 / smp_period
if tag_mode != 3 :
ch = self.get_channel_for_tags(episode)
n_samples = self.n_samples(episode, ch)
t_stop = (n_samples - 1) * smp_freq
else :
# get the max of n_samples multiplied by the sampling
# period done on every analog channels in order to avoid
# the selection of a channel without concrete signals
t_max = list()
for ch in self.analog_index(episode) :
n_samples = self.n_samples(episode, ch)
factors = self.x_scale_factors(episode, ch)
chtime = n_samples * factors.delta
t_max.append(chtime)
time_max = max(t_max)
# as (n_samples_tag - 1) * dX_tag
# and time_max = n_sample_tag * dX_tag
# it comes the following duration
t_stop = time_max - smp_period
return ElphyTag(self, episode, tag_channel, x_unit, smp_freq, 0, t_stop)
else :
return None
def get_event(self, ep, ch, marked_ks):
"""
Return a :class:`ElphyEvent` which is a
descriptor of the specified event channel.
"""
assert ep in range(1, self.n_episodes + 1)
assert ch in range(1, self.n_channels + 1)
# find the event channel number
evt_channel = np.where(marked_ks == -1)[0][0]
assert evt_channel in range(1, self.n_events(ep) + 1)
block = self.episode_block(ep)
ep_blocks = self.get_blocks_stored_in_episode(ep)
evt_blocks = [k for k in ep_blocks if k.identifier == 'REVT']
n_events = np.sum([k.n_events[evt_channel - 1] for k in evt_blocks], dtype=int)
x_unit = block.ep_block.x_unit
return ElphyEvent(self, ep, evt_channel, x_unit, n_events, ch_number=ch)
def load_encoded_events(self, episode, evt_channel, identifier):
"""
Return times stored as a 4-bytes integer
in the specified event channel.
"""
data_blocks = self.group_blocks_of_type(episode, identifier)
ep_blocks = self.get_blocks_stored_in_episode(episode)
evt_blocks = [k for k in ep_blocks if k.identifier == identifier]
#compute events on each channel
n_events = np.sum([k.n_events for k in evt_blocks], dtype=int, axis=0)
pre_events = np.sum(n_events[0:evt_channel - 1], dtype=int)
start = pre_events
end = start + n_events[evt_channel - 1]
expected_size = 4 * np.sum(n_events, dtype=int)
return self.load_bytes(data_blocks, dtype='<i4', start=start, end=end, expected_size=expected_size)
def load_encoded_spikes(self, episode, evt_channel, identifier):
"""
Return times stored as a 4-bytes integer
in the specified spike channel.
NB: it is meant for Blackrock-type, having an additional byte for each event time as spike sorting label.
These additiona bytes are appended trailing the times.
"""
# to load the requested spikes for the specified episode and event channel:
# get all the elphy blocks having as identifier 'RSPK' (or whatever)
all_rspk_blocks = [k for k in self.blocks if k.identifier == identifier]
rspk_block = all_rspk_blocks[episode-1]
# RDATA(h?dI) REVT(NbVeV:I, NbEv:256I ... spike data are 4byte integers
rspk_header = 4*( rspk_block.size - rspk_block.data_size-2 + len(rspk_block.n_events))
pre_events = np.sum(rspk_block.n_events[0:evt_channel-1], dtype=int, axis=0)
# the real start is after header, preceeding events (which are 4byte) and preceeding labels (1byte)
start = rspk_header + (4*pre_events) + pre_events
end = start + 4*rspk_block.n_events[evt_channel-1]
raw = self.load_bytes( [rspk_block], dtype='<i1', start=start, end=end, expected_size=rspk_block.size )
# re-encoding after reading byte by byte
res = np.frombuffer(raw[0:(4*rspk_block.n_events[evt_channel-1])], dtype='<i4')
res.sort() # sometimes timings are not sorted
#print "load_encoded_data() - spikes:",res
return res
def get_episode_name( self, episode ):
episode_name = "episode %s" % episode
names = [k for k in self.blocks if k.identifier == 'COM']
if len(names) > 0 :
name = names[episode-1]
start = name.size+1 - name.data_size+1
end = name.end - name.start+1
chars = self.load_bytes([name], dtype='uint8', start=start, end=end, expected_size=name.size ).tolist()
#print "chars[%s:%s]: %s" % (start,end,chars)
episode_name = ''.join([chr(k) for k in chars])
return episode_name
def get_event_data(self, episode, evt_channel):
"""
Return times contained in the specified event channel.
This function is triggered when the 'times' property of
an :class:`ElphyEvent` descriptor instance is accessed.
"""
times = self.load_encoded_events(episode, evt_channel, "REVT")
block = self.episode_block(episode)
return times * block.ep_block.dX / len(block.ks_block.k_sampling)
def get_spiketrain(self, episode, electrode_id):
"""
Return a :class:`Spike` which is a
descriptor of the specified spike channel.
"""
assert episode in range(1, self.n_episodes + 1)
assert electrode_id in range(1, self.n_spiketrains(episode) + 1)
# get some properties stored in the episode sub-block
block = self.episode_block(episode)
x_unit = block.ep_block.x_unit
x_unit_wf = getattr(block.ep_block, 'x_unit_wf', None)
y_unit_wf = getattr(block.ep_block, 'y_unit_wf', None)
# number of spikes in the entire episode
spk_blocks = [k for k in self.blocks if k.identifier == 'RSPK']
n_events = np.sum([k.n_events[electrode_id - 1] for k in spk_blocks], dtype=int)
# number of samples in a waveform
wf_sampling_frequency = 1.0 / block.ep_block.dX
wf_blocks = [k for k in self.blocks if k.identifier == 'RspkWave']
if wf_blocks :
wf_samples = wf_blocks[0].wavelength
t_start = wf_blocks[0].pre_trigger * block.ep_block.dX
else:
wf_samples = 0
t_start = 0
return ElphySpikeTrain(self, episode, electrode_id, x_unit, n_events, wf_sampling_frequency, wf_samples, x_unit_wf, y_unit_wf, t_start)
def get_spiketrain_data(self, episode, electrode_id):
"""
Return times contained in the specified spike channel.
This function is triggered when the 'times' property of
an :class:`Spike` descriptor instance is accessed.
NB : The 'RSPK' block is not actually identical to the 'EVT' one,
because all units relative to a time are stored directly after all
event times, 1 byte for each. This function doesn't return these
units. But, they could be retrieved from the 'RspkWave' block with
the 'get_waveform_data function'
"""
block = self.episode_block(episode)
times = self.load_encoded_spikes(episode, electrode_id, "RSPK")
return times * block.ep_block.dX
def load_encoded_waveforms(self, episode, electrode_id):
"""
Return times on which waveforms are defined
and a numpy recarray containing all the data
stored in the RspkWave block.
"""
# load data corresponding to the RspkWave block
identifier = "RspkWave"
data_blocks = self.group_blocks_of_type(episode, identifier)
databytes = self.load_bytes(data_blocks)
# select only data corresponding
# to the specified spk_channel
ep_blocks = self.get_blocks_stored_in_episode(episode)
wf_blocks = [k for k in ep_blocks if k.identifier == identifier]
wf_samples = wf_blocks[0].wavelength
events = np.sum([k.n_spikes for k in wf_blocks], dtype=int, axis=0)
n_events = events[electrode_id - 1]
pre_events = np.sum(events[0:electrode_id - 1], dtype=int)
start = pre_events
end = start + n_events
# data must be reshaped before
dtype = [
# the time of the spike arrival
('elphy_time', 'u4', (1,)),
('device_time', 'u4', (1,)),
# the identifier of the electrode
# would also be the 'trodalness'
# but this tetrode devices are not
# implemented in Elphy
('channel_id', 'u2', (1,)),
# the 'category' of the waveform
('unit_id', 'u1', (1,)),
#do not used
('dummy', 'u1', (13,)),
# samples of the waveform
('waveform', 'i2', (wf_samples,))
]
x_start = wf_blocks[0].pre_trigger
x_stop = wf_samples - x_start
return np.arange(-x_start, x_stop), np.frombuffer(databytes, dtype=dtype)[start:end]
def get_waveform_data(self, episode, electrode_id):
"""
Return waveforms corresponding to the specified
spike channel. This function is triggered when the
``waveforms`` property of an :class:`Spike` descriptor
instance is accessed.
"""
block = self.episode_block(episode)
times, databytes = self.load_encoded_waveforms(episode, electrode_id)
n_events, = databytes.shape
wf_samples = databytes['waveform'].shape[1]
dtype = [
('time', float),
('electrode_id', int),
('unit_id', int),
('waveform', float, (wf_samples, 2))
]
data = np.empty(n_events, dtype=dtype)
data['electrode_id'] = databytes['channel_id'][:, 0]
data['unit_id'] = databytes['unit_id'][:, 0]
data['time'] = databytes['elphy_time'][:, 0] * block.ep_block.dX
data['waveform'][:, :, 0] = times * block.ep_block.dX
data['waveform'][:, :, 1] = databytes['waveform'] * block.ep_block.dY_wf + block.ep_block.Y0_wf
return data
def get_rspk_data(self, spk_channel):
"""
Return times stored as a 4-bytes integer
in the specified event channel.
"""
evt_blocks = self.get_blocks_of_type('RSPK')
#compute events on each channel
n_events = np.sum([k.n_events for k in evt_blocks], dtype=int, axis=0)
pre_events = np.sum(n_events[0:spk_channel], dtype=int) # sum of array values up to spk_channel-1!!!!
start = pre_events + (7 + len(n_events))# rspk header
end = start + n_events[spk_channel]
expected_size = 4 * np.sum(n_events, dtype=int) # constant
return self.load_bytes(evt_blocks, dtype='<i4', start=start, end=end, expected_size=expected_size)
# ---------------------------------------------------------
# factories.py
class LayoutFactory(object):
"""
Generate base elements composing the layout of a file.
"""
def __init__(self, elphy_file):
self.elphy_file = elphy_file
self.pattern = "\d{4}(\d+|\D)\D"
self.block_subclasses = dict()
@property
def file(self):
return self.elphy_file.file
def create_layout(self):
"""
Return the actual :class:`ElphyLayout` subclass
instance used in an :class:`ElphyFile` object.
"""
raise Exception('must be overloaded in a subclass')
def create_header(self, layout):
"""
Return the actual :class:`Header` instance used
in an :class:`ElphyLayout` subclass object.
"""
raise Exception('must be overloaded in a subclass')
def create_block(self, layout):
"""
Return a :class:`Block` instance composing
the :class:`ElphyLayout` subclass instance.
"""
raise Exception('must be overloaded in a subclass')
def create_sub_block(self, block, sub_offset):
"""
Return a set of sub-blocks stored
in DAC2 objects format files.
"""
self.file.seek(sub_offset)
sub_ident_size = read_from_char(self.file, 'B')
sub_identifier, = struct.unpack('<%ss' % sub_ident_size, self.file.read(sub_ident_size))
sub_data_size = read_from_char(self.file, 'H')
sub_data_offset = sub_offset + sub_ident_size + 3
size_format = "H"
if sub_data_size == 0xFFFF :
_ch = 'l'
sub_data_size = read_from_char(self.file, _ch)
size_format += "+%s" % (_ch)
sub_data_offset += 4
sub_size = len(sub_identifier) + 1 + type_dict[size_format] + sub_data_size
if sub_identifier == 'Ep' :
block_type = DAC2EpSubBlock
elif sub_identifier == 'Adc' :
block_type = DAC2AdcSubBlock
elif sub_identifier == 'Ksamp' :
block_type = DAC2KSampSubBlock
elif sub_identifier == 'Ktype' :
block_type = DAC2KTypeSubBlock
elif sub_identifier == 'USR' :
block_type = self.select_file_info_subclass()
else :
block_type = ElphyBlock
block = block_type(block.layout, sub_identifier, sub_offset, sub_size, size_format=size_format, parent_block=block)
self.file.seek(self.file.tell() + sub_data_size)
return block
def create_episode(self, block):
raise Exception('must be overloaded in a subclass')
def create_channel(self, block):
raise Exception('must be overloaded in a subclass')
def is_multistim(self, path):
"""
Return a boolean telling if the
specified file is a multistim one.
"""
match = re.search(self.pattern, path)
return hasattr(match, 'end') and path[match.end() - 1] in ['m', 'M']
def select_file_info_subclass(self):
"""
Detect the type of a file from its nomenclature
and return its relative :class:`ClassicFileInfo` or
:class:`MultistimFileInfo` class. Useful to transparently
access to user file info stored in an Elphy file.
"""
if not self.is_multistim(self.file.name) :
return ClassicFileInfo
else :
return MultistimFileInfo
def select_block_subclass(self, identifier):
return self.block_subclasses.get(identifier, ElphyBlock)
class Acquis1Factory(LayoutFactory):
"""
Subclass of :class:`LayoutFactory` useful to
generate base elements composing the layout
of Acquis1 file format.
"""
def __init__(self, elphy_file):
super(Acquis1Factory, self).__init__(elphy_file)
self.file.seek(16)
self.data_offset = read_from_char(self.file, 'h')
self.file.seek(0)
# the set of interesting blocks useful
# to retrieve data stored in a file
self.block_subclasses = {
"USER INFO" : self.select_file_info_subclass()
}
def create_layout(self):
return Acquis1Layout(self.elphy_file, self.data_offset)
def create_header(self, layout):
return Acquis1Header(layout)
def create_block(self, layout, offset):
self.file.seek(offset)
ident_size, identifier = struct.unpack('<B15s', self.file.read(16))
identifier = identifier[0:ident_size]
size = read_from_char(self.file, 'h')
block_type = self.select_block_subclass(identifier)
block = block_type(layout, identifier, offset, size, fixed_length=15, size_format='h')
self.file.seek(0)
return block
class DAC2GSFactory(LayoutFactory):
"""
Subclass of :class:`LayoutFactory` useful to
generate base elements composing the layout
of DAC2/GS/2000 file format.
"""
def __init__(self, elphy_file):
super(DAC2GSFactory, self).__init__(elphy_file)
self.file.seek(16)
self.data_offset = read_from_char(self.file, 'i')
self.file.seek(0)
# the set of interesting blocks useful
# to retrieve data stored in a file
self.block_subclasses = {
"USER INFO" : self.select_file_info_subclass(),
"DAC2SEQ" : DAC2GSEpisodeBlock,
'MAIN' : DAC2GSMainBlock,
}
def create_layout(self):
return DAC2GSLayout(self.elphy_file, self.data_offset)
def create_header(self, layout):
return DAC2GSHeader(layout)
def create_block(self, layout, offset):
self.file.seek(offset)
ident_size, identifier = struct.unpack('<B15s', self.file.read(16))
# block title size is 7 or 15 bytes
# 7 is for sequence blocs
if identifier.startswith('DAC2SEQ') :
self.file.seek(self.file.tell() - 8)
length = 7
else :
length = 15
identifier = identifier[0:ident_size]
size = read_from_char(self.file, 'i')
block_type = self.select_block_subclass(identifier)
block = block_type(layout, identifier, offset, size, fixed_length=length, size_format='i')
self.file.seek(0)
return block
class DAC2Factory(LayoutFactory):
"""
Subclass of :class:`LayoutFactory` useful to
generate base elements composing the layout
of DAC2 objects file format.
"""
def __init__(self, elphy_file):
super(DAC2Factory, self).__init__(elphy_file)
# the set of interesting blocks useful
# to retrieve data stored in a file
self.block_subclasses = {
"B_Ep" : DAC2EpisodeBlock,
"RDATA" : DAC2RDataBlock,
"RCyberTag" : DAC2CyberTagBlock,
"REVT" : DAC2EventBlock,
"RSPK" : DAC2SpikeBlock,
"RspkWave" : DAC2WaveFormBlock
}
def create_layout(self):
return DAC2Layout(self.elphy_file)
def create_header(self, layout):
return DAC2Header(layout)
def create_block(self, layout, offset):
self.file.seek(offset)
size = read_from_char(self.file, 'l')
ident_size = read_from_char(self.file, 'B')
identifier, = struct.unpack('<%ss' % ident_size, self.file.read(ident_size))
block_type = self.select_block_subclass(identifier)
block = block_type(layout, identifier, offset, size, size_format='l')
self.file.seek(0)
return block
#caching all available layout factories
factories = {
"ACQUIS1/GS/1991" : Acquis1Factory,
"DAC2/GS/2000" : DAC2GSFactory,
"DAC2 objects" : DAC2Factory
}
# --------------------------------------------------------
# ELPHY FILE
"""
Classes useful to retrieve data from the
three major Elphy formats, i.e : Acquis1, DAC2/GS/2000, DAC2 objects.
The :class:`ElphyFile` class is useful to access raw data and user info
that stores protocol metadata. Internally, It uses a subclass :class:`ElphyLayout`
to handle each kind of file format : :class:`Acquis1Layout`, :class:`DAC2GSLayout`
and :class:`DAC2Layout`.
These layouts decompose the file structure into several blocks of data, inheriting
from the :class:`BaseBlock`, corresponding for example to the header of the file,
the user info, the raw data, the episode or channel properties. Each subclass of
:class:`BaseBlock` map to a file chunk and is responsible to store metadata contained
in this chunk. These metadata could be also useful to reconstruct raw data.
Consequently, when an :class:`ElphyLayout` layout is requested by its relative
:class:`ElphyFile`, It iterates through :class:`BaseBlock` objects to retrieve
asked data.
NB : The reader is not able to read Acquis1 and DAC2/GS/2000 event channels.
"""
class ElphyFile(object):
"""
A convenient class useful to read Elphy files.
It acts like a file reader that wraps up a python
file opened in 'rb' mode in order to retrieve
directly from an Elphy file raw data and metadata
relative to protocols.
``path`` : the path of the elphy file.
``file`` : the python file object that iterates
through the elphy file.
``file_size`` : the size of the elphy file on the
hard disk drive.
``nomenclature`` : the label that identifies the
kind of elphy format, i.e. 'Acquis1', 'DAC2/GS/2000',
'DAC2 objects'.
``factory`` : the :class:`LayoutFactory` object which
generates the base component of the elphy file layout.
``layout`` : the :class:`ElphyLayout` object which
decomposes the file structure into several blocks of
data (:class:`BaseBlock` objects). The :class:`ElphyFile`
object do requests to this layout which iterates through
this blocks before returning asked data.
``protocol`` : the acquisition protocol which has generated
the file.
``version`` : the variant of the acquisition protocol.
NB : An elphy file could store several kind of data :
(1) 'User defined' metadata which are stored in a block
called 'USER INFO' ('Acquis1' and 'DAC2/GS/2000') or 'USR'
('DAC2 objects') of the ``layout``. They could be used for
example to describe stimulation parameters.
(2) Raw data acquired on separate analog channels. Data
coming from each channel are multiplexed in blocks dedicated
to raw data storage :
- For Acquis1 format, raw data are stored directly
after the file header.
- For DAC2/GS/2000, in continuous mode they are stored
after all blocks composing the file else they are stored
in a 'DAC2SEQ' block.
- For 'DAC2 objects' they are stored in 'RDATA' blocks.
In continuous mode raw data could be spread other multiple
'RDATA' blocks. Whereas in episode mode there is a single
'RDATA' block for each episode.
These raw data are placed under the 'channels' node of a
TDataFile object listed in Elphy's "Inspect" tool.
(3) ElphyEvents dedicated to threshold detection in analog
channels. ElphyEvents are only available for 'DAC2 objects'
format. For 'Acquis1' and 'DAC2/GS/2000' these events are
in fact stored in another kind of file format called
'event' format with the '.evt' extension which is opened
by Elphy as same time as the '.dat' file. This 'event'
format is not yet implemented because it seems that it
was not really used.
These events are also placed under the 'channels' node
of a TDataFile object in Elphy's "Inspect" tool.
(4) ElphyTags that appeared after 'DAC2/GS/2000' release. They
are also present in 'DAC2 objects' format. Each, tag occupies
a channel called 'tag' channel. Their encoding depends on the
kind of acquisition card :
- For 'digidata' cards (``tag_mode``=1) and if tags are acquired,
they are directly encoded in 2 (digidata 1322) or 4 (digidata 1200)
significant bits of 16-bits samples coming from an analog channel.
In all cases they are only 2 bits encoding the tag channels. The
sample value could be encoded on 16, 14 or 12 bits and retrieved by
applying a shift equal to ``tag_shift`` to the right.
- For ITC cards (``tag_mode``=2), tags are transmitted by a channel
fully dedicated to 'tag channels' providing 16-bits samples. In this
case, each bit corresponds to a 'tag channel'.
- For Blackrock/Cyberkinetics devices (``tag_mode``=3), tags are also
transmitted by a channel fully dedicated to tags, but the difference is
that only transitions are stored in 'RCyberTag' blocks. This case in only
available in 'DAC2 objects' format.
These tags are placed under the 'Vtags' node of a TDataFile
object in Elphy's "Inspect" tool.
(5) Spiketrains coming from an electrode of a Blackrock/Cyberkinetics
multi-electrode device. These data are only available in 'DAC2 objects'
format.
These spiketrains are placed under the 'Vspk' node of a TDataFile
object in Elphy's "Inspect" tool.
(6) Waveforms relative to each time of a spiketrain. These data are only
available in 'DAC2 objects' format. These waveforms are placed under the
'Wspk' node of a TDataFile object in Elphy's "Inspect" tool.
"""
def __init__(self, file_path) :
self.path = file_path
self.folder, self.filename = path.split(self.path)
self.file = None
self.file_size = None
self.nomenclature = None
self.factory = None
self.layout = None
# writing support
self.header_size = None
def __del__(self):
"""
Trigger closing of the file.
"""
self.close()
# super(ElphyFile, self).__del__()
def open(self):
"""
Setup the internal structure.
NB : Call this function before
extracting data from a file.
"""
if self.file :
self.file.close()
try :
self.file = open(self.path, 'rb')
except Exception as e:
raise Exception("python couldn't open file %s : %s" % (self.path, e))
self.file_size = path.getsize(self.file.name)
self.creation_date = datetime.fromtimestamp(path.getctime(self.file.name))
self.modification_date = datetime.fromtimestamp(path.getmtime(self.file.name))
self.nomenclature = self.get_nomenclature()
self.factory = self.get_factory()
self.layout = self.create_layout()
def close(self):
"""
Close the file.
"""
if self.file :
self.file.close()
def get_nomenclature(self):
"""
Return the title of the file header
giving the actual file format. This
title is encoded as a pascal string
containing 15 characters and stored
as 16 bytes of binary data.
"""
self.file.seek(0)
length, title = struct.unpack('<B15s', self.file.read(16))
self.file.seek(0)
title = title[0:length]
if not title in factories :
title = "format is not implemented ('%s' not in %s)" % (title, str(factories.keys()))
return title
def set_nomenclature(self):
"""
As in get_nomenclature, but set the title of the file header
in the file, encoded as a pascal string containing
15 characters and stored as 16 bytes of binary data.
"""
self.file.seek(0)
title = 'DAC2 objects'
st = struct.Struct( '<B15sH' )
header_rec = [len(title), title, 18] # constant header
header_chr = st.pack( *header_rec )
self.header_size = len( header_chr )
self.file.write( header_chr )
def get_factory(self):
"""
Return a subclass of :class:`LayoutFactory`
useful to build the file layout depending
on header title.
"""
return factories[self.nomenclature](self)
def write(self, data):
"""
Assume the blocks are already filled.
It is able to write several types of block: B_Ep, RDATA, ...
and subBlock: Adc, Ksamp, Ktype, dataRecord, ...
In the following shape:
B_Ep
|_ Ep
|_ Adc
|_ Adc
|_ ...
|_ Ktype
RDATA
|_ dataRecord+data
"""
# close if open and reopen for writing
if self.file :
self.file.close()
try :
self.file = open(self.path, 'wb')
except Exception as e:
raise Exception("python couldn't open file %s : %s" % (self.path, e))
self.file_size = 0
self.creation_date = datetime.now()
self.modification_date = datetime.now()
self.set_nomenclature()
# then call ElphyFile writing routines to write the serialized string
self.file.write( data ) # actual writing
# close file
self.close()
def create_layout(self):
"""
Build the :class:`Layout` object corresponding
to the file format and configure properties of
itself and then its blocks and sub-blocks.
NB : this function must be called before all kind
of requests on the file because it is used also to setup
the internal properties of the :class:`ElphyLayout`
object or some :class:`BaseBlock` objects. Consequently,
executing some function corresponding to a request on
the file has many chances to lead to bad results.
"""
# create the layout
layout = self.factory.create_layout()
# create the header block and
# add it to the list of blocks
header = self.factory.create_header(layout)
layout.add_block(header)
# set the position of the cursor
# in order to be after the header
# block and then compute its last
# valid position to know when stop
# the iteration through the file
offset = header.size
offset_stop = layout.get_blocks_end()
# in continuous mode DAC2/GS/2000 raw data are not stored
# into several DAC2SEQ blocks, they are stored after all
# available blocks, that's why it is necessary to limit the
# loop to data_offset when it is a DAC2/GS/2000 format
is_continuous = False
detect_continuous = False
detect_main = False
while (offset < offset_stop) and not (is_continuous and (offset >= layout.data_offset)) :
block = self.factory.create_block(layout, offset)
# create the sub blocks if it is DAC2 objects format
# this is only done for B_Ep and B_Finfo blocks for
# DAC2 objects format, maybe it could be useful to
# spread this to other block types.
#if isinstance(header, DAC2Header) and (block.identifier in ['B_Ep']) :
if isinstance(header, DAC2Header) and (block.identifier in ['B_Ep', 'B_Finfo']) :
sub_offset = block.data_offset
while sub_offset < block.start + block.size :
sub_block = self.factory.create_sub_block(block, sub_offset)
block.add_sub_block(sub_block)
sub_offset += sub_block.size
# set up some properties of some DAC2Layout sub-blocks
if isinstance(sub_block, (DAC2EpSubBlock, DAC2AdcSubBlock, DAC2KSampSubBlock, DAC2KTypeSubBlock)) :
block.set_episode_block()
block.set_channel_block()
block.set_sub_sampling_block()
block.set_sample_size_block()
# SpikeTrain
#if isinstance(header, DAC2Header) and (block.identifier in ['RSPK']) :
#print "\nElphyFile.create_layout() - RSPK"
#print "ElphyFile.create_layout() - n_events",block.n_events
#print "ElphyFile.create_layout() - n_evt_channels",block.n_evt_channels
layout.add_block(block)
offset += block.size
# set up as soon as possible the shortcut
# to the main block of a DAC2GSLayout
if not detect_main and isinstance(layout, DAC2GSLayout) and isinstance(block, DAC2GSMainBlock) :
layout.set_main_block()
detect_main = True
# detect if the file is continuous when
# the 'MAIN' block has been parsed
if not detect_continuous :
is_continuous = isinstance(header, DAC2GSHeader) and layout.is_continuous()
# set up the shortcut to blocks corresponding
# to episodes, only available for DAC2Layout
# and also DAC2GSLayout if not continuous
if isinstance(layout, DAC2Layout) or (isinstance(layout, DAC2GSLayout) and not layout.is_continuous()) :
layout.set_episode_blocks()
layout.set_data_blocks()
# finally set up the user info block of the layout
layout.set_info_block()
self.file.seek(0)
return layout
def is_continuous(self):
return self.layout.is_continuous()
@property
def n_episodes(self):
"""
Return the number of recording sequences.
"""
return self.layout.n_episodes
def n_channels(self, episode):
"""
Return the number of recording
channels involved in data acquisition
and relative to the specified episode :
``episode`` : the recording sequence identifier.
"""
return self.layout.n_channels(episode)
def n_tags(self, episode):
"""
Return the number of tag channels
relative to the specified episode :
``episode`` : the recording sequence identifier.
"""
return self.layout.n_tags(episode)
def n_events(self, episode):
"""
Return the number of event channels
relative to the specified episode :
``episode`` : the recording sequence identifier.
"""
return self.layout.n_events(episode)
def n_spiketrains(self, episode):
"""
Return the number of event channels
relative to the specified episode :
``episode`` : the recording sequence identifier.
"""
return self.layout.n_spiketrains(episode)
def n_waveforms(self, episode):
"""
Return the number of waveform channels :
"""
return self.layout.n_waveforms(episode)
def get_signal(self, episode, channel):
"""
Return the signal or event descriptor relative
to the specified episode and channel :
``episode`` : the recording sequence identifier.
``channel`` : the analog channel identifier.
NB : For 'DAC2 objects' format, it could
be also used to retrieve events.
"""
return self.layout.get_signal(episode, channel)
def get_tag(self, episode, tag_channel):
"""
Return the tag descriptor relative to
the specified episode and tag channel :
``episode`` : the recording sequence identifier.
``tag_channel`` : the tag channel identifier.
NB : There isn't any tag channels for
'Acquis1' format. ElphyTag channels appeared
after 'DAC2/GS/2000' release. They are
also present in 'DAC2 objects' format.
"""
return self.layout.get_tag(episode, tag_channel)
def get_event(self, episode, evt_channel):
"""
Return the event relative the
specified episode and event channel.
`episode`` : the recording sequence identifier.
``tag_channel`` : the tag channel identifier.
"""
return self.layout.get_event(episode, evt_channel)
def get_spiketrain(self, episode, electrode_id):
"""
Return the spiketrain relative to the
specified episode and electrode_id.
``episode`` : the recording sequence identifier.
``electrode_id`` : the identifier of the electrode providing the spiketrain.
NB : Available only for 'DAC2 objects' format.
This descriptor can return the times of a spiketrain
and waveforms relative to each of these times.
"""
return self.layout.get_spiketrain(episode, electrode_id)
@property
def comments(self):
raise NotImplementedError()
def get_user_file_info(self):
"""
Return user defined file metadata.
"""
if not self.layout.info_block :
return dict()
else :
return self.layout.info_block.get_user_file_info()
@property
def episode_info(self, ep_number):
raise NotImplementedError()
def get_signals(self):
"""
Get all available analog or event channels stored into an Elphy file.
"""
signals = list()
for ep in range(1, self.n_episodes + 1) :
for ch in range(1, self.n_channels(ep) + 1) :
signal = self.get_signal(ep, ch)
signals.append(signal)
return signals
def get_tags(self):
"""
Get all available tag channels stored into an Elphy file.
"""
tags = list()
for ep in range(1, self.n_episodes + 1) :
for tg in range(1, self.n_tags(ep) + 1) :
tag = self.get_tag(ep, tg)
tags.append(tag)
return tags
def get_spiketrains(self):
"""
Get all available spiketrains stored into an Elphy file.
"""
spiketrains = list()
for ep in range(1, self.n_episodes + 1) :
for ch in range(1, self.n_spiketrains(ep) + 1) :
spiketrain = self.get_spiketrain(ep, ch)
spiketrains.append(spiketrain)
return spiketrains
def get_rspk_spiketrains(self):
"""
Get all available spiketrains stored into an Elphy file.
"""
spiketrains = list()
spk_blocks = self.layout.get_blocks_of_type('RSPK')
for bl in spk_blocks :
#print "ElphyFile.get_spiketrains() - identifier:",bl.identifier
for ch in range(0,bl.n_evt_channels) :
spiketrain = self.layout.get_rspk_data(ch)
spiketrains.append(spiketrain)
return spiketrains
def get_names( self ) :
com_blocks = list()
com_blocks = self.layout.get_blocks_of_type('COM')
return com_blocks
# --------------------------------------------------------
class ElphyIO(BaseIO):
"""
Class for reading from and writing to an Elphy file.
It enables reading:
- :class:`Block`
- :class:`Segment`
- :class:`RecordingChannel`
- :class:`ChannelIndex`
- :class:`Event`
- :class:`SpikeTrain`
Usage:
>>> from neo import io
>>> r = io.ElphyIO(filename='ElphyExample.DAT')
>>> seg = r.read_block(lazy=False, cascade=True)
>>> print(seg.analogsignals) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
>>> print(seg.spiketrains) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
>>> print(seg.events) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
>>> print(anasig._data_description)
>>> anasig = r.read_analogsignal(lazy=False, cascade=False)
>>> bl = Block()
>>> # creating segments, their contents and append to bl
>>> r.write_block( bl )
"""
is_readable = True # This class can read data
is_writable = False # This class can write data
# This class is able to directly or indirectly handle the following objects
supported_objects = [ Block, Segment, AnalogSignal, SpikeTrain ]
# This class can return a Block
readable_objects = [ Block ]
# This class is not able to write objects
writeable_objects = [ ]
has_header = False
is_streameable = False
# This is for GUI stuff : a definition for parameters when reading.
# This dict should be keyed by object (`Block`). Each entry is a list
# of tuple. The first entry in each tuple is the parameter name. The
# second entry is a dict with keys 'value' (for default value),
# and 'label' (for a descriptive name).
# Note that if the highest-level object requires parameters,
# common_io_test will be skipped.
read_params = {
}
# do not supported write so no GUI stuff
write_params = {
}
name = 'Elphy IO'
extensions = [ 'DAT' ]
# mode can be 'file' or 'dir' or 'fake' or 'database'
mode = 'file'
# internal serialized representation of neo data
serialized = None
def __init__(self , filename = None) :
"""
Arguments:
filename : the filename to read
"""
BaseIO.__init__(self)
self.filename = filename
self.elphy_file = ElphyFile(self.filename)
def read_block(self,
# the 2 first key arguments are imposed by neo.io API
lazy = False,
cascade = True
):
"""
Return :class:`Block` filled or not depending on 'cascade' parameter.
Parameters:
lazy : postpone actual reading of the file.
cascade : normally you want this True, otherwise method will only ready Block label.
"""
# basic
block = Block(name=None)
# laziness
if lazy:
return block
else:
# get analog and tag channels
try :
self.elphy_file.open()
except Exception as e:
self.elphy_file.close()
raise Exception("cannot open file %s : %s" % (self.filename, e))
# cascading
#print "\n\n==========================================\n"
#print "read_block() - n_episodes:",self.elphy_file.n_episodes
if cascade:
# create a segment containing all analog,
# tag and event channels for the episode
if self.elphy_file.n_episodes == None :
print("File '%s' appears to have no episodes" % (self.filename))
return block
for episode in range(1, self.elphy_file.n_episodes+1) :
segment = self.read_segment(episode)
segment.block = block
block.segments.append(segment)
# close file
self.elphy_file.close()
# result
return block
def write_block( self, block ):
"""
Write a given Neo Block to an Elphy file, its structure being, for example:
Neo -> Elphy
--------------------------------------------------------------
Block File
Segment Episode Block (B_Ep)
AnalogSignalArray Episode Descriptor (Ep + Adc + Ksamp + Ktype)
multichannel RDATA (with a ChannelMask multiplexing channels)
2D NumPy Array
...
AnalogSignalArray
AnalogSignal
AnalogSignal
...
...
SpikeTrain Event Block (RSPK)
SpikeTrain
...
Arguments::
block: the block to be saved
"""
# Serialize Neo structure into Elphy file
# each analog signal will be serialized as elphy Episode Block (with its subblocks)
# then all spiketrains will be serialized into an Rspk Block (an Event Block with addons).
# Serialize (and size) all Neo structures before writing them to file
# Since to write each Elphy Block is required to know in advance its size,
# which includes that of its subblocks, it is necessary to
# serialize first the lowest structures.
# Iterate over block structures
elphy_limit = 256
All = ''
#print "\n\n--------------------------------------------\n"
#print "write_block() - n_segments:",len(block.segments)
for seg in block.segments:
analogsignals = 0 # init
nbchan = 0
nbpt = 0
chls = 0
Dxu = 1e-8 #0.0000001
Rxu = 1e+8 #10000000.0
X0uSpk = 0.0
CyberTime = 0.0
aa_units = []
NbEv = []
serialized_analog_data = ''
serialized_spike_data = ''
# AnalogSignals
# Neo signalarrays are 2D numpy array where each row is an array of samples for a channel:
# signalarray A = [[ 1, 2, 3, 4 ],
# [ 5, 6, 7, 8 ]]
# signalarray B = [[ 9, 10, 11, 12 ],
# [ 13, 14, 15, 16 ]]
# Neo Segments can have more than one signalarray.
# To be converted in Elphy analog channels they need to be all in a 2D array, not in several 2D arrays.
# Concatenate all analogsignalarrays into one and then flatten it.
# Elphy RDATA blocks contain Fortran styled samples:
# 1, 5, 9, 13, 2, 6, 10, 14, 3, 7, 11, 15, 4, 8, 12, 16
# AnalogSignalArrays -> analogsignals
# get the first to have analogsignals with the right shape
# Annotations for analogsignals array come as a list of int being source ids
# here, put each source id on a separate dict entry in order to have a matching afterwards
idx = 0
annotations = dict( )
# get all the others
#print "write_block() - n_analogsignals:",len(seg.analogsignals)
#print "write_block() - n_analogsignalarrays:",len(seg.analogsignalarrays)
for asigar in seg.analogsignalarrays :
idx,annotations = self.get_annotations_dict( annotations, "analogsignal", asigar.annotations.items(), asigar.name, idx )
# array structure
_,chls = asigar.shape
# units
for _ in range(chls) :
aa_units.append( asigar.units )
Dxu = asigar.sampling_period
Rxu = asigar.sampling_rate
if isinstance(analogsignals, np.ndarray) :
analogsignals = np.hstack( (analogsignals,asigar) )
else :
analogsignals = asigar # first time
# collect and reshape all analogsignals
if isinstance(analogsignals, np.ndarray) :
# transpose matrix since in Neo channels are column-wise while in Elphy are row-wise
analogsignals = analogsignals.T
# get dimensions
nbchan,nbpt = analogsignals.shape
# serialize AnalogSignal
analog_data_fmt = '<' + str(analogsignals.size) + 'f'
# serialized flattened numpy channels in 'F'ortran style
analog_data_64 = analogsignals.flatten('F')
# elphy normally uses float32 values (for performance reasons)
analog_data = np.array( analog_data_64, dtype=np.float32 )
serialized_analog_data += struct.pack( analog_data_fmt, *analog_data )
# SpikeTrains
# Neo spiketrains are stored as a one-dimensional array of times
# [ 0.11, 1.23, 2.34, 3.45, 4.56, 5.67, 6.78, 7.89 ... ]
# These are converted into Elphy Rspk Block which will contain all of them
# RDATA + NbVeV:integer for the number of channels (spiketrains)
# + NbEv:integer[] for the number of event per channel
# followed by the actual arrays of integer containing spike times
#spiketrains = seg.spiketrains
# ... but consider elphy loading limitation:
NbVeV = len( seg.spiketrains )
#print "write_block() - n_spiketrains:",NbVeV
if len(seg.spiketrains) > elphy_limit :
NbVeV = elphy_limit
# serialize format
spiketrain_data_fmt = '<'
spiketrains = []
for idx,train in enumerate(seg.spiketrains[:NbVeV]) :
#print "write_block() - train.size:", train.size,idx
#print "write_block() - train:", train
fake,annotations = self.get_annotations_dict( annotations,"spiketrain", train.annotations.items(), '', idx )
#annotations.update( dict( [("spiketrain-"+str(idx),train.annotations['source_id'])] ) )
#print "write_block() - train[%s].annotation['source_id']:%s" % (idx,train.annotations['source_id'])
# total number of events format + blackrock sorting mark (0 for neo)
spiketrain_data_fmt += str(train.size) + "i" + str(train.size) + "B"
# get starting time
X0uSpk = train.t_start.item()
CyberTime = train.t_stop.item()
# count number of events per train
NbEv.append( train.size )
# multiply by sampling period
train = train * Rxu
# all flattened spike train
# blackrock acquisition card also adds a byte for each event to sort it
spiketrains.extend( [spike.item() for spike in train] + [0 for _ in range(train.size)])
# Annotations
#print annotations
# using DBrecord elphy block, they will be available as values in elphy environment
# separate keys and values in two separate serialized strings
ST_sub = ''
st_fmt = ''
st_data = []
BUF_sub = ''
serialized_ST_data = ''
serialized_BUF_data = ''
for key in sorted(annotations.iterkeys()) :
# take all values, get their type and concatenate
fmt = ''
data = []
value = annotations[key]
if isinstance( value, (int,np.int32,np.int64) ) :
# elphy type 2
fmt = '<Bq'
data = [2, value]
elif type( value ) == str :
# elphy type 4
str_len = len(value)
fmt = '<BI'+str(str_len)+'s'
data = [4, str_len, value]
else :
print("ElphyIO.write_block() - unknown annotation type: %s" % type(value))
continue
# last, serialization
# BUF values
serialized_BUF_data += struct.pack( fmt, *data )
# ST values
# take each key and concatenate using 'crlf'
st_fmt += str(len(key))+'s2s'
st_data.extend( [ key, "\r\n" ] )
# ST keys
serialized_ST_data = struct.pack( st_fmt, *st_data )
# SpikeTrains
# serialized spike trains
serialized_spike_data += struct.pack( spiketrain_data_fmt, *spiketrains )
# ------------- Elphy Structures to be filled --------------
# 'Ep'
data_format = '<BiBB10sdd?BBddiddB10sB10sdI'
# setting values
uX = 'ms '
pc_time = datetime.now()
pc_time = pc_time.microsecond * 1000
data_values = [
nbchan, # nbchan : byte
nbpt, # nbpt : integer - nominal number of samples per channel
0, # tpData : byte - not used
10, # uX length
uX, # uX : string - time units
Dxu, # Dxu : double - sampling rate, scaling parameters on time axis
0.0, # X0u : double - starting, scaling parameters on time axis
False, # continuous : boolean
0, # TagMode : byte - 0: not a tag channel
0, # TagShift : byte
Dxu, # DxuSpk : double
X0uSpk, # X0uSpk : double
NbVeV, # nbSpk : integer
0.0, # DyuSpk : double
0.0, # Y0uSpk : double
10, # uX length
uX, # unitXSpk : string
10, # uX length
' ', # unitYSpk : string
CyberTime, # CyberTime : double
pc_time # PCtime : longword - time in milliseconds
]
Ep_chr = self.get_serialized( data_format, data_values )
Ep_sub = self.get_serialized_subblock( 'Ep', Ep_chr )
# 'Adc'
# Then, one or more (nbchan) Analog/Digital Channel will be, having their fixed data format
data_format = "<B10sdd"
# when Ep.tpdata is an integer type, Dyu nad Y0u are parameters such that
# for an adc value j, the real value is y = Dyu*j + Y0u
Adc_chrl = ""
for dc in aa_units :
# create
Adc_chr = [] # init
Dyu,UnitY = '{}'.format(dc).split()
data_values = [
10, # size
UnitY+' ', # uY string : vertical units
float(Dyu), # Dyu double : scaling parameter
0.0 # Y0u double : scaling parameter
]
Adc_chr = self.get_serialized( data_format, data_values )
Adc_chrl += Adc_chr
Adc_sub = self.get_serialized_subblock( 'Adc', Adc_chrl )
#print "Adc size:",len(Adc_sub)
# 'Ksamp'
# subblock containing an array of nbchan bytes
# data_format = '<h...' # nbchan times Bytes
# data_values = [ 1, 1, ... ] # nbchan times 1
data_format = "<" + ("h" * nbchan)
data_values = [ 1 for _ in range(nbchan) ]
Ksamp_chr = self.get_serialized( data_format, data_values )
Ksamp_sub = self.get_serialized_subblock( 'Ksamp', Ksamp_chr )
#print "Ksamp size: %s" % (len(Ksamp_sub))
# 'Ktype'
# subblock containing an array of nbchan bytes
# data_format = '<B...' # nbchan times Bytes
# data_values = [ 2, ... ] # nbchan times ctype
# Possible values are:
# 0: byte
# 1: short
# 2: smallint
# 3: word
# 4: longint
# 5: single
# 6: real48
# 7: double
# 8: extended DATA
# array of nbchan bytes specifying type of data forthcoming
ctype = 5 # single float
data_format = "<" + ("B" * nbchan)
data_values = [ ctype for n in range(nbchan) ]
Ktype_chr = self.get_serialized( data_format, data_values )
Ktype_sub = self.get_serialized_subblock( 'Ktype', Ktype_chr )
#print "Ktype size: %s" % (len(Ktype_sub))
# Episode data serialization:
# concatenate all its data strings under a block
Ep_data = Ep_sub + Adc_sub + Ksamp_sub + Ktype_sub
#print "\n---- Finishing:\nEp subs size: %s" % (len(Ep_data))
Ep_blk = self.get_serialized_block( 'B_Ep', Ep_data )
#print "B_Ep size: %s" % (len(Ep_blk))
# 'RDATA'
# It produces a two part (header+data) content coming from analog/digital inputs.
pctime = time()
data_format = "<h?dI"
data_values = [ 15, True, pctime, 0 ]
RDATA_chr = self.get_serialized( data_format, data_values, serialized_analog_data )
RDATA_blk = self.get_serialized_block( 'RDATA', RDATA_chr )
#print "RDATA size: %s" % (len(RDATA_blk))
# 'Rspk'
# like an REVT block + addons
# It starts with a RDATA header, after an integer with the number of events,
# then the events per channel and finally all the events one after the other
data_format = "<h?dII" + str(NbVeV) + "I"
data_values = [ 15, True, pctime, 0, NbVeV ]
data_values.extend(NbEv)
Rspk_chr = self.get_serialized( data_format, data_values, serialized_spike_data )
Rspk_blk = self.get_serialized_block( 'RSPK', Rspk_chr )
#print "RSPK size: %s" % (len(Rspk_blk))
# 'DBrecord'
# like a block + subblocks
# serializzation
ST_sub = self.get_serialized_subblock( 'ST', serialized_ST_data )
#print "ST size: %s" % (len(ST_sub))
BUF_sub = self.get_serialized_subblock( 'BUF', serialized_BUF_data )
#print "BUF size: %s" % (len(BUF_sub))
annotations_data = ST_sub + BUF_sub
#data_format = "<h?dI"
#data_values = [ 15, True, pctime, 0 ]
#DBrec_chr = self.get_serialized( data_format, data_values, annotations_data )
DBrec_blk = self.get_serialized_block( 'DBrecord', annotations_data )
#print "DBrecord size: %s" % (len(DBrec_blk))
# 'COM'
#print "write_block() - segment name:", seg.name
# name of the file - NEO Segment name
data_format = '<h'+str(len(seg.name))+'s'
data_values = [ len(seg.name), seg.name ]
SEG_COM_chr = self.get_serialized( data_format, data_values )
SEG_COM_blk = self.get_serialized_block( 'COM', SEG_COM_chr )
# Complete data serialization: concatenate all data strings
All += Ep_blk + RDATA_blk + Rspk_blk + DBrec_blk + SEG_COM_blk
# ElphyFile (open, write and close)
self.elphy_file.write( All )
def get_serialized( self, data_format, data_values, ext_data='' ):
data_chr = struct.pack( data_format, *data_values )
return data_chr + ext_data
def get_serialized_block( self, ident, data ):
"""
Generic Block Header
This function (without needing a layout and the rest) creates a binary serialized version of
the block containing the format string and the actual data for the following
Elphy Block Header structure:
size: longint // 4-byte integer
ident: string[XXX]; // a Pascal variable-length string
data: array[1..YYY] of byte;
For example:
'<IB22s' followed by an array of bytes as specified
"""
# endian 4byte ident
data_format = "<IB" + str(len(ident))+"s"
data_size = 4 + 1 + len(ident) + len(data) # all: <IBs...data...
data_values = [ data_size, len(ident), ident ]
data_chr = struct.pack( data_format, *data_values )
return data_chr + data
def get_serialized_subblock( self, ident, data ):
"""
Generic Sub-Block Header
This function (without needing a layout and the rest) creates a binary serialized version of
the block containing the format string and the actual data for the following
Elphy Sub-Block Header structure:
id: string[XXX]; // a Pascal variable-length string
size1: word // 2-byte unsigned integer
data: array[1..YYY] of byte;
For example:
'<B22sH4522L' followed by an array of bytes as specified
"""
data_size = len( data )
# endian size+string 2byte array of data_size bytes
data_format = "<B" + str(len(ident))+"s" + "h"
data_values = [ len(ident), ident, data_size ]
data_chr = struct.pack( data_format, *data_values )
return data_chr + data
def get_annotations_dict( self, annotations, prefix, items, name='', idx=0 ) :
"""
Helper function to retrieve annotations in a dictionary to be serialized as Elphy DBrecord
"""
for (key,value) in items :
#print "get_annotation_dict() - items[%s]" % (key)
if isinstance( value, (list,tuple,np.ndarray) ) :
for element in value :
annotations.update( dict( [(prefix+"-"+name+"-"+key+"-"+str(idx), element)] ) )
idx = idx+1
else :
annotations.update( dict( [(prefix+"-"+key+"-"+str(idx),value)] ) )
return (idx,annotations)
def read_segment( self, episode ):
"""
Internal method used to return :class:`Segment` data to the main read method.
Parameters:
elphy_file : is the elphy object.
episode : number of elphy episode, roughly corresponding to a segment
"""
#print "name:",self.elphy_file.layout.get_episode_name(episode)
episode_name = self.elphy_file.layout.get_episode_name(episode)
name = episode_name if len(episode_name)>0 else "episode %s" % str(episode + 1)
segment = Segment( name=name )
# create an analog signal for
# each channel in the episode
for channel in range(1, self.elphy_file.n_channels(episode)+1) :
signal = self.elphy_file.get_signal(episode, channel)
analog_signal = AnalogSignal(
signal.data['y'],
units = signal.y_unit,
t_start = signal.t_start * getattr(pq, signal.x_unit.strip()),
t_stop = signal.t_stop * getattr(pq, signal.x_unit.strip()),
#sampling_rate = signal.sampling_frequency * pq.kHz,
sampling_period = signal.sampling_period * getattr(pq, signal.x_unit.strip()),
channel_name="episode %s, channel %s" % ( int(episode+1), int(channel+1) )
)
analog_signal.segment = segment
segment.analogsignals.append(analog_signal)
# create a spiketrain for each
# spike channel in the episode
# in case of multi-electrode
# acquisition context
n_spikes = self.elphy_file.n_spiketrains(episode)
#print "read_segment() - n_spikes:",n_spikes
if n_spikes>0 :
for spk in range(1, n_spikes+1) :
spiketrain = self.read_spiketrain(episode, spk)
spiketrain.segment = segment
segment.spiketrains.append( spiketrain )
# segment
return segment
def read_channelindex( self, episode ):
"""
Internal method used to return :class:`ChannelIndex` info.
Parameters:
elphy_file : is the elphy object.
episode : number of elphy episode, roughly corresponding to a segment
"""
n_spikes = self.elphy_file.n_spikes
group = ChannelIndex(
name="episode %s, group of %s electrodes" % (episode, n_spikes)
)
for spk in range(0, n_spikes) :
channel = self.read_channelindex(episode, spk)
group.channel_indexes.append(channel)
return group
def read_recordingchannel( self, episode, chl ):
"""
Internal method used to return a :class:`RecordingChannel` label.
Parameters:
elphy_file : is the elphy object.
episode : number of elphy episode, roughly corresponding to a segment.
chl : electrode number.
"""
channel = RecordingChannel(
name="episode %s, electrodes %s" % (episode, chl)
)
return channel
def read_event( self, episode, evt ):
"""
Internal method used to return a list of elphy :class:`EventArray` acquired from event channels.
Parameters:
elphy_file : is the elphy object.
episode : number of elphy episode, roughly corresponding to a segment.
evt : index of the event.
"""
event = self.elphy_file.get_event(episode, evt)
neo_event = Event(
times=event.times * pq.s,
channel_name="episode %s, event channel %s" % (episode + 1, evt + 1)
)
return neo_event
def read_spiketrain( self, episode, spk ):
"""
Internal method used to return an elphy object :class:`SpikeTrain`.
Parameters:
elphy_file : is the elphy object.
episode : number of elphy episode, roughly corresponding to a segment.
spk : index of the spike array.
"""
block = self.elphy_file.layout.episode_block(episode)
spike = self.elphy_file.get_spiketrain(episode, spk)
spikes = spike.times * pq.s
#print "read_spiketrain() - spikes: %s" % (len(spikes))
#print "read_spiketrain() - spikes:",spikes
dct = {
'times':spikes,
't_start': block.ep_block.X0_wf if block.ep_block.X0_wf < spikes[0] else spikes[0], #check
't_stop': block.ep_block.cyber_time if block.ep_block.cyber_time > spikes[-1] else spikes[-1],
'units':'s',
# special keywords to identify the
# electrode providing the spiketrain
# event though it is redundant with
# waveforms
'label':"episode %s, electrode %s" % (episode, spk),
'electrode_id':spk
}
# new spiketrain
return SpikeTrain(**dct)
|
gpl-3.0
|
CiscoSystems/tempest
|
tempest/cli/simple_read_only/telemetry/test_ceilometer.py
|
7
|
1983
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import cli
from tempest import config
from tempest.openstack.common import log as logging
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class SimpleReadOnlyCeilometerClientTest(cli.ClientTestBase):
"""Basic, read-only tests for Ceilometer CLI client.
Checks return values and output of read-only commands.
These tests do not presume any content, nor do they create
their own. They only verify the structure of output if present.
"""
@classmethod
def resource_setup(cls):
if (not CONF.service_available.ceilometer):
msg = ("Skipping all Ceilometer cli tests because it is "
"not available")
raise cls.skipException(msg)
super(SimpleReadOnlyCeilometerClientTest, cls).resource_setup()
def ceilometer(self, *args, **kwargs):
return self.clients.ceilometer(
*args, endpoint_type=CONF.telemetry.endpoint_type, **kwargs)
def test_ceilometer_meter_list(self):
self.ceilometer('meter-list')
@test.attr(type='slow')
def test_ceilometer_resource_list(self):
self.ceilometer('resource-list')
def test_ceilometermeter_alarm_list(self):
self.ceilometer('alarm-list')
def test_ceilometer_version(self):
self.ceilometer('', flags='--version')
|
apache-2.0
|
pierrelb/RMG-Java
|
source/cclib/parser/data.py
|
11
|
9767
|
"""
cclib (http://cclib.sf.net) is (c) 2007, the cclib development team
and licensed under the LGPL (http://www.gnu.org/copyleft/lgpl.html).
gmagoon 4/5/10-4/6/10 (this notice added 4/29/10): Gregory Magoon modified this file from cclib 1.0
"""
import cPickle as pickle
import os
import sys
import numpy
class ccData(object):
"""Class for objects containing data from cclib parsers and methods.
Description of cclib attributes:
aonames -- atomic orbital names (list)
aooverlaps -- atomic orbital overlap matrix (array[2])
atombasis -- indices of atomic orbitals on each atom (list of lists)
atomcoords -- atom coordinates (array[3], angstroms)
atomnos -- atomic numbers (array[1])
charge -- net charge of the system (integer)
ccenergies -- molecular energies with Coupled-Cluster corrections (array[2], eV)
coreelectrons -- number of core electrons in atom pseudopotentials (array[1])
etenergies -- energies of electronic transitions (array[1], 1/cm)
etoscs -- oscillator strengths of electronic transitions (array[1])
etrotats -- rotatory strengths of electronic transitions (array[1], ??)
etsecs -- singly-excited configurations for electronic transitions (list of lists)
etsyms -- symmetries of electronic transitions (list)
fonames -- fragment orbital names (list)
fooverlaps -- fragment orbital overlap matrix (array[2])
fragnames -- names of fragments (list)
frags -- indices of atoms in a fragment (list of lists)
gbasis -- coefficients and exponents of Gaussian basis functions (PyQuante format)
geotargets -- targets for convergence of geometry optimization (array[1])
geovalues -- current values for convergence of geometry optmization (array[1])
homos -- molecular orbital indices of HOMO(s) (array[1])
mocoeffs -- molecular orbital coefficients (list of arrays[2])
moenergies -- molecular orbital energies (list of arrays[1], eV)
mosyms -- orbital symmetries (list of lists)
mpenergies -- molecular electronic energies with Moller-Plesset corrections (array[2], eV)
mult -- multiplicity of the system (integer)
natom -- number of atoms (integer)
nbasis -- number of basis functions (integer)
nmo -- number of molecular orbitals (integer)
nocoeffs -- natural orbital coefficients (array[2])
scfenergies -- molecular electronic energies after SCF (Hartree-Fock, DFT) (array[1], eV)
scftargets -- targets for convergence of the SCF (array[2])
scfvalues -- current values for convergence of the SCF (list of arrays[2])
stericenergy -- final steric energy (for MM4 calculations)
vibdisps -- cartesian displacement vectors (array[3], delta angstrom)
vibfreqs -- vibrational frequencies (array[1], 1/cm)
vibirs -- IR intensities (array[1], km/mol)
vibramans -- Raman intensities (array[1], A^4/Da)
vibsyms -- symmetries of vibrations (list)
(1) The term 'array' refers to a numpy array
(2) The number of dimensions of an array is given in square brackets
(3) Python indexes arrays/lists starting at zero, so if homos==[10], then
the 11th molecular orbital is the HOMO
"""
def __init__(self, attributes=None):
"""Initialize the cclibData object.
Normally called in the parse() method of a Logfile subclass.
Inputs:
attributes - dictionary of attributes to load
"""
# Names of all supported attributes.
self._attrlist = ['aonames', 'aooverlaps', 'atombasis',
'atomcoords', 'atomnos',
'ccenergies', 'charge', 'coreelectrons',
'etenergies', 'etoscs', 'etrotats', 'etsecs', 'etsyms',
'fonames', 'fooverlaps', 'fragnames', 'frags',
'gbasis', 'geotargets', 'geovalues', 'grads',
'hessian', 'homos',
'mocoeffs', 'moenergies', 'molmass', 'mosyms', 'mpenergies', 'mult',
'natom', 'nbasis', 'nmo', 'nocoeffs', 'rotcons', 'rotsymm',
'scfenergies', 'scftargets', 'scfvalues', 'stericenergy',
'vibdisps', 'vibfreqs', 'vibirs', 'vibramans', 'vibsyms']
# The expected types for all supported attributes.
#gmagoon 5/27/09: added rotsymm type above and below
#gmagoon 6/8/09: added molmass (previously (maybe 5/28) I had added rotcons)
self._attrtypes = { "aonames": list,
"aooverlaps": numpy.ndarray,
"atombasis": list,
"atomcoords": numpy.ndarray,
"atomnos": numpy.ndarray,
"charge": int,
"coreelectrons": numpy.ndarray,
"etenergies": numpy.ndarray,
"etoscs": numpy.ndarray,
"etrotats": numpy.ndarray,
"etsecs": list,
"etsyms": list,
'gbasis': list,
"geotargets": numpy.ndarray,
"geovalues": numpy.ndarray,
"grads": numpy.ndarray,
"hessian": numpy.ndarray,
"homos": numpy.ndarray,
"mocoeffs": list,
"moenergies": list,
"molmass": float,
"mosyms": list,
"mpenergies": numpy.ndarray,
"mult": int,
"natom": int,
"nbasis": int,
"nmo": int,
"nocoeffs": numpy.ndarray,
"rotcons": list,
"rotsymm": int,
"scfenergies": numpy.ndarray,
"scftargets": numpy.ndarray,
"scfvalues": list,
"stericenergy": float,
"vibdisps": numpy.ndarray,
"vibfreqs": numpy.ndarray,
"vibirs": numpy.ndarray,
"vibramans": numpy.ndarray,
"vibsyms": list,
}
# Arrays are double precision by default, but these will be integer arrays.
self._intarrays = ['atomnos', 'coreelectrons', 'homos']
# Attributes that should be lists of arrays (double precision).
self._listsofarrays = ['mocoeffs', 'moenergies', 'scfvalues', 'rotcons']#gmagoon 5/28/09: added rotcons
if attributes:
self.setattributes(attributes)
def listify(self):
"""Converts all attributes that are arrays or lists of arrays to lists."""
for k, v in self._attrtypes.iteritems():
if hasattr(self, k):
if v == numpy.ndarray:
setattr(self, k, getattr(self, k).tolist())
elif v == list and k in self._listsofarrays:
setattr(self, k, [x.tolist() for x in getattr(self, k)])
def arrayify(self):
"""Converts appropriate attributes to arrays or lists of arrays."""
for k, v in self._attrtypes.iteritems():
if hasattr(self, k):
precision = 'd'
if k in self._intarrays:
precision = 'i'
if v == numpy.ndarray:
setattr(self, k, numpy.array(getattr(self, k), precision))
elif v == list and k in self._listsofarrays:
setattr(self, k, [numpy.array(x, precision)
for x in getattr(self, k)])
def getattributes(self, tolists=False):
"""Returns a dictionary of existing data attributes.
Inputs:
tolists - flag to convert attributes to lists where applicable
"""
if tolists:
self.listify()
attributes = {}
for attr in self._attrlist:
if hasattr(self, attr):
attributes[attr] = getattr(self,attr)
if tolists:
self.arrayofy()
return attributes
def setattributes(self, attributes):
"""Sets data attributes given in a dictionary.
Inputs:
attributes - dictionary of attributes to set
Outputs:
invalid - list of attributes names that were not set, which
means they are not specified in self._attrlist
"""
if type(attributes) is not dict:
raise TypeError, "attributes must be in a dictionary"
valid = [a for a in attributes if a in self._attrlist]
invalid = [a for a in attributes if a not in self._attrlist]
for attr in valid:
setattr(self, attr, attributes[attr])
self.arrayify()
return invalid
|
mit
|
GetBlimp/boards-backend
|
blimp_boards/comments/tests/test_permissions.py
|
2
|
2581
|
from django.contrib.auth.models import AnonymousUser
from rest_framework.test import APIRequestFactory
from rest_framework.views import APIView
from ...utils.tests import BaseTestCase
from ..models import Comment
from ..permissions import CommentPermission
class MockView(APIView):
pass
mock_view = MockView.as_view()
class BoardPermissionTestCase(BaseTestCase):
def setUp(self):
self.factory = APIRequestFactory()
self.perm_class = CommentPermission()
def test_should_return_true_for_authenticated_user(self):
"""
Tests that `.has_permission` returns `True` for an
authenticated user.
"""
self.create_user()
request = self.factory.post('/')
request.user = self.user
view = mock_view(request)
has_perm = self.perm_class.has_permission(request, view)
self.assertTrue(has_perm)
def test_should_return_false_for_anonymous_user(self):
"""
Tests that `.has_permission` returns `True` for an
unauthenticated user.
"""
request = self.factory.post('/')
request.user = AnonymousUser()
view = mock_view(request)
has_perm = self.perm_class.has_permission(request, view)
self.assertFalse(has_perm)
def test_returns_true_for_user_that_created_comment(self):
"""
Tests that `.has_object_permission` returns `True` for
the user that created the comment.
"""
self.create_user()
comment = Comment.objects.create(
content='A comment',
content_object=self.user,
created_by=self.user)
request = self.factory.post('/')
request.user = self.user
view = mock_view(request)
has_perm = self.perm_class.has_object_permission(
request, view, comment)
self.assertTrue(has_perm)
def test_returns_false_for_user_that_didnt_create_comment(self):
"""
Tests that `.has_object_permission` returns `False` for
a user other than the one that created the comment.
"""
self.create_user()
user = self.create_another_user(username='pedro')
comment = Comment.objects.create(
content='A comment',
content_object=self.user,
created_by=user)
request = self.factory.post('/')
request.user = self.user
view = mock_view(request)
has_perm = self.perm_class.has_object_permission(
request, view, comment)
self.assertFalse(has_perm)
|
agpl-3.0
|
nthien/pulp
|
server/test/unit/server/auth/test_authorization.py
|
16
|
1161
|
import unittest
from pulp.server.auth import authorization
class TestAuthorization(unittest.TestCase):
def test_module_level_attributes(self):
"""
Assert that the expected module level variables are correct.
"""
self.assertEqual(authorization.CREATE, 0)
self.assertEqual(authorization.READ, 1)
self.assertEqual(authorization.UPDATE, 2)
self.assertEqual(authorization.DELETE, 3)
self.assertEqual(authorization.EXECUTE, 4)
expected_op_names = ['CREATE', 'READ', 'UPDATE', 'DELETE', 'EXECUTE']
self.assertEqual(authorization.OPERATION_NAMES, expected_op_names)
def test__lookup_operation_name(self):
"""
Test the _lookup_operation_name function
"""
_lookup = authorization._lookup_operation_name
self.assertEqual(_lookup(0), 'CREATE')
self.assertEqual(_lookup(1), 'READ')
self.assertEqual(_lookup(2), 'UPDATE')
self.assertEqual(_lookup(3), 'DELETE')
self.assertEqual(_lookup(4), 'EXECUTE')
invalid_operation_value = 1000
self.assertRaises(KeyError, _lookup, invalid_operation_value)
|
gpl-2.0
|
nttks/jenkins-test
|
lms/lib/comment_client/comment.py
|
27
|
3334
|
from .utils import CommentClientRequestError, perform_request
from .thread import Thread, _url_for_flag_abuse_thread, _url_for_unflag_abuse_thread
import models
import settings
class Comment(models.Model):
accessible_fields = [
'id', 'body', 'anonymous', 'anonymous_to_peers', 'course_id',
'endorsed', 'parent_id', 'thread_id', 'username', 'votes', 'user_id',
'closed', 'created_at', 'updated_at', 'depth', 'at_position_list',
'type', 'commentable_id', 'abuse_flaggers', 'endorsement',
]
updatable_fields = [
'body', 'anonymous', 'anonymous_to_peers', 'course_id', 'closed',
'user_id', 'endorsed', 'endorsement_user_id',
]
initializable_fields = updatable_fields
metrics_tag_fields = ['course_id', 'endorsed', 'closed']
base_url = "{prefix}/comments".format(prefix=settings.PREFIX)
type = 'comment'
@property
def thread(self):
return Thread(id=self.thread_id, type='thread')
@classmethod
def url_for_comments(cls, params={}):
if params.get('thread_id'):
return _url_for_thread_comments(params['thread_id'])
else:
return _url_for_comment(params['parent_id'])
@classmethod
def url(cls, action, params={}):
if action in ['post']:
return cls.url_for_comments(params)
else:
return super(Comment, cls).url(action, params)
def flagAbuse(self, user, voteable):
if voteable.type == 'thread':
url = _url_for_flag_abuse_thread(voteable.id)
elif voteable.type == 'comment':
url = _url_for_flag_abuse_comment(voteable.id)
else:
raise CommentClientRequestError("Can only flag/unflag threads or comments")
params = {'user_id': user.id}
response = perform_request(
'put',
url,
params,
metric_tags=self._metric_tags,
metric_action='comment.abuse.flagged'
)
voteable._update_from_response(response)
def unFlagAbuse(self, user, voteable, removeAll):
if voteable.type == 'thread':
url = _url_for_unflag_abuse_thread(voteable.id)
elif voteable.type == 'comment':
url = _url_for_unflag_abuse_comment(voteable.id)
else:
raise CommentClientRequestError("Can flag/unflag for threads or comments")
params = {'user_id': user.id}
if removeAll:
params['all'] = True
response = perform_request(
'put',
url,
params,
metric_tags=self._metric_tags,
metric_action='comment.abuse.unflagged'
)
voteable._update_from_response(response)
def _url_for_thread_comments(thread_id):
return "{prefix}/threads/{thread_id}/comments".format(prefix=settings.PREFIX, thread_id=thread_id)
def _url_for_comment(comment_id):
return "{prefix}/comments/{comment_id}".format(prefix=settings.PREFIX, comment_id=comment_id)
def _url_for_flag_abuse_comment(comment_id):
return "{prefix}/comments/{comment_id}/abuse_flag".format(prefix=settings.PREFIX, comment_id=comment_id)
def _url_for_unflag_abuse_comment(comment_id):
return "{prefix}/comments/{comment_id}/abuse_unflag".format(prefix=settings.PREFIX, comment_id=comment_id)
|
agpl-3.0
|
Huyuwei/tvm
|
tests/python/unittest/test_schedule_lstm.py
|
2
|
3710
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
def test_lstm_cell_inline():
num_step = 128
num_input = 256
num_hidden = 1152
batch_size = 4
# Global transition matrix
X = tvm.placeholder((num_step - 1, batch_size, num_input), name="X")
Wi2h = tvm.placeholder((4, num_hidden, num_input), name="Wi2h")
Wh2h = tvm.placeholder((4, num_hidden, num_hidden), name="Wh2h")
# h: output hidden state, c: cell state.
s_state_h = tvm.placeholder((num_step, batch_size, num_hidden))
s_state_c = tvm.placeholder((num_step, batch_size, num_hidden))
s_init_c = tvm.compute((1, batch_size, num_hidden),
lambda *i: 0.0, name="init_c")
s_init_h = tvm.compute((1, batch_size, num_hidden),
lambda *i: 0.0, name="init_h")
# LSTM transition
k = tvm.reduce_axis((0, num_input), name="ki2h")
s_i2h = tvm.compute(
(num_step, 4, batch_size, num_hidden),
lambda t, x, i, j: tvm.sum(X[t - 1, i, k] * Wi2h[x, j, k], axis=k),
name="s_i2h")
k = tvm.reduce_axis((0, num_hidden), name="ki2h")
s_h2h = tvm.compute(
(num_step, 4, batch_size, num_hidden),
lambda t, x, i, j: tvm.sum(s_state_h[t - 1, i, k] * Wh2h[x, j, k], axis=k),
name="s_h2h")
# Gate rules
gates = tvm.compute(s_i2h.shape, lambda *i:
s_i2h(*i) + s_h2h(*i), name="gates")
gshape = (num_step, batch_size, num_hidden)
in_gate = tvm.compute(gshape, lambda t, i, j: tvm.sigmoid(gates[t, 0, i, j]), name="in_gate")
in_transform = tvm.compute(gshape, lambda t, i, j: tvm.tanh(gates[t, 1, i, j]), name="in_transform")
forget_gate = tvm.compute(gshape, lambda t, i, j: tvm.sigmoid(gates[t, 2, i, j]), name="forget_gate")
out_gate = tvm.compute(gshape, lambda t, i, j: tvm.sigmoid(gates[t, 3, i, j]), name="out_gate")
next_c = tvm.compute(gshape,
lambda t, i, j:
forget_gate[t, i, j] * s_state_c[t - 1, i, j] +
in_gate[t, i, j] * in_transform[t, i, j], name="next_c")
next_h = tvm.compute(gshape,
lambda t, i, j: out_gate[t, i, j] * tvm.tanh(next_c[t, i, j]), name="next_h")
update_c = tvm.compute(gshape, lambda *i: next_c(*i), name="update_c")
update_h = tvm.compute(gshape, lambda *i: next_h(*i), name="update_h")
# schedule
scan_h, scan_c = tvm.scan(
[s_init_h, s_init_c],
[update_h, update_c],
[s_state_h, s_state_c],
inputs=[X],
name="lstm_scan")
# schedule
s = tvm.create_schedule(scan_h.op)
# Inline gate computations
s[gates].compute_inline()
s[in_gate].compute_inline()
s[in_transform].compute_inline()
s[forget_gate].compute_inline()
s[out_gate].compute_inline()
# verify we can lower correctly
tvm.lower(s, [X, Wi2h, Wh2h, scan_h, scan_c])
if __name__ == "__main__":
test_lstm_cell_inline()
|
apache-2.0
|
p0psicles/SickGear
|
lib/requests/packages/chardet/escsm.py
|
2930
|
7839
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart, eError, eItsMe
HZ_cls = (
1,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,4,0,5,2,0, # 78 - 7f
1,1,1,1,1,1,1,1, # 80 - 87
1,1,1,1,1,1,1,1, # 88 - 8f
1,1,1,1,1,1,1,1, # 90 - 97
1,1,1,1,1,1,1,1, # 98 - 9f
1,1,1,1,1,1,1,1, # a0 - a7
1,1,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,1,1,1,1,1,1, # c0 - c7
1,1,1,1,1,1,1,1, # c8 - cf
1,1,1,1,1,1,1,1, # d0 - d7
1,1,1,1,1,1,1,1, # d8 - df
1,1,1,1,1,1,1,1, # e0 - e7
1,1,1,1,1,1,1,1, # e8 - ef
1,1,1,1,1,1,1,1, # f0 - f7
1,1,1,1,1,1,1,1, # f8 - ff
)
HZ_st = (
eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-17
5,eError, 6,eError, 5, 5, 4,eError,# 18-1f
4,eError, 4, 4, 4,eError, 4,eError,# 20-27
4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f
)
HZCharLenTable = (0, 0, 0, 0, 0, 0)
HZSMModel = {'classTable': HZ_cls,
'classFactor': 6,
'stateTable': HZ_st,
'charLenTable': HZCharLenTable,
'name': "HZ-GB-2312"}
ISO2022CN_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,4,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022CN_st = (
eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1f
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27
5, 6,eError,eError,eError,eError,eError,eError,# 28-2f
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37
eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f
)
ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022CNSMModel = {'classTable': ISO2022CN_cls,
'classFactor': 9,
'stateTable': ISO2022CN_st,
'charLenTable': ISO2022CNCharLenTable,
'name': "ISO-2022-CN"}
ISO2022JP_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,2,2, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,7,0,0,0, # 20 - 27
3,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
6,0,4,0,8,0,0,0, # 40 - 47
0,9,5,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022JP_st = (
eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f
eError, 5,eError,eError,eError, 4,eError,eError,# 20-27
eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2f
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f
eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47
)
ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022JPSMModel = {'classTable': ISO2022JP_cls,
'classFactor': 10,
'stateTable': ISO2022JP_st,
'charLenTable': ISO2022JPCharLenTable,
'name': "ISO-2022-JP"}
ISO2022KR_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,3,0,0,0, # 20 - 27
0,4,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,5,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022KR_st = (
eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
eItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17
eError,eError,eError,eError, 5,eError,eError,eError,# 18-1f
eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27
)
ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0)
ISO2022KRSMModel = {'classTable': ISO2022KR_cls,
'classFactor': 6,
'stateTable': ISO2022KR_st,
'charLenTable': ISO2022KRCharLenTable,
'name': "ISO-2022-KR"}
# flake8: noqa
|
gpl-3.0
|
sharescience/ardupilot
|
Tools/autotest/apmrover2.py
|
1
|
21420
|
#!/usr/bin/env python
# Drive APMrover2 in SITL
from __future__ import print_function
import os
import pexpect
import shutil
import time
from common import AutoTest
from pysim import util
from pymavlink import mavutil
# get location of scripts
testdir = os.path.dirname(os.path.realpath(__file__))
# HOME = mavutil.location(-35.362938, 149.165085, 584, 270)
HOME = mavutil.location(40.071374969556928,
-105.22978898137808,
1583.702759,
246)
class AutoTestRover(AutoTest):
def __init__(self,
binary,
valgrind=False,
gdb=False,
speedup=10,
frame=None,
params=None,
gdbserver=False,
**kwargs):
super(AutoTestRover, self).__init__(**kwargs)
self.binary = binary
self.valgrind = valgrind
self.gdb = gdb
self.frame = frame
self.params = params
self.gdbserver = gdbserver
self.home = "%f,%f,%u,%u" % (HOME.lat,
HOME.lng,
HOME.alt,
HOME.heading)
self.homeloc = None
self.speedup = speedup
self.speedup_default = 10
self.sitl = None
self.hasInit = False
self.log_name = "APMrover2"
def init(self):
if self.frame is None:
self.frame = 'rover'
self.apply_parameters_using_sitl()
self.sitl = util.start_SITL(self.binary,
model=self.frame,
home=self.home,
speedup=self.speedup,
valgrind=self.valgrind,
gdb=self.gdb,
gdbserver=self.gdbserver)
self.mavproxy = util.start_MAVProxy_SITL(
'APMrover2', options=self.mavproxy_options())
self.mavproxy.expect('Telemetry log: (\S+)\r\n')
logfile = self.mavproxy.match.group(1)
self.progress("LOGFILE %s" % logfile)
buildlog = self.buildlogs_path("APMrover2-test.tlog")
self.progress("buildlog=%s" % buildlog)
if os.path.exists(buildlog):
os.unlink(buildlog)
try:
os.link(logfile, buildlog)
except Exception:
pass
self.mavproxy.expect('Received [0-9]+ parameters')
util.expect_setup_callback(self.mavproxy, self.expect_callback)
self.expect_list_clear()
self.expect_list_extend([self.sitl, self.mavproxy])
self.progress("Started simulator")
# get a mavlink connection going
connection_string = '127.0.0.1:19550'
try:
self.mav = mavutil.mavlink_connection(connection_string,
robust_parsing=True)
except Exception as msg:
self.progress("Failed to start mavlink connection on %s" %
connection_string)
raise
self.mav.message_hooks.append(self.message_hook)
self.mav.idle_hooks.append(self.idle_hook)
self.hasInit = True
self.progress("Ready to start testing!")
# def reset_and_arm(self):
# """Reset RC, set to MANUAL and arm."""
# self.mav.wait_heartbeat()
# # ensure all sticks in the middle
# self.set_rc_default()
# self.mavproxy.send('switch 1\n')
# self.mav.wait_heartbeat()
# self.disarm_vehicle()
# self.mav.wait_heartbeat()
# self.arm_vehicle()
#
# # TEST ARM RADIO
# def test_arm_motors_radio(self):
# """Test Arming motors with radio."""
# self.progress("Test arming motors with radio")
# self.mavproxy.send('switch 6\n') # stabilize/manual mode
# self.wait_mode('MANUAL')
# self.mavproxy.send('rc 3 1500\n') # throttle at zero
# self.mavproxy.send('rc 1 2000\n') # steer full right
# self.mavproxy.expect('APM: Throttle armed')
# self.mavproxy.send('rc 1 1500\n')
#
# self.mav.motors_armed_wait()
# self.progress("MOTORS ARMED OK")
# return True
#
# # TEST DISARM RADIO
# def test_disarm_motors_radio(self):
# """Test Disarm motors with radio."""
# self.progress("Test disarming motors with radio")
# self.mavproxy.send('switch 6\n') # stabilize/manual mode
# self.wait_mode('MANUAL')
# self.mavproxy.send('rc 3 1500\n') # throttle at zero
# self.mavproxy.send('rc 1 1000\n') # steer full right
# tstart = self.get_sim_time()
# self.mav.wait_heartbeat()
# timeout = 15
# while self.get_sim_time() < tstart + timeout:
# self.mav.wait_heartbeat()
# if not self.mav.motors_armed():
# disarm_delay = self.get_sim_time() - tstart
# self.progress("MOTORS DISARMED OK WITH RADIO")
# self.mavproxy.send('rc 1 1500\n') # steer full right
# self.mavproxy.send('rc 4 1500\n') # yaw full right
# self.progress("Disarm in %ss" % disarm_delay)
# return True
# self.progress("FAILED TO DISARM WITH RADIO")
# return False
#
# # TEST AUTO DISARM
# def test_autodisarm_motors(self):
# """Test Autodisarm motors."""
# self.progress("Test Autodisarming motors")
# self.mavproxy.send('switch 6\n') # stabilize/manual mode
# # NOT IMPLEMENTED ON ROVER
# self.progress("MOTORS AUTODISARMED OK")
# return True
#
# # TEST RC OVERRIDE
# # TEST RC OVERRIDE TIMEOUT
# def test_rtl(self, home, distance_min=5, timeout=250):
# """Return, land."""
# super(AutotestRover, self).test_rtl(home, distance_min, timeout)
#
# def test_mission(self, filename):
# """Test a mission from a file."""
# self.progress("Test mission %s" % filename)
# num_wp = self.load_mission_from_file(filename)
# self.mavproxy.send('wp set 1\n')
# self.mav.wait_heartbeat()
# self.mavproxy.send('switch 4\n') # auto mode
# self.wait_mode('AUTO')
# ret = self.wait_waypoint(0, num_wp-1, max_dist=5, timeout=500)
#
# if ret:
# self.mavproxy.expect("Mission Complete")
# self.mav.wait_heartbeat()
# self.wait_mode('HOLD')
# self.progress("test: MISSION COMPLETE: passed=%s" % ret)
# return ret
##########################################################
# TESTS DRIVE
##########################################################
# Drive a square in manual mode
def drive_square(self, side=50):
"""Drive a square, Driving N then E ."""
self.progress("TEST SQUARE")
success = True
# use LEARNING Mode
self.mavproxy.send('switch 5\n')
self.wait_mode('MANUAL')
# first aim north
self.progress("\nTurn right towards north")
if not self.reach_heading_manual(10):
success = False
# save bottom left corner of box as waypoint
self.progress("Save WP 1 & 2")
self.save_wp()
# pitch forward to fly north
self.progress("\nGoing north %u meters" % side)
if not self.reach_distance_manual(side):
success = False
# save top left corner of square as waypoint
self.progress("Save WP 3")
self.save_wp()
# roll right to fly east
self.progress("\nGoing east %u meters" % side)
if not self.reach_heading_manual(100):
success = False
if not self.reach_distance_manual(side):
success = False
# save top right corner of square as waypoint
self.progress("Save WP 4")
self.save_wp()
# pitch back to fly south
self.progress("\nGoing south %u meters" % side)
if not self.reach_heading_manual(190):
success = False
if not self.reach_distance_manual(side):
success = False
# save bottom right corner of square as waypoint
self.progress("Save WP 5")
self.save_wp()
# roll left to fly west
self.progress("\nGoing west %u meters" % side)
if not self.reach_heading_manual(280):
success = False
if not self.reach_distance_manual(side):
success = False
# save bottom left corner of square (should be near home) as waypoint
self.progress("Save WP 6")
self.save_wp()
return success
def drive_left_circuit(self):
"""Drive a left circuit, 50m on a side."""
self.mavproxy.send('switch 6\n')
self.wait_mode('MANUAL')
self.set_rc(3, 2000)
self.progress("Driving left circuit")
# do 4 turns
for i in range(0, 4):
# hard left
self.progress("Starting turn %u" % i)
self.set_rc(1, 1000)
if not self.wait_heading(270 - (90*i), accuracy=10):
return False
self.set_rc(1, 1500)
self.progress("Starting leg %u" % i)
if not self.wait_distance(50, accuracy=7):
return False
self.set_rc(3, 1500)
self.progress("Circuit complete")
return True
# def test_throttle_failsafe(self, home, distance_min=10, side=60,
# timeout=300):
# """Fly east, Failsafe, return, land."""
#
# self.mavproxy.send('switch 6\n') # manual mode
# self.wait_mode('MANUAL')
# self.mavproxy.send("param set FS_ACTION 1\n")
#
# # first aim east
# self.progress("turn east")
# if not self.reach_heading_manual(135):
# return False
#
# # fly east 60 meters
# self.progress("# Going forward %u meters" % side)
# if not self.reach_distance_manual(side):
# return False
#
# # pull throttle low
# self.progress("# Enter Failsafe")
# self.mavproxy.send('rc 3 900\n')
#
# tstart = self.get_sim_time()
# success = False
# while self.get_sim_time() < tstart + timeout and not success:
# m = self.mav.recv_match(type='VFR_HUD', blocking=True)
# pos = self.mav.location()
# home_distance = self.get_distance(home, pos)
# self.progress("Alt: %u HomeDistance: %.0f" %
# (m.alt, home_distance))
# # check if we've reached home
# if home_distance <= distance_min:
# self.progress("RTL Complete")
# success = True
#
# # reduce throttle
# self.mavproxy.send('rc 3 1500\n')
# self.mavproxy.expect('APM: Failsafe ended')
# self.mavproxy.send('switch 2\n') # manual mode
# self.mav.wait_heartbeat()
# self.wait_mode('MANUAL')
#
# if success:
# self.progress("Reached failsafe home OK")
# return True
# else:
# self.progress("Failed to reach Home on failsafe RTL - "
# "timed out after %u seconds" % timeout)
# return False
#################################################
# AUTOTEST ALL
#################################################
def drive_mission(self, filename):
"""Drive a mission from a file."""
self.progress("Driving mission %s" % filename)
self.mavproxy.send('wp load %s\n' % filename)
self.mavproxy.expect('Flight plan received')
self.mavproxy.send('wp list\n')
self.mavproxy.expect('Requesting [0-9]+ waypoints')
self.mavproxy.send('switch 4\n') # auto mode
self.set_rc(3, 1500)
self.wait_mode('AUTO')
if not self.wait_waypoint(1, 4, max_dist=5):
return False
self.wait_mode('HOLD')
self.progress("Mission OK")
return True
def do_get_banner(self):
self.mavproxy.send("long DO_SEND_BANNER 1\n")
start = time.time()
while True:
m = self.mav.recv_match(type='STATUSTEXT',
blocking=True,
timeout=1)
if m is not None and "ArduRover" in m.text:
self.progress("banner received: %s" % m.text)
return True
if time.time() - start > 10:
break
self.progress("banner not received")
return False
def drive_brake_get_stopping_distance(self, speed):
# measure our stopping distance:
old_cruise_speed = self.get_parameter('CRUISE_SPEED')
old_accel_max = self.get_parameter('ATC_ACCEL_MAX')
# controller tends not to meet cruise speed (max of ~14 when 15
# set), thus *1.2
self.set_parameter('CRUISE_SPEED', speed*1.2)
# at time of writing, the vehicle is only capable of 10m/s/s accel
self.set_parameter('ATC_ACCEL_MAX', 15)
self.mavproxy.send("mode STEERING\n")
self.wait_mode('STEERING')
self.set_rc(3, 2000)
self.wait_groundspeed(15, 100)
initial = self.mav.location()
initial_time = time.time()
while time.time() - initial_time < 2:
# wait for a position update from the autopilot
start = self.mav.location()
if start != initial:
break
self.set_rc(3, 1500)
self.wait_groundspeed(0, 0.2) # why do we not stop?!
initial = self.mav.location()
initial_time = time.time()
while time.time() - initial_time < 2:
# wait for a position update from the autopilot
stop = self.mav.location()
if stop != initial:
break
delta = self.get_distance(start, stop)
self.set_parameter('CRUISE_SPEED', old_cruise_speed)
self.set_parameter('ATC_ACCEL_MAX', old_accel_max)
return delta
def drive_brake(self):
old_using_brake = self.get_parameter('ATC_BRAKE')
old_cruise_speed = self.get_parameter('CRUISE_SPEED')
self.set_parameter('CRUISE_SPEED', 15)
self.set_parameter('ATC_BRAKE', 0)
distance_without_brakes = self.drive_brake_get_stopping_distance(15)
# brakes on:
self.set_parameter('ATC_BRAKE', 1)
distance_with_brakes = self.drive_brake_get_stopping_distance(15)
# revert state:
self.set_parameter('ATC_BRAKE', old_using_brake)
self.set_parameter('CRUISE_SPEED', old_cruise_speed)
delta = distance_without_brakes - distance_with_brakes
if delta < distance_without_brakes * 0.05: # 5% isn't asking for much
self.progress("Brakes have negligible effect"
"(with=%0.2fm without=%0.2fm delta=%0.2fm)" %
(distance_with_brakes,
distance_without_brakes,
delta))
return False
else:
self.progress(
"Brakes work (with=%0.2fm without=%0.2fm delta=%0.2fm)" %
(distance_with_brakes, distance_without_brakes, delta))
return True
def drive_rtl_mission(self):
mission_filepath = os.path.join(testdir,
"ArduRover-Missions",
"rtl.txt")
self.mavproxy.send('wp load %s\n' % mission_filepath)
self.mavproxy.expect('Flight plan received')
self.mavproxy.send('switch 4\n') # auto mode
self.set_rc(3, 1500)
self.wait_mode('AUTO')
self.mavproxy.expect('Executing RTL')
m = self.mav.recv_match(type='NAV_CONTROLLER_OUTPUT',
blocking=True,
timeout=0.1)
if m is None:
self.progress("Did not receive NAV_CONTROLLER_OUTPUT message")
return False
wp_dist_min = 5
if m.wp_dist < wp_dist_min:
self.progress("Did not start at least 5 metres from destination")
return False
self.progress("NAV_CONTROLLER_OUTPUT.wp_dist looks good (%u >= %u)" %
(m.wp_dist, wp_dist_min,))
self.wait_mode('HOLD')
pos = self.mav.location()
home_distance = self.get_distance(HOME, pos)
home_distance_max = 5
if home_distance > home_distance_max:
self.progress("Did not get home (%u metres distant > %u)" %
(home_distance, home_distance_max))
return False
self.mavproxy.send('switch 6\n')
self.wait_mode('MANUAL')
self.progress("RTL Mission OK")
return True
def test_servorelayevents(self):
self.mavproxy.send("relay set 0 0\n")
off = self.get_parameter("SIM_PIN_MASK")
self.mavproxy.send("relay set 0 1\n")
on = self.get_parameter("SIM_PIN_MASK")
if on == off:
self.progress("Pin mask unchanged after relay command")
return False
self.progress("Pin mask changed after relay command")
return True
def autotest(self):
"""Autotest APMrover2 in SITL."""
if not self.hasInit:
self.init()
self.progress("Started simulator")
failed = False
e = 'None'
try:
self.progress("Waiting for a heartbeat with mavlink protocol %s" %
self.mav.WIRE_PROTOCOL_VERSION)
self.mav.wait_heartbeat()
self.progress("Setting up RC parameters")
self.set_rc_default()
self.set_rc(8, 1800)
self.progress("Waiting for GPS fix")
self.mav.wait_gps_fix()
self.homeloc = self.mav.location()
self.progress("Home location: %s" % self.homeloc)
self.mavproxy.send('switch 6\n') # Manual mode
self.wait_mode('MANUAL')
self.progress("Waiting reading for arm")
self.wait_ready_to_arm()
if not self.arm_vehicle():
self.progress("Failed to ARM")
failed = True
self.progress("#")
self.progress("########## Drive an RTL mission ##########")
self.progress("#")
# Drive a square in learning mode
# self.reset_and_arm()
if not self.drive_rtl_mission():
self.progress("Failed RTL mission")
failed = True
self.progress("#")
self.progress("########## Drive a square and save WPs with CH7"
"switch ##########")
self.progress("#")
# Drive a square in learning mode
# self.reset_and_arm()
if not self.drive_square():
self.progress("Failed drive square")
failed = True
if not self.drive_mission(os.path.join(testdir, "rover1.txt")):
self.progress("Failed mission")
failed = True
if not self.drive_brake():
self.progress("Failed brake")
failed = True
if not self.disarm_vehicle():
self.progress("Failed to DISARM")
failed = True
# do not move this to be the first test. MAVProxy's dedupe
# function may bite you.
self.progress("Getting banner")
if not self.do_get_banner():
self.progress("FAILED: get banner")
failed = True
self.progress("Getting autopilot capabilities")
if not self.do_get_autopilot_capabilities():
self.progress("FAILED: get capabilities")
failed = True
self.progress("Setting mode via MAV_COMMAND_DO_SET_MODE")
if not self.do_set_mode_via_command_long():
failed = True
# test ServoRelayEvents:
self.progress("########## Test ServoRelayEvents ##########")
if not self.test_servorelayevents():
self.progress("Failed servo relay events")
failed = True
# Throttle Failsafe
self.progress("#")
self.progress("########## Test Failsafe ##########")
self.progress("#")
# self.reset_and_arm()
# if not self.test_throttle_failsafe(HOME, distance_min=4):
# self.progress("Throttle failsafe failed")
# sucess = False
if not self.log_download(self.buildlogs_path("APMrover2-log.bin")):
self.progress("Failed log download")
failed = True
# if not drive_left_circuit(self):
# self.progress("Failed left circuit")
# failed = True
# if not drive_RTL(self):
# self.progress("Failed RTL")
# failed = True
except pexpect.TIMEOUT as e:
self.progress("Failed with timeout")
failed = True
self.close()
if failed:
self.progress("FAILED: %s" % e)
return False
return True
|
gpl-3.0
|
ramccor/lector
|
lector/utils/settings.py
|
6
|
1767
|
# -*- coding: utf-8 -*-
""" Functions to access settings
"""
#pylint: disable-msg=C0103
from PyQt4.QtCore import QSettings, QVariant, QDir
from PyQt4.QtGui import QFont
from PyQt4.QtGui import QDesktopServices
def set(name, value):
""" Set setting
"""
settings = QSettings("Davide Setti", "Lector")
settings.setValue(name, QVariant(value))
def get(name):
""" Retrieve setting and convert result
"""
home_dir = QDesktopServices.storageLocation(QDesktopServices.HomeLocation)
stdPwlDict = unicode(home_dir + QDir.separator() + "my-dict.txt")
settings = QSettings("Davide Setti", "Lector")
if name == 'scanner:height':
return settings.value(name, QVariant(297)).toInt()[0]
elif name == 'scanner:width':
return settings.value(name, QVariant(210)).toInt()[0]
elif name == 'scanner:resolution':
return settings.value(name, QVariant(300)).toInt()[0]
elif name == 'scanner:mode':
return str(settings.value(name, QVariant("Color")).toString())
elif name == 'scanner:device':
return str(settings.value(name).toString())
elif name == 'editor:font':
return settings.value(name, QFont(QFont("Courier New", 10)))
elif name == 'editor:symbols':
return settings.value(name).toString()
elif name in ('editor:clear', 'editor:spell', 'editor:whiteSpace',
'spellchecker:pwlLang',):
return str(settings.value(name, "true").toString()).lower() == "true"
elif name in ('log:errors'):
return str(settings.value(name, "false").toString()).lower() == "true"
elif name == 'spellchecker:pwlDict':
return str(settings.value(name, stdPwlDict).toString())
else:
return str(settings.value(name).toString())
|
gpl-2.0
|
PinguinoIDE/pinguino-ide
|
pinguino/qtgui/ide/tools/paths.py
|
1
|
5277
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#import os
from PySide2 import QtCore
## Python3 compatibility
#if os.getenv("PINGUINO_PYTHON") is "3":
##Python3
#from configparser import RawConfigParser
#else:
##Python2
#from ConfigParser import RawConfigParser
from ..methods.dialogs import Dialogs
########################################################################
class Paths(object):
def __init__(self):
""""""
self.load_paths()
self.connect(self.main.lineEdit_path_sdcc_bin, QtCore.SIGNAL("editingFinished()"), self.save_paths)
self.connect(self.main.lineEdit_path_gcc_bin, QtCore.SIGNAL("editingFinished()"), self.save_paths)
self.connect(self.main.lineEdit_path_xc8_bin, QtCore.SIGNAL("editingFinished()"), self.save_paths)
self.connect(self.main.lineEdit_path_8_libs, QtCore.SIGNAL("editingFinished()"), self.save_paths)
self.connect(self.main.lineEdit_path_32_libs, QtCore.SIGNAL("editingFinished()"), self.save_paths)
self.connect(self.main.pushButton_dir_sdcc, QtCore.SIGNAL("clicked()"), lambda :self.open_path_for(self.main.lineEdit_path_sdcc_bin))
self.connect(self.main.pushButton_dir_gcc, QtCore.SIGNAL("clicked()"), lambda :self.open_path_for(self.main.lineEdit_path_gcc_bin))
self.connect(self.main.pushButton_dir_xc8, QtCore.SIGNAL("clicked()"), lambda :self.open_path_for(self.main.lineEdit_path_xc8_bin))
self.connect(self.main.pushButton_dir_8bit, QtCore.SIGNAL("clicked()"), lambda :self.open_path_for(self.main.lineEdit_path_8_libs))
self.connect(self.main.pushButton_dir_32bit, QtCore.SIGNAL("clicked()"), lambda :self.open_path_for(self.main.lineEdit_path_32_libs))
self.connect(self.main.pushButton_dir_libs, QtCore.SIGNAL("clicked()"), lambda :self.open_path_for(self.main.lineEdit_path_custom_libs))
self.connect(self.main.pushButton_dir_mplab, QtCore.SIGNAL("clicked()"), lambda :self.open_path_for(self.main.lineEdit_path_mplab))
self.connect(self.main.checkBox_paths_default, QtCore.SIGNAL("toggled(bool)"), self.set_default_values)
#----------------------------------------------------------------------
def open_path_for(self, lineedit):
""""""
dir_path = Dialogs.set_open_dir(self)
if dir_path:
lineedit.setText(dir_path)
self.save_paths()
#----------------------------------------------------------------------
def save_paths(self):
""""""
sdcc_bin = self.main.lineEdit_path_sdcc_bin.text()
gcc_bin = self.main.lineEdit_path_gcc_bin.text()
xc8_bin = self.main.lineEdit_path_xc8_bin.text()
p8libs = self.main.lineEdit_path_8_libs.text()
p32libs = self.main.lineEdit_path_32_libs.text()
#self.configIDE.set("Paths", "sdcc_bin", sdcc_bin)
#self.configIDE.set("Paths", "gcc_bin", gcc_bin)
#self.configIDE.set("Paths", "xc8_bin", xc8_bin)
#self.configIDE.set("Paths", "pinguino_8_libs", p8libs)
#self.configIDE.set("Paths", "pinguino_32_libs", p32libs)
self.configIDE.set("PathsCustom", "sdcc_bin", sdcc_bin)
self.configIDE.set("PathsCustom", "gcc_bin", gcc_bin)
self.configIDE.set("PathsCustom", "xc8_bin", xc8_bin)
self.configIDE.set("PathsCustom", "pinguino_8_libs", p8libs)
self.configIDE.set("PathsCustom", "pinguino_32_libs", p32libs)
self.configIDE.save_config()
#----------------------------------------------------------------------
def load_paths(self, section=None):
""""""
self.configIDE.load_config()
if section is None:
section = self.configIDE.config("Features", "pathstouse", "Paths")
self.main.checkBox_paths_default.setChecked(section=="Paths")
def short(section, option):
if section == "Paths":
return self.configIDE.get(section, option)
elif section == "PathsCustom":
return self.configIDE.config(section, option, self.configIDE.get("Paths", option))
sdcc_bin = short(section, "sdcc_bin")
gcc_bin = short(section, "gcc_bin")
xc8_bin = short(section, "xc8_bin")
p8libs = short(section, "pinguino_8_libs")
p32libs = short(section, "pinguino_32_libs")
self.main.lineEdit_path_sdcc_bin.setText(sdcc_bin)
self.main.lineEdit_path_gcc_bin.setText(gcc_bin)
self.main.lineEdit_path_xc8_bin.setText(xc8_bin)
self.main.lineEdit_path_8_libs.setText(p8libs)
self.main.lineEdit_path_32_libs.setText(p32libs)
#----------------------------------------------------------------------
def set_default_values(self, default):
""""""
self.configIDE.load_config()
if default:
self.load_paths(section="Paths")
self.configIDE.set("Features", "pathstouse", "Paths")
else:
self.load_paths(section="PathsCustom")
self.configIDE.set("Features", "pathstouse", "PathsCustom")
self.main.groupBox_compilers.setEnabled(not default)
self.main.groupBox_libraries.setEnabled(not default)
self.main.groupBox_icsp.setEnabled(not default)
self.configIDE.save_config()
|
gpl-2.0
|
lake-lerna/hydra
|
src/main/python/hydra/zmqtest/zmq_sub.py
|
4
|
5975
|
__author__ = 'sushil, abdullahS'
import zmq
import logging
import os
import time
import psutil
import json
from hydra.lib import util
from hydra.lib.hdaemon import HDaemonRepSrv
from hydra.lib.childmgr import ChildManager
from pprint import pformat
l = util.createlogger('HSub', logging.INFO)
class HDZmqsRepSrv(HDaemonRepSrv):
def __init__(self, port):
self.msg_cnt = 0 # message count, other option is global, making progress
self.recv_rate = 0
self.reconnect_cnt = 0
self.reconnect_rate = 0
HDaemonRepSrv.__init__(self, port)
self.register_fn('getstats', self.get_stats)
self.register_fn('resetstats', self.reset_stats)
self.register_fn('updateconfig', self.update_config)
self.reset_stats()
def get_stats(self):
process = psutil.Process()
self.run_data['stats']['msg_cnt'] = self.msg_cnt
self.run_data['stats']['net:end'] = json.dumps(psutil.net_io_counters())
self.run_data['stats']['cpu:end'] = json.dumps(process.cpu_times())
self.run_data['stats']['mem:end'] = json.dumps(process.memory_info())
self.run_data['stats']['reconnect_cnt'] = self.reconnect_cnt
self.run_data['stats']['rate'] = self.run_data['stats']['msg_cnt'] / (
self.run_data['last_msg_time_r'] - self.run_data['first_msg_time_r'])
return ('ok', self.run_data['stats'])
def reset_stats(self):
l.info("RESETTING SUB STATS")
process = psutil.Process()
self.run_data = {'stats': {}}
self.run_data['stats'] = {'msg_cnt': 0, 'first_msg_time': 0, 'last_msg_time': 0}
self.run_data['stats']['net:start'] = json.dumps(psutil.net_io_counters())
self.run_data['stats']['cpu:start'] = json.dumps(process.cpu_times())
self.run_data['stats']['mem:start'] = json.dumps(process.memory_info())
self.run_data['first_msg_time_r'] = 0
self.run_data['last_msg_time_r'] = 1
self.msg_cnt = 0
self.reconnect_cnt = 0
return ('ok', 'stats reset')
def update_config(self, recv_rate, reconnect_rate):
self.recv_rate = float(recv_rate)
self.reconnect_rate = reconnect_rate
l.info("Updating SUB Metrics recv_rate = " + pformat(self.recv_rate) +
" reconnect_rate = " + pformat(self.reconnect_rate))
return ('ok', None)
def run10_inst(cmd):
pwd = os.getcwd()
l.info("CWD = " + pformat(pwd))
cmgr = ChildManager()
cwd = None
for idx in range(0, 10):
myenv = os.environ.copy()
myenv["PORT0"] = myenv["PORT" + str(idx)]
l.info("Launch%d:" % idx + " cwd=" + " CMD=" + pformat(cmd) + " PORT0=" + str(myenv["PORT0"]))
cmgr.add_child('p' + str(idx), cmd, cwd, myenv)
cmgr.launch_children()
cmgr.wait()
def run10cpp(argv):
cmd = './zmq_sub'.split(' ') + argv[1:]
run10_inst(cmd)
def run10(argv):
cmd = './hydra hydra.zmqtest.zmq_sub.run'.split(' ') + argv[1:]
run10_inst(cmd)
def run(argv):
pub_port = ""
pub_ip = ""
l.info("JOB RUN : " + pformat(argv))
if len(argv) > 2:
pub_ip = argv[1]
pub_port = argv[2]
int(pub_port)
if (not pub_ip or (not pub_port)):
raise Exception("zmq-sub needs a pub server to subscribe to, pub_ip/pub_port"
" can not be empty pub_ip[%s], pub_port[%s]" % (pub_ip, pub_port))
# Initalize HDaemonRepSrv
sub_rep_port = os.environ.get('PORT0')
hd = HDZmqsRepSrv(sub_rep_port)
hd.reset_stats()
hd.run()
# Socket to SUB to PUB server
context = zmq.Context()
hd.msg_cnt = 0
start_idx = 0
while True:
socket = context.socket(zmq.SUB)
topicfilter = ""
l.info("SUB client connecting to PUB server at [%s:%s]" % (pub_ip, pub_port))
socket.connect("tcp://%s:%s" % (pub_ip, pub_port))
l.info("SUB client successfully connected to PUB server at [%s:%s]" % (pub_ip, pub_port))
socket.setsockopt(zmq.SUBSCRIBE, topicfilter)
while True:
string = socket.recv()
if hd.msg_cnt == 0:
hd.run_data['first_msg_time_r'] = time.time()
hd.run_data['stats']['first_msg_time'] = json.dumps(hd.run_data['first_msg_time_r'])
l.info("Setting the first_msg_time to = " + pformat(hd.run_data['stats']['first_msg_time']))
hd.msg_cnt = hd.msg_cnt + 1
index, messagedata = string.split()
iidx = int(index)
if (start_idx != iidx):
l.info("Missing FROM IDX = %d :: GOT MESSAGE %s %d", start_idx, index, iidx)
start_idx = iidx
start_idx += 1
hd.run_data['last_msg_time_r'] = time.time()
hd.run_data['stats']['last_msg_time'] = json.dumps(hd.run_data['last_msg_time_r'])
# rate limit the receive
if hd.recv_rate != 0:
# Check the rate from beginning of first message to now
duration = float(hd.msg_cnt) / hd.recv_rate
current_duration = time.time() - hd.run_data['first_msg_time_r']
if current_duration < duration:
sleep_time = duration - current_duration
if sleep_time > 1:
sleep_time = 1
time.sleep(sleep_time)
# re-connect
if hd.reconnect_rate != 0:
current_duration = time.time() - hd.run_data['first_msg_time_r']
num_reconnects = int(hd.reconnect_rate * current_duration)
if (num_reconnects > hd.reconnect_cnt):
l.info("expected Reconnect = " + pformat(num_reconnects) + " reconnect_cnt = " +
pformat(hd.reconnect_cnt))
break
hd.reconnect_cnt += 1
l.info('Closing Socket. Will try to reconnect. Current msg cnt=' + pformat(hd.msg_cnt))
socket.close()
|
apache-2.0
|
Peekmo/three.js
|
utils/exporters/blender/addons/io_three/exporter/api/light.py
|
195
|
1099
|
from bpy import data, types
from .. import utilities, logger
def _lamp(func):
"""
:param func:
"""
def inner(name, *args, **kwargs):
"""
:param name:
:param *args:
:param **kwargs:
"""
if isinstance(name, types.Lamp):
lamp = name
else:
lamp = data.lamps[name]
return func(lamp, *args, **kwargs)
return inner
@_lamp
def angle(lamp):
"""
:param lamp:
:rtype: float
"""
logger.debug("light.angle(%s)", lamp)
return lamp.spot_size
@_lamp
def color(lamp):
"""
:param lamp:
:rtype: int
"""
logger.debug("light.color(%s)", lamp)
colour = (lamp.color.r, lamp.color.g, lamp.color.b)
return utilities.rgb2int(colour)
@_lamp
def distance(lamp):
"""
:param lamp:
:rtype: float
"""
logger.debug("light.distance(%s)", lamp)
return lamp.distance
@_lamp
def intensity(lamp):
"""
:param lamp:
:rtype: float
"""
logger.debug("light.intensity(%s)", lamp)
return round(lamp.energy, 2)
|
mit
|
motine/Ohouse
|
src/vendor/worker/workerdb.py
|
4
|
2398
|
import os.path
from datetime import datetime
from sqlalchemy import Table, Column, MetaData, ForeignKey, PickleType, DateTime, String, Integer, Text, create_engine, select, and_, or_, not_, event
from sqlalchemy.orm import scoped_session, sessionmaker, mapper
from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound
from sqlalchemy.ext.declarative import declarative_base
import amsoil.core.pluginmanager as pm
import amsoil.core.log
logger=amsoil.core.log.getLogger('worker')
from amsoil.config import expand_amsoil_path
WORKERDB_PATH = expand_amsoil_path(pm.getService('config').get('worker.dbpath'))
WORKERDB_ENGINE = "sqlite:///%s" % (WORKERDB_PATH,)
# initialize sqlalchemy
db_engine = create_engine(WORKERDB_ENGINE, pool_recycle=6000) # please see the wiki for more info
db_session_factory = sessionmaker(autoflush=True, bind=db_engine, expire_on_commit=False) # the class which can create sessions (factory pattern)
db_session = scoped_session(db_session_factory) # still a session creator, but it will create _one_ session per thread and delegate all method calls to it
# we could limit the session's scope (lifetime) to one request, but for this plugin it is not necessary
Base = declarative_base() # get the base class for the ORM, which includes the metadata object (collection of table descriptions)
class JobDBEntry(Base):
__tablename__ = 'worker_jobs'
id = Column(Integer, primary_key=True)
service_name = Column(String)
callable_attr_str = Column(String)
params = Column(PickleType)
recurring_interval = Column(Integer)
next_execution = Column(DateTime)
Base.metadata.create_all(db_engine) # create the tables if they are not there yet
def getAllJobs():
"""Do not change the values of the records retrieved with this function. You might accedently change them in the database too. Unless you call updateJob"""
records = db_session.query(JobDBEntry).all()
return records
def addJob(job_db_entry):
"""Creates a config item, if it does not exist. If it already exists this function does not change anything."""
job_db_entry.id = None
db_session.add(job_db_entry)
db_session.commit()
def commit():
"""Commits the changes to objects in the session (e.g. a changed attribute in an object)."""
db_session.commit()
def delJob(job_db_entry):
db_session.delete(job_db_entry)
db_session.commit()
|
bsd-3-clause
|
TissueMAPS/TmDeploy
|
tmdeploy/config.py
|
2
|
25162
|
# TmDeploy - Automated deployment of TissueMAPS in the cloud.
# Copyright (C) 2016 Markus D. Herrmann, University of Zurich
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import logging
from abc import ABCMeta
from abc import abstractproperty
try:
from ConfigParser import SafeConfigParser
except ImportError:
import configparser
SafeConfigParser = configparser.ConfigParser
from Crypto.PublicKey import RSA
from tmdeploy.errors import SetupDescriptionError, SetupEnvironmentError
from tmdeploy.utils import read_yaml_file
CONFIG_DIR = os.path.expanduser('~/.tmaps/setup')
logger = logging.getLogger(__name__)
class _SetupSection(object):
'''Abstract base class for a section of the `TissueMAPS` setup description.
'''
__meta__ = ABCMeta
def __init__(self, description):
if not isinstance(description, dict):
raise SetupDescriptionError(
'Section "{0}" of setup description must be a mapping.'.format(
self._section_name
)
)
possible_attrs = set([
attr for attr in dir(self)
if not attr.startswith('_') and
isinstance(getattr(self.__class__, attr), property)
])
if hasattr(self.__class__, '_OPTIONAL_ATTRS'):
required_attrs = possible_attrs - self.__class__._OPTIONAL_ATTRS
else:
required_attrs = possible_attrs
for k, v in description.items():
if k not in possible_attrs:
raise SetupDescriptionError(
'Key "{0}" is not supported for section "{1}".'.format(
k, self._section_name
)
)
setattr(self, k, v)
for k in required_attrs:
if k not in description:
raise SetupDescriptionError(
'Key "{0}" is required for section "{1}".'.format(
k, self._section_name
)
)
if not os.path.exists(CONFIG_DIR):
os.makedirs(CONFIG_DIR)
@abstractproperty
def _section_name(self):
pass
def _check_value_type(self, value, name, required_type):
type_translation = {
int: 'a number', str: 'a string',
dict: 'a mapping', list: 'an array'
}
if not isinstance(value, required_type):
raise SetupDescriptionError(
'Value of "{0}" in section "{1}" must be {2}.'.format(
name, self._section_name, type_translation[required_type]
)
)
def _check_subsection_type(self, value, name, required_type, index=None):
if index is None:
message = 'Subsection "{0}" in setup'.format(name)
mapping = value
else:
message = 'Item #{0} of subsection "{1}" in setup'.format(
index, name
)
mapping = value[index]
type_translation = {dict: 'a mapping', list: 'an array'}
if not isinstance(mapping, required_type):
raise SetupDescriptionError(
'{0} configuration must be {1}.'.format(
message, type_translation[required_type]
)
)
def to_dict(self):
'''Represents the setup section in form of key-value pairs.
Returns
-------
dict
'''
mapping = dict()
for attr in dir(self):
if attr.startswith('_'):
continue
if not isinstance(getattr(self.__class__, attr), property):
continue
try:
value = getattr(self, attr)
except AttributeError:
if attr in self._OPTIONAL_ATTRS:
continue
else:
raise AttributeError(
'Required attribute "{0}" does not exist on '
'instance of type "{1}.'.format(
attr, self.__class__.__name__
)
)
mapping[attr] = value
return mapping
def __repr__(self):
return '{0} setup section:\n{1}'.format(
self._section_name, to_json(self.to_dict())
)
class CloudSection(_SetupSection):
'''Class for the section of the `TissueMAPS` setup description that provides
information about the cloud infrastructure where the application should be
deployed.
'''
_OPTIONAL_ATTRS = {
'ip_range', 'network', 'subnetwork', 'proxy',
'key_name', 'key_file_public', 'key_file_private'
}
def __init__(self, description):
self.ip_range = '10.65.4.0/24'
self.network = 'tmaps'
self.key_name = 'tmaps'
self.proxy = ''
super(CloudSection, self).__init__(description)
@property
def _section_name(self):
return 'cloud'
@property
def provider(self):
'''str: name of the cloud provider (options: ``{"os", "ec2", "gce"}``)
'''
return self._provider
@provider.setter
def provider(self, value):
self._check_value_type(value, 'provider', str)
options = {'os', 'gce', 'ec2'}
if value not in options:
raise SetupDescriptionError(
'Cloud provider must be one of the following: "{0}"'.format(
'", "'.join(options)
)
)
if value == 'os':
required_env_vars = {
'OS_AUTH_URL',
'OS_USERNAME',
'OS_PASSWORD',
'OS_PROJECT_NAME'
}
elif value == 'gce':
required_env_vars = {
'GCE_EMAIL',
'GCE_PROJECT',
'GCE_CREDENTIALS_FILE_PATH'
}
elif value == 'ec2':
required_env_vars = {
'AWS_ACCESS_KEY_ID',
'AWS_SECRET_ACCESS_KEY'
}
for var in required_env_vars:
if var not in os.environ:
raise SetupEnvironmentError(
'Environment variable "{0}" must be set '
'for "{1}" provider.'.format(var, value)
)
self._provider = value
@property
def network(self):
'''str: name of the network that should be used (default: ``"tmaps"``)
'''
return self._network
@network.setter
def network(self, value):
self._check_value_type(value, 'network', str)
self._network = value
@property
def subnetwork(self):
'''str: name or the subnetwork that should be used
(defaults to ``"{network}-subnet"``)
'''
return getattr(
self, '_subnetwork', '{network}-subnet'.format(network=self.network)
)
@subnetwork.setter
def subnetwork(self, value):
self._check_value_type(value, 'subnetwork', str)
self._subnetwork = value
@property
def ip_range(self):
'''str: range of allowed IPv4 addresses for the private network in
`Classless Inter-Domain Routing (CIDR) <https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing>`_
notation (default:``"10.65.4.0/24""``)
'''
return self._ip_range
@ip_range.setter
def ip_range(self, value):
self._check_value_type(value, 'ip_range', str)
r = re.compile(r'^\d+\.\d+\.\d+\.\d+\/\d+$')
if not r.search(value):
raise ValueError(
'Argument "ip_range" must be provided in CIDR notation.'
)
self._ip_range = value
@property
def key_name(self):
'''str: name of the key-pair used to connect to virtual machines
(default: ``"tmaps"``)'''
return self._key_name
@key_name.setter
def key_name(self, value):
self._check_value_type(value, 'key_name', str)
self._key_name = value
@property
def proxy(self):
'''str: URI and port of a HTTP(S) proxy'''
return self._proxy
@proxy.setter
def proxy(self, value):
self._check_value_type(value, 'proxy', str)
self._proxy = value
@property
def key_file_private(self):
'''str: path to the private key used by Ansible to connect to virtual
machines (by default looks for a file with name
:attr:`key_name <tmdeploy.config.CloudSection.key_name>` in ``~/.ssh``
directory)
'''
if not hasattr(self, '_key_file_private'):
self.key_file_private = '~/.ssh/{key}'.format(key=self.key_name)
return self._key_file_private
@key_file_private.setter
def key_file_private(self, value):
self._check_value_type(value, 'key_file_private', str)
value = os.path.expandvars(os.path.expanduser(value))
if value.endswith('.pub'):
raise SetupDescriptionError(
'Value of "key_file_private" must point to a '
'private key: {0}'.format(value)
)
if not os.path.exists(value):
logger.warn('private key file "%s" does not exist', value)
key_file_public = self.key_file_public
logger.info('create SSH key pair')
logger.warn('SSH key is not protected by a passphrase')
key = RSA.generate(2048)
with open(value, 'w') as f:
os.chmod(value, 0o400)
f.write(key.exportKey('PEM'))
pubkey = key.publickey()
with open(key_file_public, 'w') as f:
f.write(pubkey.exportKey('OpenSSH'))
self._key_file_private = value
@property
def key_file_public(self):
'''str: path to the public key that will be uploaded to the cloud
provider (by default looks for a ``.pub`` file with name
:attr:`key_name <tmdeploy.config.CloudSection.key_name>` in ``~/.ssh``
directory)
'''
if not hasattr(self, '_key_file_public'):
self.key_file_public = '~/.ssh/{key}.pub'.format(key=self.key_name)
return self._key_file_public
@key_file_public.setter
def key_file_public(self, value):
self._check_value_type(value, 'key_file_public', str)
value = os.path.expandvars(os.path.expanduser(value))
if not value.endswith('.pub'):
raise SetupDescriptionError(
'Value of "key_file_public" must point to a '
'public key: {0}'.format(value)
)
if not os.path.exists(value):
logger.warn('public key file "%s" does not exist', value)
self._key_file_public = value
@property
def region(self):
'''str: cloud region (zone)'''
return self._region
@region.setter
def region(self, value):
self._check_value_type(value, 'region', str)
self._region = value
class ArchitectureSection(_SetupSection):
'''Class for the section of the `TissueMAPS` setup description that provides
information about the cluster architecture, i.e. the layout of computational
resources.
'''
def __init__(self, description):
super(ArchitectureSection, self).__init__(description)
@property
def _section_name(self):
return 'grid'
@property
def name(self):
'''str: name of the grid'''
return self._name
@name.setter
def name(self, value):
self._check_value_type(value, 'name', str)
self._name = value
@property
def clusters(self):
'''List[tmdeploy.config.ClusterSection]: clusters that should be set up
'''
return self._clusters
@clusters.setter
def clusters(self, value):
self._clusters = list()
self._check_subsection_type(value, 'clusters', list)
for i, item in enumerate(value):
self._check_subsection_type(value, 'clusters', dict, index=i)
self._clusters.append(ClusterSection(item))
class ClusterSection(_SetupSection):
'''Class for the section of the `TissueMAPS` setup description that provides
information about an individual cluster of virtual machine instances.
'''
def __init__(self, description):
super(ClusterSection, self).__init__(description)
@property
def _section_name(self):
return 'cluster'
@property
def name(self):
'''str: name of the cluster'''
return self._name
@name.setter
def name(self, value):
self._check_value_type(value, 'name', str)
self._name = value
@property
def node_types(self):
'''List[tmdeploy.config.ClusterNodeTypeSection]: different types of
virtual machines the cluster is comprised of
'''
return self._node_types
@node_types.setter
def node_types(self, value):
self._node_types = list()
self._check_subsection_type(value, 'node_types', list)
for i, item in enumerate(value):
self._check_subsection_type(value, 'node_types', dict, index=i)
self._node_types.append(ClusterNodeTypeSection(item))
class ClusterNodeTypeSection(_SetupSection):
_OPTIONAL_ATTRS = {'vars'}
'''Class for the section of the `TissueMAPS` setup description that provides
information about a particular set of virtual machine instances belonging
to the same cluster (e.g. master or worker nodes).
'''
def __init__(self, description):
super(ClusterNodeTypeSection, self).__init__(description)
@property
def _section_name(self):
return 'node_types'
@property
def name(self):
'''str: name of the cluster node type'''
return self._name
@name.setter
def name(self, value):
self._check_value_type(value, 'name', str)
self._name = value
@property
def count(self):
'''int: number of virtual machines'''
return self._count
@count.setter
def count(self, value):
self._check_value_type(value, 'count', int)
self._count = value
@property
def instance(self):
'''AnsibleHostVariableSection: variables required for managing the
virtual machine instances via Ansible (optional)
'''
return self._instance
@instance.setter
def instance(self, value):
self._check_value_type(value, 'instance', dict)
self._instance = AnsibleHostVariableSection(value)
@property
def groups(self):
'''List[tmdeploy.config.AnsibleGroupSection]: Ansible host groups
that should be used for deployment of virtual machines beloning
to the cluster node types
'''
return self._groups
@groups.setter
def groups(self, value):
self._groups = list()
self._check_subsection_type(value, 'groups', list)
for i, item in enumerate(value):
self._check_subsection_type(value, 'groups', dict, index=i)
self._groups.append(AnsibleGroupSection(item))
@property
def vars(self):
'''dict: mapping of Ansible variable key-value pairs that should be set
for all :attr:`groups <tmdeploy.config.ClusterNodeTypeSection.groups>`
of the cluster node type
'''
return getattr(self, '_vars', None)
@vars.setter
def vars(self, value):
if value is None:
self._vars = value
else:
self._check_value_type(value, 'vars', dict)
self._vars = value
class AnsibleGroupSection(_SetupSection):
_OPTIONAL_ATTRS = {'vars'}
'''Class for the section of the `TissueMAPS` setup description that provides
information about an Ansible host group, corresponding to a set of
virtual machine instances that get configured the same way.
'''
def __init__(self, description):
super(AnsibleGroupSection, self).__init__(description)
@property
def _section_name(self):
return 'groups'
@property
def name(self):
'''str: name of the Ansible group'''
return self._name
@name.setter
def name(self, value):
self._check_value_type(value, 'name', str)
self._name = value
@property
def vars(self):
'''dict: mapping of Ansible variable key-value pairs that should be
only set for the group
'''
return getattr(self, '_vars', None)
@vars.setter
def vars(self, value):
if value is None:
self._vars = value
else:
self._check_value_type(value, 'vars', dict)
self._vars = value
class AnsibleHostVariableSection(_SetupSection):
'''Class for the section of the `TissueMAPS` setup description that provides
variables that determine how virtual machine instances belonging to the
given cluster node type are created.
'''
_OPTIONAL_ATTRS = {
'disk_size', 'volume_size', 'volume_mountpoint',
'assign_public_ip', 'tags', 'ssh_user', 'tm_user', 'tm_group',
'db_user', 'db_group', 'web_user', 'web_group'
}
def __init__(self, description):
self.volume_mountpoint = '/storage'
self.ssh_user = 'ubuntu'
self.tm_user = 'tissuemaps'
self._tm_group = None
self.db_user = 'postgres'
self._db_group = None
self.web_user = 'nginx'
self._web_group = None
super(AnsibleHostVariableSection, self).__init__(description)
@property
def _section_name(self):
return 'vars'
@property
def disk_size(self):
'''int: size of the boot disk of the virtual machine in GB (optional)
'''
return self._disk_size
@disk_size.setter
def disk_size(self, value):
self._check_value_type(value, 'disk_size', int)
self._disk_size = value
@property
def volume_size(self):
'''int: size of an additional storage volume in GB (optional)'''
return self._volume_size
@volume_size.setter
def volume_size(self, value):
self._check_value_type(value, 'volume_size', int)
self._volume_size = value
@property
def volume_mountpoint(self):
'''str: mountpoint of an additional storage volume
(default: ``"storage"``)
'''
return self._volume_mountpoint
@volume_mountpoint.setter
def volume_mountpoint(self, value):
self._check_value_type(value, 'volume_mountpoint', str)
self._volume_mountpoint = str(value)
@property
def ssh_user(self):
'''str: user for establishing SSH connection to remote host
(default: ``"ubuntu"``)
'''
return self._ssh_user
@ssh_user.setter
def ssh_user(self, value):
self._check_value_type(value, 'ssh_user', str)
self._ssh_user = str(value)
@property
def tm_user(self):
'''str: TissueMAPS system user (default: ``"tissuemaps"``)
'''
return self._tm_user
@tm_user.setter
def tm_user(self, value):
self._check_value_type(value, 'tm_user', str)
self._tm_user = str(value)
@property
def tm_group(self):
'''str: TissueMAPS system group (defaults to
:attr:`tm_user <tmdeploy.config.AnsibleHostVariableSection.tm_user>`)
'''
if self._tm_group is None:
self._tm_group = self.tm_user
return self._tm_group
@tm_group.setter
def tm_group(self, value):
self._check_value_type(value, 'tm_group', str)
self._tm_group = str(value)
@property
def db_user(self):
'''str: database system user (default: ``"postgres"``)
'''
return self._db_user
@db_user.setter
def db_user(self, value):
self._check_value_type(value, 'db_user', str)
self._db_user = str(value)
@property
def db_group(self):
'''str: database system group (defaults to
:attr:`db_user <tmdeploy.config.AnsibleHostVariableSection.db_user>`)
'''
if self._db_group is None:
self._db_group = self.db_user
return self._db_group
@db_group.setter
def db_group(self, value):
self._check_value_type(value, 'db_group', str)
self._db_group = str(value)
@property
def web_user(self):
'''str: database system user (default: ``"nginx"``)
'''
return self._web_user
@web_user.setter
def web_user(self, value):
self._check_value_type(value, 'web_user', str)
self._web_user = str(value)
@property
def web_group(self):
'''str: web system group (defaults to
:attr:`web_user <tmdeploy.config.AnsibleHostVariableSection.web_user>`)
'''
if self._web_group is None:
self._web_group = self.web_user
return self._web_group
@web_group.setter
def web_group(self, value):
self._check_value_type(value, 'web_group', str)
self._web_group = str(value)
@property
def image(self):
'''str: name or ID of the image from which the virtual machine should
be booted
Note
----
The image must have the Ubuntu (14.04) operating system installed.
'''
return self._image
@image.setter
def image(self, value):
self._check_value_type(value, 'image', str)
self._image = value
@property
def assign_public_ip(self):
'''bool: whether a public IP address should be assigned to the virtual
machine (default: ``True``)
'''
return self._assign_public_ip
@assign_public_ip.setter
def assign_public_ip(self, value):
self._check_value_type(value, 'assign_public_ip', bool)
self._assign_public_ip = value
@property
def flavor(self):
'''str: name or ID of the flavor (machine type) which the virtual
machine should have
'''
return self._flavor
@flavor.setter
def flavor(self, value):
self._check_value_type(value, 'flavor', str)
self._flavor = value
@property
def tags(self):
'''List[str]: tags that should be added to instances
(options: ``{"web", "compute", "storage"}``)
Note
----
Will only be used for assigning security groups (firewall rules) to
tagged instances.
'''
return self._tags
@tags.setter
def tags(self, value):
self._check_value_type(value, 'tags', list)
supported_tags = {'web', 'compute', 'storage'}
for t in value:
if t not in supported_tags:
raise ValueError(
'Tag "{0}" is not supported! Supported are: "{1}"'.format(
t, '", "'.join(supported_tags)
)
)
self._tags = value
class Setup(object):
'''Description of the `TissueMAPS` setup.'''
def __init__(self, setup_file):
setup_file = os.path.expanduser(os.path.expandvars(setup_file))
description = self._load_description(setup_file)
for k, v in description.items():
if k not in dir(self):
raise SetupDescriptionError(
'Key "{0}" is not supported for setup description.'.format(k)
)
setattr(self, k, v)
for k in {'cloud', 'architecture'}:
if k not in description:
raise SetupDescriptionError(
'Setup description requires key "{0}"'.format(k)
)
def _load_description(self, description_file):
if not os.path.exists(description_file):
raise OSError(
'Setup file "{0}" does not exist!'.format(description_file)
)
description = read_yaml_file(description_file)
if not isinstance(description, dict):
raise SetupDescriptionError(
'Setup description must be a mapping.'
)
return description
@property
def cloud(self):
'''tmdeploy.config.CloudSection: cloud configuration'''
return self._cloud
@cloud.setter
def cloud(self, value):
self._cloud = CloudSection(value)
@property
def architecture(self):
'''tmdeploy.config.ArchitectureSection: cluster architecture'''
return self._architecture
@architecture.setter
def architecture(self, value):
self._architecture = ArchitectureSection(value)
|
gpl-3.0
|
xindus40223115/w17_test
|
man4.py
|
16
|
11453
|
import cherrypy
# 這是 MAN 類別的定義
'''
# 在 application 中導入子模組
import programs.cdag30.man as cdag30_man
# 加入 cdag30 模組下的 man.py 且以子模組 man 對應其 MAN() 類別
root.cdag30.man = cdag30_man.MAN()
# 完成設定後, 可以利用
/cdag30/man/assembly
# 呼叫 man.py 中 MAN 類別的 assembly 方法
'''
class MAN(object):
# 各組利用 index 引導隨後的程式執行
@cherrypy.expose
def index(self, *args, **kwargs):
outstring = '''
這是 2014CDA 協同專案下的 cdag30 模組下的 MAN 類別.<br /><br />
<!-- 這裡採用相對連結, 而非網址的絕對連結 (這一段為 html 註解) -->
<a href="assembly">執行 MAN 類別中的 assembly 方法</a><br /><br />
請確定下列零件於 V:/home/lego/man 目錄中, 且開啟空白 Creo 組立檔案.<br />
<a href="/static/lego_man.7z">lego_man.7z</a>(滑鼠右鍵存成 .7z 檔案)<br />
'''
return outstring
@cherrypy.expose
def assembly(self, *args, **kwargs):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<script type="text/javascript" src="/static/weblink/pfcUtils.js"></script>
<script type="text/javascript" src="/static/weblink/wl_header.js"></script>
</head>
<body>
</script><script language="JavaScript">
/*man2.py 完全利用函式呼叫進行組立*/
/*設計一個零件組立函式*/
// featID 為組立件第一個組立零件的編號
// inc 則為 part1 的組立順序編號, 第一個入組立檔編號為 featID+0
// part2 為外加的零件名稱
////////////////////////////////////////////////
// axis_plane_assembly 組立函式
////////////////////////////////////////////////
function axis_plane_assembly(session, assembly, transf, featID, inc, part2, axis1, plane1, axis2, plane2){
var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName ("v:/home/lego/man/"+part2);
var componentModel = session.GetModelFromDescr(descr);
var componentModel = session.RetrieveModel(descr);
if (componentModel != void null)
{
var asmcomp = assembly.AssembleComponent (componentModel, transf);
}
var ids = pfcCreate("intseq");
ids.Append(featID+inc);
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = subPath.Leaf;
var asmDatums = new Array(axis1, plane1);
var compDatums = new Array(axis2, plane2);
var relation = new Array (pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN, pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_MATE);
var relationItem = new Array(pfcCreate("pfcModelItemType").ITEM_AXIS, pfcCreate("pfcModelItemType").ITEM_SURFACE);
var constrs = pfcCreate("pfcComponentConstraints");
for (var i = 0; i < 2; i++)
{
var asmItem = subassembly.GetItemByName (relationItem[i], asmDatums [i]);
if (asmItem == void null)
{
interactFlag = true;
continue;
}
var compItem = componentModel.GetItemByName (relationItem[i], compDatums [i]);
if (compItem == void null)
{
interactFlag = true;
continue;
}
var MpfcSelect = pfcCreate ("MpfcSelect");
var asmSel = MpfcSelect.CreateModelItemSelection (asmItem, subPath);
var compSel = MpfcSelect.CreateModelItemSelection (compItem, void null);
var constr = pfcCreate("pfcComponentConstraint").Create (relation[i]);
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (true, false);
constrs.Append(constr);
}
asmcomp.SetConstraints(constrs, void null);
}
// 以上為 axis_plane_assembly() 函式
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// three_plane_assembly 採 align 組立, 若 featID 為 0 表示為空組立檔案
///////////////////////////////////////////////////////////////////////////////////////////////////////////
function three_plane_assembly(session, assembly, transf, featID, inc, part2, plane1, plane2, plane3, plane4, plane5, plane6){
var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName ("v:/home/lego/man/"+part2);
var componentModel = session.GetModelFromDescr(descr);
var componentModel = session.RetrieveModel(descr);
if (componentModel != void null)
{
var asmcomp = assembly.AssembleComponent (componentModel, transf);
}
var ids = pfcCreate("intseq");
// 若 featID 為 0 表示為空組立檔案
if (featID != 0){
ids.Append(featID+inc);
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = subPath.Leaf;
}else{
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = assembly;
// 設法取得第一個組立零件 first_featID
// 取得 assembly 項下的元件 id, 因為只有一個零件, 採用 index 0 取出其 featID
var components = assembly.ListFeaturesByType(true, pfcCreate ("pfcFeatureType").FEATTYPE_COMPONENT);
// 此一 featID 為組立件中的第一個零件編號, 也就是樂高人偶的 body
var first_featID = components.Item(0).Id;
}
var constrs = pfcCreate("pfcComponentConstraints");
var asmDatums = new Array(plane1, plane2, plane3);
var compDatums = new Array(plane4, plane5, plane6);
var MpfcSelect = pfcCreate("MpfcSelect");
for (var i = 0; i < 3; i++)
{
var asmItem = subassembly.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, asmDatums[i]);
if (asmItem == void null)
{
interactFlag = true;
continue;
}
var compItem = componentModel.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, compDatums[i]);
if (compItem == void null)
{
interactFlag = true;
continue;
}
var asmSel = MpfcSelect.CreateModelItemSelection(asmItem, subPath);
var compSel = MpfcSelect.CreateModelItemSelection(compItem, void null);
var constr = pfcCreate("pfcComponentConstraint").Create(pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_ALIGN);
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (false, false);
constrs.Append(constr);
}
asmcomp.SetConstraints(constrs, void null);
// 若 featID = 0 則傳回 first_featID
if (featID == 0)
return first_featID;
}
// 以上為 three_plane_assembly() 函式
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// three_plane_assembly2 採 mate 組立, 若 featID 為 0 表示為空組立檔案
///////////////////////////////////////////////////////////////////////////////////////////////////////////
function three_plane_assembly2(session, assembly, transf, featID, inc, part2, plane1, plane2, plane3, plane4, plane5, plane6){
var descr = pfcCreate("pfcModelDescriptor").CreateFromFileName ("v:/home/lego/man/"+part2);
var componentModel = session.GetModelFromDescr(descr);
var componentModel = session.RetrieveModel(descr);
if (componentModel != void null)
{
var asmcomp = assembly.AssembleComponent (componentModel, transf);
}
var ids = pfcCreate("intseq");
// 若 featID 為 0 表示為空組立檔案
if (featID != 0){
ids.Append(featID+inc);
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = subPath.Leaf;
}else{
var subPath = pfcCreate("MpfcAssembly").CreateComponentPath(assembly, ids);
subassembly = assembly;
// 設法取得第一個組立零件 first_featID
// 取得 assembly 項下的元件 id, 因為只有一個零件, 採用 index 0 取出其 featID
var components = assembly.ListFeaturesByType(true, pfcCreate ("pfcFeatureType").FEATTYPE_COMPONENT);
// 此一 featID 為組立件中的第一個零件編號, 也就是樂高人偶的 body
var first_featID = components.Item(0).Id;
}
var constrs = pfcCreate("pfcComponentConstraints");
var asmDatums = new Array(plane1, plane2, plane3);
var compDatums = new Array(plane4, plane5, plane6);
var MpfcSelect = pfcCreate("MpfcSelect");
for (var i = 0; i < 3; i++)
{
var asmItem = subassembly.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, asmDatums[i]);
if (asmItem == void null)
{
interactFlag = true;
continue;
}
var compItem = componentModel.GetItemByName(pfcCreate("pfcModelItemType").ITEM_SURFACE, compDatums[i]);
if (compItem == void null)
{
interactFlag = true;
continue;
}
var asmSel = MpfcSelect.CreateModelItemSelection(asmItem, subPath);
var compSel = MpfcSelect.CreateModelItemSelection(compItem, void null);
var constr = pfcCreate("pfcComponentConstraint").Create(pfcCreate("pfcComponentConstraintType").ASM_CONSTRAINT_MATE);
constr.AssemblyReference = asmSel;
constr.ComponentReference = compSel;
constr.Attributes = pfcCreate("pfcConstraintAttributes").Create (false, false);
constrs.Append(constr);
}
asmcomp.SetConstraints(constrs, void null);
// 若 featID = 0 則傳回 first_featID
if (featID == 0)
return first_featID;
}
// 以上為 three_plane_assembly2() 函式, 主要採三面 MATE 組立
//
// 假如 Creo 所在的操作系統不是 Windows 環境
if (!pfcIsWindows())
// 則啟動對應的 UniversalXPConnect 執行權限 (等同 Windows 下的 ActiveX)
netscape.security.PrivilegeManager.enablePrivilege("UniversalXPConnect");
// pfcGetProESession() 是位於 pfcUtils.js 中的函式, 確定此 JavaScript 是在嵌入式瀏覽器中執行
var session = pfcGetProESession();
// 設定 config option, 不要使用元件組立流程中內建的假設約束條件
session.SetConfigOption("comp_placement_assumptions","no");
// 建立擺放零件的位置矩陣, Pro/Web.Link 中的變數無法直接建立, 必須透過 pfcCreate() 建立
var identityMatrix = pfcCreate("pfcMatrix3D");
// 建立 identity 位置矩陣
for (var x = 0; x < 4; x++)
for (var y = 0; y < 4; y++)
{
if (x == y)
identityMatrix.Set(x, y, 1.0);
else
identityMatrix.Set(x, y, 0.0);
}
// 利用 identityMatrix 建立 transf 座標轉換矩陣
var transf = pfcCreate("pfcTransform3D").Create(identityMatrix);
// 取得目前的工作目錄
var currentDir = session.getCurrentDirectory();
// 以目前已開檔的空白組立檔案, 作為 model
var model = session.CurrentModel;
// 查驗有無 model, 或 model 類別是否為組立件, 若不符合條件則丟出錯誤訊息
if (model == void null || model.Type != pfcCreate("pfcModelType").MDL_ASSEMBLY)
throw new Error (0, "Current model is not an assembly.");
// 將此模型設為組立物件
var assembly = model;
/////////////////////////////////////////////////////////////////
// 開始執行組立, 全部採函式呼叫組立
/////////////////////////////////////////////////////////////////
// 利用函式呼叫組立人偶頭部 HEAD, 組立增量次序為 5
// BODY id 為 featID+0, 以 A_2 及 DTM3 約束
// HEAD 則直接呼叫檔案名稱, 以 A_2, DTM2 約束
axis_plane_assembly(session, assembly, transf, 40, 0,
"LEGO_HEAD.prt", "A_2", "DTM3", "A_2", "DTM2");
// regenerate 並且 repaint 組立檔案
assembly.Regenerate (void null);
session.GetModelWindow (assembly).Repaint();
</script>
</body>
</html>
'''
return outstring
|
gpl-3.0
|
maciekcc/tensorflow
|
tensorflow/python/ops/metrics.py
|
72
|
1589
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Evaluation-related metrics.
@@accuracy
@@auc
@@false_negatives
@@false_positives
@@mean
@@mean_absolute_error
@@mean_cosine_distance
@@mean_iou
@@mean_per_class_accuracy
@@mean_relative_error
@@mean_squared_error
@@mean_tensor
@@percentage_below
@@precision
@@precision_at_thresholds
@@recall
@@recall_at_k
@@recall_at_thresholds
@@root_mean_squared_error
@@sensitivity_at_specificity
@@sparse_average_precision_at_k
@@sparse_precision_at_k
@@specificity_at_sensitivity
@@true_negatives
@@true_positives
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.metrics_impl import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = []
remove_undocumented(__name__, _allowed_symbols)
|
apache-2.0
|
hrishioa/Aviato
|
kartograph/kartograph/geometry/utils.py
|
4
|
2952
|
"""
geometry utils
"""
def is_clockwise(pts):
""" returns true if a given linear ring is in clockwise order """
s = 0
for i in range(len(pts) - 1):
if 'x' in pts[i]:
x1 = pts[i].x
y1 = pts[i].y
x2 = pts[i + 1].x
y2 = pts[i + 1].y
else:
x1, y1 = pts[i]
x2, y2 = pts[i + 1]
s += (x2 - x1) * (y2 + y1)
return s >= 0
def bbox_to_polygon(bbox):
from shapely.geometry import Polygon
s = bbox
poly = Polygon([(s.left, s.bottom), (s.left, s.top), (s.right, s.top), (s.right, s.bottom)])
return poly
def geom_to_bbox(geom, min_area=0):
from kartograph.geometry import BBox
from shapely.geometry import MultiPolygon
if min_area == 0 or not isinstance(geom, MultiPolygon):
# if no minimum area ratio is set or the geometry
# is not a multipart geometry, we simply use the
# full bbox
minx, miny, maxx, maxy = geom.bounds
return BBox(width=maxx - minx, height=maxy - miny, left=minx, top=miny)
else:
# for multipart geometry we use only the bbox of
# the 'biggest' sub-geometries, depending on min_area
bbox = BBox()
areas = []
bb = []
for polygon in geom.geoms:
areas.append(polygon.area)
max_a = max(areas)
for i in range(len(geom.geoms)):
a = areas[i]
if a < max_a * min_area:
# ignore this sub polygon since it is too small
continue
bb.append(geom.geoms[i].bounds)
for b in bb:
bbox.update((b[0], b[2]))
bbox.update((b[1], b[2]))
bbox.update((b[0], b[3]))
bbox.update((b[1], b[3]))
return bbox
def join_features(features, props, buf=False):
""" joins polygonal features
"""
from feature import MultiPolygonFeature, MultiLineFeature
from shapely.ops import linemerge
if len(features) == 0:
return features
joined = []
polygons = []
lines = []
for feat in features:
if isinstance(feat, MultiPolygonFeature):
polygons.append(feat.geom)
elif isinstance(feat, MultiLineFeature):
lines.append(feat.geom)
else:
joined.append(feat) # cannot join this
polygons = filter(lambda x: x is not None, polygons)
if len(polygons) > 0:
poly = polygons[0]
if buf is not False:
poly = poly.buffer(buf, 4)
for poly2 in polygons[1:]:
if buf is not False:
poly2 = poly2.buffer(buf, 4)
poly = poly.union(poly2)
joined.append(MultiPolygonFeature(poly, props))
if len(lines) > 0:
rings = []
for line in lines:
geoms = hasattr(line, 'geoms') and line.geoms or [line]
rings += geoms
joined.append(MultiLineFeature(linemerge(rings), props))
return joined
|
gpl-2.0
|
alexanderfefelov/nav
|
python/nav/smidumps/itw_mibv3.py
|
1
|
895755
|
# python version 1.0 DO NOT EDIT
#
# Generated by smidump version 0.4.8:
#
# smidump -f python IT-WATCHDOGS-MIB-V3
FILENAME = "./itw_mibv3.mib"
MIB = {
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"IT-WATCHDOGS-MIB-V3" : {
"nodetype" : "module",
"language" : "SMIv2",
"organization" :
"""I.T. Watchdogs""",
"contact" :
"""support@itwatchdogs.com""",
"description" :
"""The MIB for I.T. Watchdogs Products""",
"revisions" : (
{
"date" : "2010-10-12 00:00",
"description" :
"""[Revision added by libsmi due to a LAST-UPDATED clause.]""",
},
),
"identity node" : "itwatchdogs",
},
"imports" : (
{"module" : "SNMPv2-TC", "name" : "DisplayString"},
{"module" : "SNMPv2-SMI", "name" : "MODULE-IDENTITY"},
{"module" : "SNMPv2-SMI", "name" : "OBJECT-TYPE"},
{"module" : "SNMPv2-SMI", "name" : "enterprises"},
{"module" : "SNMPv2-SMI", "name" : "Gauge32"},
{"module" : "SNMPv2-SMI", "name" : "NOTIFICATION-TYPE"},
),
"nodes" : {
"itwatchdogs" : {
"nodetype" : "node",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373",
"status" : "current",
}, # node
"owl" : {
"nodetype" : "node",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3",
}, # node
"deviceInfo" : {
"nodetype" : "node",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1",
}, # node
"productTitle" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.1",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Product name""",
}, # scalar
"productVersion" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Product version""",
}, # scalar
"productFriendlyName" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""User-assigned name""",
}, # scalar
"productMacAddress" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Product's unique MAC address""",
}, # scalar
"productUrl" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Product's main URL access point""",
}, # scalar
"alarmTripType" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "9"
},
],
"range" : {
"min" : "0",
"max" : "9"
},
},
},
"access" : "readonly",
"description" :
"""Type of alarm trip. 0 = None, 1 = Low, 2 = High, 3 = Unplugged""",
}, # scalar
"productHardware" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Product's hardware type""",
}, # scalar
"sensorCountsBase" : {
"nodetype" : "node",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8",
}, # node
"sensorCounts" : {
"nodetype" : "node",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1",
}, # node
"climateCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.2",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of climate monitors currently plugged in""",
}, # scalar
"powerMonitorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.3",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of power monitors currently plugged in""",
}, # scalar
"tempSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.4",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of temperature sensors currently plugged in""",
}, # scalar
"airflowSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of airflow sensors currently plugged in""",
}, # scalar
"powerOnlyCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of power only monitors currently plugged in""",
}, # scalar
"doorSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of door sensors currently plugged in""",
}, # scalar
"waterSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of water sensors currently plugged in""",
}, # scalar
"currentSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of current sensors currently plugged in""",
}, # scalar
"millivoltSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of millivolt sensors currently plugged in""",
}, # scalar
"power3ChSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of 3 channel power monitors currently plugged in""",
}, # scalar
"outletCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of outlets currently plugged in""",
}, # scalar
"vsfcCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of fan controller monitors currently plugged in""",
}, # scalar
"ctrl3ChCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of 3 channel controllers currently plugged in""",
}, # scalar
"ctrlGrpAmpsCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of amperage controllers currently plugged in""",
}, # scalar
"ctrlOutputCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.16",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of output controllers currently plugged in""",
}, # scalar
"dewpointSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.17",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of dewpoint sensors currently plugged in""",
}, # scalar
"digitalSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.18",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of digital sensors currently plugged in""",
}, # scalar
"dstsSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.19",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of DSTS controllers currently plugged in""",
}, # scalar
"cpmSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.20",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of city power sensors currently plugged in""",
}, # scalar
"smokeAlarmSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.21",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of smoke alarm sensors currently plugged in""",
}, # scalar
"neg48VdcSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.22",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of -48Vdc sensors currently plugged in""",
}, # scalar
"pos30VdcSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.23",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of 30Vdc sensors currently plugged in""",
}, # scalar
"analogSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.24",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of remote analog inputs currently plugged in""",
}, # scalar
"ctrl3ChIECCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.25",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of IEC 3 channel controllers currently plugged in""",
}, # scalar
"climateRelayCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.26",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of climate relay monitors currently plugged in""",
}, # scalar
"ctrlRelayCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.27",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of relay controllers currently plugged in""",
}, # scalar
"airSpeedSwitchSensorCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.28",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of air speed switch sensors currently plugged in""",
}, # scalar
"powerDMCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.29",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of DM48 current sensors currently plugged in""",
}, # scalar
"ioExpanderCount" : {
"nodetype" : "scalar",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.1.8.1.30",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of IO expander devices currently plugged in""",
}, # scalar
"climateTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2",
"status" : "current",
"description" :
"""Climate sensors (internal sensors for climate units)""",
}, # table
"climateEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1",
"status" : "current",
"linkage" : [
"climateIndex",
],
"description" :
"""Entry in the climate table: each entry contains
an index (climateIndex) and other details""",
}, # row
"climateIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "1"
},
],
"range" : {
"min" : "1",
"max" : "1"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"climateSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"climateName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"climateAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"climateTempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-50",
"max" : "100"
},
],
"range" : {
"min" : "-50",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current reading for Temperature (C)""",
}, # column
"climateTempF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-58",
"max" : "212"
},
],
"range" : {
"min" : "-58",
"max" : "212"
},
},
},
"access" : "readonly",
"units" : "Degress Fahrenheit",
"description" :
"""Current reading for Temperature (F)""",
}, # column
"climateHumidity" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Humidity""",
}, # column
"climateLight" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Ambient Light""",
}, # column
"climateAirflow" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Airflow""",
}, # column
"climateSound" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Sound""",
}, # column
"climateIO1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 1""",
}, # column
"climateIO2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 2""",
}, # column
"climateIO3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 3""",
}, # column
"climateDewPointC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-50",
"max" : "100"
},
],
"range" : {
"min" : "-50",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current reading for Dew Point (C)""",
}, # column
"climateDewPointF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.2.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-58",
"max" : "212"
},
],
"range" : {
"min" : "-58",
"max" : "212"
},
},
},
"access" : "readonly",
"units" : "Degress Fahrenheit",
"description" :
"""Current reading for Dew Point (F)""",
}, # column
"powMonTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3",
"status" : "current",
"description" :
"""A table of Power Monitors""",
}, # table
"powMonEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1",
"status" : "current",
"linkage" : [
"powMonIndex",
],
"description" :
"""Entry in the power monitor table: each entry contains
an index (powMonIndex) and other power monitoring details""",
}, # row
"powMonIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"powMonSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"powMonName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"powMonAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"powMonKWattHrs" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "KWh",
"description" :
"""Current reading for KWatt-Hours""",
}, # column
"powMonVolts" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts""",
}, # column
"powMonVoltMax" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Max)""",
}, # column
"powMonVoltMin" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Min)""",
}, # column
"powMonVoltPeak" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Peak)""",
}, # column
"powMonDeciAmps" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for DeciAmps""",
}, # column
"powMonRealPower" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Real Power""",
}, # column
"powMonApparentPower" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volt-Amps",
"description" :
"""Current reading for Apparent Power""",
}, # column
"powMonPowerFactor" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Power Factor""",
}, # column
"powMonOutlet1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Outlet 1",
"description" :
"""Outlet 1 Trap""",
}, # column
"powMonOutlet2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.3.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Outlet 2",
"description" :
"""Outlet 2 Trap""",
}, # column
"tempSensorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.4",
"status" : "current",
"description" :
"""A table of temperature sensors""",
}, # table
"tempSensorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.4.1",
"status" : "current",
"linkage" : [
"tempSensorIndex",
],
"description" :
"""Entry in the temperature sensor table: each entry contains
an index (tempIndex) and other sensor details""",
}, # row
"tempSensorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.4.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"tempSensorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.4.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"tempSensorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.4.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"tempSensorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.4.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"tempSensorTempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.4.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-50",
"max" : "100"
},
],
"range" : {
"min" : "-50",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Temperature in Celsius""",
}, # column
"tempSensorTempF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.4.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-58",
"max" : "212"
},
],
"range" : {
"min" : "-58",
"max" : "212"
},
},
},
"access" : "readonly",
"units" : "Degrees Fahrenheit",
"description" :
"""Temperature in Fahrenheit""",
}, # column
"airFlowSensorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5",
"status" : "current",
"description" :
"""A table of airflow sensors""",
}, # table
"airFlowSensorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5.1",
"status" : "current",
"linkage" : [
"airFlowSensorIndex",
],
"description" :
"""Entry in the air flow sensor table: each entry contains
an index (airFlowSensorIndex) and other sensor details""",
}, # row
"airFlowSensorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"airFlowSensorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"airFlowSensorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"airFlowSensorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"airFlowSensorTempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-50",
"max" : "100"
},
],
"range" : {
"min" : "-50",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Temperature reading in C""",
}, # column
"airFlowSensorTempF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-58",
"max" : "212"
},
],
"range" : {
"min" : "-58",
"max" : "212"
},
},
},
"access" : "readonly",
"units" : "Degrees Fahrenheit",
"description" :
"""Temperature reading in F""",
}, # column
"airFlowSensorFlow" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Air flow reading""",
}, # column
"airFlowSensorHumidity" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Humidity reading""",
}, # column
"airFlowSensorDewPointC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-50",
"max" : "100"
},
],
"range" : {
"min" : "-50",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current reading for Dew Point (C)""",
}, # column
"airFlowSensorDewPointF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.5.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-58",
"max" : "212"
},
],
"range" : {
"min" : "-58",
"max" : "212"
},
},
},
"access" : "readonly",
"units" : "Degress Fahrenheit",
"description" :
"""Current reading for Dew Point (F)""",
}, # column
"powerTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.6",
"status" : "current",
"description" :
"""A table of Power-Only devices""",
}, # table
"powerEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.6.1",
"status" : "current",
"linkage" : [
"powerIndex",
],
"description" :
"""Entry in the power-only device table: each entry contains
an index (powerIndex) and other power details""",
}, # row
"powerIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.6.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"powerSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.6.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"powerName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.6.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"powerAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.6.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"powerVolts" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.6.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts""",
}, # column
"powerDeciAmps" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.6.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for DeciAmps""",
}, # column
"powerRealPower" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.6.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Real Power""",
}, # column
"powerApparentPower" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.6.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volt-Amps",
"description" :
"""Current reading for Apparent Power""",
}, # column
"powerPowerFactor" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.6.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Power Factor""",
}, # column
"doorSensorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.7",
"status" : "current",
"description" :
"""A table of door sensors""",
}, # table
"doorSensorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.7.1",
"status" : "current",
"linkage" : [
"doorSensorIndex",
],
"description" :
"""Entry in the door sensor table: each entry contains
an index (doorSensorIndex) and other sensor details""",
}, # row
"doorSensorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.7.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"doorSensorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.7.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"doorSensorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.7.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"doorSensorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.7.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"doorSensorStatus" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.7.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Door sensor status""",
}, # column
"waterSensorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.8",
"status" : "current",
"description" :
"""A table of water sensors""",
}, # table
"waterSensorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.8.1",
"status" : "current",
"linkage" : [
"waterSensorIndex",
],
"description" :
"""Entry in the water sensor table: each entry contains
an index (waterSensorIndex) and other sensor details""",
}, # row
"waterSensorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.8.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"waterSensorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.8.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"waterSensorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.8.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"waterSensorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.8.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"waterSensorDampness" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.8.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Dampness of the water sensor""",
}, # column
"currentMonitorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.9",
"status" : "current",
"description" :
"""A table of current monitors""",
}, # table
"currentMonitorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.9.1",
"status" : "current",
"linkage" : [
"currentMonitorIndex",
],
"description" :
"""Entry in the current monitor table: each entry contains
an index (currentMonitorIndex) and other sensor details""",
}, # row
"currentMonitorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.9.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"currentMonitorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.9.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"currentMonitorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.9.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"currentMonitorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.9.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"currentMonitorDeciAmps" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.9.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "30"
},
],
"range" : {
"min" : "0",
"max" : "30"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"millivoltMonitorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.10",
"status" : "current",
"description" :
"""A table of millivolt monitors""",
}, # table
"millivoltMonitorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.10.1",
"status" : "current",
"linkage" : [
"millivoltMonitorIndex",
],
"description" :
"""Entry in the millivolt monitor table: each entry contains
an index (millivoltMonitorIndex) and other sensor details""",
}, # row
"millivoltMonitorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.10.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"millivoltMonitorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.10.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"millivoltMonitorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.10.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"millivoltMonitorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.10.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"millivoltMonitorMV" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.10.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "5000"
},
],
"range" : {
"min" : "0",
"max" : "5000"
},
},
},
"access" : "readonly",
"units" : "millivolts",
"description" :
"""millivolts""",
}, # column
"pow3ChTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11",
"status" : "current",
"description" :
"""A table of Power Monitor 3 Channel""",
}, # table
"pow3ChEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1",
"status" : "current",
"linkage" : [
"pow3ChIndex",
],
"description" :
"""Entry in the power monitor 3 channel table: each entry contains
an index (pow3ChIndex) and other power monitoring details""",
}, # row
"pow3ChIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"pow3ChSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"pow3ChName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"pow3ChAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"pow3ChKWattHrsA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "KWh",
"description" :
"""Current reading for KWatt-Hours (Phase A)""",
}, # column
"pow3ChVoltsA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Phase A)""",
}, # column
"pow3ChVoltMaxA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Max-Volts (Phase A)""",
}, # column
"pow3ChVoltMinA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Min-Volts (Phase A)""",
}, # column
"pow3ChVoltPeakA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts",
"description" :
"""Current reading for Peak-Volts (Phase A)""",
}, # column
"pow3ChDeciAmpsA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for DeciAmps (Phase A)""",
}, # column
"pow3ChRealPowerA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Real Power (Phase A)""",
}, # column
"pow3ChApparentPowerA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volt-Amps",
"description" :
"""Current reading for Apparent Power (Phase A)""",
}, # column
"pow3ChPowerFactorA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Power Factor (Phase A)""",
}, # column
"pow3ChKWattHrsB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "KWh",
"description" :
"""Current reading for KWatt-Hours (Phase B)""",
}, # column
"pow3ChVoltsB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Phase B)""",
}, # column
"pow3ChVoltMaxB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Max-Volts (Phase B)""",
}, # column
"pow3ChVoltMinB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Min-Volts (Phase B)""",
}, # column
"pow3ChVoltPeakB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts",
"description" :
"""Current reading for Peak-Volts (Phase B)""",
}, # column
"pow3ChDeciAmpsB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for DeciAmps (Phase B)""",
}, # column
"pow3ChRealPowerB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Real Power (Phase B)""",
}, # column
"pow3ChApparentPowerB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volt-Amps",
"description" :
"""Current reading for Apparent Power (Phase B)""",
}, # column
"pow3ChPowerFactorB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.22",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Power Factor (Phase B)""",
}, # column
"pow3ChKWattHrsC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.23",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "KWh",
"description" :
"""Current reading for KWatt-Hours (Phase C)""",
}, # column
"pow3ChVoltsC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Phase C)""",
}, # column
"pow3ChVoltMaxC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.25",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Max-Volts (Phase C)""",
}, # column
"pow3ChVoltMinC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.26",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Min-Volts (Phase C)""",
}, # column
"pow3ChVoltPeakC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.27",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts",
"description" :
"""Current reading for Peak-Volts (Phase C)""",
}, # column
"pow3ChDeciAmpsC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.28",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for DeciAmps (Phase C)""",
}, # column
"pow3ChRealPowerC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.29",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Real Power (Phase C)""",
}, # column
"pow3ChApparentPowerC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.30",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volt-Amps",
"description" :
"""Current reading for Apparent Power (Phase C)""",
}, # column
"pow3ChPowerFactorC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.11.1.31",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Power Factor (Phase C)""",
}, # column
"outletTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.12",
"status" : "current",
"description" :
"""A table of outlets""",
}, # table
"outletEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.12.1",
"status" : "current",
"linkage" : [
"outletIndex",
],
"description" :
"""Entry in the outlet table: each entry contains
an index (outletIndex) and other sensor details""",
}, # row
"outletIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.12.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"outletSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.12.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"outletName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.12.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"outletAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.12.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"outlet1Status" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.12.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Outlet 1 status""",
}, # column
"outlet2Status" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.12.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Outlet 2 status""",
}, # column
"vsfcTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13",
"status" : "current",
"description" :
"""VSFC sensors (internal sensors for VSFC units)""",
}, # table
"vsfcEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1",
"status" : "current",
"linkage" : [
"vsfcIndex",
],
"description" :
"""Entry in the vsfc table: each entry contains
an index (vsfcIndex) and other details""",
}, # row
"vsfcIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "1"
},
],
"range" : {
"min" : "1",
"max" : "1"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"vsfcSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"vsfcName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"vsfcAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"vsfcSetPointC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "18",
"max" : "38"
},
],
"range" : {
"min" : "18",
"max" : "38"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current temperature set point in C""",
}, # column
"vsfcSetPointF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "65",
"max" : "100"
},
],
"range" : {
"min" : "65",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Degrees Fahrenheit",
"description" :
"""Current temperature set point in F""",
}, # column
"vsfcFanSpeed" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Fan Speed""",
}, # column
"vsfcIntTempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-20",
"max" : "50"
},
],
"range" : {
"min" : "-20",
"max" : "50"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current internal temperature reading in C""",
}, # column
"vsfcIntTempF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-4",
"max" : "122"
},
],
"range" : {
"min" : "-4",
"max" : "122"
},
},
},
"access" : "readonly",
"units" : "Degrees Fahrenheit",
"description" :
"""Current internal temperature reading in F""",
}, # column
"vsfcExt1TempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-20",
"max" : "50"
},
],
"range" : {
"min" : "-20",
"max" : "50"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current reading for external temp 1 in C""",
}, # column
"vsfcExt1TempF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-20",
"max" : "122"
},
],
"range" : {
"min" : "-20",
"max" : "122"
},
},
},
"access" : "readonly",
"units" : "Degrees Fahrenheit",
"description" :
"""Current reading for external temp 1 in F""",
}, # column
"vsfcExt2TempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-20",
"max" : "50"
},
],
"range" : {
"min" : "-20",
"max" : "50"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current reading for external temp 2 in C""",
}, # column
"vsfcExt2TempF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-20",
"max" : "122"
},
],
"range" : {
"min" : "-20",
"max" : "122"
},
},
},
"access" : "readonly",
"units" : "Degrees Fahrenheit",
"description" :
"""Current reading for external temp 1 in F""",
}, # column
"vsfcExt3TempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-20",
"max" : "50"
},
],
"range" : {
"min" : "-20",
"max" : "50"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current reading for external temp 3 in C""",
}, # column
"vsfcExt3TempF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.15",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-20",
"max" : "122"
},
],
"range" : {
"min" : "-20",
"max" : "122"
},
},
},
"access" : "readonly",
"units" : "Degrees Fahrenheit",
"description" :
"""Current reading for external temp 1 in F""",
}, # column
"vsfcExt4TempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.16",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-20",
"max" : "50"
},
],
"range" : {
"min" : "-20",
"max" : "50"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current reading for external temp 4 in C""",
}, # column
"vsfcExt4TempF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.13.1.17",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-20",
"max" : "122"
},
],
"range" : {
"min" : "-20",
"max" : "122"
},
},
},
"access" : "readonly",
"units" : "Degrees Fahrenheit",
"description" :
"""Current reading for external temp 1 in F""",
}, # column
"ctrl3ChTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14",
"status" : "current",
"description" :
"""A table of a 3 phase outlet control""",
}, # table
"ctrl3ChEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1",
"status" : "current",
"linkage" : [
"ctrl3ChIndex",
],
"description" :
"""Entry in the 3 phase outlet control table: each entry contains
an index (ctrl3ChIndex) and other outlet control monitoring details""",
}, # row
"ctrl3ChIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "1"
},
],
"range" : {
"min" : "1",
"max" : "1"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"ctrl3ChSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"ctrl3ChName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"ctrl3ChAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"ctrl3ChVoltsA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Phase A)""",
}, # column
"ctrl3ChVoltPeakA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Peak-Volts (Phase A)""",
}, # column
"ctrl3ChDeciAmpsA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for DeciAmps (Phase A)""",
}, # column
"ctrl3ChDeciAmpsPeakA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for Peak-DeciAmps (Phase A)""",
}, # column
"ctrl3ChRealPowerA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Real Power (Phase A)""",
}, # column
"ctrl3ChApparentPowerA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volt-Amps",
"description" :
"""Current reading for Apparent Power (Phase A)""",
}, # column
"ctrl3ChPowerFactorA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Power Factor (Phase A)""",
}, # column
"ctrl3ChVoltsB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Phase B)""",
}, # column
"ctrl3ChVoltPeakB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Peak-Volts (Phase B)""",
}, # column
"ctrl3ChDeciAmpsB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for DeciAmps (Phase B)""",
}, # column
"ctrl3ChDeciAmpsPeakB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for Peak-DeciAmps (Phase B)""",
}, # column
"ctrl3ChRealPowerB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Real Power (Phase B)""",
}, # column
"ctrl3ChApparentPowerB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volt-Amps",
"description" :
"""Current reading for Apparent Power (Phase B)""",
}, # column
"ctrl3ChPowerFactorB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.18",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Power Factor (Phase B)""",
}, # column
"ctrl3ChVoltsC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Phase C)""",
}, # column
"ctrl3ChVoltPeakC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Peak-Volts (Phase C)""",
}, # column
"ctrl3ChDeciAmpsC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for DeciAmps (Phase C)""",
}, # column
"ctrl3ChDeciAmpsPeakC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.22",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for Peak-DeciAmps (Phase C)""",
}, # column
"ctrl3ChRealPowerC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.23",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Real Power (Phase C)""",
}, # column
"ctrl3ChApparentPowerC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volt-Amps",
"description" :
"""Current reading for Apparent Power (Phase C)""",
}, # column
"ctrl3ChPowerFactorC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.14.1.25",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Power Factor (Phase C)""",
}, # column
"ctrlGrpAmpsTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15",
"status" : "current",
"description" :
"""A table of Control Group Amp readings""",
}, # table
"ctrlGrpAmpsEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1",
"status" : "current",
"linkage" : [
"ctrlGrpAmpsIndex",
],
"description" :
"""Entry in the Control Group Amps table: each entry contains
an index (ctrlGrpAmpsIndex) and other sensor details""",
}, # row
"ctrlGrpAmpsIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "1"
},
],
"range" : {
"min" : "1",
"max" : "1"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"ctrlGrpAmpsSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"ctrlGrpAmpsName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"ctrlGrpAmpsAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"ctrlGrpAmpsA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""DeciAmps Group A""",
}, # column
"ctrlGrpAmpsB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""DeciAmps Group B""",
}, # column
"ctrlGrpAmpsC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""DeciAmps Group C""",
}, # column
"ctrlGrpAmpsD" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""DeciAmps Group D""",
}, # column
"ctrlGrpAmpsE" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""DeciAmps Group E""",
}, # column
"ctrlGrpAmpsF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""DeciAmps Group F""",
}, # column
"ctrlGrpAmpsG" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""DeciAmps Group G""",
}, # column
"ctrlGrpAmpsH" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""DeciAmps Group H""",
}, # column
"ctrlGrpAmpsAVolts" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Volts Group A""",
}, # column
"ctrlGrpAmpsBVolts" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Volts Group B""",
}, # column
"ctrlGrpAmpsCVolts" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Volts Group C""",
}, # column
"ctrlGrpAmpsDVolts" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Volts Group D""",
}, # column
"ctrlGrpAmpsEVolts" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Volts Group E""",
}, # column
"ctrlGrpAmpsFVolts" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Volts Group F""",
}, # column
"ctrlGrpAmpsGVolts" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Volts Group G""",
}, # column
"ctrlGrpAmpsHVolts" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.15.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Volts Group H""",
}, # column
"ctrlOutletTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16",
"status" : "current",
"description" :
"""A table of outlet information""",
}, # table
"ctrlOutletEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1",
"status" : "current",
"linkage" : [
"ctrlOutletIndex",
],
"description" :
"""Entry in the control outlet table: each entry contains
an index (ctrlOutletIndex) and other sensor details""",
}, # row
"ctrlOutletIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Outlet Number""",
}, # column
"ctrlOutletName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""Outlet Friendly Name""",
}, # column
"ctrlOutletStatus" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Current Outlet Status: 0 = Off, 1 = On | Outlet Action Write: 1 = On, 2 = On Delayed, 3 = Off Immediate, 4 = Off Delayed, 5 = Reboot""",
}, # column
"ctrlOutletFeedback" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Outlet Feedback Value, should be equal to status""",
}, # column
"ctrlOutletPending" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Outlet Status Read to change to: 0 = Off, 1 = On | Outlet Action Write: 1 = On, 2 = On Delayed, 3 = Off Immediate, 4 = Off Delayed, 5 = Reboot""",
}, # column
"ctrlOutletDeciAmps" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Outlet DeciAmps reading""",
}, # column
"ctrlOutletGroup" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Outlet Group (A to G)""",
}, # column
"ctrlOutletUpDelay" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"units" : "seconds",
"description" :
"""Outlet Power Up Delay""",
}, # column
"ctrlOutletDwnDelay" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"units" : "seconds",
"description" :
"""Outlet Power Down Delay""",
}, # column
"ctrlOutletRbtDelay" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"units" : "seconds",
"description" :
"""Outlet Reboot Delay""",
}, # column
"ctrlOutletURL" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""Outlet URL""",
}, # column
"ctrlOutletPOAAction" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""POA Action: 1 = Off, 2 = On, 3 = Last, 0 = POA not supported on this unit type""",
}, # column
"ctrlOutletPOADelay" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"units" : "seconds",
"description" :
"""POA Delay""",
}, # column
"ctrlOutletKWattHrs" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "KWh",
"description" :
"""Current Reading for KWatt-Hours""",
}, # column
"ctrlOutletPower" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.16.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Power""",
}, # column
"dewPointSensorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.17",
"status" : "current",
"description" :
"""A table of dew point sensors""",
}, # table
"dewPointSensorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.17.1",
"status" : "current",
"linkage" : [
"dewPointSensorIndex",
],
"description" :
"""Entry in the dew point sensor table: each entry contains
an index (dewPointSensorIndex) and other sensor details""",
}, # row
"dewPointSensorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.17.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"dewPointSensorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.17.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"dewPointSensorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.17.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"dewPointSensorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.17.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"dewPointSensorTempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.17.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-50",
"max" : "100"
},
],
"range" : {
"min" : "-50",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Temperature reading in C""",
}, # column
"dewPointSensorTempF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.17.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-58",
"max" : "212"
},
],
"range" : {
"min" : "-58",
"max" : "212"
},
},
},
"access" : "readonly",
"units" : "Degrees Fahrenheit",
"description" :
"""Temperature reading in F""",
}, # column
"dewPointSensorHumidity" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.17.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Humidity reading""",
}, # column
"dewPointSensorDewPointC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.17.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-50",
"max" : "100"
},
],
"range" : {
"min" : "-50",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Dew point reading in C""",
}, # column
"dewPointSensorDewPointF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.17.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-58",
"max" : "212"
},
],
"range" : {
"min" : "-58",
"max" : "212"
},
},
},
"access" : "readonly",
"units" : "Degrees Fahrenheit",
"description" :
"""Dew point reading in F""",
}, # column
"digitalSensorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.18",
"status" : "current",
"description" :
"""A table of digital sensors""",
}, # table
"digitalSensorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.18.1",
"status" : "current",
"linkage" : [
"digitalSensorIndex",
],
"description" :
"""Entry in the digital sensor table: each entry contains
an index (digitalSensorIndex) and other sensor details""",
}, # row
"digitalSensorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.18.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"digitalSensorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.18.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"digitalSensorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.18.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"digitalSensorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.18.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"digitalSensorDigital" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.18.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Digital sensor status""",
}, # column
"dstsTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19",
"status" : "current",
"description" :
"""Digital Static Transfer Switch status""",
}, # table
"dstsEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1",
"status" : "current",
"linkage" : [
"dstsIndex",
],
"description" :
"""Entry in the DSTS table: each entry contains
an index (dstsIndex) and other details""",
}, # row
"dstsIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "1"
},
],
"range" : {
"min" : "1",
"max" : "1"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"dstsSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"dstsName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"dstsAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"dstsVoltsA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""RMS Voltage of Side A""",
}, # column
"dstsDeciAmpsA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""RMS Current of Side A in deciamps""",
}, # column
"dstsVoltsB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""RMS Voltage of Side B""",
}, # column
"dstsDeciAmpsB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""RMS Current of Side B in deciamps""",
}, # column
"dstsSourceAActive" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""If 99, source A active""",
}, # column
"dstsSourceBActive" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""If 99, source B active""",
}, # column
"dstsPowerStatusA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Power Quality of source A""",
}, # column
"dstsPowerStatusB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Power Quality of Source B""",
}, # column
"dstsSourceATempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.13",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-20",
"max" : "50"
},
],
"range" : {
"min" : "-20",
"max" : "50"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current reading for Source A temp in C""",
}, # column
"dstsSourceBTempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.19.1.14",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-20",
"max" : "50"
},
],
"range" : {
"min" : "-20",
"max" : "50"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current reading for Source B temp in C""",
}, # column
"cpmSensorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.20",
"status" : "current",
"description" :
"""A table of city power sensors""",
}, # table
"cpmSensorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.20.1",
"status" : "current",
"linkage" : [
"cpmSensorIndex",
],
"description" :
"""Entry in the city power sensor table: each entry contains
an index (cpmSensorIndex) and other sensor details""",
}, # row
"cpmSensorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.20.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"cpmSensorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.20.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"cpmSensorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.20.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"cpmSensorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.20.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"cpmSensorStatus" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.20.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""City Power sensor status""",
}, # column
"smokeAlarmTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.21",
"status" : "current",
"description" :
"""A table of smoke alarm sensors""",
}, # table
"smokeAlarmEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.21.1",
"status" : "current",
"linkage" : [
"smokeAlarmIndex",
],
"description" :
"""Entry in the smoke alarm sensor table: each entry contains
an index (smokeAlarmIndex) and other sensor details""",
}, # row
"smokeAlarmIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.21.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"smokeAlarmSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.21.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"smokeAlarmName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.21.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"smokeAlarmAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.21.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"smokeAlarmStatus" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.21.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Smoke alarm status""",
}, # column
"neg48VdcSensorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.22",
"status" : "current",
"description" :
"""A table of -48Vdc sensors""",
}, # table
"neg48VdcSensorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.22.1",
"status" : "current",
"linkage" : [
"neg48VdcSensorIndex",
],
"description" :
"""Entry in the -48Vdc sensor table: each entry contains
an index (neg48VdcSensorIndex) and other sensor details""",
}, # row
"neg48VdcSensorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.22.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"neg48VdcSensorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.22.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"neg48VdcSensorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.22.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"neg48VdcSensorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.22.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"neg48VdcSensorVoltage" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.22.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-100",
"max" : "10"
},
],
"range" : {
"min" : "-100",
"max" : "10"
},
},
},
"access" : "readonly",
"units" : "Volts",
"description" :
"""-48Vdc Sensor value""",
}, # column
"pos30VdcSensorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.23",
"status" : "current",
"description" :
"""A table of 30Vdc sensors""",
}, # table
"pos30VdcSensorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.23.1",
"status" : "current",
"linkage" : [
"pos30VdcSensorIndex",
],
"description" :
"""Entry in the 30Vdc sensor table: each entry contains
an index (pos30VdcSensorIndex) and other sensor details""",
}, # row
"pos30VdcSensorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.23.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"pos30VdcSensorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.23.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"pos30VdcSensorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.23.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"pos30VdcSensorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.23.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"pos30VdcSensorVoltage" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.23.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-10",
"max" : "100"
},
],
"range" : {
"min" : "-10",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Volts",
"description" :
"""30Vdc Sensor value""",
}, # column
"analogSensorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.24",
"status" : "current",
"description" :
"""A table of analog sensors""",
}, # table
"analogSensorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.24.1",
"status" : "current",
"linkage" : [
"analogSensorIndex",
],
"description" :
"""Entry in the analog input table: each entry contains
an index (analogSensorIndex) and other sensor details""",
}, # row
"analogSensorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.24.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"analogSensorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.24.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"analogSensorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.24.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"analogSensorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.24.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"analogSensorAnalog" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.24.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Analog Sensor Value""",
}, # column
"ctrl3ChIECTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25",
"status" : "current",
"description" :
"""A table of a 3 phase outlet control (IEC)""",
}, # table
"ctrl3ChIECEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1",
"status" : "current",
"linkage" : [
"ctrl3ChIECIndex",
],
"description" :
"""Entry in the 3 phase outlet control table: each entry contains
an index (ctrl3ChIECIndex) and other outlet control monitoring details""",
}, # row
"ctrl3ChIECIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"ctrl3ChIECSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"ctrl3ChIECName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"ctrl3ChIECAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"ctrl3ChIECKWattHrsA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "KWh",
"description" :
"""Current Reading for KWatt-Hours (Phase A)""",
}, # column
"ctrl3ChIECVoltsA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Phase A)""",
}, # column
"ctrl3ChIECVoltPeakA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Peak-Volts (Phase A)""",
}, # column
"ctrl3ChIECDeciAmpsA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for DeciAmps (Phase A)""",
}, # column
"ctrl3ChIECDeciAmpsPeakA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for Peak-DeciAmps (Phase A)""",
}, # column
"ctrl3ChIECRealPowerA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Real Power (Phase A)""",
}, # column
"ctrl3ChIECApparentPowerA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volt-Amps",
"description" :
"""Current reading for Apparent Power (Phase A)""",
}, # column
"ctrl3ChIECPowerFactorA" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Power Factor (Phase A)""",
}, # column
"ctrl3ChIECKWattHrsB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "KWh",
"description" :
"""Current Reading for KWatt-Hours (Phase B)""",
}, # column
"ctrl3ChIECVoltsB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Phase B)""",
}, # column
"ctrl3ChIECVoltPeakB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Peak-Volts (Phase B)""",
}, # column
"ctrl3ChIECDeciAmpsB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for DeciAmps (Phase B)""",
}, # column
"ctrl3ChIECDeciAmpsPeakB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for Peak-DeciAmps (Phase B)""",
}, # column
"ctrl3ChIECRealPowerB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Real Power (Phase B)""",
}, # column
"ctrl3ChIECApparentPowerB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volt-Amps",
"description" :
"""Current reading for Apparent Power (Phase B)""",
}, # column
"ctrl3ChIECPowerFactorB" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.20",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Power Factor (Phase B)""",
}, # column
"ctrl3ChIECKWattHrsC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "KWh",
"description" :
"""Current Reading for KWatt-Hours (Phase C)""",
}, # column
"ctrl3ChIECVoltsC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.22",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Volts (Phase C)""",
}, # column
"ctrl3ChIECVoltPeakC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.23",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volts (rms)",
"description" :
"""Current reading for Peak-Volts (Phase C)""",
}, # column
"ctrl3ChIECDeciAmpsC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for DeciAmps (Phase C)""",
}, # column
"ctrl3ChIECDeciAmpsPeakC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.25",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "0.1 Amps (rms)",
"description" :
"""Current reading for Peak-DeciAmps (Phase C)""",
}, # column
"ctrl3ChIECRealPowerC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.26",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Watts",
"description" :
"""Current reading for Real Power (Phase C)""",
}, # column
"ctrl3ChIECApparentPowerC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.27",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"units" : "Volt-Amps",
"description" :
"""Current reading for Apparent Power (Phase C)""",
}, # column
"ctrl3ChIECPowerFactorC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.25.1.28",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "%",
"description" :
"""Current reading for Power Factor (Phase C)""",
}, # column
"climateRelayTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26",
"status" : "current",
"description" :
"""Climate Relay sensors (internal sensors for climate relay units)""",
}, # table
"climateRelayEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1",
"status" : "current",
"linkage" : [
"climateRelayIndex",
],
"description" :
"""Entry in the climate table: each entry contains
an index (climateRelayIndex) and other details""",
}, # row
"climateRelayIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "1"
},
],
"range" : {
"min" : "1",
"max" : "1"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"climateRelaySerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"climateRelayName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"climateRelayAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"climateRelayTempC" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-50",
"max" : "100"
},
],
"range" : {
"min" : "-50",
"max" : "100"
},
},
},
"access" : "readonly",
"units" : "Degrees Celsius",
"description" :
"""Current reading for Temperature (C)""",
}, # column
"climateRelayTempF" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.6",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "-58",
"max" : "212"
},
],
"range" : {
"min" : "-58",
"max" : "212"
},
},
},
"access" : "readonly",
"units" : "Degress Fahrenheit",
"description" :
"""Current reading for Temperature (F)""",
}, # column
"climateRelayIO1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 1""",
}, # column
"climateRelayIO2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 2""",
}, # column
"climateRelayIO3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.9",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 3""",
}, # column
"climateRelayIO4" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.10",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 4""",
}, # column
"climateRelayIO5" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.11",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 5""",
}, # column
"climateRelayIO6" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.26.1.12",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 6""",
}, # column
"ctrlRelayTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.27",
"status" : "current",
"description" :
"""A table of relay information""",
}, # table
"ctrlRelayEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.27.1",
"status" : "current",
"linkage" : [
"ctrlRelayIndex",
],
"description" :
"""Entry in the control relay table: each entry contains
an index (ctrlRelayIndex) and other sensor details""",
}, # row
"ctrlRelayIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.27.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Relay Number""",
}, # column
"ctrlRelayName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.27.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""Relay Friendly Name""",
}, # column
"ctrlRelayState" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.27.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Current Relay Status: 0 = Off, 1 = On""",
}, # column
"ctrlRelayLatchingMode" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.27.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Relay latching mode: 0 = Non-latching, 1 = Latching""",
}, # column
"ctrlRelayOverride" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.27.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Relay Override Mode: 0 - None, 1 - On, 2 - Off""",
}, # column
"ctrlRelayAcknowledge" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.27.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Acknowledge write a 1, always reads back 0""",
}, # column
"airSpeedSwitchSensorTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.28",
"status" : "current",
"description" :
"""A table of air speed switch sensors""",
}, # table
"airSpeedSwitchSensorEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.28.1",
"status" : "current",
"linkage" : [
"airSpeedSwitchSensorIndex",
],
"description" :
"""Entry in the air speed switch sensor table: each entry contains
an index (airSpeedSwitchIndex) and other sensor details""",
}, # row
"airSpeedSwitchSensorIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.28.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"airSpeedSwitchSensorSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.28.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"airSpeedSwitchSensorName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.28.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"airSpeedSwitchSensorAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.28.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"airSpeedSwitchSensorAirSpeed" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.28.1.5",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Air Speed Switch Status""",
}, # column
"powerDMTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29",
"status" : "current",
"description" :
"""A table of DM48 current monitors""",
}, # table
"powerDMEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1",
"status" : "current",
"linkage" : [
"powerDMIndex",
],
"description" :
"""Entry in the DM48 current monitor table: each entry contains
an index (powerDMIndex) and other sensor details""",
}, # row
"powerDMIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "100"
},
],
"range" : {
"min" : "1",
"max" : "100"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"powerDMSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"powerDMName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"powerDMAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"powerDMUnitInfoTitle" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Type of Unit""",
}, # column
"powerDMUnitInfoVersion" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Unit Version Number""",
}, # column
"powerDMUnitInfoMainCount" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.7",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "16"
},
],
"range" : {
"min" : "0",
"max" : "16"
},
},
},
"access" : "readonly",
"description" :
"""Number of Main (Total Amps) Channels on the Unit""",
}, # column
"powerDMUnitInfoAuxCount" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.8",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "48"
},
],
"range" : {
"min" : "0",
"max" : "48"
},
},
},
"access" : "readonly",
"description" :
"""Number of Auxiliary (Outlet) Channels on the Unit""",
}, # column
"powerDMChannelName1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 1 Factory Name""",
}, # column
"powerDMChannelName2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 2 Factory Name""",
}, # column
"powerDMChannelName3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 3 Factory Name""",
}, # column
"powerDMChannelName4" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 4 Factory Name""",
}, # column
"powerDMChannelName5" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 5 Factory Name""",
}, # column
"powerDMChannelName6" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 6 Factory Name""",
}, # column
"powerDMChannelName7" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 7 Factory Name""",
}, # column
"powerDMChannelName8" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 8 Factory Name""",
}, # column
"powerDMChannelName9" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 9 Factory Name""",
}, # column
"powerDMChannelName10" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 10 Factory Name""",
}, # column
"powerDMChannelName11" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 11 Factory Name""",
}, # column
"powerDMChannelName12" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 12 Factory Name""",
}, # column
"powerDMChannelName13" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 13 Factory Name""",
}, # column
"powerDMChannelName14" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.22",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 14 Factory Name""",
}, # column
"powerDMChannelName15" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.23",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 15 Factory Name""",
}, # column
"powerDMChannelName16" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 16 Factory Name""",
}, # column
"powerDMChannelName17" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.25",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 17 Factory Name""",
}, # column
"powerDMChannelName18" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.26",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 18 Factory Name""",
}, # column
"powerDMChannelName19" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.27",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 19 Factory Name""",
}, # column
"powerDMChannelName20" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.28",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 20 Factory Name""",
}, # column
"powerDMChannelName21" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.29",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 21 Factory Name""",
}, # column
"powerDMChannelName22" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.30",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 22 Factory Name""",
}, # column
"powerDMChannelName23" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.31",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 23 Factory Name""",
}, # column
"powerDMChannelName24" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.32",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 24 Factory Name""",
}, # column
"powerDMChannelName25" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.33",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 25 Factory Name""",
}, # column
"powerDMChannelName26" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.34",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 26 Factory Name""",
}, # column
"powerDMChannelName27" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.35",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 27 Factory Name""",
}, # column
"powerDMChannelName28" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.36",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 28 Factory Name""",
}, # column
"powerDMChannelName29" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.37",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 29 Factory Name""",
}, # column
"powerDMChannelName30" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.38",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 30 Factory Name""",
}, # column
"powerDMChannelName31" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.39",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 31 Factory Name""",
}, # column
"powerDMChannelName32" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.40",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 32 Factory Name""",
}, # column
"powerDMChannelName33" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.41",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 33 Factory Name""",
}, # column
"powerDMChannelName34" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.42",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 34 Factory Name""",
}, # column
"powerDMChannelName35" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.43",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 35 Factory Name""",
}, # column
"powerDMChannelName36" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.44",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 36 Factory Name""",
}, # column
"powerDMChannelName37" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.45",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 37 Factory Name""",
}, # column
"powerDMChannelName38" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.46",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 38 Factory Name""",
}, # column
"powerDMChannelName39" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.47",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 39 Factory Name""",
}, # column
"powerDMChannelName40" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.48",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 40 Factory Name""",
}, # column
"powerDMChannelName41" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.49",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 41 Factory Name""",
}, # column
"powerDMChannelName42" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.50",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 42 Factory Name""",
}, # column
"powerDMChannelName43" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.51",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 43 Factory Name""",
}, # column
"powerDMChannelName44" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.52",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 44 Factory Name""",
}, # column
"powerDMChannelName45" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.53",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 45 Factory Name""",
}, # column
"powerDMChannelName46" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.54",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 46 Factory Name""",
}, # column
"powerDMChannelName47" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.55",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 47 Factory Name""",
}, # column
"powerDMChannelName48" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.56",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 48 Factory Name""",
}, # column
"powerDMChannelFriendly1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.57",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 1 Friendly Name""",
}, # column
"powerDMChannelFriendly2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.58",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 2 Friendly Name""",
}, # column
"powerDMChannelFriendly3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.59",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 3 Friendly Name""",
}, # column
"powerDMChannelFriendly4" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.60",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 4 Friendly Name""",
}, # column
"powerDMChannelFriendly5" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.61",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 5 Friendly Name""",
}, # column
"powerDMChannelFriendly6" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.62",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 6 Friendly Name""",
}, # column
"powerDMChannelFriendly7" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.63",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 7 Friendly Name""",
}, # column
"powerDMChannelFriendly8" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.64",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 8 Friendly Name""",
}, # column
"powerDMChannelFriendly9" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.65",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 9 Friendly Name""",
}, # column
"powerDMChannelFriendly10" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.66",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 10 Friendly Name""",
}, # column
"powerDMChannelFriendly11" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.67",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 11 Friendly Name""",
}, # column
"powerDMChannelFriendly12" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.68",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 12 Friendly Name""",
}, # column
"powerDMChannelFriendly13" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.69",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 13 Friendly Name""",
}, # column
"powerDMChannelFriendly14" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.70",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 14 Friendly Name""",
}, # column
"powerDMChannelFriendly15" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.71",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 15 Friendly Name""",
}, # column
"powerDMChannelFriendly16" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.72",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 16 Friendly Name""",
}, # column
"powerDMChannelFriendly17" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.73",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 17 Friendly Name""",
}, # column
"powerDMChannelFriendly18" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.74",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 18 Friendly Name""",
}, # column
"powerDMChannelFriendly19" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.75",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 19 Friendly Name""",
}, # column
"powerDMChannelFriendly20" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.76",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 20 Friendly Name""",
}, # column
"powerDMChannelFriendly21" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.77",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 21 Friendly Name""",
}, # column
"powerDMChannelFriendly22" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.78",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 22 Friendly Name""",
}, # column
"powerDMChannelFriendly23" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.79",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 23 Friendly Name""",
}, # column
"powerDMChannelFriendly24" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.80",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 24 Friendly Name""",
}, # column
"powerDMChannelFriendly25" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.81",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 25 Friendly Name""",
}, # column
"powerDMChannelFriendly26" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.82",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 26 Friendly Name""",
}, # column
"powerDMChannelFriendly27" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.83",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 27 Friendly Name""",
}, # column
"powerDMChannelFriendly28" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.84",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 28 Friendly Name""",
}, # column
"powerDMChannelFriendly29" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.85",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 29 Friendly Name""",
}, # column
"powerDMChannelFriendly30" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.86",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 30 Friendly Name""",
}, # column
"powerDMChannelFriendly31" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.87",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 31 Friendly Name""",
}, # column
"powerDMChannelFriendly32" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.88",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 32 Friendly Name""",
}, # column
"powerDMChannelFriendly33" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.89",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 33 Friendly Name""",
}, # column
"powerDMChannelFriendly34" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.90",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 34 Friendly Name""",
}, # column
"powerDMChannelFriendly35" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.91",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 35 Friendly Name""",
}, # column
"powerDMChannelFriendly36" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.92",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 36 Friendly Name""",
}, # column
"powerDMChannelFriendly37" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.93",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 37 Friendly Name""",
}, # column
"powerDMChannelFriendly38" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.94",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 38 Friendly Name""",
}, # column
"powerDMChannelFriendly39" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.95",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 39 Friendly Name""",
}, # column
"powerDMChannelFriendly40" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.96",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 40 Friendly Name""",
}, # column
"powerDMChannelFriendly41" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.97",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 41 Friendly Name""",
}, # column
"powerDMChannelFriendly42" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.98",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 42 Friendly Name""",
}, # column
"powerDMChannelFriendly43" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.99",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 43 Friendly Name""",
}, # column
"powerDMChannelFriendly44" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.100",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 44 Friendly Name""",
}, # column
"powerDMChannelFriendly45" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.101",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 45 Friendly Name""",
}, # column
"powerDMChannelFriendly46" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.102",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 46 Friendly Name""",
}, # column
"powerDMChannelFriendly47" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.103",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 47 Friendly Name""",
}, # column
"powerDMChannelFriendly48" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.104",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 48 Friendly Name""",
}, # column
"powerDMChannelGroup1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.105",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 1 Group""",
}, # column
"powerDMChannelGroup2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.106",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 2 Group""",
}, # column
"powerDMChannelGroup3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.107",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 3 Group""",
}, # column
"powerDMChannelGroup4" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.108",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 4 Group""",
}, # column
"powerDMChannelGroup5" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.109",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 5 Group""",
}, # column
"powerDMChannelGroup6" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.110",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 6 Group""",
}, # column
"powerDMChannelGroup7" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.111",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 7 Group""",
}, # column
"powerDMChannelGroup8" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.112",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 8 Group""",
}, # column
"powerDMChannelGroup9" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.113",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 9 Group""",
}, # column
"powerDMChannelGroup10" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.114",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 10 Group""",
}, # column
"powerDMChannelGroup11" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.115",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 11 Group""",
}, # column
"powerDMChannelGroup12" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.116",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 12 Group""",
}, # column
"powerDMChannelGroup13" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.117",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 13 Group""",
}, # column
"powerDMChannelGroup14" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.118",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 14 Group""",
}, # column
"powerDMChannelGroup15" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.119",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 15 Group""",
}, # column
"powerDMChannelGroup16" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.120",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 16 Group""",
}, # column
"powerDMChannelGroup17" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.121",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 17 Group""",
}, # column
"powerDMChannelGroup18" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.122",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 18 Group""",
}, # column
"powerDMChannelGroup19" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.123",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 19 Group""",
}, # column
"powerDMChannelGroup20" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.124",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 20 Group""",
}, # column
"powerDMChannelGroup21" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.125",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 21 Group""",
}, # column
"powerDMChannelGroup22" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.126",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 22 Group""",
}, # column
"powerDMChannelGroup23" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.127",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 23 Group""",
}, # column
"powerDMChannelGroup24" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.128",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 24 Group""",
}, # column
"powerDMChannelGroup25" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.129",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 25 Group""",
}, # column
"powerDMChannelGroup26" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.130",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 26 Group""",
}, # column
"powerDMChannelGroup27" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.131",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 27 Group""",
}, # column
"powerDMChannelGroup28" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.132",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 28 Group""",
}, # column
"powerDMChannelGroup29" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.133",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 29 Group""",
}, # column
"powerDMChannelGroup30" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.134",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 30 Group""",
}, # column
"powerDMChannelGroup31" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.135",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 31 Group""",
}, # column
"powerDMChannelGroup32" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.136",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 32 Group""",
}, # column
"powerDMChannelGroup33" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.137",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 33 Group""",
}, # column
"powerDMChannelGroup34" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.138",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 34 Group""",
}, # column
"powerDMChannelGroup35" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.139",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 35 Group""",
}, # column
"powerDMChannelGroup36" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.140",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 36 Group""",
}, # column
"powerDMChannelGroup37" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.141",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 37 Group""",
}, # column
"powerDMChannelGroup38" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.142",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 38 Group""",
}, # column
"powerDMChannelGroup39" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.143",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 39 Group""",
}, # column
"powerDMChannelGroup40" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.144",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 40 Group""",
}, # column
"powerDMChannelGroup41" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.145",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 41 Group""",
}, # column
"powerDMChannelGroup42" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.146",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 42 Group""",
}, # column
"powerDMChannelGroup43" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.147",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 43 Group""",
}, # column
"powerDMChannelGroup44" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.148",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 44 Group""",
}, # column
"powerDMChannelGroup45" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.149",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 45 Group""",
}, # column
"powerDMChannelGroup46" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.150",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 46 Group""",
}, # column
"powerDMChannelGroup47" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.151",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 47 Group""",
}, # column
"powerDMChannelGroup48" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.152",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Channel 48 Group""",
}, # column
"powerDMDeciAmps1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.153",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.154",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.155",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps4" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.156",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps5" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.157",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps6" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.158",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps7" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.159",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps8" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.160",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps9" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.161",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps10" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.162",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps11" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.163",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps12" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.164",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps13" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.165",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps14" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.166",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps15" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.167",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps16" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.168",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps17" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.169",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps18" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.170",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps19" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.171",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps20" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.172",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps21" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.173",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps22" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.174",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps23" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.175",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps24" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.176",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps25" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.177",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps26" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.178",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps27" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.179",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps28" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.180",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps29" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.181",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps30" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.182",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps31" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.183",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps32" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.184",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps33" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.185",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps34" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.186",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps35" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.187",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps36" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.188",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps37" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.189",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps38" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.190",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps39" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.191",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps40" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.192",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps41" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.193",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps42" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.194",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps43" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.195",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps44" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.196",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps45" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.197",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps46" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.198",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps47" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.199",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"powerDMDeciAmps48" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.29.1.200",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "1209"
},
],
"range" : {
"min" : "0",
"max" : "1209"
},
},
},
"access" : "readonly",
"units" : "0.1 Amps",
"description" :
"""Current in deciamps""",
}, # column
"ioExpanderTable" : {
"nodetype" : "table",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30",
"status" : "current",
"description" :
"""IO Expander device with relay capability""",
}, # table
"ioExpanderEntry" : {
"nodetype" : "row",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1",
"status" : "current",
"linkage" : [
"ioExpanderIndex",
],
"description" :
"""Entry in the IO Expander table: each entry contains
an index (ioExpanderIndex) and other details""",
}, # row
"ioExpanderIndex" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.1",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "1",
"max" : "1"
},
],
"range" : {
"min" : "1",
"max" : "1"
},
},
},
"access" : "noaccess",
"description" :
"""Table entry index value""",
}, # column
"ioExpanderSerial" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.2",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Serial Number""",
}, # column
"ioExpanderName" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.3",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""Friendly Name""",
}, # column
"ioExpanderAvail" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.4",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Is device currently plugged in?""",
}, # column
"ioExpanderFriendlyName1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.5",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 1 Friendly Name""",
}, # column
"ioExpanderFriendlyName2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.6",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 2 Friendly Name""",
}, # column
"ioExpanderFriendlyName3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.7",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 3 Friendly Name""",
}, # column
"ioExpanderFriendlyName4" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.8",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 4 Friendly Name""",
}, # column
"ioExpanderFriendlyName5" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.9",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 5 Friendly Name""",
}, # column
"ioExpanderFriendlyName6" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.10",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 6 Friendly Name""",
}, # column
"ioExpanderFriendlyName7" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.11",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 7 Friendly Name""",
}, # column
"ioExpanderFriendlyName8" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.12",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 8 Friendly Name""",
}, # column
"ioExpanderFriendlyName9" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.13",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 9 Friendly Name""",
}, # column
"ioExpanderFriendlyName10" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.14",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 10 Friendly Name""",
}, # column
"ioExpanderFriendlyName11" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.15",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 11 Friendly Name""",
}, # column
"ioExpanderFriendlyName12" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.16",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 12 Friendly Name""",
}, # column
"ioExpanderFriendlyName13" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.17",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 13 Friendly Name""",
}, # column
"ioExpanderFriendlyName14" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.18",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 14 Friendly Name""",
}, # column
"ioExpanderFriendlyName15" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.19",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 15 Friendly Name""",
}, # column
"ioExpanderFriendlyName16" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.20",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 16 Friendly Name""",
}, # column
"ioExpanderFriendlyName17" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.21",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 17 Friendly Name""",
}, # column
"ioExpanderFriendlyName18" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.22",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 18 Friendly Name""",
}, # column
"ioExpanderFriendlyName19" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.23",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 19 Friendly Name""",
}, # column
"ioExpanderFriendlyName20" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.24",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 20 Friendly Name""",
}, # column
"ioExpanderFriendlyName21" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.25",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 21 Friendly Name""",
}, # column
"ioExpanderFriendlyName22" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.26",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 22 Friendly Name""",
}, # column
"ioExpanderFriendlyName23" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.27",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 23 Friendly Name""",
}, # column
"ioExpanderFriendlyName24" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.28",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 24 Friendly Name""",
}, # column
"ioExpanderFriendlyName25" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.29",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 25 Friendly Name""",
}, # column
"ioExpanderFriendlyName26" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.30",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 26 Friendly Name""",
}, # column
"ioExpanderFriendlyName27" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.31",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 27 Friendly Name""",
}, # column
"ioExpanderFriendlyName28" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.32",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 28 Friendly Name""",
}, # column
"ioExpanderFriendlyName29" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.33",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 29 Friendly Name""",
}, # column
"ioExpanderFriendlyName30" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.34",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 30 Friendly Name""",
}, # column
"ioExpanderFriendlyName31" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.35",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 31 Friendly Name""",
}, # column
"ioExpanderFriendlyName32" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.36",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readonly",
"description" :
"""IO 32 Friendly Name""",
}, # column
"ioExpanderIO1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.37",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 1""",
}, # column
"ioExpanderIO2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.38",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 2""",
}, # column
"ioExpanderIO3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.39",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 3""",
}, # column
"ioExpanderIO4" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.40",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 4""",
}, # column
"ioExpanderIO5" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.41",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 5""",
}, # column
"ioExpanderIO6" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.42",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 6""",
}, # column
"ioExpanderIO7" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.43",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 7""",
}, # column
"ioExpanderIO8" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.44",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 8""",
}, # column
"ioExpanderIO9" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.45",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 9""",
}, # column
"ioExpanderIO10" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.46",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 10""",
}, # column
"ioExpanderIO11" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.47",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 11""",
}, # column
"ioExpanderIO12" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.48",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 12""",
}, # column
"ioExpanderIO13" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.49",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 13""",
}, # column
"ioExpanderIO14" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.50",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 14""",
}, # column
"ioExpanderIO15" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.51",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 15""",
}, # column
"ioExpanderIO16" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.52",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 16""",
}, # column
"ioExpanderIO17" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.53",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 17""",
}, # column
"ioExpanderIO18" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.54",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 18""",
}, # column
"ioExpanderIO19" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.55",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 19""",
}, # column
"ioExpanderIO20" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.56",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 20""",
}, # column
"ioExpanderIO21" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.57",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 21""",
}, # column
"ioExpanderIO22" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.58",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 22""",
}, # column
"ioExpanderIO23" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.59",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 23""",
}, # column
"ioExpanderIO24" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.60",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 24""",
}, # column
"ioExpanderIO25" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.61",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 25""",
}, # column
"ioExpanderIO26" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.62",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 26""",
}, # column
"ioExpanderIO27" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.63",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 27""",
}, # column
"ioExpanderIO28" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.64",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 28""",
}, # column
"ioExpanderIO29" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.65",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 29""",
}, # column
"ioExpanderIO30" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.66",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 30""",
}, # column
"ioExpanderIO31" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.67",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 31""",
}, # column
"ioExpanderIO32" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.68",
"status" : "current",
"syntax" : {
"type" : {
"basetype" : "Integer32",
"ranges" : [
{
"min" : "0",
"max" : "100"
},
],
"range" : {
"min" : "0",
"max" : "100"
},
},
},
"access" : "readonly",
"description" :
"""Current reading for Analog Input 32""",
}, # column
"ioExpanderRelayName1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.69",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""Relay1 Friendly Name""",
}, # column
"ioExpanderRelayState1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.70",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Relay1 Current Status: 0 = Off, 1 = On""",
}, # column
"ioExpanderRelayLatchingMode1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.71",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Relay1 Latching mode: 0 = Non-latching, 1 = Latching""",
}, # column
"ioExpanderRelayOverride1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.72",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Relay1 Override Mode: 0 - None, 1 - On, 2 - Off""",
}, # column
"ioExpanderRelayAcknowledge1" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.73",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Relay1 Acknowledge write a 1, always reads back 0""",
}, # column
"ioExpanderRelayName2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.74",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""Relay2 Friendly Name""",
}, # column
"ioExpanderRelayState2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.75",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Relay2 Current Status: 0 = Off, 1 = On""",
}, # column
"ioExpanderRelayLatchingMode2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.76",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Relay2 Latching mode: 0 = Non-latching, 1 = Latching""",
}, # column
"ioExpanderRelayOverride2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.77",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Relay2 Override Mode: 0 - None, 1 - On, 2 - Off""",
}, # column
"ioExpanderRelayAcknowledge2" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.78",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Relay2 Acknowledge write a 1, always reads back 0""",
}, # column
"ioExpanderRelayName3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.79",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-TC", "name" : "DisplayString"},
},
"access" : "readwrite",
"description" :
"""Relay3 Friendly Name""",
}, # column
"ioExpanderRelayState3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.80",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readonly",
"description" :
"""Relay3 Current Status: 0 = Off, 1 = On""",
}, # column
"ioExpanderRelayLatchingMode3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.81",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Relay3 Latching mode: 0 = Non-latching, 1 = Latching""",
}, # column
"ioExpanderRelayOverride3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.82",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Relay3 Override Mode: 0 - None, 1 - On, 2 - Off""",
}, # column
"ioExpanderRelayAcknowledge3" : {
"nodetype" : "column",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.30.1.83",
"status" : "current",
"syntax" : {
"type" : { "module" :"SNMPv2-SMI", "name" : "Gauge32"},
},
"access" : "readwrite",
"description" :
"""Relay3 Acknowledge write a 1, always reads back 0""",
}, # column
"cmTrap" : {
"nodetype" : "node",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767",
}, # node
"cmTrapPrefix" : {
"nodetype" : "node",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0",
}, # node
}, # nodes
"notifications" : {
"cmTestNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10101",
"status" : "current",
"objects" : {
},
"description" :
"""Test SNMP Trap""",
}, # notification
"cmClimateTempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10205",
"status" : "current",
"objects" : {
"climateTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Temperature Sensor Trap""",
}, # notification
"cmClimateTempFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10206",
"status" : "current",
"objects" : {
"climateTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Temperature Sensor Trap""",
}, # notification
"cmClimateHumidityNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10207",
"status" : "current",
"objects" : {
"climateHumidity" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Humidity Sensor Trap""",
}, # notification
"cmClimateLightNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10208",
"status" : "current",
"objects" : {
"climateLight" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Light Sensor Trap""",
}, # notification
"cmClimateAirflowNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10209",
"status" : "current",
"objects" : {
"climateAirflow" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Air Flow Sensor Trap""",
}, # notification
"cmClimateSoundNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10210",
"status" : "current",
"objects" : {
"climateSound" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Sound Sensor Trap""",
}, # notification
"cmClimateIO1NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10211",
"status" : "current",
"objects" : {
"climateIO1" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate IO1 Sensor Trap""",
}, # notification
"cmClimateIO2NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10212",
"status" : "current",
"objects" : {
"climateIO2" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate IO2 Sensor Trap""",
}, # notification
"cmClimateIO3NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10213",
"status" : "current",
"objects" : {
"climateIO3" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate IO3 Sensor Trap""",
}, # notification
"cmClimateDewPointCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10214",
"status" : "current",
"objects" : {
"climateDewPointC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Dew Point Sensor Trap""",
}, # notification
"cmClimateDewPointFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10215",
"status" : "current",
"objects" : {
"climateDewPointF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Dew Point Sensor Trap""",
}, # notification
"cmPowMonKWattHrsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10305",
"status" : "current",
"objects" : {
"powMonKWattHrs" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours Trap""",
}, # notification
"cmPowMonVoltsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10306",
"status" : "current",
"objects" : {
"powMonVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Trap""",
}, # notification
"cmPowMonVoltMaxNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10307",
"status" : "current",
"objects" : {
"powMonVoltMax" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Max Trap""",
}, # notification
"cmPowMonVoltMinNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10308",
"status" : "current",
"objects" : {
"powMonVoltMin" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Min Trap""",
}, # notification
"cmPowMonVoltPeakNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10309",
"status" : "current",
"objects" : {
"powMonVoltPeak" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volt Peak Trap""",
}, # notification
"cmPowMonDeciAmpsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10310",
"status" : "current",
"objects" : {
"powMonDeciAmps" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DeciAmps Trap""",
}, # notification
"cmPowMonRealPowerNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10311",
"status" : "current",
"objects" : {
"powMonRealPower" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power Trap""",
}, # notification
"cmPowMonApparentPowerNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10312",
"status" : "current",
"objects" : {
"powMonApparentPower" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power Trap""",
}, # notification
"cmPowMonPowerFactorNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10313",
"status" : "current",
"objects" : {
"powMonPowerFactor" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor Trap""",
}, # notification
"cmPowMonOutlet1NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10314",
"status" : "current",
"objects" : {
"powMonOutlet1" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Outlet 1 Clear Trap""",
}, # notification
"cmPowMonOutlet2NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10315",
"status" : "current",
"objects" : {
"powMonOutlet2" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Outlet 2 Clear Trap""",
}, # notification
"cmTempSensorTempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10405",
"status" : "current",
"objects" : {
"tempSensorTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"tempSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Temp Sensor - Temperature Trap""",
}, # notification
"cmTempSensorTempFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10406",
"status" : "current",
"objects" : {
"tempSensorTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"tempSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Temp Sensor - Temperature Trap""",
}, # notification
"cmAirFlowSensorTempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10505",
"status" : "current",
"objects" : {
"airFlowSensorTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Temperature Trap""",
}, # notification
"cmAirFlowSensorTempFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10506",
"status" : "current",
"objects" : {
"airFlowSensorTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Temperature Trap""",
}, # notification
"cmAirFlowSensorFlowNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10507",
"status" : "current",
"objects" : {
"airFlowSensorFlow" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Air Flow Trap""",
}, # notification
"cmAirFlowSensorHumidityNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10508",
"status" : "current",
"objects" : {
"airFlowSensorHumidity" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Humidity""",
}, # notification
"cmAirFlowSensorDewPointCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10509",
"status" : "current",
"objects" : {
"airFlowSensorDewPointC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Dew Point Trap""",
}, # notification
"cmAirFlowSensorDewPointFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10510",
"status" : "current",
"objects" : {
"airFlowSensorDewPointF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Dew Point Trap""",
}, # notification
"cmPowerVoltsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10605",
"status" : "current",
"objects" : {
"powerVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power-Only Volts Trap""",
}, # notification
"cmPowerDeciAmpsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10606",
"status" : "current",
"objects" : {
"powerDeciAmps" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power-Only Amps Trap""",
}, # notification
"cmPowerRealPowerNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10607",
"status" : "current",
"objects" : {
"powerRealPower" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power-Only Watts Trap""",
}, # notification
"cmPowerApparentPowerNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10608",
"status" : "current",
"objects" : {
"powerApparentPower" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power-Only Volt Amps Trap""",
}, # notification
"cmPowerPowerFactorNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10609",
"status" : "current",
"objects" : {
"powerPowerFactor" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power-Only Power Factor Trap""",
}, # notification
"cmDoorSensorStatusNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10705",
"status" : "current",
"objects" : {
"doorSensorStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"doorSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Door sensor Trap""",
}, # notification
"cmWaterSensorDampnessNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10805",
"status" : "current",
"objects" : {
"waterSensorDampness" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"waterSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Water sensor Trap""",
}, # notification
"cmCurrentMonitorDeciAmpsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.10905",
"status" : "current",
"objects" : {
"currentMonitorDeciAmps" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"currentMonitorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Current Monitor Amps Trap""",
}, # notification
"cmMillivoltMonitorMVNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11005",
"status" : "current",
"objects" : {
"millivoltMonitorMV" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"millivoltMonitorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Millivolt Monitor Trap""",
}, # notification
"cmPow3ChKWattHrsANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11105",
"status" : "current",
"objects" : {
"pow3ChKWattHrsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours A Trap""",
}, # notification
"cmPow3ChVoltsANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11106",
"status" : "current",
"objects" : {
"pow3ChVoltsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts A Trap""",
}, # notification
"cmPow3ChVoltMaxANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11107",
"status" : "current",
"objects" : {
"pow3ChVoltMaxA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Max A Trap""",
}, # notification
"cmPow3ChVoltMinANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11108",
"status" : "current",
"objects" : {
"pow3ChVoltMinA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Min A Trap""",
}, # notification
"cmPow3ChVoltPeakANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11109",
"status" : "current",
"objects" : {
"pow3ChVoltPeakA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volt Peak A Trap""",
}, # notification
"cmPow3ChDeciAmpsANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11110",
"status" : "current",
"objects" : {
"pow3ChDeciAmpsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps A Trap""",
}, # notification
"cmPow3ChRealPowerANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11111",
"status" : "current",
"objects" : {
"pow3ChRealPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power A Trap""",
}, # notification
"cmPow3ChApparentPowerANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11112",
"status" : "current",
"objects" : {
"pow3ChApparentPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power A Trap""",
}, # notification
"cmPow3ChPowerFactorANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11113",
"status" : "current",
"objects" : {
"pow3ChPowerFactorA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor A Trap""",
}, # notification
"cmPow3ChKWattHrsBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11114",
"status" : "current",
"objects" : {
"pow3ChKWattHrsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours B Trap""",
}, # notification
"cmPow3ChVoltsBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11115",
"status" : "current",
"objects" : {
"pow3ChVoltsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts B Trap""",
}, # notification
"cmPow3ChVoltMaxBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11116",
"status" : "current",
"objects" : {
"pow3ChVoltMaxB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Max B Trap""",
}, # notification
"cmPow3ChVoltMinBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11117",
"status" : "current",
"objects" : {
"pow3ChVoltMinB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Min B Trap""",
}, # notification
"cmPow3ChVoltPeakBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11118",
"status" : "current",
"objects" : {
"pow3ChVoltPeakB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volt Peak B Trap""",
}, # notification
"cmPow3ChDeciAmpsBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11119",
"status" : "current",
"objects" : {
"pow3ChDeciAmpsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps B Trap""",
}, # notification
"cmPow3ChRealPowerBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11120",
"status" : "current",
"objects" : {
"pow3ChRealPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power B Trap""",
}, # notification
"cmPow3ChApparentPowerBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11121",
"status" : "current",
"objects" : {
"pow3ChApparentPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power B Trap""",
}, # notification
"cmPow3ChPowerFactorBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11122",
"status" : "current",
"objects" : {
"pow3ChPowerFactorB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor B Trap""",
}, # notification
"cmPow3ChKWattHrsCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11123",
"status" : "current",
"objects" : {
"pow3ChKWattHrsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours C Trap""",
}, # notification
"cmPow3ChVoltsCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11124",
"status" : "current",
"objects" : {
"pow3ChVoltsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts C Trap""",
}, # notification
"cmPow3ChVoltMaxCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11125",
"status" : "current",
"objects" : {
"pow3ChVoltMaxC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Max C Trap""",
}, # notification
"cmPow3ChVoltMinCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11126",
"status" : "current",
"objects" : {
"pow3ChVoltMinC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Min C Trap""",
}, # notification
"cmPow3ChVoltPeakCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11127",
"status" : "current",
"objects" : {
"pow3ChVoltPeakC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volt Peak C Trap""",
}, # notification
"cmPow3ChDeciAmpsCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11128",
"status" : "current",
"objects" : {
"pow3ChDeciAmpsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps C Trap""",
}, # notification
"cmPow3ChRealPowerCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11129",
"status" : "current",
"objects" : {
"pow3ChRealPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power C Trap""",
}, # notification
"cmPow3ChApparentPowerCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11130",
"status" : "current",
"objects" : {
"pow3ChApparentPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power C Trap""",
}, # notification
"cmPow3ChPowerFactorCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11131",
"status" : "current",
"objects" : {
"pow3ChPowerFactorC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor C Trap""",
}, # notification
"cmOutlet1StatusNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11205",
"status" : "current",
"objects" : {
"outlet1Status" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"outletName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Outlet 1 Status Trap""",
}, # notification
"cmOutlet2StatusNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11206",
"status" : "current",
"objects" : {
"outlet2Status" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"outletName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Outlet 2 Status Trap""",
}, # notification
"cmVsfcSetPointCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11305",
"status" : "current",
"objects" : {
"vsfcSetPointC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc Temp Set Point Sensor Trap""",
}, # notification
"cmVsfcSetPointFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11306",
"status" : "current",
"objects" : {
"vsfcSetPointF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc Temp Set Point Sensor Trap""",
}, # notification
"cmVsfcFanSpeedNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11307",
"status" : "current",
"objects" : {
"vsfcFanSpeed" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc Fan Speed Sensor Trap""",
}, # notification
"cmVsfcIntTempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11308",
"status" : "current",
"objects" : {
"vsfcIntTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc Internal Temp Sensor Trap""",
}, # notification
"cmVsfcIntTempFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11309",
"status" : "current",
"objects" : {
"vsfcIntTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc Internal Temp Sensor Trap""",
}, # notification
"cmVsfcExt1TempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11310",
"status" : "current",
"objects" : {
"vsfcExt1TempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 1 Sensor Trap""",
}, # notification
"cmVsfcExt1TempFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11311",
"status" : "current",
"objects" : {
"vsfcExt1TempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 1 Sensor Trap""",
}, # notification
"cmVsfcExt2TempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11312",
"status" : "current",
"objects" : {
"vsfcExt2TempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 2 Sensor Trap""",
}, # notification
"cmVsfcExt2TempFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11313",
"status" : "current",
"objects" : {
"vsfcExt2TempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 1 Sensor Trap""",
}, # notification
"cmVsfcExt3TempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11314",
"status" : "current",
"objects" : {
"vsfcExt3TempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 3 Sensor Trap""",
}, # notification
"cmVsfcExt3TempFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11315",
"status" : "current",
"objects" : {
"vsfcExt3TempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 1 Sensor Trap""",
}, # notification
"cmVsfcExt4TempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11316",
"status" : "current",
"objects" : {
"vsfcExt4TempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 4 Sensor Trap""",
}, # notification
"cmVsfcExt4TempFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11317",
"status" : "current",
"objects" : {
"vsfcExt4TempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 1 Sensor Trap""",
}, # notification
"cmCtrl3ChVoltsANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11405",
"status" : "current",
"objects" : {
"ctrl3ChVoltsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts A Trap""",
}, # notification
"cmCtrl3ChVoltPeakANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11406",
"status" : "current",
"objects" : {
"ctrl3ChVoltPeakA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak A Trap""",
}, # notification
"cmCtrl3ChDeciAmpsANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11407",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps A Trap""",
}, # notification
"cmCtrl3ChDeciAmpsPeakANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11408",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsPeakA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak A Trap""",
}, # notification
"cmCtrl3ChRealPowerANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11409",
"status" : "current",
"objects" : {
"ctrl3ChRealPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power A Trap""",
}, # notification
"cmCtrl3ChApparentPowerANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11410",
"status" : "current",
"objects" : {
"ctrl3ChApparentPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power A Trap""",
}, # notification
"cmCtrl3ChPowerFactorANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11411",
"status" : "current",
"objects" : {
"ctrl3ChPowerFactorA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor A Trap""",
}, # notification
"cmCtrl3ChVoltsBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11412",
"status" : "current",
"objects" : {
"ctrl3ChVoltsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts B Trap""",
}, # notification
"cmCtrl3ChVoltPeakBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11413",
"status" : "current",
"objects" : {
"ctrl3ChVoltPeakB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak B Trap""",
}, # notification
"cmCtrl3ChDeciAmpsBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11414",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps B Trap""",
}, # notification
"cmCtrl3ChDeciAmpsPeakBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11415",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsPeakB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak B Trap""",
}, # notification
"cmCtrl3ChRealPowerBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11416",
"status" : "current",
"objects" : {
"ctrl3ChRealPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power B Trap""",
}, # notification
"cmCtrl3ChApparentPowerBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11417",
"status" : "current",
"objects" : {
"ctrl3ChApparentPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power B Trap""",
}, # notification
"cmCtrl3ChPowerFactorBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11418",
"status" : "current",
"objects" : {
"ctrl3ChPowerFactorB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor B Trap""",
}, # notification
"cmCtrl3ChVoltsCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11419",
"status" : "current",
"objects" : {
"ctrl3ChVoltsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts C Trap""",
}, # notification
"cmCtrl3ChVoltPeakCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11420",
"status" : "current",
"objects" : {
"ctrl3ChVoltPeakC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak C Trap""",
}, # notification
"cmCtrl3ChDeciAmpsCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11421",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps C Trap""",
}, # notification
"cmCtrl3ChDeciAmpsPeakCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11422",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsPeakC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak C Trap""",
}, # notification
"cmCtrl3ChRealPowerCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11423",
"status" : "current",
"objects" : {
"ctrl3ChRealPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power C Trap""",
}, # notification
"cmCtrl3ChApparentPowerCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11424",
"status" : "current",
"objects" : {
"ctrl3ChApparentPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power C Trap""",
}, # notification
"cmCtrl3ChPowerFactorCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11425",
"status" : "current",
"objects" : {
"ctrl3ChPowerFactorC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor C Trap""",
}, # notification
"cmCtrlGrpAmpsANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11505",
"status" : "current",
"objects" : {
"ctrlGrpAmpsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group A DeciAmps Trap""",
}, # notification
"cmCtrlGrpAmpsBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11506",
"status" : "current",
"objects" : {
"ctrlGrpAmpsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group B DeciAmps Trap""",
}, # notification
"cmCtrlGrpAmpsCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11507",
"status" : "current",
"objects" : {
"ctrlGrpAmpsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group C DeciAmps Trap""",
}, # notification
"cmCtrlGrpAmpsDNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11508",
"status" : "current",
"objects" : {
"ctrlGrpAmpsD" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group D DeciAmps Trap""",
}, # notification
"cmCtrlGrpAmpsENOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11509",
"status" : "current",
"objects" : {
"ctrlGrpAmpsE" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group E DeciAmps Trap""",
}, # notification
"cmCtrlGrpAmpsFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11510",
"status" : "current",
"objects" : {
"ctrlGrpAmpsF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group F DeciAmps Trap""",
}, # notification
"cmCtrlGrpAmpsGNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11511",
"status" : "current",
"objects" : {
"ctrlGrpAmpsG" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group G DeciAmps Trap""",
}, # notification
"cmCtrlGrpAmpsHNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11512",
"status" : "current",
"objects" : {
"ctrlGrpAmpsH" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group H DeciAmps Trap""",
}, # notification
"cmCtrlGrpAmpsAVoltsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11513",
"status" : "current",
"objects" : {
"ctrlGrpAmpsAVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""AVolts Trip Trap""",
}, # notification
"cmCtrlGrpAmpsBVoltsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11514",
"status" : "current",
"objects" : {
"ctrlGrpAmpsBVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""BVolts Trip Trap""",
}, # notification
"cmCtrlGrpAmpsCVoltsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11515",
"status" : "current",
"objects" : {
"ctrlGrpAmpsCVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""CVolts Trip Trap""",
}, # notification
"cmCtrlGrpAmpsDVoltsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11516",
"status" : "current",
"objects" : {
"ctrlGrpAmpsDVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DVolts Trip Trap""",
}, # notification
"cmCtrlGrpAmpsEVoltsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11517",
"status" : "current",
"objects" : {
"ctrlGrpAmpsEVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""EVolts Trip Trap""",
}, # notification
"cmCtrlGrpAmpsFVoltsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11518",
"status" : "current",
"objects" : {
"ctrlGrpAmpsFVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""FVolts Trip Trap""",
}, # notification
"cmCtrlGrpAmpsGVoltsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11519",
"status" : "current",
"objects" : {
"ctrlGrpAmpsGVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""GVolts Trip Trap""",
}, # notification
"cmCtrlGrpAmpsHVoltsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11520",
"status" : "current",
"objects" : {
"ctrlGrpAmpsHVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""HVolts Trip Trap""",
}, # notification
"cmCtrlOutletPendingNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11605",
"status" : "current",
"objects" : {
"ctrlOutletPending" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Pending Trip Trap""",
}, # notification
"cmCtrlOutletDeciAmpsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11606",
"status" : "current",
"objects" : {
"ctrlOutletDeciAmps" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Outlet DeciAmps Trap""",
}, # notification
"cmCtrlOutletGroupNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11607",
"status" : "current",
"objects" : {
"ctrlOutletGroup" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group Trip Trap""",
}, # notification
"cmCtrlOutletUpDelayNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11608",
"status" : "current",
"objects" : {
"ctrlOutletUpDelay" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""UpDelay Trip Trap""",
}, # notification
"cmCtrlOutletDwnDelayNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11609",
"status" : "current",
"objects" : {
"ctrlOutletDwnDelay" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DwnDelay Trip Trap""",
}, # notification
"cmCtrlOutletRbtDelayNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11610",
"status" : "current",
"objects" : {
"ctrlOutletRbtDelay" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""RbtDelay Trip Trap""",
}, # notification
"cmCtrlOutletURLNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11611",
"status" : "current",
"objects" : {
"ctrlOutletURL" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""URL Trip Trap""",
}, # notification
"cmCtrlOutletPOAActionNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11612",
"status" : "current",
"objects" : {
"ctrlOutletPOAAction" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""POAAction Trip Trap""",
}, # notification
"cmCtrlOutletPOADelayNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11613",
"status" : "current",
"objects" : {
"ctrlOutletPOADelay" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""POADelay Trip Trap""",
}, # notification
"cmCtrlOutletKWattHrsNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11614",
"status" : "current",
"objects" : {
"ctrlOutletKWattHrs" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""KWattHrs Trip Trap""",
}, # notification
"cmCtrlOutletPowerNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11615",
"status" : "current",
"objects" : {
"ctrlOutletPower" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Trip Trap""",
}, # notification
"cmDewPointSensorTempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11705",
"status" : "current",
"objects" : {
"dewPointSensorTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dewPointSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Dew Point Sensor - Temperature Trap""",
}, # notification
"cmDewPointSensorTempFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11706",
"status" : "current",
"objects" : {
"dewPointSensorTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dewPointSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Dew Point Sensor - Temperature Trap""",
}, # notification
"cmDewPointSensorHumidityNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11707",
"status" : "current",
"objects" : {
"dewPointSensorHumidity" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dewPointSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Dew Point Sensor - Humidity""",
}, # notification
"cmDewPointSensorDewPointCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11708",
"status" : "current",
"objects" : {
"dewPointSensorDewPointC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dewPointSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Dew Point Sensor - Dew Point Trap""",
}, # notification
"cmDewPointSensorDewPointFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11709",
"status" : "current",
"objects" : {
"dewPointSensorDewPointF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dewPointSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Dew Point Sensor - Dew Point Trap""",
}, # notification
"cmDigitalSensorDigitalNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11805",
"status" : "current",
"objects" : {
"digitalSensorDigital" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"digitalSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Digital sensor Trap""",
}, # notification
"cmDstsVoltsANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11905",
"status" : "current",
"objects" : {
"dstsVoltsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""RMS Voltage of Side A Set Point Sensor Trap""",
}, # notification
"cmDstsDeciAmpsANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11906",
"status" : "current",
"objects" : {
"dstsDeciAmpsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""RMS Current of Side A Set Point Sensor Trap""",
}, # notification
"cmDstsVoltsBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11907",
"status" : "current",
"objects" : {
"dstsVoltsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""RMS Voltage of Side B Set Point Sensor Trap""",
}, # notification
"cmDstsDeciAmpsBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11908",
"status" : "current",
"objects" : {
"dstsDeciAmpsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""RMS Current of Side B Set Point Sensor Trap""",
}, # notification
"cmDstsSourceAActiveNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11909",
"status" : "current",
"objects" : {
"dstsSourceAActive" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source A Active Set Point Sensor Trap""",
}, # notification
"cmDstsSourceBActiveNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11910",
"status" : "current",
"objects" : {
"dstsSourceBActive" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source B Active Set Point Sensor Trap""",
}, # notification
"cmDstsPowerStatusANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11911",
"status" : "current",
"objects" : {
"dstsPowerStatusA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source A Power Qualilty Active Set Point Sensor Trap""",
}, # notification
"cmDstsPowerStatusBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11912",
"status" : "current",
"objects" : {
"dstsPowerStatusB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source B Power Qualilty Active Set Point Sensor Trap""",
}, # notification
"cmDstsSourceATempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11913",
"status" : "current",
"objects" : {
"dstsSourceATempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source A Temp Sensor Trap""",
}, # notification
"cmDstsSourceBTempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.11914",
"status" : "current",
"objects" : {
"dstsSourceBTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source B Temp Sensor Trap""",
}, # notification
"cmCpmSensorStatusNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12005",
"status" : "current",
"objects" : {
"cpmSensorStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"cpmSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""City Power sensor Trap""",
}, # notification
"cmSmokeAlarmStatusNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12105",
"status" : "current",
"objects" : {
"smokeAlarmStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"smokeAlarmName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Smoke alarm Trap""",
}, # notification
"cmNeg48VdcSensorVoltageNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12205",
"status" : "current",
"objects" : {
"neg48VdcSensorVoltage" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"neg48VdcSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""-48Vdc Sensor Trap""",
}, # notification
"cmPos30VdcSensorVoltageNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12305",
"status" : "current",
"objects" : {
"pos30VdcSensorVoltage" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pos30VdcSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""30Vdc Sensor Trap""",
}, # notification
"cmAnalogSensorAnalogNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12405",
"status" : "current",
"objects" : {
"analogSensorAnalog" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"analogSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Analog Sensor Trap""",
}, # notification
"cmCtrl3ChIECKWattHrsANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12505",
"status" : "current",
"objects" : {
"ctrl3ChIECKWattHrsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours A Trap""",
}, # notification
"cmCtrl3ChIECVoltsANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12506",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts A Trap""",
}, # notification
"cmCtrl3ChIECVoltPeakANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12507",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltPeakA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak A Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12508",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps A Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsPeakANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12509",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsPeakA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak A Trap""",
}, # notification
"cmCtrl3ChIECRealPowerANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12510",
"status" : "current",
"objects" : {
"ctrl3ChIECRealPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power A Trap""",
}, # notification
"cmCtrl3ChIECApparentPowerANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12511",
"status" : "current",
"objects" : {
"ctrl3ChIECApparentPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power A Trap""",
}, # notification
"cmCtrl3ChIECPowerFactorANOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12512",
"status" : "current",
"objects" : {
"ctrl3ChIECPowerFactorA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor A Trap""",
}, # notification
"cmCtrl3ChIECKWattHrsBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12513",
"status" : "current",
"objects" : {
"ctrl3ChIECKWattHrsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours B Trap""",
}, # notification
"cmCtrl3ChIECVoltsBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12514",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts B Trap""",
}, # notification
"cmCtrl3ChIECVoltPeakBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12515",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltPeakB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak B Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12516",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps B Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsPeakBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12517",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsPeakB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak B Trap""",
}, # notification
"cmCtrl3ChIECRealPowerBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12518",
"status" : "current",
"objects" : {
"ctrl3ChIECRealPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power B Trap""",
}, # notification
"cmCtrl3ChIECApparentPowerBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12519",
"status" : "current",
"objects" : {
"ctrl3ChIECApparentPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power B Trap""",
}, # notification
"cmCtrl3ChIECPowerFactorBNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12520",
"status" : "current",
"objects" : {
"ctrl3ChIECPowerFactorB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor B Trap""",
}, # notification
"cmCtrl3ChIECKWattHrsCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12521",
"status" : "current",
"objects" : {
"ctrl3ChIECKWattHrsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours C Trap""",
}, # notification
"cmCtrl3ChIECVoltsCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12522",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts C Trap""",
}, # notification
"cmCtrl3ChIECVoltPeakCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12523",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltPeakC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak C Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12524",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps C Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsPeakCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12525",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsPeakC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak C Trap""",
}, # notification
"cmCtrl3ChIECRealPowerCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12526",
"status" : "current",
"objects" : {
"ctrl3ChIECRealPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power C Trap""",
}, # notification
"cmCtrl3ChIECApparentPowerCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12527",
"status" : "current",
"objects" : {
"ctrl3ChIECApparentPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power C Trap""",
}, # notification
"cmCtrl3ChIECPowerFactorCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12528",
"status" : "current",
"objects" : {
"ctrl3ChIECPowerFactorC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor C Trap""",
}, # notification
"cmClimateRelayTempCNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12605",
"status" : "current",
"objects" : {
"climateRelayTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay Temperature Sensor Trap""",
}, # notification
"cmClimateRelayTempFNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12606",
"status" : "current",
"objects" : {
"climateRelayTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay Temperature Sensor Trap""",
}, # notification
"cmClimateRelayIO1NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12607",
"status" : "current",
"objects" : {
"climateRelayIO1" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO1 Sensor Trap""",
}, # notification
"cmClimateRelayIO2NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12608",
"status" : "current",
"objects" : {
"climateRelayIO2" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO2 Sensor Trap""",
}, # notification
"cmClimateRelayIO3NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12609",
"status" : "current",
"objects" : {
"climateRelayIO3" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO3 Sensor Trap""",
}, # notification
"cmClimateRelayIO4NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12610",
"status" : "current",
"objects" : {
"climateRelayIO4" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO4 Sensor Trap""",
}, # notification
"cmClimateRelayIO5NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12611",
"status" : "current",
"objects" : {
"climateRelayIO5" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO5 Sensor Trap""",
}, # notification
"cmClimateRelayIO6NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12612",
"status" : "current",
"objects" : {
"climateRelayIO6" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO6 Sensor Trap""",
}, # notification
"cmAirSpeedSwitchSensorAirSpeedNOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.12805",
"status" : "current",
"objects" : {
"airSpeedSwitchSensorAirSpeed" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airSpeedSwitchSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Air Speed Switch Trap""",
}, # notification
"cmIoExpanderIO1NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13037",
"status" : "current",
"objects" : {
"ioExpanderIO1" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO1 Sensor Trap""",
}, # notification
"cmIoExpanderIO2NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13038",
"status" : "current",
"objects" : {
"ioExpanderIO2" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO2 Sensor Trap""",
}, # notification
"cmIoExpanderIO3NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13039",
"status" : "current",
"objects" : {
"ioExpanderIO3" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO3 Sensor Trap""",
}, # notification
"cmIoExpanderIO4NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13040",
"status" : "current",
"objects" : {
"ioExpanderIO4" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO4 Sensor Trap""",
}, # notification
"cmIoExpanderIO5NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13041",
"status" : "current",
"objects" : {
"ioExpanderIO5" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO5 Sensor Trap""",
}, # notification
"cmIoExpanderIO6NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13042",
"status" : "current",
"objects" : {
"ioExpanderIO6" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO6 Sensor Trap""",
}, # notification
"cmIoExpanderIO7NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13043",
"status" : "current",
"objects" : {
"ioExpanderIO7" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO7 Sensor Trap""",
}, # notification
"cmIoExpanderIO8NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13044",
"status" : "current",
"objects" : {
"ioExpanderIO8" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO8 Sensor Trap""",
}, # notification
"cmIoExpanderIO9NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13045",
"status" : "current",
"objects" : {
"ioExpanderIO9" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO9 Sensor Trap""",
}, # notification
"cmIoExpanderIO10NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13046",
"status" : "current",
"objects" : {
"ioExpanderIO10" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO10 Sensor Trap""",
}, # notification
"cmIoExpanderIO11NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13047",
"status" : "current",
"objects" : {
"ioExpanderIO11" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO11 Sensor Trap""",
}, # notification
"cmIoExpanderIO12NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13048",
"status" : "current",
"objects" : {
"ioExpanderIO12" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO12 Sensor Trap""",
}, # notification
"cmIoExpanderIO13NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13049",
"status" : "current",
"objects" : {
"ioExpanderIO13" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO13 Sensor Trap""",
}, # notification
"cmIoExpanderIO14NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13050",
"status" : "current",
"objects" : {
"ioExpanderIO14" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO14 Sensor Trap""",
}, # notification
"cmIoExpanderIO15NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13051",
"status" : "current",
"objects" : {
"ioExpanderIO15" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO15 Sensor Trap""",
}, # notification
"cmIoExpanderIO16NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13052",
"status" : "current",
"objects" : {
"ioExpanderIO16" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO16 Sensor Trap""",
}, # notification
"cmIoExpanderIO17NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13053",
"status" : "current",
"objects" : {
"ioExpanderIO17" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO17 Sensor Trap""",
}, # notification
"cmIoExpanderIO18NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13054",
"status" : "current",
"objects" : {
"ioExpanderIO18" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO18 Sensor Trap""",
}, # notification
"cmIoExpanderIO19NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13055",
"status" : "current",
"objects" : {
"ioExpanderIO19" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO19 Sensor Trap""",
}, # notification
"cmIoExpanderIO20NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13056",
"status" : "current",
"objects" : {
"ioExpanderIO20" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO20 Sensor Trap""",
}, # notification
"cmIoExpanderIO21NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13057",
"status" : "current",
"objects" : {
"ioExpanderIO21" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO21 Sensor Trap""",
}, # notification
"cmIoExpanderIO22NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13058",
"status" : "current",
"objects" : {
"ioExpanderIO22" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO22 Sensor Trap""",
}, # notification
"cmIoExpanderIO23NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13059",
"status" : "current",
"objects" : {
"ioExpanderIO23" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO23 Sensor Trap""",
}, # notification
"cmIoExpanderIO24NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13060",
"status" : "current",
"objects" : {
"ioExpanderIO24" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO24 Sensor Trap""",
}, # notification
"cmIoExpanderIO25NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13061",
"status" : "current",
"objects" : {
"ioExpanderIO25" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO25 Sensor Trap""",
}, # notification
"cmIoExpanderIO26NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13062",
"status" : "current",
"objects" : {
"ioExpanderIO26" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO26 Sensor Trap""",
}, # notification
"cmIoExpanderIO27NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13063",
"status" : "current",
"objects" : {
"ioExpanderIO27" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO27 Sensor Trap""",
}, # notification
"cmIoExpanderIO28NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13064",
"status" : "current",
"objects" : {
"ioExpanderIO28" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO28 Sensor Trap""",
}, # notification
"cmIoExpanderIO29NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13065",
"status" : "current",
"objects" : {
"ioExpanderIO29" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO29 Sensor Trap""",
}, # notification
"cmIoExpanderIO30NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13066",
"status" : "current",
"objects" : {
"ioExpanderIO30" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO30 Sensor Trap""",
}, # notification
"cmIoExpanderIO31NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13067",
"status" : "current",
"objects" : {
"ioExpanderIO31" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO31 Sensor Trap""",
}, # notification
"cmIoExpanderIO32NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.13068",
"status" : "current",
"objects" : {
"ioExpanderIO32" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO32 Sensor Trap""",
}, # notification
"cmClimateTempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20205",
"status" : "current",
"objects" : {
"climateTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Temperature Sensor Clear Trap""",
}, # notification
"cmClimateTempFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20206",
"status" : "current",
"objects" : {
"climateTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Temperature Sensor Clear Trap""",
}, # notification
"cmClimateHumidityCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20207",
"status" : "current",
"objects" : {
"climateHumidity" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Humidity Sensor Clear Trap""",
}, # notification
"cmClimateLightCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20208",
"status" : "current",
"objects" : {
"climateLight" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Light Sensor Clear Trap""",
}, # notification
"cmClimateAirflowCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20209",
"status" : "current",
"objects" : {
"climateAirflow" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Air Flow Sensor Clear Trap""",
}, # notification
"cmClimateSoundCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20210",
"status" : "current",
"objects" : {
"climateSound" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Sound Sensor Clear Trap""",
}, # notification
"cmClimateIO1CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20211",
"status" : "current",
"objects" : {
"climateIO1" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate IO1 Sensor Clear Trap""",
}, # notification
"cmClimateIO2CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20212",
"status" : "current",
"objects" : {
"climateIO2" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate IO2 Sensor Clear Trap""",
}, # notification
"cmClimateIO3CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20213",
"status" : "current",
"objects" : {
"climateIO3" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate IO3 Sensor Clear Trap""",
}, # notification
"cmClimateDewPointCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20214",
"status" : "current",
"objects" : {
"climateDewPointC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Dew Point Sensor Clear Trap""",
}, # notification
"cmClimateDewPointFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20215",
"status" : "current",
"objects" : {
"climateDewPointF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Dew Point Sensor Clear Trap""",
}, # notification
"cmPowMonKWattHrsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20305",
"status" : "current",
"objects" : {
"powMonKWattHrs" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours Clear Trap""",
}, # notification
"cmPowMonVoltsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20306",
"status" : "current",
"objects" : {
"powMonVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Clear Trap""",
}, # notification
"cmPowMonVoltMaxCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20307",
"status" : "current",
"objects" : {
"powMonVoltMax" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Max Clear Trap""",
}, # notification
"cmPowMonVoltMinCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20308",
"status" : "current",
"objects" : {
"powMonVoltMin" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Min Clear Trap""",
}, # notification
"cmPowMonVoltPeakCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20309",
"status" : "current",
"objects" : {
"powMonVoltPeak" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volt Peak Clear Trap""",
}, # notification
"cmPowMonDeciAmpsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20310",
"status" : "current",
"objects" : {
"powMonDeciAmps" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DeciAmps Clear Trap""",
}, # notification
"cmPowMonRealPowerCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20311",
"status" : "current",
"objects" : {
"powMonRealPower" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power Clear Trap""",
}, # notification
"cmPowMonApparentPowerCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20312",
"status" : "current",
"objects" : {
"powMonApparentPower" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power Clear Trap""",
}, # notification
"cmPowMonPowerFactorCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20313",
"status" : "current",
"objects" : {
"powMonPowerFactor" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor Clear Trap""",
}, # notification
"cmPowMonOutlet1CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20314",
"status" : "current",
"objects" : {
"powMonOutlet1" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Outlet1 Clear Trap""",
}, # notification
"cmPowMonOutlet2CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20315",
"status" : "current",
"objects" : {
"powMonOutlet2" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powMonName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Outlet2 Clear Trap""",
}, # notification
"cmTempSensorTempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20405",
"status" : "current",
"objects" : {
"tempSensorTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"tempSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Temp Sensor - Temperature Clear Trap""",
}, # notification
"cmTempSensorTempFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20406",
"status" : "current",
"objects" : {
"tempSensorTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"tempSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Temp Sensor - Temperature Clear Trap""",
}, # notification
"cmAirFlowSensorTempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20505",
"status" : "current",
"objects" : {
"airFlowSensorTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Temperature Clear Trap""",
}, # notification
"cmAirFlowSensorTempFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20506",
"status" : "current",
"objects" : {
"airFlowSensorTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Temperature Clear Trap""",
}, # notification
"cmAirFlowSensorFlowCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20507",
"status" : "current",
"objects" : {
"airFlowSensorFlow" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Air Flow Clear Trap""",
}, # notification
"cmAirFlowSensorHumidityCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20508",
"status" : "current",
"objects" : {
"airFlowSensorHumidity" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Humidity Clear Trap""",
}, # notification
"cmAirFlowSensorDewPointCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20509",
"status" : "current",
"objects" : {
"airFlowSensorDewPointC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Dew Point Clear Trap""",
}, # notification
"cmAirFlowSensorDewPointFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20510",
"status" : "current",
"objects" : {
"airFlowSensorDewPointF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airFlowSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Air Flow Sensor - Dew Point Clear Trap""",
}, # notification
"cmPowerVoltsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20605",
"status" : "current",
"objects" : {
"powerVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power-Only Volts Clear Trap""",
}, # notification
"cmPowerDeciAmpsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20606",
"status" : "current",
"objects" : {
"powerDeciAmps" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power-Only Amps Clear Trap""",
}, # notification
"cmPowerRealPowerCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20607",
"status" : "current",
"objects" : {
"powerRealPower" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power-Only Watts Clear Trap""",
}, # notification
"cmPowerApparentPowerCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20608",
"status" : "current",
"objects" : {
"powerApparentPower" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power-Only Volt Amps Clear Trap""",
}, # notification
"cmPowerPowerFactorCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20609",
"status" : "current",
"objects" : {
"powerPowerFactor" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power-Only Power Factor Clear Trap""",
}, # notification
"cmDoorSensorStatusCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20705",
"status" : "current",
"objects" : {
"doorSensorStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"doorSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Door sensor Clear Trap""",
}, # notification
"cmWaterSensorDampnessCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20805",
"status" : "current",
"objects" : {
"waterSensorDampness" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"waterSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Water sensor Clear Trap""",
}, # notification
"cmCurrentMonitorDeciAmpsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.20905",
"status" : "current",
"objects" : {
"currentMonitorDeciAmps" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"currentMonitorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Current Monitor Amps Clear Trap""",
}, # notification
"cmMillivoltMonitorMVCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21005",
"status" : "current",
"objects" : {
"millivoltMonitorMV" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"millivoltMonitorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Millivolt Monitor Clear Trap""",
}, # notification
"cmPow3ChKWattHrsACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21105",
"status" : "current",
"objects" : {
"pow3ChKWattHrsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours A Clear Trap""",
}, # notification
"cmPow3ChVoltsACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21106",
"status" : "current",
"objects" : {
"pow3ChVoltsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts A Clear Trap""",
}, # notification
"cmPow3ChVoltMaxACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21107",
"status" : "current",
"objects" : {
"pow3ChVoltMaxA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Max A Clear Trap""",
}, # notification
"cmPow3ChVoltMinACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21108",
"status" : "current",
"objects" : {
"pow3ChVoltMinA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Min A Clear Trap""",
}, # notification
"cmPow3ChVoltPeakACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21109",
"status" : "current",
"objects" : {
"pow3ChVoltPeakA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volt Peak A Clear Trap""",
}, # notification
"cmPow3ChDeciAmpsACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21110",
"status" : "current",
"objects" : {
"pow3ChDeciAmpsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps A Clear Trap""",
}, # notification
"cmPow3ChRealPowerACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21111",
"status" : "current",
"objects" : {
"pow3ChRealPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power A Clear Trap""",
}, # notification
"cmPow3ChApparentPowerACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21112",
"status" : "current",
"objects" : {
"pow3ChApparentPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power A Clear Trap""",
}, # notification
"cmPow3ChPowerFactorACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21113",
"status" : "current",
"objects" : {
"pow3ChPowerFactorA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor A Clear Trap""",
}, # notification
"cmPow3ChKWattHrsBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21114",
"status" : "current",
"objects" : {
"pow3ChKWattHrsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours B Clear Trap""",
}, # notification
"cmPow3ChVoltsBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21115",
"status" : "current",
"objects" : {
"pow3ChVoltsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts B Clear Trap""",
}, # notification
"cmPow3ChVoltMaxBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21116",
"status" : "current",
"objects" : {
"pow3ChVoltMaxB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Max B Clear Trap""",
}, # notification
"cmPow3ChVoltMinBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21117",
"status" : "current",
"objects" : {
"pow3ChVoltMinB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Min B Clear Trap""",
}, # notification
"cmPow3ChVoltPeakBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21118",
"status" : "current",
"objects" : {
"pow3ChVoltPeakB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volt Peak B Clear Trap""",
}, # notification
"cmPow3ChDeciAmpsBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21119",
"status" : "current",
"objects" : {
"pow3ChDeciAmpsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps B Clear Trap""",
}, # notification
"cmPow3ChRealPowerBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21120",
"status" : "current",
"objects" : {
"pow3ChRealPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power B Clear Trap""",
}, # notification
"cmPow3ChApparentPowerBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21121",
"status" : "current",
"objects" : {
"pow3ChApparentPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power B Clear Trap""",
}, # notification
"cmPow3ChPowerFactorBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21122",
"status" : "current",
"objects" : {
"pow3ChPowerFactorB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor B Clear Trap""",
}, # notification
"cmPow3ChKWattHrsCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21123",
"status" : "current",
"objects" : {
"pow3ChKWattHrsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours C Clear Trap""",
}, # notification
"cmPow3ChVoltsCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21124",
"status" : "current",
"objects" : {
"pow3ChVoltsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts C Clear Trap""",
}, # notification
"cmPow3ChVoltMaxCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21125",
"status" : "current",
"objects" : {
"pow3ChVoltMaxC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Max C Clear Trap""",
}, # notification
"cmPow3ChVoltMinCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21126",
"status" : "current",
"objects" : {
"pow3ChVoltMinC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Min C Clear Trap""",
}, # notification
"cmPow3ChVoltPeakCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21127",
"status" : "current",
"objects" : {
"pow3ChVoltPeakC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volt Peak C Clear Trap""",
}, # notification
"cmPow3ChDeciAmpsCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21128",
"status" : "current",
"objects" : {
"pow3ChDeciAmpsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps C Clear Trap""",
}, # notification
"cmPow3ChRealPowerCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21129",
"status" : "current",
"objects" : {
"pow3ChRealPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power C Clear Trap""",
}, # notification
"cmPow3ChApparentPowerCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21130",
"status" : "current",
"objects" : {
"pow3ChApparentPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power C Clear Trap""",
}, # notification
"cmPow3ChPowerFactorCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21131",
"status" : "current",
"objects" : {
"pow3ChPowerFactorC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pow3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor C Clear Trap""",
}, # notification
"cmOutlet1StatusCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21205",
"status" : "current",
"objects" : {
"outlet1Status" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"outletName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Outlet 1 Status Clear Trap""",
}, # notification
"cmOutlet2StatusCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21206",
"status" : "current",
"objects" : {
"outlet2Status" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"outletName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Outlet 2 Status Clear Trap""",
}, # notification
"cmVsfcSetPointCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21305",
"status" : "current",
"objects" : {
"vsfcSetPointC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc Temp Set Point Sensor Clear""",
}, # notification
"cmVsfcSetPointFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21306",
"status" : "current",
"objects" : {
"vsfcSetPointF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc Temp Set Point Sensor Clear""",
}, # notification
"cmVsfcFanSpeedCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21307",
"status" : "current",
"objects" : {
"vsfcFanSpeed" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc Fan Speed Sensor Clear""",
}, # notification
"cmVsfcIntTempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21308",
"status" : "current",
"objects" : {
"vsfcIntTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc Internal Temp Sensor Clear""",
}, # notification
"cmVsfcIntTempFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21309",
"status" : "current",
"objects" : {
"vsfcIntTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc Internal Temp Sensor Clear""",
}, # notification
"cmVsfcExt1TempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21310",
"status" : "current",
"objects" : {
"vsfcExt1TempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 1 Sensor Clear""",
}, # notification
"cmVsfcExt1TempFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21311",
"status" : "current",
"objects" : {
"vsfcExt1TempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 1 Sensor Clear""",
}, # notification
"cmVsfcExt2TempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21312",
"status" : "current",
"objects" : {
"vsfcExt2TempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 2 Sensor Clear""",
}, # notification
"cmVsfcExt2TempFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21313",
"status" : "current",
"objects" : {
"vsfcExt2TempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 1 Sensor Clear""",
}, # notification
"cmVsfcExt3TempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21314",
"status" : "current",
"objects" : {
"vsfcExt3TempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 3 Sensor Clear""",
}, # notification
"cmVsfcExt3TempFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21315",
"status" : "current",
"objects" : {
"vsfcExt3TempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 1 Sensor Clear""",
}, # notification
"cmVsfcExt4TempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21316",
"status" : "current",
"objects" : {
"vsfcExt4TempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 4 Sensor Clear""",
}, # notification
"cmVsfcExt4TempFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21317",
"status" : "current",
"objects" : {
"vsfcExt4TempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"vsfcName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Vsfc External Temp 1 Sensor Clear""",
}, # notification
"cmCtrl3ChVoltsACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21405",
"status" : "current",
"objects" : {
"ctrl3ChVoltsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts A Clear Trap""",
}, # notification
"cmCtrl3ChVoltPeakACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21406",
"status" : "current",
"objects" : {
"ctrl3ChVoltPeakA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak A Clear Trap""",
}, # notification
"cmCtrl3ChDeciAmpsACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21407",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps A Clear Trap""",
}, # notification
"cmCtrl3ChDeciAmpsPeakACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21408",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsPeakA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak A Clear Trap""",
}, # notification
"cmCtrl3ChRealPowerACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21409",
"status" : "current",
"objects" : {
"ctrl3ChRealPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power A Clear Trap""",
}, # notification
"cmCtrl3ChApparentPowerACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21410",
"status" : "current",
"objects" : {
"ctrl3ChApparentPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power A Clear Trap""",
}, # notification
"cmCtrl3ChPowerFactorACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21411",
"status" : "current",
"objects" : {
"ctrl3ChPowerFactorA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor A Clear Trap""",
}, # notification
"cmCtrl3ChVoltsBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21412",
"status" : "current",
"objects" : {
"ctrl3ChVoltsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts B Clear Trap""",
}, # notification
"cmCtrl3ChVoltPeakBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21413",
"status" : "current",
"objects" : {
"ctrl3ChVoltPeakB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak B Clear Trap""",
}, # notification
"cmCtrl3ChDeciAmpsBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21414",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps B Clear Trap""",
}, # notification
"cmCtrl3ChDeciAmpsPeakBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21415",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsPeakB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak B Clear Trap""",
}, # notification
"cmCtrl3ChRealPowerBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21416",
"status" : "current",
"objects" : {
"ctrl3ChRealPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power B Clear Trap""",
}, # notification
"cmCtrl3ChApparentPowerBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21417",
"status" : "current",
"objects" : {
"ctrl3ChApparentPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power B Clear Trap""",
}, # notification
"cmCtrl3ChPowerFactorBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21418",
"status" : "current",
"objects" : {
"ctrl3ChPowerFactorB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor B Clear Trap""",
}, # notification
"cmCtrl3ChVoltsCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21419",
"status" : "current",
"objects" : {
"ctrl3ChVoltsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts C Clear Trap""",
}, # notification
"cmCtrl3ChVoltPeakCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21420",
"status" : "current",
"objects" : {
"ctrl3ChVoltPeakC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak C Clear Trap""",
}, # notification
"cmCtrl3ChDeciAmpsCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21421",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps C Clear Trap""",
}, # notification
"cmCtrl3ChDeciAmpsPeakCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21422",
"status" : "current",
"objects" : {
"ctrl3ChDeciAmpsPeakC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak C Clear Trap""",
}, # notification
"cmCtrl3ChRealPowerCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21423",
"status" : "current",
"objects" : {
"ctrl3ChRealPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power C Clear Trap""",
}, # notification
"cmCtrl3ChApparentPowerCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21424",
"status" : "current",
"objects" : {
"ctrl3ChApparentPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power C Clear Trap""",
}, # notification
"cmCtrl3ChPowerFactorCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21425",
"status" : "current",
"objects" : {
"ctrl3ChPowerFactorC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor C Clear Trap""",
}, # notification
"cmCtrlGrpAmpsACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21505",
"status" : "current",
"objects" : {
"ctrlGrpAmpsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group A DeciAmps Clear Trap""",
}, # notification
"cmCtrlGrpAmpsBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21506",
"status" : "current",
"objects" : {
"ctrlGrpAmpsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group B DeciAmps Clear Trap""",
}, # notification
"cmCtrlGrpAmpsCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21507",
"status" : "current",
"objects" : {
"ctrlGrpAmpsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group C DeciAmps Clear Trap""",
}, # notification
"cmCtrlGrpAmpsDCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21508",
"status" : "current",
"objects" : {
"ctrlGrpAmpsD" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group D DeciAmps Clear Trap""",
}, # notification
"cmCtrlGrpAmpsECLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21509",
"status" : "current",
"objects" : {
"ctrlGrpAmpsE" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group E DeciAmps Clear Trap""",
}, # notification
"cmCtrlGrpAmpsFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21510",
"status" : "current",
"objects" : {
"ctrlGrpAmpsF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group F DeciAmps Clear Trap""",
}, # notification
"cmCtrlGrpAmpsGCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21511",
"status" : "current",
"objects" : {
"ctrlGrpAmpsG" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group G DeciAmps Clear Trap""",
}, # notification
"cmCtrlGrpAmpsHCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21512",
"status" : "current",
"objects" : {
"ctrlGrpAmpsH" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group H DeciAmps Clear Trap""",
}, # notification
"cmCtrlGrpAmpsAVoltsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21513",
"status" : "current",
"objects" : {
"ctrlGrpAmpsAVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""AVolts Clear Trap""",
}, # notification
"cmCtrlGrpAmpsBVoltsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21514",
"status" : "current",
"objects" : {
"ctrlGrpAmpsBVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""BVolts Clear Trap""",
}, # notification
"cmCtrlGrpAmpsCVoltsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21515",
"status" : "current",
"objects" : {
"ctrlGrpAmpsCVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""CVolts Clear Trap""",
}, # notification
"cmCtrlGrpAmpsDVoltsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21516",
"status" : "current",
"objects" : {
"ctrlGrpAmpsDVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DVolts Clear Trap""",
}, # notification
"cmCtrlGrpAmpsEVoltsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21517",
"status" : "current",
"objects" : {
"ctrlGrpAmpsEVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""EVolts Clear Trap""",
}, # notification
"cmCtrlGrpAmpsFVoltsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21518",
"status" : "current",
"objects" : {
"ctrlGrpAmpsFVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""FVolts Clear Trap""",
}, # notification
"cmCtrlGrpAmpsGVoltsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21519",
"status" : "current",
"objects" : {
"ctrlGrpAmpsGVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""GVolts Clear Trap""",
}, # notification
"cmCtrlGrpAmpsHVoltsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21520",
"status" : "current",
"objects" : {
"ctrlGrpAmpsHVolts" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlGrpAmpsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""HVolts Clear Trap""",
}, # notification
"cmCtrlOutletPendingCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21605",
"status" : "current",
"objects" : {
"ctrlOutletPending" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Pending Clear Trap""",
}, # notification
"cmCtrlOutletDeciAmpsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21606",
"status" : "current",
"objects" : {
"ctrlOutletDeciAmps" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Outlet DeciAmps Clear Trap""",
}, # notification
"cmCtrlOutletGroupCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21607",
"status" : "current",
"objects" : {
"ctrlOutletGroup" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Group Clear Trap""",
}, # notification
"cmCtrlOutletUpDelayCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21608",
"status" : "current",
"objects" : {
"ctrlOutletUpDelay" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""UpDelay Clear Trap""",
}, # notification
"cmCtrlOutletDwnDelayCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21609",
"status" : "current",
"objects" : {
"ctrlOutletDwnDelay" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DwnDelay Clear Trap""",
}, # notification
"cmCtrlOutletRbtDelayCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21610",
"status" : "current",
"objects" : {
"ctrlOutletRbtDelay" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""RbtDelay Clear Trap""",
}, # notification
"cmCtrlOutletURLCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21611",
"status" : "current",
"objects" : {
"ctrlOutletURL" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""URL Clear Trap""",
}, # notification
"cmCtrlOutletPOAActionCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21612",
"status" : "current",
"objects" : {
"ctrlOutletPOAAction" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""POAAction Clear Trap""",
}, # notification
"cmCtrlOutletPOADelayCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21613",
"status" : "current",
"objects" : {
"ctrlOutletPOADelay" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""POADelay Clear Trap""",
}, # notification
"cmCtrlOutletKWattHrsCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21614",
"status" : "current",
"objects" : {
"ctrlOutletKWattHrs" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""KWattHrs Clear Trap""",
}, # notification
"cmCtrlOutletPowerCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21615",
"status" : "current",
"objects" : {
"ctrlOutletPower" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrlOutletStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Clear Trap""",
}, # notification
"cmDewPointSensorTempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21705",
"status" : "current",
"objects" : {
"dewPointSensorTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dewPointSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Dew Point Sensor - Temperature Clear Trap""",
}, # notification
"cmDewPointSensorTempFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21706",
"status" : "current",
"objects" : {
"dewPointSensorTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dewPointSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Dew Point Sensor - Temperature Clear Trap""",
}, # notification
"cmDewPointSensorHumidityCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21707",
"status" : "current",
"objects" : {
"dewPointSensorHumidity" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dewPointSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Dew Point Sensor - Humidity Clear Trap""",
}, # notification
"cmDewPointSensorDewPointCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21708",
"status" : "current",
"objects" : {
"dewPointSensorDewPointC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dewPointSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Dew Point Sensor - Dew Point Clear Trap""",
}, # notification
"cmDewPointSensorDewPointFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21709",
"status" : "current",
"objects" : {
"dewPointSensorDewPointF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dewPointSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Remote Dew Point Sensor - Dew Point Clear Trap""",
}, # notification
"cmDigitalSensorDigitalCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21805",
"status" : "current",
"objects" : {
"digitalSensorDigital" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"digitalSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Digital sensor Clear Trap""",
}, # notification
"cmDstsVoltsACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21905",
"status" : "current",
"objects" : {
"dstsVoltsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""RMS Voltage of Side A Set Point Sensor Clear""",
}, # notification
"cmDstsDeciAmpsACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21906",
"status" : "current",
"objects" : {
"dstsDeciAmpsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""RMS Current of Side A Set Point Sensor Clear""",
}, # notification
"cmDstsVoltsBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21907",
"status" : "current",
"objects" : {
"dstsVoltsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""RMS Voltage of Side B Set Point Sensor Clear""",
}, # notification
"cmDstsDeciAmpsBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21908",
"status" : "current",
"objects" : {
"dstsDeciAmpsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""RMS Current of Side B Set Point Sensor Clear""",
}, # notification
"cmDstsSourceAActiveCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21909",
"status" : "current",
"objects" : {
"dstsSourceAActive" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source A Active Set Point Sensor Clear""",
}, # notification
"cmDstsSourceBActiveCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21910",
"status" : "current",
"objects" : {
"dstsSourceBActive" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source B Active Set Point Sensor Clear""",
}, # notification
"cmDstsPowerStatusACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21911",
"status" : "current",
"objects" : {
"dstsPowerStatusA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source A Power Qualilty Active Set Point Sensor Clear""",
}, # notification
"cmDstsPowerStatusBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21912",
"status" : "current",
"objects" : {
"dstsPowerStatusB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source B Power Qualilty Active Set Point Sensor Clear""",
}, # notification
"cmDstsSourceATempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21913",
"status" : "current",
"objects" : {
"dstsSourceATempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source A Temp Sensor Clear""",
}, # notification
"cmDstsSourceBTempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.21914",
"status" : "current",
"objects" : {
"dstsSourceBTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"dstsName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Source B Temp Sensor Clear""",
}, # notification
"cmCpmSensorStatusCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22005",
"status" : "current",
"objects" : {
"cpmSensorStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"cpmSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""City Power sensor Clear Trap""",
}, # notification
"cmSmokeAlarmStatusCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22105",
"status" : "current",
"objects" : {
"smokeAlarmStatus" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"smokeAlarmName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Smoke alarm Clear Trap""",
}, # notification
"cmNeg48VdcSensorVoltageCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22205",
"status" : "current",
"objects" : {
"neg48VdcSensorVoltage" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"neg48VdcSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""-48Vdc Sensor Clear Trap""",
}, # notification
"cmPos30VdcSensorVoltageCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22305",
"status" : "current",
"objects" : {
"pos30VdcSensorVoltage" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"pos30VdcSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""30Vdc Sensor Clear Trap""",
}, # notification
"cmAnalogSensorAnalogCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22405",
"status" : "current",
"objects" : {
"analogSensorAnalog" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"analogSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Analog Sensor Clear Trap""",
}, # notification
"cmCtrl3ChIECKWattHrsACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22505",
"status" : "current",
"objects" : {
"ctrl3ChIECKWattHrsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours A Clear Trap""",
}, # notification
"cmCtrl3ChIECVoltsACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22506",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts A Clear Trap""",
}, # notification
"cmCtrl3ChIECVoltPeakACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22507",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltPeakA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak A Clear Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22508",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps A Clear Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsPeakACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22509",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsPeakA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak A Clear Trap""",
}, # notification
"cmCtrl3ChIECRealPowerACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22510",
"status" : "current",
"objects" : {
"ctrl3ChIECRealPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power A Clear Trap""",
}, # notification
"cmCtrl3ChIECApparentPowerACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22511",
"status" : "current",
"objects" : {
"ctrl3ChIECApparentPowerA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power A Clear Trap""",
}, # notification
"cmCtrl3ChIECPowerFactorACLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22512",
"status" : "current",
"objects" : {
"ctrl3ChIECPowerFactorA" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor A Clear Trap""",
}, # notification
"cmCtrl3ChIECKWattHrsBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22513",
"status" : "current",
"objects" : {
"ctrl3ChIECKWattHrsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours B Clear Trap""",
}, # notification
"cmCtrl3ChIECVoltsBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22514",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts B Clear Trap""",
}, # notification
"cmCtrl3ChIECVoltPeakBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22515",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltPeakB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak B Clear Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22516",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps B Clear Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsPeakBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22517",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsPeakB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak B Clear Trap""",
}, # notification
"cmCtrl3ChIECRealPowerBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22518",
"status" : "current",
"objects" : {
"ctrl3ChIECRealPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power B Clear Trap""",
}, # notification
"cmCtrl3ChIECApparentPowerBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22519",
"status" : "current",
"objects" : {
"ctrl3ChIECApparentPowerB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power B Clear Trap""",
}, # notification
"cmCtrl3ChIECPowerFactorBCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22520",
"status" : "current",
"objects" : {
"ctrl3ChIECPowerFactorB" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor B Clear Trap""",
}, # notification
"cmCtrl3ChIECKWattHrsCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22521",
"status" : "current",
"objects" : {
"ctrl3ChIECKWattHrsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Kilo Watt Hours C Clear Trap""",
}, # notification
"cmCtrl3ChIECVoltsCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22522",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts C Clear Trap""",
}, # notification
"cmCtrl3ChIECVoltPeakCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22523",
"status" : "current",
"objects" : {
"ctrl3ChIECVoltPeakC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Volts Peak C Clear Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22524",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps C Clear Trap""",
}, # notification
"cmCtrl3ChIECDeciAmpsPeakCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22525",
"status" : "current",
"objects" : {
"ctrl3ChIECDeciAmpsPeakC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Deciamps Peak C Clear Trap""",
}, # notification
"cmCtrl3ChIECRealPowerCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22526",
"status" : "current",
"objects" : {
"ctrl3ChIECRealPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Real Power C Clear Trap""",
}, # notification
"cmCtrl3ChIECApparentPowerCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22527",
"status" : "current",
"objects" : {
"ctrl3ChIECApparentPowerC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Apparent Power C Clear Trap""",
}, # notification
"cmCtrl3ChIECPowerFactorCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22528",
"status" : "current",
"objects" : {
"ctrl3ChIECPowerFactorC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ctrl3ChIECName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Power Factor C Clear Trap""",
}, # notification
"cmClimateRelayTempCCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22605",
"status" : "current",
"objects" : {
"climateRelayTempC" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay Temperature Sensor Clear Trap""",
}, # notification
"cmClimateRelayTempFCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22606",
"status" : "current",
"objects" : {
"climateRelayTempF" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay Temperature Sensor Clear Trap""",
}, # notification
"cmClimateRelayIO1CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22607",
"status" : "current",
"objects" : {
"climateRelayIO1" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO1 Sensor Clear Trap""",
}, # notification
"cmClimateRelayIO2CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22608",
"status" : "current",
"objects" : {
"climateRelayIO2" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO2 Sensor Clear Trap""",
}, # notification
"cmClimateRelayIO3CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22609",
"status" : "current",
"objects" : {
"climateRelayIO3" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO3 Sensor Clear Trap""",
}, # notification
"cmClimateRelayIO4CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22610",
"status" : "current",
"objects" : {
"climateRelayIO4" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO4 Sensor Clear Trap""",
}, # notification
"cmClimateRelayIO5CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22611",
"status" : "current",
"objects" : {
"climateRelayIO5" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO5 Sensor Clear Trap""",
}, # notification
"cmClimateRelayIO6CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22612",
"status" : "current",
"objects" : {
"climateRelayIO6" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"climateRelayName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO6 Sensor Clear Trap""",
}, # notification
"cmAirSpeedSwitchSensorAirSpeedCLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.22805",
"status" : "current",
"objects" : {
"airSpeedSwitchSensorAirSpeed" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"airSpeedSwitchSensorName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Air Speed Switch Clear Trap""",
}, # notification
"cmIoExpanderIO1CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23037",
"status" : "current",
"objects" : {
"ioExpanderIO1" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO1 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO2CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23038",
"status" : "current",
"objects" : {
"ioExpanderIO2" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO2 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO3CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23039",
"status" : "current",
"objects" : {
"ioExpanderIO3" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO3 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO4CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23040",
"status" : "current",
"objects" : {
"ioExpanderIO4" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO4 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO5CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23041",
"status" : "current",
"objects" : {
"ioExpanderIO5" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO5 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO6CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23042",
"status" : "current",
"objects" : {
"ioExpanderIO6" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO6 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO7CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23043",
"status" : "current",
"objects" : {
"ioExpanderIO7" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO7 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO8CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23044",
"status" : "current",
"objects" : {
"ioExpanderIO8" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO8 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO9CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23045",
"status" : "current",
"objects" : {
"ioExpanderIO9" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO9 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO10CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23046",
"status" : "current",
"objects" : {
"ioExpanderIO10" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO10 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO11CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23047",
"status" : "current",
"objects" : {
"ioExpanderIO11" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO11 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO12CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23048",
"status" : "current",
"objects" : {
"ioExpanderIO12" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO12 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO13CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23049",
"status" : "current",
"objects" : {
"ioExpanderIO13" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO13 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO14CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23050",
"status" : "current",
"objects" : {
"ioExpanderIO14" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO14 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO15CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23051",
"status" : "current",
"objects" : {
"ioExpanderIO15" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO15 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO16CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23052",
"status" : "current",
"objects" : {
"ioExpanderIO16" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO16 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO17CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23053",
"status" : "current",
"objects" : {
"ioExpanderIO17" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO17 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO18CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23054",
"status" : "current",
"objects" : {
"ioExpanderIO18" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO18 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO19CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23055",
"status" : "current",
"objects" : {
"ioExpanderIO19" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO19 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO20CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23056",
"status" : "current",
"objects" : {
"ioExpanderIO20" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO20 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO21CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23057",
"status" : "current",
"objects" : {
"ioExpanderIO21" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO21 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO22CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23058",
"status" : "current",
"objects" : {
"ioExpanderIO22" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO22 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO23CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23059",
"status" : "current",
"objects" : {
"ioExpanderIO23" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO23 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO24CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23060",
"status" : "current",
"objects" : {
"ioExpanderIO24" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO24 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO25CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23061",
"status" : "current",
"objects" : {
"ioExpanderIO25" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO25 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO26CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23062",
"status" : "current",
"objects" : {
"ioExpanderIO26" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO26 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO27CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23063",
"status" : "current",
"objects" : {
"ioExpanderIO27" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO27 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO28CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23064",
"status" : "current",
"objects" : {
"ioExpanderIO28" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO28 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO29CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23065",
"status" : "current",
"objects" : {
"ioExpanderIO29" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO29 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO30CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23066",
"status" : "current",
"objects" : {
"ioExpanderIO30" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO30 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO31CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23067",
"status" : "current",
"objects" : {
"ioExpanderIO31" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO31 Sensor Clear Trap""",
}, # notification
"cmIoExpanderIO32CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.23068",
"status" : "current",
"objects" : {
"ioExpanderIO32" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"ioExpanderName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""Climate Relay IO32 Sensor Clear Trap""",
}, # notification
"cmPowerDMDeciAmps1NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129153",
"status" : "current",
"objects" : {
"powerDMDeciAmps1" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps2NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129154",
"status" : "current",
"objects" : {
"powerDMDeciAmps2" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps3NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129155",
"status" : "current",
"objects" : {
"powerDMDeciAmps3" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps4NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129156",
"status" : "current",
"objects" : {
"powerDMDeciAmps4" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps5NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129157",
"status" : "current",
"objects" : {
"powerDMDeciAmps5" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps6NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129158",
"status" : "current",
"objects" : {
"powerDMDeciAmps6" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps7NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129159",
"status" : "current",
"objects" : {
"powerDMDeciAmps7" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps8NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129160",
"status" : "current",
"objects" : {
"powerDMDeciAmps8" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps9NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129161",
"status" : "current",
"objects" : {
"powerDMDeciAmps9" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps10NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129162",
"status" : "current",
"objects" : {
"powerDMDeciAmps10" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps11NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129163",
"status" : "current",
"objects" : {
"powerDMDeciAmps11" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps12NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129164",
"status" : "current",
"objects" : {
"powerDMDeciAmps12" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps13NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129165",
"status" : "current",
"objects" : {
"powerDMDeciAmps13" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps14NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129166",
"status" : "current",
"objects" : {
"powerDMDeciAmps14" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps15NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129167",
"status" : "current",
"objects" : {
"powerDMDeciAmps15" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps16NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129168",
"status" : "current",
"objects" : {
"powerDMDeciAmps16" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps17NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129169",
"status" : "current",
"objects" : {
"powerDMDeciAmps17" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps18NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129170",
"status" : "current",
"objects" : {
"powerDMDeciAmps18" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps19NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129171",
"status" : "current",
"objects" : {
"powerDMDeciAmps19" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps20NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129172",
"status" : "current",
"objects" : {
"powerDMDeciAmps20" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps21NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129173",
"status" : "current",
"objects" : {
"powerDMDeciAmps21" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps22NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129174",
"status" : "current",
"objects" : {
"powerDMDeciAmps22" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps23NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129175",
"status" : "current",
"objects" : {
"powerDMDeciAmps23" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps24NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129176",
"status" : "current",
"objects" : {
"powerDMDeciAmps24" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps25NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129177",
"status" : "current",
"objects" : {
"powerDMDeciAmps25" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps26NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129178",
"status" : "current",
"objects" : {
"powerDMDeciAmps26" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps27NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129179",
"status" : "current",
"objects" : {
"powerDMDeciAmps27" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps28NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129180",
"status" : "current",
"objects" : {
"powerDMDeciAmps28" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps29NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129181",
"status" : "current",
"objects" : {
"powerDMDeciAmps29" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps30NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129182",
"status" : "current",
"objects" : {
"powerDMDeciAmps30" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps31NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129183",
"status" : "current",
"objects" : {
"powerDMDeciAmps31" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps32NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129184",
"status" : "current",
"objects" : {
"powerDMDeciAmps32" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps33NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129185",
"status" : "current",
"objects" : {
"powerDMDeciAmps33" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps34NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129186",
"status" : "current",
"objects" : {
"powerDMDeciAmps34" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps35NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129187",
"status" : "current",
"objects" : {
"powerDMDeciAmps35" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps36NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129188",
"status" : "current",
"objects" : {
"powerDMDeciAmps36" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps37NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129189",
"status" : "current",
"objects" : {
"powerDMDeciAmps37" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps38NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129190",
"status" : "current",
"objects" : {
"powerDMDeciAmps38" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps39NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129191",
"status" : "current",
"objects" : {
"powerDMDeciAmps39" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps40NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129192",
"status" : "current",
"objects" : {
"powerDMDeciAmps40" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps41NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129193",
"status" : "current",
"objects" : {
"powerDMDeciAmps41" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps42NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129194",
"status" : "current",
"objects" : {
"powerDMDeciAmps42" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps43NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129195",
"status" : "current",
"objects" : {
"powerDMDeciAmps43" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps44NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129196",
"status" : "current",
"objects" : {
"powerDMDeciAmps44" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps45NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129197",
"status" : "current",
"objects" : {
"powerDMDeciAmps45" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps46NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129198",
"status" : "current",
"objects" : {
"powerDMDeciAmps46" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps47NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129199",
"status" : "current",
"objects" : {
"powerDMDeciAmps47" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps48NOTIFY" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.129200",
"status" : "current",
"objects" : {
"powerDMDeciAmps48" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Trap""",
}, # notification
"cmPowerDMDeciAmps1CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229153",
"status" : "current",
"objects" : {
"powerDMDeciAmps1" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps2CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229154",
"status" : "current",
"objects" : {
"powerDMDeciAmps2" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps3CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229155",
"status" : "current",
"objects" : {
"powerDMDeciAmps3" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps4CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229156",
"status" : "current",
"objects" : {
"powerDMDeciAmps4" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps5CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229157",
"status" : "current",
"objects" : {
"powerDMDeciAmps5" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps6CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229158",
"status" : "current",
"objects" : {
"powerDMDeciAmps6" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps7CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229159",
"status" : "current",
"objects" : {
"powerDMDeciAmps7" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps8CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229160",
"status" : "current",
"objects" : {
"powerDMDeciAmps8" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps9CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229161",
"status" : "current",
"objects" : {
"powerDMDeciAmps9" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps10CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229162",
"status" : "current",
"objects" : {
"powerDMDeciAmps10" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps11CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229163",
"status" : "current",
"objects" : {
"powerDMDeciAmps11" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps12CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229164",
"status" : "current",
"objects" : {
"powerDMDeciAmps12" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps13CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229165",
"status" : "current",
"objects" : {
"powerDMDeciAmps13" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps14CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229166",
"status" : "current",
"objects" : {
"powerDMDeciAmps14" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps15CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229167",
"status" : "current",
"objects" : {
"powerDMDeciAmps15" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps16CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229168",
"status" : "current",
"objects" : {
"powerDMDeciAmps16" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps17CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229169",
"status" : "current",
"objects" : {
"powerDMDeciAmps17" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps18CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229170",
"status" : "current",
"objects" : {
"powerDMDeciAmps18" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps19CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229171",
"status" : "current",
"objects" : {
"powerDMDeciAmps19" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps20CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229172",
"status" : "current",
"objects" : {
"powerDMDeciAmps20" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps21CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229173",
"status" : "current",
"objects" : {
"powerDMDeciAmps21" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps22CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229174",
"status" : "current",
"objects" : {
"powerDMDeciAmps22" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps23CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229175",
"status" : "current",
"objects" : {
"powerDMDeciAmps23" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps24CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229176",
"status" : "current",
"objects" : {
"powerDMDeciAmps24" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps25CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229177",
"status" : "current",
"objects" : {
"powerDMDeciAmps25" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps26CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229178",
"status" : "current",
"objects" : {
"powerDMDeciAmps26" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps27CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229179",
"status" : "current",
"objects" : {
"powerDMDeciAmps27" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps28CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229180",
"status" : "current",
"objects" : {
"powerDMDeciAmps28" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps29CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229181",
"status" : "current",
"objects" : {
"powerDMDeciAmps29" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps30CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229182",
"status" : "current",
"objects" : {
"powerDMDeciAmps30" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps31CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229183",
"status" : "current",
"objects" : {
"powerDMDeciAmps31" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps32CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229184",
"status" : "current",
"objects" : {
"powerDMDeciAmps32" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps33CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229185",
"status" : "current",
"objects" : {
"powerDMDeciAmps33" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps34CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229186",
"status" : "current",
"objects" : {
"powerDMDeciAmps34" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps35CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229187",
"status" : "current",
"objects" : {
"powerDMDeciAmps35" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps36CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229188",
"status" : "current",
"objects" : {
"powerDMDeciAmps36" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps37CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229189",
"status" : "current",
"objects" : {
"powerDMDeciAmps37" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps38CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229190",
"status" : "current",
"objects" : {
"powerDMDeciAmps38" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps39CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229191",
"status" : "current",
"objects" : {
"powerDMDeciAmps39" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps40CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229192",
"status" : "current",
"objects" : {
"powerDMDeciAmps40" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps41CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229193",
"status" : "current",
"objects" : {
"powerDMDeciAmps41" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps42CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229194",
"status" : "current",
"objects" : {
"powerDMDeciAmps42" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps43CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229195",
"status" : "current",
"objects" : {
"powerDMDeciAmps43" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps44CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229196",
"status" : "current",
"objects" : {
"powerDMDeciAmps44" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps45CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229197",
"status" : "current",
"objects" : {
"powerDMDeciAmps45" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps46CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229198",
"status" : "current",
"objects" : {
"powerDMDeciAmps46" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps47CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229199",
"status" : "current",
"objects" : {
"powerDMDeciAmps47" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
"cmPowerDMDeciAmps48CLEAR" : {
"nodetype" : "notification",
"moduleName" : "IT-WATCHDOGS-MIB-V3",
"oid" : "1.3.6.1.4.1.17373.3.32767.0.229200",
"status" : "current",
"objects" : {
"powerDMDeciAmps48" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"powerDMName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"productFriendlyName" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
"alarmTripType" : {
"nodetype" : "object",
"module" : "IT-WATCHDOGS-MIB-V3"
},
},
"description" :
"""DM48 Current Monitor Amps Clear Trap""",
}, # notification
}, # notifications
}
|
gpl-2.0
|
InQuest/ThreatKB
|
migrations/versions/bc0fab3363f7_create_cfg_category_range_mapping_table.py
|
1
|
1736
|
"""create cfg_category_range_mapping table
Revision ID: bc0fab3363f7
Revises: 960676c435b2
Create Date: 2017-08-12 23:11:42.385100
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'bc0fab3363f7'
down_revision = '960676c435b2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('cfg_category_range_mapping',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('category', sa.String(length=255), nullable=False),
sa.Column('range_min', sa.Integer(unsigned=True), nullable=False),
sa.Column('range_max', sa.Integer(unsigned=True), nullable=False),
sa.Column('current', sa.Integer(unsigned=True), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('category')
)
op.create_index(u'ix_cfg_category_range_mapping_current', 'cfg_category_range_mapping', ['current'], unique=False)
op.create_index(u'ix_cfg_category_range_mapping_range_max', 'cfg_category_range_mapping', ['range_max'], unique=False)
op.create_index(u'ix_cfg_category_range_mapping_range_min', 'cfg_category_range_mapping', ['range_min'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(u'ix_cfg_category_range_mapping_range_min', table_name='cfg_category_range_mapping')
op.drop_index(u'ix_cfg_category_range_mapping_range_max', table_name='cfg_category_range_mapping')
op.drop_index(u'ix_cfg_category_range_mapping_current', table_name='cfg_category_range_mapping')
op.drop_table('cfg_category_range_mapping')
# ### end Alembic commands ###
|
gpl-2.0
|
tndatacommons/tndata_backend
|
tndata_backend/rewards/management/commands/load_reward_content.py
|
2
|
4893
|
import csv
from django.core.management.base import BaseCommand, CommandError
from rewards.models import FunContent
class Command(BaseCommand):
"""A command to import data from a CSV file into the `FunContent` model.
This command is fairly flexible, letting you read content from one or more
columns, and specifying which colums should be used for which fields.
Run manage.py help load_reward_content for more info.
Examples of input for the various flags:
--message 2:4
will include columns 2, 3, and 4 in the message column
--message 2 --author 3
will load column 2 into message, column 3 into author
--kewords 2
will load all of the text from column 2 into the keywords field
The type and message options are required, the rest are optional.
"""
help = 'Import data from a CSV file for the Fun Content rewards model.'
def add_arguments(self, parser):
parser.add_argument('csv_path', nargs=1, type=str)
parser.add_argument(
'-t',
'--type',
action='store',
type=str,
dest='message_type',
default='',
required=True,
help='Type of content: quote|fortune|fact|joke'
)
parser.add_argument(
'-m',
'--message',
action='store',
type=str,
dest='message_columns',
default=None,
required=True,
help='Column number or range (eg "2:4") for the message'
)
parser.add_argument(
'-a',
'--author',
action='store',
type=int,
dest='author_column',
default=None,
help='Column number for the author or attribution.'
)
parser.add_argument(
'-k',
'--keywords',
action='store',
type=int,
dest='keyword_column',
default=None,
help='Column number for the keywords'
)
parser.add_argument(
'-d',
'--delimiter',
action='store',
type=str,
dest='keyword_delimiter',
default=',',
help='A keyword delimiter for columns with multiple keywords. '
'The default is a comma.'
)
parser.add_argument(
'-s',
'--separator',
action='store',
type=str,
dest='message_separator',
default='\n',
help='Separator used between message text in multiple columns. '
'The defalut is a newline.'
)
def handle(self, **options):
# Check the options input
csv_path = options['csv_path'][0]
kw_delimiter = options['keyword_delimiter']
kw_column = options['keyword_column']
author_column = options['author_column']
message_type = options['message_type'].lower()
message_separator = options['message_separator']
try:
message_columns = [int(c) for c in options['message_columns'].split(':')]
if len(message_columns) == 1:
message_start = message_columns[0]
message_end = message_columns[0] + 1
else:
message_start, message_end = message_columns
message_end = message_end + 1 # be inclusive
except ValueError:
err = "Invalid input for message column(s): '{}'".format(
options['message_columns']
)
raise CommandError(err)
if message_type not in [t[0] for t in FunContent.MESSAGE_TYPE_CHOICES]:
err = "{} is not a valid message type".format(message_type)
raise CommandError(err)
created = 0
with open(csv_path, newline='') as csvfile:
for row in csv.reader(csvfile):
message = row[message_start:message_end]
message = message_separator.join(message)
if message: # no content, so skip this...
keywords = []
if kw_column:
keywords = row[kw_column]
if kw_delimiter:
keywords = keywords.split(kw_delimiter)
else:
keywords = [keywords]
author = ''
if author_column:
author = row[author_column]
FunContent.objects.create(
message_type=message_type,
message=message,
author=author,
keywords=keywords
)
created += 1
self.stdout.write("Created {} items.".format(created))
|
mit
|
matbu/ansible-modules-extras
|
cloud/cloudstack/cs_network.py
|
48
|
16787
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (c) 2015, René Moser <mail@renemoser.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: cs_network
short_description: Manages networks on Apache CloudStack based clouds.
description:
- Create, update, restart and delete networks.
version_added: '2.0'
author: "René Moser (@resmo)"
options:
name:
description:
- Name (case sensitive) of the network.
required: true
display_text:
description:
- Display text of the network.
- If not specified, C(name) will be used as C(display_text).
required: false
default: null
network_offering:
description:
- Name of the offering for the network.
- Required if C(state=present).
required: false
default: null
start_ip:
description:
- The beginning IPv4 address of the network belongs to.
- Only considered on create.
required: false
default: null
end_ip:
description:
- The ending IPv4 address of the network belongs to.
- If not specified, value of C(start_ip) is used.
- Only considered on create.
required: false
default: null
gateway:
description:
- The gateway of the network.
- Required for shared networks and isolated networks when it belongs to a VPC.
- Only considered on create.
required: false
default: null
netmask:
description:
- The netmask of the network.
- Required for shared networks and isolated networks when it belongs to a VPC.
- Only considered on create.
required: false
default: null
start_ipv6:
description:
- The beginning IPv6 address of the network belongs to.
- Only considered on create.
required: false
default: null
end_ipv6:
description:
- The ending IPv6 address of the network belongs to.
- If not specified, value of C(start_ipv6) is used.
- Only considered on create.
required: false
default: null
cidr_ipv6:
description:
- CIDR of IPv6 network, must be at least /64.
- Only considered on create.
required: false
default: null
gateway_ipv6:
description:
- The gateway of the IPv6 network.
- Required for shared networks.
- Only considered on create.
required: false
default: null
vlan:
description:
- The ID or VID of the network.
required: false
default: null
vpc:
description:
- Name of the VPC of the network.
required: false
default: null
isolated_pvlan:
description:
- The isolated private VLAN for this network.
required: false
default: null
clean_up:
description:
- Cleanup old network elements.
- Only considered on C(state=restarted).
required: false
default: false
acl_type:
description:
- Access control type.
- Only considered on create.
required: false
default: account
choices: [ 'account', 'domain' ]
network_domain:
description:
- The network domain.
required: false
default: null
state:
description:
- State of the network.
required: false
default: present
choices: [ 'present', 'absent', 'restarted' ]
zone:
description:
- Name of the zone in which the network should be deployed.
- If not set, default zone is used.
required: false
default: null
project:
description:
- Name of the project the network to be deployed in.
required: false
default: null
domain:
description:
- Domain the network is related to.
required: false
default: null
account:
description:
- Account the network is related to.
required: false
default: null
poll_async:
description:
- Poll async jobs until job has finished.
required: false
default: true
extends_documentation_fragment: cloudstack
'''
EXAMPLES = '''
# create a network
- local_action:
module: cs_network
name: my network
zone: gva-01
network_offering: DefaultIsolatedNetworkOfferingWithSourceNatService
network_domain: example.com
# update a network
- local_action:
module: cs_network
name: my network
display_text: network of domain example.local
network_domain: example.local
# restart a network with clean up
- local_action:
module: cs_network
name: my network
clean_up: yes
state: restared
# remove a network
- local_action:
module: cs_network
name: my network
state: absent
'''
RETURN = '''
---
id:
description: UUID of the network.
returned: success
type: string
sample: 04589590-ac63-4ffc-93f5-b698b8ac38b6
name:
description: Name of the network.
returned: success
type: string
sample: web project
display_text:
description: Display text of the network.
returned: success
type: string
sample: web project
dns1:
description: IP address of the 1st nameserver.
returned: success
type: string
sample: 1.2.3.4
dns2:
description: IP address of the 2nd nameserver.
returned: success
type: string
sample: 1.2.3.4
cidr:
description: IPv4 network CIDR.
returned: success
type: string
sample: 10.101.64.0/24
gateway:
description: IPv4 gateway.
returned: success
type: string
sample: 10.101.64.1
netmask:
description: IPv4 netmask.
returned: success
type: string
sample: 255.255.255.0
cidr_ipv6:
description: IPv6 network CIDR.
returned: success
type: string
sample: 2001:db8::/64
gateway_ipv6:
description: IPv6 gateway.
returned: success
type: string
sample: 2001:db8::1
state:
description: State of the network.
returned: success
type: string
sample: Implemented
zone:
description: Name of zone.
returned: success
type: string
sample: ch-gva-2
domain:
description: Domain the network is related to.
returned: success
type: string
sample: ROOT
account:
description: Account the network is related to.
returned: success
type: string
sample: example account
project:
description: Name of project.
returned: success
type: string
sample: Production
tags:
description: List of resource tags associated with the network.
returned: success
type: dict
sample: '[ { "key": "foo", "value": "bar" } ]'
acl_type:
description: Access type of the network (Domain, Account).
returned: success
type: string
sample: Account
broadcast_domain_type:
description: Broadcast domain type of the network.
returned: success
type: string
sample: Vlan
type:
description: Type of the network.
returned: success
type: string
sample: Isolated
traffic_type:
description: Traffic type of the network.
returned: success
type: string
sample: Guest
state:
description: State of the network (Allocated, Implemented, Setup).
returned: success
type: string
sample: Allocated
is_persistent:
description: Whether the network is persistent or not.
returned: success
type: boolean
sample: false
network_domain:
description: The network domain
returned: success
type: string
sample: example.local
network_offering:
description: The network offering name.
returned: success
type: string
sample: DefaultIsolatedNetworkOfferingWithSourceNatService
'''
# import cloudstack common
from ansible.module_utils.cloudstack import *
class AnsibleCloudStackNetwork(AnsibleCloudStack):
def __init__(self, module):
super(AnsibleCloudStackNetwork, self).__init__(module)
self.returns = {
'networkdomain': 'network domain',
'networkofferingname': 'network_offering',
'ispersistent': 'is_persistent',
'acltype': 'acl_type',
'type': 'type',
'traffictype': 'traffic_type',
'ip6gateway': 'gateway_ipv6',
'ip6cidr': 'cidr_ipv6',
'gateway': 'gateway',
'cidr': 'cidr',
'netmask': 'netmask',
'broadcastdomaintype': 'broadcast_domain_type',
'dns1': 'dns1',
'dns2': 'dns2',
}
self.network = None
def get_network_offering(self, key=None):
network_offering = self.module.params.get('network_offering')
if not network_offering:
self.module.fail_json(msg="missing required arguments: network_offering")
args = {}
args['zoneid'] = self.get_zone(key='id')
network_offerings = self.cs.listNetworkOfferings(**args)
if network_offerings:
for no in network_offerings['networkoffering']:
if network_offering in [ no['name'], no['displaytext'], no['id'] ]:
return self._get_by_key(key, no)
self.module.fail_json(msg="Network offering '%s' not found" % network_offering)
def _get_args(self):
args = {}
args['name'] = self.module.params.get('name')
args['displaytext'] = self.get_or_fallback('display_text', 'name')
args['networkdomain'] = self.module.params.get('network_domain')
args['networkofferingid'] = self.get_network_offering(key='id')
return args
def get_network(self):
if not self.network:
network = self.module.params.get('name')
args = {}
args['zoneid'] = self.get_zone(key='id')
args['projectid'] = self.get_project(key='id')
args['account'] = self.get_account(key='name')
args['domainid'] = self.get_domain(key='id')
networks = self.cs.listNetworks(**args)
if networks:
for n in networks['network']:
if network in [ n['name'], n['displaytext'], n['id']]:
self.network = n
break
return self.network
def present_network(self):
network = self.get_network()
if not network:
network = self.create_network(network)
else:
network = self.update_network(network)
return network
def update_network(self, network):
args = self._get_args()
args['id'] = network['id']
if self.has_changed(args, network):
self.result['changed'] = True
if not self.module.check_mode:
network = self.cs.updateNetwork(**args)
if 'errortext' in network:
self.module.fail_json(msg="Failed: '%s'" % network['errortext'])
poll_async = self.module.params.get('poll_async')
if network and poll_async:
network = self.poll_job(network, 'network')
return network
def create_network(self, network):
self.result['changed'] = True
args = self._get_args()
args['acltype'] = self.module.params.get('acl_type')
args['zoneid'] = self.get_zone(key='id')
args['projectid'] = self.get_project(key='id')
args['account'] = self.get_account(key='name')
args['domainid'] = self.get_domain(key='id')
args['startip'] = self.module.params.get('start_ip')
args['endip'] = self.get_or_fallback('end_ip', 'start_ip')
args['netmask'] = self.module.params.get('netmask')
args['gateway'] = self.module.params.get('gateway')
args['startipv6'] = self.module.params.get('start_ipv6')
args['endipv6'] = self.get_or_fallback('end_ipv6', 'start_ipv6')
args['ip6cidr'] = self.module.params.get('cidr_ipv6')
args['ip6gateway'] = self.module.params.get('gateway_ipv6')
args['vlan'] = self.module.params.get('vlan')
args['isolatedpvlan'] = self.module.params.get('isolated_pvlan')
args['subdomainaccess'] = self.module.params.get('subdomain_access')
args['vpcid'] = self.get_vpc(key='id')
if not self.module.check_mode:
res = self.cs.createNetwork(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
network = res['network']
return network
def restart_network(self):
network = self.get_network()
if not network:
self.module.fail_json(msg="No network named '%s' found." % self.module.params('name'))
# Restarting only available for these states
if network['state'].lower() in [ 'implemented', 'setup' ]:
self.result['changed'] = True
args = {}
args['id'] = network['id']
args['cleanup'] = self.module.params.get('clean_up')
if not self.module.check_mode:
network = self.cs.restartNetwork(**args)
if 'errortext' in network:
self.module.fail_json(msg="Failed: '%s'" % network['errortext'])
poll_async = self.module.params.get('poll_async')
if network and poll_async:
network = self.poll_job(network, 'network')
return network
def absent_network(self):
network = self.get_network()
if network:
self.result['changed'] = True
args = {}
args['id'] = network['id']
if not self.module.check_mode:
res = self.cs.deleteNetwork(**args)
if 'errortext' in res:
self.module.fail_json(msg="Failed: '%s'" % res['errortext'])
poll_async = self.module.params.get('poll_async')
if res and poll_async:
res = self.poll_job(res, 'network')
return network
def main():
argument_spec = cs_argument_spec()
argument_spec.update(dict(
name = dict(required=True),
display_text = dict(default=None),
network_offering = dict(default=None),
zone = dict(default=None),
start_ip = dict(default=None),
end_ip = dict(default=None),
gateway = dict(default=None),
netmask = dict(default=None),
start_ipv6 = dict(default=None),
end_ipv6 = dict(default=None),
cidr_ipv6 = dict(default=None),
gateway_ipv6 = dict(default=None),
vlan = dict(default=None),
vpc = dict(default=None),
isolated_pvlan = dict(default=None),
clean_up = dict(type='bool', default=False),
network_domain = dict(default=None),
state = dict(choices=['present', 'absent', 'restarted' ], default='present'),
acl_type = dict(choices=['account', 'domain'], default='account'),
project = dict(default=None),
domain = dict(default=None),
account = dict(default=None),
poll_async = dict(type='bool', default=True),
))
required_together = cs_required_together()
required_together.extend([
['start_ip', 'netmask', 'gateway'],
['start_ipv6', 'cidr_ipv6', 'gateway_ipv6'],
])
module = AnsibleModule(
argument_spec=argument_spec,
required_together=required_together,
supports_check_mode=True
)
try:
acs_network = AnsibleCloudStackNetwork(module)
state = module.params.get('state')
if state in ['absent']:
network = acs_network.absent_network()
elif state in ['restarted']:
network = acs_network.restart_network()
else:
network = acs_network.present_network()
result = acs_network.get_result(network)
except CloudStackException as e:
module.fail_json(msg='CloudStackException: %s' % str(e))
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
jbowes/ansible-modules-extras
|
cloud/centurylink/clc_group.py
|
60
|
13838
|
#!/usr/bin/python
#
# Copyright (c) 2015 CenturyLink
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
#
DOCUMENTATION = '''
module: clc_group
short_description: Create/delete Server Groups at Centurylink Cloud
description:
- Create or delete Server Groups at Centurylink Centurylink Cloud
version_added: "2.0"
options:
name:
description:
- The name of the Server Group
required: True
description:
description:
- A description of the Server Group
required: False
parent:
description:
- The parent group of the server group. If parent is not provided, it creates the group at top level.
required: False
location:
description:
- Datacenter to create the group in. If location is not provided, the group gets created in the default datacenter
associated with the account
required: False
state:
description:
- Whether to create or delete the group
default: present
choices: ['present', 'absent']
wait:
description:
- Whether to wait for the tasks to finish before returning.
choices: [ True, False ]
default: True
required: False
requirements:
- python = 2.7
- requests >= 2.5.0
- clc-sdk
notes:
- To use this module, it is required to set the below environment variables which enables access to the
Centurylink Cloud
- CLC_V2_API_USERNAME, the account login id for the centurylink cloud
- CLC_V2_API_PASSWORD, the account passwod for the centurylink cloud
- Alternatively, the module accepts the API token and account alias. The API token can be generated using the
CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login
- CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login
- CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud
- Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment.
'''
EXAMPLES = '''
# Create a Server Group
---
- name: Create Server Group
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Create / Verify a Server Group at CenturyLink Cloud
clc_group:
name: 'My Cool Server Group'
parent: 'Default Group'
state: present
register: clc
- name: debug
debug: var=clc
# Delete a Server Group
---
- name: Delete Server Group
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Delete / Verify Absent a Server Group at CenturyLink Cloud
clc_group:
name: 'My Cool Server Group'
parent: 'Default Group'
state: absent
register: clc
- name: debug
debug: var=clc
'''
__version__ = '${version}'
from distutils.version import LooseVersion
try:
import requests
except ImportError:
REQUESTS_FOUND = False
else:
REQUESTS_FOUND = True
#
# Requires the clc-python-sdk.
# sudo pip install clc-sdk
#
try:
import clc as clc_sdk
from clc import CLCException
except ImportError:
CLC_FOUND = False
clc_sdk = None
else:
CLC_FOUND = True
class ClcGroup(object):
clc = None
root_group = None
def __init__(self, module):
"""
Construct module
"""
self.clc = clc_sdk
self.module = module
self.group_dict = {}
if not CLC_FOUND:
self.module.fail_json(
msg='clc-python-sdk required for this module')
if not REQUESTS_FOUND:
self.module.fail_json(
msg='requests library is required for this module')
if requests.__version__ and LooseVersion(requests.__version__) < LooseVersion('2.5.0'):
self.module.fail_json(
msg='requests library version should be >= 2.5.0')
self._set_user_agent(self.clc)
def process_request(self):
"""
Execute the main code path, and handle the request
:return: none
"""
location = self.module.params.get('location')
group_name = self.module.params.get('name')
parent_name = self.module.params.get('parent')
group_description = self.module.params.get('description')
state = self.module.params.get('state')
self._set_clc_credentials_from_env()
self.group_dict = self._get_group_tree_for_datacenter(
datacenter=location)
if state == "absent":
changed, group, requests = self._ensure_group_is_absent(
group_name=group_name, parent_name=parent_name)
else:
changed, group, requests = self._ensure_group_is_present(
group_name=group_name, parent_name=parent_name, group_description=group_description)
if requests:
self._wait_for_requests_to_complete(requests)
self.module.exit_json(changed=changed, group=group_name)
@staticmethod
def _define_module_argument_spec():
"""
Define the argument spec for the ansible module
:return: argument spec dictionary
"""
argument_spec = dict(
name=dict(required=True),
description=dict(default=None),
parent=dict(default=None),
location=dict(default=None),
state=dict(default='present', choices=['present', 'absent']),
wait=dict(type='bool', default=True))
return argument_spec
def _set_clc_credentials_from_env(self):
"""
Set the CLC Credentials on the sdk by reading environment variables
:return: none
"""
env = os.environ
v2_api_token = env.get('CLC_V2_API_TOKEN', False)
v2_api_username = env.get('CLC_V2_API_USERNAME', False)
v2_api_passwd = env.get('CLC_V2_API_PASSWD', False)
clc_alias = env.get('CLC_ACCT_ALIAS', False)
api_url = env.get('CLC_V2_API_URL', False)
if api_url:
self.clc.defaults.ENDPOINT_URL_V2 = api_url
if v2_api_token and clc_alias:
self.clc._LOGIN_TOKEN_V2 = v2_api_token
self.clc._V2_ENABLED = True
self.clc.ALIAS = clc_alias
elif v2_api_username and v2_api_passwd:
self.clc.v2.SetCredentials(
api_username=v2_api_username,
api_passwd=v2_api_passwd)
else:
return self.module.fail_json(
msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD "
"environment variables")
def _ensure_group_is_absent(self, group_name, parent_name):
"""
Ensure that group_name is absent by deleting it if necessary
:param group_name: string - the name of the clc server group to delete
:param parent_name: string - the name of the parent group for group_name
:return: changed, group
"""
changed = False
requests = []
if self._group_exists(group_name=group_name, parent_name=parent_name):
if not self.module.check_mode:
request = self._delete_group(group_name)
requests.append(request)
changed = True
return changed, group_name, requests
def _delete_group(self, group_name):
"""
Delete the provided server group
:param group_name: string - the server group to delete
:return: none
"""
response = None
group, parent = self.group_dict.get(group_name)
try:
response = group.Delete()
except CLCException, ex:
self.module.fail_json(msg='Failed to delete group :{0}. {1}'.format(
group_name, ex.response_text
))
return response
def _ensure_group_is_present(
self,
group_name,
parent_name,
group_description):
"""
Checks to see if a server group exists, creates it if it doesn't.
:param group_name: the name of the group to validate/create
:param parent_name: the name of the parent group for group_name
:param group_description: a short description of the server group (used when creating)
:return: (changed, group) -
changed: Boolean- whether a change was made,
group: A clc group object for the group
"""
assert self.root_group, "Implementation Error: Root Group not set"
parent = parent_name if parent_name is not None else self.root_group.name
description = group_description
changed = False
parent_exists = self._group_exists(group_name=parent, parent_name=None)
child_exists = self._group_exists(
group_name=group_name,
parent_name=parent)
if parent_exists and child_exists:
group, parent = self.group_dict[group_name]
changed = False
elif parent_exists and not child_exists:
if not self.module.check_mode:
self._create_group(
group=group_name,
parent=parent,
description=description)
changed = True
else:
self.module.fail_json(
msg="parent group: " +
parent +
" does not exist")
return changed, group_name, None
def _create_group(self, group, parent, description):
"""
Create the provided server group
:param group: clc_sdk.Group - the group to create
:param parent: clc_sdk.Parent - the parent group for {group}
:param description: string - a text description of the group
:return: clc_sdk.Group - the created group
"""
response = None
(parent, grandparent) = self.group_dict[parent]
try:
response = parent.Create(name=group, description=description)
except CLCException, ex:
self.module.fail_json(msg='Failed to create group :{0}. {1}'.format(
group, ex.response_text
))
return response
def _group_exists(self, group_name, parent_name):
"""
Check to see if a group exists
:param group_name: string - the group to check
:param parent_name: string - the parent of group_name
:return: boolean - whether the group exists
"""
result = False
if group_name in self.group_dict:
(group, parent) = self.group_dict[group_name]
if parent_name is None or parent_name == parent.name:
result = True
return result
def _get_group_tree_for_datacenter(self, datacenter=None):
"""
Walk the tree of groups for a datacenter
:param datacenter: string - the datacenter to walk (ex: 'UC1')
:return: a dictionary of groups and parents
"""
self.root_group = self.clc.v2.Datacenter(
location=datacenter).RootGroup()
return self._walk_groups_recursive(
parent_group=None,
child_group=self.root_group)
def _walk_groups_recursive(self, parent_group, child_group):
"""
Walk a parent-child tree of groups, starting with the provided child group
:param parent_group: clc_sdk.Group - the parent group to start the walk
:param child_group: clc_sdk.Group - the child group to start the walk
:return: a dictionary of groups and parents
"""
result = {str(child_group): (child_group, parent_group)}
groups = child_group.Subgroups().groups
if len(groups) > 0:
for group in groups:
if group.type != 'default':
continue
result.update(self._walk_groups_recursive(child_group, group))
return result
def _wait_for_requests_to_complete(self, requests_lst):
"""
Waits until the CLC requests are complete if the wait argument is True
:param requests_lst: The list of CLC request objects
:return: none
"""
if not self.module.params['wait']:
return
for request in requests_lst:
request.WaitUntilComplete()
for request_details in request.requests:
if request_details.Status() != 'succeeded':
self.module.fail_json(
msg='Unable to process group request')
@staticmethod
def _set_user_agent(clc):
if hasattr(clc, 'SetRequestsSession'):
agent_string = "ClcAnsibleModule/" + __version__
ses = requests.Session()
ses.headers.update({"Api-Client": agent_string})
ses.headers['User-Agent'] += " " + agent_string
clc.SetRequestsSession(ses)
def main():
"""
The main function. Instantiates the module and calls process_request.
:return: none
"""
module = AnsibleModule(
argument_spec=ClcGroup._define_module_argument_spec(),
supports_check_mode=True)
clc_group = ClcGroup(module)
clc_group.process_request()
from ansible.module_utils.basic import * # pylint: disable=W0614
if __name__ == '__main__':
main()
|
gpl-3.0
|
scalable-networks/gnuradio-3.7.0.1
|
gr-trellis/examples/python/test_tcm_bit.py
|
13
|
5140
|
#!/usr/bin/env python
from gnuradio import gr
from gnuradio import trellis, digital, blocks
from gnuradio import eng_notation
import math
import sys
import random
import fsm_utils
from gnuradio.eng_option import eng_option
from optparse import OptionParser
try:
from gnuradio import analog
except ImportError:
sys.stderr.write("Error: Program requires gr-analog.\n")
sys.exit(1)
def run_test (f,Kb,bitspersymbol,K,dimensionality,constellation,N0,seed):
tb = gr.top_block ()
# TX
packet = [0]*Kb
# this for loop is TOO slow!!!
for i in range(Kb-1*16): # last 16 bits = 0 to drive the final state to 0
packet[i] = random.randint(0, 1) # random 0s and 1s
src = blocks.vector_source_s(packet,False)
#src = blocks.lfsr_32k_source_s()
#src_head = blocks.head (gr.sizeof_short,Kb/16) # packet size in shorts
b2s = blocks.unpacked_to_packed_ss(1,gr.GR_MSB_FIRST) # pack bits in shorts
s2fsmi = blocks.packed_to_unpacked_ss(bitspersymbol,gr.GR_MSB_FIRST) # unpack shorts to symbols compatible with the FSM input cardinality
enc = trellis.encoder_ss(f,0) # initial state = 0
mod = digital.chunks_to_symbols_sf(constellation,dimensionality)
# CHANNEL
add = blocks.add_ff()
noise = analog.noise_source_f(analog.GR_GAUSSIAN,math.sqrt(N0/2),seed)
# RX
metrics = trellis.metrics_f(f.O(),dimensionality,constellation,digital.TRELLIS_EUCLIDEAN) # data preprocessing to generate metrics for Viterbi
va = trellis.viterbi_s(f,K,0,-1) # Put -1 if the Initial/Final states are not set.
fsmi2s = blocks.unpacked_to_packed_ss(bitspersymbol,gr.GR_MSB_FIRST) # pack FSM input symbols to shorts
s2b = blocks.packed_to_unpacked_ss(1,gr.GR_MSB_FIRST) # unpack shorts to bits
dst = blocks.vector_sink_s();
#dst = blocks.check_lfsr_32k_s();
#tb.connect (src,src_head,s2fsmi,enc,mod)
tb.connect (src,b2s,s2fsmi,enc,mod)
tb.connect (mod,(add,0))
tb.connect (noise,(add,1))
tb.connect (add,metrics)
#tb.connect (metrics,va,fsmi2s,dst)
tb.connect (metrics,va,fsmi2s,s2b,dst)
tb.run()
# A bit of cheating: run the program once and print the
# final encoder state..
# Then put it as the last argument in the viterbi block
#print "final state = " , enc.ST()
#ntotal = dst.ntotal ()
#nright = dst.nright ()
#runlength = dst.runlength ()
ntotal = len(packet)
if len(dst.data()) != ntotal:
print "Error: not enough data\n"
nright = 0;
# this for loop is TOO slow!!!
for i in range(ntotal):
if packet[i]==dst.data()[i]:
nright=nright+1
#else:
#print "Error in ", i
return (ntotal,ntotal-nright)
def main():
parser = OptionParser(option_class=eng_option)
parser.add_option("-f", "--fsm_file", type="string", default="fsm_files/awgn1o2_4.fsm", help="Filename containing the fsm specification, e.g. -f fsm_files/awgn1o2_4.fsm (default=fsm_files/awgn1o2_4.fsm)")
parser.add_option("-e", "--esn0", type="eng_float", default=10.0, help="Symbol energy to noise PSD level ratio in dB, e.g., -e 10.0 (default=10.0)")
parser.add_option("-r", "--repetitions", type="int", default=100, help="Number of packets to be generated for the simulation, e.g., -r 100 (default=100)")
(options, args) = parser.parse_args ()
if len(args) != 0:
parser.print_help()
raise SystemExit, 1
fname=options.fsm_file
esn0_db=float(options.esn0)
rep=int(options.repetitions)
# system parameters
f=trellis.fsm(fname) # get the FSM specification from a file
Kb=1024*16 # packet size in bits (make it multiple of 16 so it can be packed in a short)
bitspersymbol = int(round(math.log(f.I())/math.log(2))) # bits per FSM input symbol
K=Kb/bitspersymbol # packet size in trellis steps
modulation = fsm_utils.psk4 # see fsm_utlis.py for available predefined modulations
dimensionality = modulation[0]
constellation = modulation[1]
if len(constellation)/dimensionality != f.O():
sys.stderr.write ('Incompatible FSM output cardinality and modulation size.\n')
sys.exit (1)
# calculate average symbol energy
Es = 0
for i in range(len(constellation)):
Es = Es + constellation[i]**2
Es = Es / (len(constellation)/dimensionality)
N0=Es/pow(10.0,esn0_db/10.0); # calculate noise variance
tot_s=0 # total number of transmitted shorts
terr_s=0 # total number of shorts in error
terr_p=0 # total number of packets in error
for i in range(rep):
(s,e)=run_test(f,Kb,bitspersymbol,K,dimensionality,constellation,N0,-long(666+i)) # run experiment with different seed to get different noise realizations
tot_s=tot_s+s
terr_s=terr_s+e
terr_p=terr_p+(terr_s!=0)
if ((i+1)%1==0) : # display progress
print i+1,terr_p, '%.2e' % ((1.0*terr_p)/(i+1)),tot_s,terr_s, '%.2e' % ((1.0*terr_s)/tot_s)
# estimate of the (short or bit) error rate
print rep,terr_p, '%.2e' % ((1.0*terr_p)/(i+1)),tot_s,terr_s, '%.2e' % ((1.0*terr_s)/tot_s)
if __name__ == '__main__':
main()
|
gpl-3.0
|
apple/llvm-project
|
lldb/test/API/functionalities/breakpoint/breakpoint_names/TestBreakpointNames.py
|
4
|
18396
|
"""
Test breakpoint names.
"""
import os
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class BreakpointNames(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@add_test_categories(['pyapi'])
def test_setting_names(self):
"""Use Python APIs to test that we can set breakpoint names."""
self.build()
self.setup_target()
self.do_check_names()
def test_illegal_names(self):
"""Use Python APIs to test that we don't allow illegal names."""
self.build()
self.setup_target()
self.do_check_illegal_names()
def test_using_names(self):
"""Use Python APIs to test that operations on names works correctly."""
self.build()
self.setup_target()
self.do_check_using_names()
def test_configuring_names(self):
"""Use Python APIs to test that configuring options on breakpoint names works correctly."""
self.build()
self.make_a_dummy_name()
self.setup_target()
self.do_check_configuring_names()
def test_configuring_permissions_sb(self):
"""Use Python APIs to test that configuring permissions on names works correctly."""
self.build()
self.setup_target()
self.do_check_configuring_permissions_sb()
def test_configuring_permissions_cli(self):
"""Use Python APIs to test that configuring permissions on names works correctly."""
self.build()
self.setup_target()
self.do_check_configuring_permissions_cli()
def setup_target(self):
exe = self.getBuildArtifact("a.out")
# Create a targets we are making breakpoint in and copying to:
self.target = self.dbg.CreateTarget(exe)
self.assertTrue(self.target, VALID_TARGET)
self.main_file_spec = lldb.SBFileSpec(os.path.join(self.getSourceDir(), "main.c"))
def check_name_in_target(self, bkpt_name):
name_list = lldb.SBStringList()
self.target.GetBreakpointNames(name_list)
found_it = False
for name in name_list:
if name == bkpt_name:
found_it = True
break
self.assertTrue(found_it, "Didn't find the name %s in the target's name list:"%(bkpt_name))
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# These are the settings we're going to be putting into names & breakpoints:
self.bp_name_string = "ABreakpoint"
self.is_one_shot = True
self.ignore_count = 1000
self.condition = "1 == 2"
self.auto_continue = True
self.tid = 0xaaaa
self.tidx = 10
self.thread_name = "Fooey"
self.queue_name = "Blooey"
self.cmd_list = lldb.SBStringList()
self.cmd_list.AppendString("frame var")
self.cmd_list.AppendString("bt")
self.help_string = "I do something interesting"
def do_check_names(self):
"""Use Python APIs to check that we can set & retrieve breakpoint names"""
bkpt = self.target.BreakpointCreateByLocation(self.main_file_spec, 10)
bkpt_name = "ABreakpoint"
other_bkpt_name = "_AnotherBreakpoint"
# Add a name and make sure we match it:
success = bkpt.AddNameWithErrorHandling(bkpt_name)
self.assertSuccess(success, "We couldn't add a legal name to a breakpoint.")
matches = bkpt.MatchesName(bkpt_name)
self.assertTrue(matches, "We didn't match the name we just set")
# Make sure we don't match irrelevant names:
matches = bkpt.MatchesName("NotABreakpoint")
self.assertTrue(not matches, "We matched a name we didn't set.")
# Make sure the name is also in the target:
self.check_name_in_target(bkpt_name)
# Add another name, make sure that works too:
bkpt.AddNameWithErrorHandling(other_bkpt_name)
matches = bkpt.MatchesName(bkpt_name)
self.assertTrue(matches, "Adding a name means we didn't match the name we just set")
self.check_name_in_target(other_bkpt_name)
# Remove the name and make sure we no longer match it:
bkpt.RemoveName(bkpt_name)
matches = bkpt.MatchesName(bkpt_name)
self.assertTrue(not matches,"We still match a name after removing it.")
# Make sure the name list has the remaining name:
name_list = lldb.SBStringList()
bkpt.GetNames(name_list)
num_names = name_list.GetSize()
self.assertEquals(num_names, 1, "Name list has %d items, expected 1."%(num_names))
name = name_list.GetStringAtIndex(0)
self.assertEquals(name, other_bkpt_name, "Remaining name was: %s expected %s."%(name, other_bkpt_name))
def do_check_illegal_names(self):
"""Use Python APIs to check that we reject illegal names."""
bkpt = self.target.BreakpointCreateByLocation(self.main_file_spec, 10)
bad_names = ["-CantStartWithADash",
"1CantStartWithANumber",
"^CantStartWithNonAlpha",
"CantHave-ADash",
"Cant Have Spaces"]
for bad_name in bad_names:
success = bkpt.AddNameWithErrorHandling(bad_name)
self.assertTrue(success.Fail(), "We allowed an illegal name: %s"%(bad_name))
bp_name = lldb.SBBreakpointName(self.target, bad_name)
self.assertFalse(bp_name.IsValid(), "We made a breakpoint name with an illegal name: %s"%(bad_name));
retval =lldb.SBCommandReturnObject()
self.dbg.GetCommandInterpreter().HandleCommand("break set -n whatever -N '%s'"%(bad_name), retval)
self.assertTrue(not retval.Succeeded(), "break set succeeded with: illegal name: %s"%(bad_name))
def do_check_using_names(self):
"""Use Python APIs to check names work in place of breakpoint ID's."""
# Create a dummy breakpoint to use up ID 1
_ = self.target.BreakpointCreateByLocation(self.main_file_spec, 30)
# Create a breakpoint to test with
bkpt = self.target.BreakpointCreateByLocation(self.main_file_spec, 10)
bkpt_name = "ABreakpoint"
bkpt_id = bkpt.GetID()
other_bkpt_name= "_AnotherBreakpoint"
# Add a name and make sure we match it:
success = bkpt.AddNameWithErrorHandling(bkpt_name)
self.assertSuccess(success, "We couldn't add a legal name to a breakpoint.")
bkpts = lldb.SBBreakpointList(self.target)
self.target.FindBreakpointsByName(bkpt_name, bkpts)
self.assertEquals(bkpts.GetSize(), 1, "One breakpoint matched.")
found_bkpt = bkpts.GetBreakpointAtIndex(0)
self.assertEquals(bkpt.GetID(), found_bkpt.GetID(),"The right breakpoint.")
self.assertEquals(bkpt.GetID(), bkpt_id,"With the same ID as before.")
retval = lldb.SBCommandReturnObject()
self.dbg.GetCommandInterpreter().HandleCommand("break disable %s"%(bkpt_name), retval)
self.assertTrue(retval.Succeeded(), "break disable failed with: %s."%(retval.GetError()))
self.assertTrue(not bkpt.IsEnabled(), "We didn't disable the breakpoint.")
# Also make sure we don't apply commands to non-matching names:
self.dbg.GetCommandInterpreter().HandleCommand("break modify --one-shot 1 %s"%(other_bkpt_name), retval)
self.assertTrue(retval.Succeeded(), "break modify failed with: %s."%(retval.GetError()))
self.assertTrue(not bkpt.IsOneShot(), "We applied one-shot to the wrong breakpoint.")
def check_option_values(self, bp_object):
self.assertEqual(bp_object.IsOneShot(), self.is_one_shot, "IsOneShot")
self.assertEqual(bp_object.GetIgnoreCount(), self.ignore_count, "IgnoreCount")
self.assertEqual(bp_object.GetCondition(), self.condition, "Condition")
self.assertEqual(bp_object.GetAutoContinue(), self.auto_continue, "AutoContinue")
self.assertEqual(bp_object.GetThreadID(), self.tid, "Thread ID")
self.assertEqual(bp_object.GetThreadIndex(), self.tidx, "Thread Index")
self.assertEqual(bp_object.GetThreadName(), self.thread_name, "Thread Name")
self.assertEqual(bp_object.GetQueueName(), self.queue_name, "Queue Name")
set_cmds = lldb.SBStringList()
bp_object.GetCommandLineCommands(set_cmds)
self.assertEqual(set_cmds.GetSize(), self.cmd_list.GetSize(), "Size of command line commands")
for idx in range(0, set_cmds.GetSize()):
self.assertEqual(self.cmd_list.GetStringAtIndex(idx), set_cmds.GetStringAtIndex(idx), "Command %d"%(idx))
def make_a_dummy_name(self):
"This makes a breakpoint name in the dummy target to make sure it gets copied over"
dummy_target = self.dbg.GetDummyTarget()
self.assertTrue(dummy_target.IsValid(), "Dummy target was not valid.")
def cleanup ():
self.dbg.GetDummyTarget().DeleteBreakpointName(self.bp_name_string)
# Execute the cleanup function during test case tear down.
self.addTearDownHook(cleanup)
# Now find it in the dummy target, and make sure these settings took:
bp_name = lldb.SBBreakpointName(dummy_target, self.bp_name_string)
# Make sure the name is right:
self.assertEqual(bp_name.GetName(), self.bp_name_string, "Wrong bp_name: %s"%(bp_name.GetName()))
bp_name.SetOneShot(self.is_one_shot)
bp_name.SetIgnoreCount(self.ignore_count)
bp_name.SetCondition(self.condition)
bp_name.SetAutoContinue(self.auto_continue)
bp_name.SetThreadID(self.tid)
bp_name.SetThreadIndex(self.tidx)
bp_name.SetThreadName(self.thread_name)
bp_name.SetQueueName(self.queue_name)
bp_name.SetCommandLineCommands(self.cmd_list)
# Now look it up again, and make sure it got set correctly.
bp_name = lldb.SBBreakpointName(dummy_target, self.bp_name_string)
self.assertTrue(bp_name.IsValid(), "Failed to make breakpoint name.")
self.check_option_values(bp_name)
def do_check_configuring_names(self):
"""Use Python APIs to check that configuring breakpoint names works correctly."""
other_bp_name_string = "AnotherBreakpointName"
cl_bp_name_string = "CLBreakpointName"
# Now find the version copied in from the dummy target, and make sure these settings took:
bp_name = lldb.SBBreakpointName(self.target, self.bp_name_string)
self.assertTrue(bp_name.IsValid(), "Failed to make breakpoint name.")
self.check_option_values(bp_name)
# Now add this name to a breakpoint, and make sure it gets configured properly
bkpt = self.target.BreakpointCreateByLocation(self.main_file_spec, 10)
success = bkpt.AddNameWithErrorHandling(self.bp_name_string)
self.assertSuccess(success, "Couldn't add this name to the breakpoint")
self.check_option_values(bkpt)
# Now make a name from this breakpoint, and make sure the new name is properly configured:
new_name = lldb.SBBreakpointName(bkpt, other_bp_name_string)
self.assertTrue(new_name.IsValid(), "Couldn't make a valid bp_name from a breakpoint.")
self.check_option_values(bkpt)
# Now change the name's option and make sure it gets propagated to
# the breakpoint:
new_auto_continue = not self.auto_continue
bp_name.SetAutoContinue(new_auto_continue)
self.assertEqual(bp_name.GetAutoContinue(), new_auto_continue, "Couldn't change auto-continue on the name")
self.assertEqual(bkpt.GetAutoContinue(), new_auto_continue, "Option didn't propagate to the breakpoint.")
# Now make this same breakpoint name - but from the command line
cmd_str = "breakpoint name configure %s -o %d -i %d -c '%s' -G %d -t %d -x %d -T '%s' -q '%s' -H '%s'"%(cl_bp_name_string,
self.is_one_shot,
self.ignore_count,
self.condition,
self.auto_continue,
self.tid,
self.tidx,
self.thread_name,
self.queue_name,
self.help_string)
for cmd in self.cmd_list:
cmd_str += " -C '%s'"%(cmd)
self.runCmd(cmd_str, check=True)
# Now look up this name again and check its options:
cl_name = lldb.SBBreakpointName(self.target, cl_bp_name_string)
self.check_option_values(cl_name)
# Also check the help string:
self.assertEqual(self.help_string, cl_name.GetHelpString(), "Help string didn't match")
# Change the name and make sure that works:
new_help = "I do something even more interesting"
cl_name.SetHelpString(new_help)
self.assertEqual(new_help, cl_name.GetHelpString(), "SetHelpString didn't")
# We should have three names now, make sure the target can list them:
name_list = lldb.SBStringList()
self.target.GetBreakpointNames(name_list)
for name_string in [self.bp_name_string, other_bp_name_string, cl_bp_name_string]:
self.assertIn(name_string, name_list, "Didn't find %s in names"%(name_string))
# Delete the name from the current target. Make sure that works and deletes the
# name from the breakpoint as well:
self.target.DeleteBreakpointName(self.bp_name_string)
name_list.Clear()
self.target.GetBreakpointNames(name_list)
self.assertNotIn(self.bp_name_string, name_list, "Didn't delete %s from a real target"%(self.bp_name_string))
# Also make sure the name got removed from breakpoints holding it:
self.assertFalse(bkpt.MatchesName(self.bp_name_string), "Didn't remove the name from the breakpoint.")
# Test that deleting the name we injected into the dummy target works (there's also a
# cleanup that will do this, but that won't test the result...
dummy_target = self.dbg.GetDummyTarget()
dummy_target.DeleteBreakpointName(self.bp_name_string)
name_list.Clear()
dummy_target.GetBreakpointNames(name_list)
self.assertNotIn(self.bp_name_string, name_list, "Didn't delete %s from the dummy target"%(self.bp_name_string))
# Also make sure the name got removed from breakpoints holding it:
self.assertFalse(bkpt.MatchesName(self.bp_name_string), "Didn't remove the name from the breakpoint.")
def check_permission_results(self, bp_name):
self.assertEqual(bp_name.GetAllowDelete(), False, "Didn't set allow delete.")
protected_bkpt = self.target.BreakpointCreateByLocation(self.main_file_spec, 10)
protected_id = protected_bkpt.GetID()
unprotected_bkpt = self.target.BreakpointCreateByLocation(self.main_file_spec, 10)
unprotected_id = unprotected_bkpt.GetID()
success = protected_bkpt.AddNameWithErrorHandling(self.bp_name_string)
self.assertSuccess(success, "Couldn't add this name to the breakpoint")
self.target.DisableAllBreakpoints()
self.assertEqual(protected_bkpt.IsEnabled(), True, "Didnt' keep breakpoint from being disabled")
self.assertEqual(unprotected_bkpt.IsEnabled(), False, "Protected too many breakpoints from disabling.")
# Try from the command line too:
unprotected_bkpt.SetEnabled(True)
result = lldb.SBCommandReturnObject()
self.dbg.GetCommandInterpreter().HandleCommand("break disable", result)
self.assertTrue(result.Succeeded())
self.assertEqual(protected_bkpt.IsEnabled(), True, "Didnt' keep breakpoint from being disabled")
self.assertEqual(unprotected_bkpt.IsEnabled(), False, "Protected too many breakpoints from disabling.")
self.target.DeleteAllBreakpoints()
bkpt = self.target.FindBreakpointByID(protected_id)
self.assertTrue(bkpt.IsValid(), "Didn't keep the breakpoint from being deleted.")
bkpt = self.target.FindBreakpointByID(unprotected_id)
self.assertFalse(bkpt.IsValid(), "Protected too many breakpoints from deletion.")
# Remake the unprotected breakpoint and try again from the command line:
unprotected_bkpt = self.target.BreakpointCreateByLocation(self.main_file_spec, 10)
unprotected_id = unprotected_bkpt.GetID()
self.dbg.GetCommandInterpreter().HandleCommand("break delete -f", result)
self.assertTrue(result.Succeeded())
bkpt = self.target.FindBreakpointByID(protected_id)
self.assertTrue(bkpt.IsValid(), "Didn't keep the breakpoint from being deleted.")
bkpt = self.target.FindBreakpointByID(unprotected_id)
self.assertFalse(bkpt.IsValid(), "Protected too many breakpoints from deletion.")
def do_check_configuring_permissions_sb(self):
bp_name = lldb.SBBreakpointName(self.target, self.bp_name_string)
# Make a breakpoint name with delete disallowed:
bp_name = lldb.SBBreakpointName(self.target, self.bp_name_string)
self.assertTrue(bp_name.IsValid(), "Failed to make breakpoint name for valid name.")
bp_name.SetAllowDelete(False)
bp_name.SetAllowDisable(False)
bp_name.SetAllowList(False)
self.check_permission_results(bp_name)
def do_check_configuring_permissions_cli(self):
# Make the name with the right options using the command line:
self.runCmd("breakpoint name configure -L 0 -D 0 -A 0 %s"%(self.bp_name_string), check=True)
# Now look up the breakpoint we made, and check that it works.
bp_name = lldb.SBBreakpointName(self.target, self.bp_name_string)
self.assertTrue(bp_name.IsValid(), "Didn't make a breakpoint name we could find.")
self.check_permission_results(bp_name)
|
apache-2.0
|
kaflesudip/grabfeed
|
docs/source/conf.py
|
1
|
11341
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Grabfeed documentation build configuration file, created by
# sphinx-quickstart on Tue Jan 19 09:26:38 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
import sphinx_rtd_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Grabfeed'
copyright = '2016, Sudip Kafle'
author = 'Sudip Kafle'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Grabfeeddoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Grabfeed.tex', 'Grabfeed Documentation',
'Sudip Kafle', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'grabfeed', 'Grabfeed Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Grabfeed', 'Grabfeed Documentation',
author, 'Grabfeed', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
|
apache-2.0
|
caseyrygt/osf.io
|
tests/test_metadata.py
|
27
|
1206
|
# -*- coding: utf-8 -*-
'''Unit tests for models and their factories.'''
import unittest
from nose.tools import * # PEP8 asserts
from framework.forms.utils import process_payload
from website.project.model import MetaSchema
from website.project.model import ensure_schemas
from website.project.metadata.schemas import OSF_META_SCHEMAS
from tests.base import OsfTestCase
class TestMetaData(OsfTestCase):
def test_ensure_schemas(self):
# Should be zero MetaSchema records to begin with
assert_equal(
MetaSchema.find().count(),
0
)
ensure_schemas()
assert_equal(
MetaSchema.find().count(),
len(OSF_META_SCHEMAS)
)
def test_process(self):
processed = process_payload({'foo': 'bar&baz'})
assert_equal(processed['foo'], 'bar%26baz')
def test_process_list(self):
processed = process_payload({'foo': ['bar', 'baz&bob']})
assert_equal(processed['foo'][1], 'baz%26bob')
def test_process_whitespace(self):
processed = process_payload({'foo': 'bar baz'})
assert_equal(processed['foo'], 'bar baz')
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
DONIKAN/django
|
django/core/management/commands/loaddata.py
|
294
|
12977
|
from __future__ import unicode_literals
import glob
import gzip
import os
import warnings
import zipfile
from itertools import product
from django.apps import apps
from django.conf import settings
from django.core import serializers
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand, CommandError
from django.core.management.color import no_style
from django.db import (
DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connections, router,
transaction,
)
from django.utils import lru_cache
from django.utils._os import upath
from django.utils.encoding import force_text
from django.utils.functional import cached_property
from django.utils.glob import glob_escape
try:
import bz2
has_bz2 = True
except ImportError:
has_bz2 = False
class Command(BaseCommand):
help = 'Installs the named fixture(s) in the database.'
missing_args_message = ("No database fixture specified. Please provide the "
"path of at least one fixture in the command line.")
def add_arguments(self, parser):
parser.add_argument('args', metavar='fixture', nargs='+',
help='Fixture labels.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a specific database to load '
'fixtures into. Defaults to the "default" database.')
parser.add_argument('--app', action='store', dest='app_label',
default=None, help='Only look for fixtures in the specified app.')
parser.add_argument('--ignorenonexistent', '-i', action='store_true',
dest='ignore', default=False,
help='Ignores entries in the serialized data for fields that do not '
'currently exist on the model.')
def handle(self, *fixture_labels, **options):
self.ignore = options.get('ignore')
self.using = options.get('database')
self.app_label = options.get('app_label')
self.hide_empty = options.get('hide_empty', False)
self.verbosity = options.get('verbosity')
with transaction.atomic(using=self.using):
self.loaddata(fixture_labels)
# Close the DB connection -- unless we're still in a transaction. This
# is required as a workaround for an edge case in MySQL: if the same
# connection is used to create tables, load data, and query, the query
# can return incorrect results. See Django #7572, MySQL #37735.
if transaction.get_autocommit(self.using):
connections[self.using].close()
def loaddata(self, fixture_labels):
connection = connections[self.using]
# Keep a count of the installed objects and fixtures
self.fixture_count = 0
self.loaded_object_count = 0
self.fixture_object_count = 0
self.models = set()
self.serialization_formats = serializers.get_public_serializer_formats()
# Forcing binary mode may be revisited after dropping Python 2 support (see #22399)
self.compression_formats = {
None: (open, 'rb'),
'gz': (gzip.GzipFile, 'rb'),
'zip': (SingleZipReader, 'r'),
}
if has_bz2:
self.compression_formats['bz2'] = (bz2.BZ2File, 'r')
with connection.constraint_checks_disabled():
for fixture_label in fixture_labels:
self.load_label(fixture_label)
# Since we disabled constraint checks, we must manually check for
# any invalid keys that might have been added
table_names = [model._meta.db_table for model in self.models]
try:
connection.check_constraints(table_names=table_names)
except Exception as e:
e.args = ("Problem installing fixtures: %s" % e,)
raise
# If we found even one object in a fixture, we need to reset the
# database sequences.
if self.loaded_object_count > 0:
sequence_sql = connection.ops.sequence_reset_sql(no_style(), self.models)
if sequence_sql:
if self.verbosity >= 2:
self.stdout.write("Resetting sequences\n")
with connection.cursor() as cursor:
for line in sequence_sql:
cursor.execute(line)
if self.verbosity >= 1:
if self.fixture_count == 0 and self.hide_empty:
pass
elif self.fixture_object_count == self.loaded_object_count:
self.stdout.write("Installed %d object(s) from %d fixture(s)" %
(self.loaded_object_count, self.fixture_count))
else:
self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)" %
(self.loaded_object_count, self.fixture_object_count, self.fixture_count))
def load_label(self, fixture_label):
"""
Loads fixtures files for a given label.
"""
show_progress = self.verbosity >= 3
for fixture_file, fixture_dir, fixture_name in self.find_fixtures(fixture_label):
_, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file))
open_method, mode = self.compression_formats[cmp_fmt]
fixture = open_method(fixture_file, mode)
try:
self.fixture_count += 1
objects_in_fixture = 0
loaded_objects_in_fixture = 0
if self.verbosity >= 2:
self.stdout.write("Installing %s fixture '%s' from %s." %
(ser_fmt, fixture_name, humanize(fixture_dir)))
objects = serializers.deserialize(ser_fmt, fixture,
using=self.using, ignorenonexistent=self.ignore)
for obj in objects:
objects_in_fixture += 1
if router.allow_migrate_model(self.using, obj.object.__class__):
loaded_objects_in_fixture += 1
self.models.add(obj.object.__class__)
try:
obj.save(using=self.using)
if show_progress:
self.stdout.write(
'\rProcessed %i object(s).' % loaded_objects_in_fixture,
ending=''
)
except (DatabaseError, IntegrityError) as e:
e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
'app_label': obj.object._meta.app_label,
'object_name': obj.object._meta.object_name,
'pk': obj.object.pk,
'error_msg': force_text(e)
},)
raise
if objects and show_progress:
self.stdout.write('') # add a newline after progress indicator
self.loaded_object_count += loaded_objects_in_fixture
self.fixture_object_count += objects_in_fixture
except Exception as e:
if not isinstance(e, CommandError):
e.args = ("Problem installing fixture '%s': %s" % (fixture_file, e),)
raise
finally:
fixture.close()
# Warn if the fixture we loaded contains 0 objects.
if objects_in_fixture == 0:
warnings.warn(
"No fixture data found for '%s'. (File format may be "
"invalid.)" % fixture_name,
RuntimeWarning
)
@lru_cache.lru_cache(maxsize=None)
def find_fixtures(self, fixture_label):
"""
Finds fixture files for a given label.
"""
fixture_name, ser_fmt, cmp_fmt = self.parse_name(fixture_label)
databases = [self.using, None]
cmp_fmts = list(self.compression_formats.keys()) if cmp_fmt is None else [cmp_fmt]
ser_fmts = serializers.get_public_serializer_formats() if ser_fmt is None else [ser_fmt]
if self.verbosity >= 2:
self.stdout.write("Loading '%s' fixtures..." % fixture_name)
if os.path.isabs(fixture_name):
fixture_dirs = [os.path.dirname(fixture_name)]
fixture_name = os.path.basename(fixture_name)
else:
fixture_dirs = self.fixture_dirs
if os.path.sep in os.path.normpath(fixture_name):
fixture_dirs = [os.path.join(dir_, os.path.dirname(fixture_name))
for dir_ in fixture_dirs]
fixture_name = os.path.basename(fixture_name)
suffixes = ('.'.join(ext for ext in combo if ext)
for combo in product(databases, ser_fmts, cmp_fmts))
targets = set('.'.join((fixture_name, suffix)) for suffix in suffixes)
fixture_files = []
for fixture_dir in fixture_dirs:
if self.verbosity >= 2:
self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))
fixture_files_in_dir = []
path = os.path.join(fixture_dir, fixture_name)
for candidate in glob.iglob(glob_escape(path) + '*'):
if os.path.basename(candidate) in targets:
# Save the fixture_dir and fixture_name for future error messages.
fixture_files_in_dir.append((candidate, fixture_dir, fixture_name))
if self.verbosity >= 2 and not fixture_files_in_dir:
self.stdout.write("No fixture '%s' in %s." %
(fixture_name, humanize(fixture_dir)))
# Check kept for backwards-compatibility; it isn't clear why
# duplicates are only allowed in different directories.
if len(fixture_files_in_dir) > 1:
raise CommandError(
"Multiple fixtures named '%s' in %s. Aborting." %
(fixture_name, humanize(fixture_dir)))
fixture_files.extend(fixture_files_in_dir)
if not fixture_files:
# Warning kept for backwards-compatibility; why not an exception?
warnings.warn("No fixture named '%s' found." % fixture_name)
return fixture_files
@cached_property
def fixture_dirs(self):
"""
Return a list of fixture directories.
The list contains the 'fixtures' subdirectory of each installed
application, if it exists, the directories in FIXTURE_DIRS, and the
current directory.
"""
dirs = []
fixture_dirs = settings.FIXTURE_DIRS
if len(fixture_dirs) != len(set(fixture_dirs)):
raise ImproperlyConfigured("settings.FIXTURE_DIRS contains duplicates.")
for app_config in apps.get_app_configs():
app_label = app_config.label
app_dir = os.path.join(app_config.path, 'fixtures')
if app_dir in fixture_dirs:
raise ImproperlyConfigured(
"'%s' is a default fixture directory for the '%s' app "
"and cannot be listed in settings.FIXTURE_DIRS." % (app_dir, app_label)
)
if self.app_label and app_label != self.app_label:
continue
if os.path.isdir(app_dir):
dirs.append(app_dir)
dirs.extend(list(fixture_dirs))
dirs.append('')
dirs = [upath(os.path.abspath(os.path.realpath(d))) for d in dirs]
return dirs
def parse_name(self, fixture_name):
"""
Splits fixture name in name, serialization format, compression format.
"""
parts = fixture_name.rsplit('.', 2)
if len(parts) > 1 and parts[-1] in self.compression_formats:
cmp_fmt = parts[-1]
parts = parts[:-1]
else:
cmp_fmt = None
if len(parts) > 1:
if parts[-1] in self.serialization_formats:
ser_fmt = parts[-1]
parts = parts[:-1]
else:
raise CommandError(
"Problem installing fixture '%s': %s is not a known "
"serialization format." % (''.join(parts[:-1]), parts[-1]))
else:
ser_fmt = None
name = '.'.join(parts)
return name, ser_fmt, cmp_fmt
class SingleZipReader(zipfile.ZipFile):
def __init__(self, *args, **kwargs):
zipfile.ZipFile.__init__(self, *args, **kwargs)
if len(self.namelist()) != 1:
raise ValueError("Zip-compressed fixtures must contain one file.")
def read(self):
return zipfile.ZipFile.read(self, self.namelist()[0])
def humanize(dirname):
return "'%s'" % dirname if dirname else 'absolute path'
|
bsd-3-clause
|
kperun/nestml
|
pynestml/visitors/ast_line_operation_visitor.py
|
1
|
1699
|
#
# ASTLineOperatorVisitor.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
rhs : left=rhs (plusOp='+' | minusOp='-') right=rhs
"""
from pynestml.meta_model.ast_expression import ASTExpression
from pynestml.visitors.ast_visitor import ASTVisitor
class ASTLineOperatorVisitor(ASTVisitor):
"""
Visits a single binary operation consisting of + or - and updates the type accordingly.
"""
def visit_expression(self, node):
"""
Visits a single expression containing a plus or minus operator and updates its type.
:param node: a single expression
:type node: ASTExpression
"""
lhs_type = node.get_lhs().type
rhs_type = node.get_rhs().type
arith_op = node.get_binary_operator()
lhs_type.referenced_object = node.get_lhs()
rhs_type.referenced_object = node.get_rhs()
if arith_op.is_plus_op:
node.type = lhs_type + rhs_type
return
elif arith_op.is_minus_op:
node.type = lhs_type - rhs_type
return
|
gpl-2.0
|
ammarkhann/FinalSeniorCode
|
lib/python2.7/site-packages/mpmath/tests/test_linalg.py
|
2
|
10275
|
# TODO: don't use round
from __future__ import division
from mpmath import *
xrange = libmp.backend.xrange
# XXX: these shouldn't be visible(?)
LU_decomp = mp.LU_decomp
L_solve = mp.L_solve
U_solve = mp.U_solve
householder = mp.householder
improve_solution = mp.improve_solution
A1 = matrix([[3, 1, 6],
[2, 1, 3],
[1, 1, 1]])
b1 = [2, 7, 4]
A2 = matrix([[ 2, -1, -1, 2],
[ 6, -2, 3, -1],
[-4, 2, 3, -2],
[ 2, 0, 4, -3]])
b2 = [3, -3, -2, -1]
A3 = matrix([[ 1, 0, -1, -1, 0],
[ 0, 1, 1, 0, -1],
[ 4, -5, 2, 0, 0],
[ 0, 0, -2, 9,-12],
[ 0, 5, 0, 0, 12]])
b3 = [0, 0, 0, 0, 50]
A4 = matrix([[10.235, -4.56, 0., -0.035, 5.67],
[-2.463, 1.27, 3.97, -8.63, 1.08],
[-6.58, 0.86, -0.257, 9.32, -43.6 ],
[ 9.83, 7.39, -17.25, 0.036, 24.86],
[-9.31, 34.9, 78.56, 1.07, 65.8 ]])
b4 = [8.95, 20.54, 7.42, 5.60, 58.43]
A5 = matrix([[ 1, 2, -4],
[-2, -3, 5],
[ 3, 5, -8]])
A6 = matrix([[ 1.377360, 2.481400, 5.359190],
[ 2.679280, -1.229560, 25.560210],
[-1.225280+1.e6, 9.910180, -35.049900-1.e6]])
b6 = [23.500000, -15.760000, 2.340000]
A7 = matrix([[1, -0.5],
[2, 1],
[-2, 6]])
b7 = [3, 2, -4]
A8 = matrix([[1, 2, 3],
[-1, 0, 1],
[-1, -2, -1],
[1, 0, -1]])
b8 = [1, 2, 3, 4]
A9 = matrix([[ 4, 2, -2],
[ 2, 5, -4],
[-2, -4, 5.5]])
b9 = [10, 16, -15.5]
A10 = matrix([[1.0 + 1.0j, 2.0, 2.0],
[4.0, 5.0, 6.0],
[7.0, 8.0, 9.0]])
b10 = [1.0, 1.0 + 1.0j, 1.0]
def test_LU_decomp():
A = A3.copy()
b = b3
A, p = LU_decomp(A)
y = L_solve(A, b, p)
x = U_solve(A, y)
assert p == [2, 1, 2, 3]
assert [round(i, 14) for i in x] == [3.78953107960742, 2.9989094874591098,
-0.081788440567070006, 3.8713195201744801, 2.9171210468920399]
A = A4.copy()
b = b4
A, p = LU_decomp(A)
y = L_solve(A, b, p)
x = U_solve(A, y)
assert p == [0, 3, 4, 3]
assert [round(i, 14) for i in x] == [2.6383625899619201, 2.6643834462368399,
0.79208015947958998, -2.5088376454101899, -1.0567657691375001]
A = randmatrix(3)
bak = A.copy()
LU_decomp(A, overwrite=1)
assert A != bak
def test_inverse():
for A in [A1, A2, A5]:
inv = inverse(A)
assert mnorm(A*inv - eye(A.rows), 1) < 1.e-14
def test_householder():
mp.dps = 15
A, b = A8, b8
H, p, x, r = householder(extend(A, b))
assert H == matrix(
[[mpf('3.0'), mpf('-2.0'), mpf('-1.0'), 0],
[-1.0,mpf('3.333333333333333'),mpf('-2.9999999999999991'),mpf('2.0')],
[-1.0, mpf('-0.66666666666666674'),mpf('2.8142135623730948'),
mpf('-2.8284271247461898')],
[1.0, mpf('-1.3333333333333333'),mpf('-0.20000000000000018'),
mpf('4.2426406871192857')]])
assert p == [-2, -2, mpf('-1.4142135623730949')]
assert round(norm(r, 2), 10) == 4.2426406870999998
y = [102.102, 58.344, 36.463, 24.310, 17.017, 12.376, 9.282, 7.140, 5.610,
4.488, 3.6465, 3.003]
def coeff(n):
# similiar to Hilbert matrix
A = []
for i in range(1, 13):
A.append([1. / (i + j - 1) for j in range(1, n + 1)])
return matrix(A)
residuals = []
refres = []
for n in range(2, 7):
A = coeff(n)
H, p, x, r = householder(extend(A, y))
x = matrix(x)
y = matrix(y)
residuals.append(norm(r, 2))
refres.append(norm(residual(A, x, y), 2))
assert [round(res, 10) for res in residuals] == [15.1733888877,
0.82378073210000002, 0.302645887, 0.0260109244,
0.00058653999999999998]
assert norm(matrix(residuals) - matrix(refres), inf) < 1.e-13
def test_factorization():
A = randmatrix(5)
P, L, U = lu(A)
assert mnorm(P*A - L*U, 1) < 1.e-15
def test_solve():
assert norm(residual(A6, lu_solve(A6, b6), b6), inf) < 1.e-10
assert norm(residual(A7, lu_solve(A7, b7), b7), inf) < 1.5
assert norm(residual(A8, lu_solve(A8, b8), b8), inf) <= 3 + 1.e-10
assert norm(residual(A6, qr_solve(A6, b6)[0], b6), inf) < 1.e-10
assert norm(residual(A7, qr_solve(A7, b7)[0], b7), inf) < 1.5
assert norm(residual(A8, qr_solve(A8, b8)[0], b8), 2) <= 4.3
assert norm(residual(A10, lu_solve(A10, b10), b10), 2) < 1.e-10
assert norm(residual(A10, qr_solve(A10, b10)[0], b10), 2) < 1.e-10
def test_solve_overdet_complex():
A = matrix([[1, 2j], [3, 4j], [5, 6]])
b = matrix([1 + j, 2, -j])
assert norm(residual(A, lu_solve(A, b), b)) < 1.0208
def test_singular():
mp.dps = 15
A = [[5.6, 1.2], [7./15, .1]]
B = repr(zeros(2))
b = [1, 2]
def _assert_ZeroDivisionError(statement):
try:
eval(statement)
assert False
except (ZeroDivisionError, ValueError):
pass
for i in ['lu_solve(%s, %s)' % (A, b), 'lu_solve(%s, %s)' % (B, b),
'qr_solve(%s, %s)' % (A, b), 'qr_solve(%s, %s)' % (B, b)]:
_assert_ZeroDivisionError(i)
def test_cholesky():
assert fp.cholesky(fp.matrix(A9)) == fp.matrix([[2, 0, 0], [1, 2, 0], [-1, -3/2, 3/2]])
x = fp.cholesky_solve(A9, b9)
assert fp.norm(fp.residual(A9, x, b9), fp.inf) == 0
def test_det():
assert det(A1) == 1
assert round(det(A2), 14) == 8
assert round(det(A3)) == 1834
assert round(det(A4)) == 4443376
assert det(A5) == 1
assert round(det(A6)) == 78356463
assert det(zeros(3)) == 0
def test_cond():
mp.dps = 15
A = matrix([[1.2969, 0.8648], [0.2161, 0.1441]])
assert cond(A, lambda x: mnorm(x,1)) == mpf('327065209.73817754')
assert cond(A, lambda x: mnorm(x,inf)) == mpf('327065209.73817754')
assert cond(A, lambda x: mnorm(x,'F')) == mpf('249729266.80008656')
@extradps(50)
def test_precision():
A = randmatrix(10, 10)
assert mnorm(inverse(inverse(A)) - A, 1) < 1.e-45
def test_interval_matrix():
mp.dps = 15
iv.dps = 15
a = iv.matrix([['0.1','0.3','1.0'],['7.1','5.5','4.8'],['3.2','4.4','5.6']])
b = iv.matrix(['4','0.6','0.5'])
c = iv.lu_solve(a, b)
assert c[0].delta < 1e-13
assert c[1].delta < 1e-13
assert c[2].delta < 1e-13
assert 5.25823271130625686059275 in c[0]
assert -13.155049396267837541163 in c[1]
assert 7.42069154774972557628979 in c[2]
def test_LU_cache():
A = randmatrix(3)
LU = LU_decomp(A)
assert A._LU == LU_decomp(A)
A[0,0] = -1000
assert A._LU is None
def test_improve_solution():
A = randmatrix(5, min=1e-20, max=1e20)
b = randmatrix(5, 1, min=-1000, max=1000)
x1 = lu_solve(A, b) + randmatrix(5, 1, min=-1e-5, max=1.e-5)
x2 = improve_solution(A, x1, b)
assert norm(residual(A, x2, b), 2) < norm(residual(A, x1, b), 2)
def test_exp_pade():
for i in range(3):
dps = 15
extra = 15
mp.dps = dps + extra
dm = 0
N = 3
dg = range(1,N+1)
a = diag(dg)
expa = diag([exp(x) for x in dg])
# choose a random matrix not close to be singular
# to avoid adding too much extra precision in computing
# m**-1 * M * m
while abs(dm) < 0.01:
m = randmatrix(N)
dm = det(m)
m = m/dm
a1 = m**-1 * a * m
e2 = m**-1 * expa * m
mp.dps = dps
e1 = expm(a1, method='pade')
mp.dps = dps + extra
d = e2 - e1
#print d
mp.dps = dps
assert norm(d, inf).ae(0)
mp.dps = 15
def test_qr():
mp.dps = 15 # used default value for dps
lowlimit = -9 # lower limit of matrix element value
uplimit = 9 # uppter limit of matrix element value
maxm = 4 # max matrix size
flg = False # toggle to create real vs complex matrix
zero = mpf('0.0')
for k in xrange(0,10):
exdps = 0
mode = 'full'
flg = bool(k % 2)
# generate arbitrary matrix size (2 to maxm)
num1 = nint(2 + (maxm-2)*rand())
num2 = nint(2 + (maxm-2)*rand())
m = int(max(num1, num2))
n = int(min(num1, num2))
# create matrix
A = mp.matrix(m,n)
# populate matrix values with arbitrary integers
if flg:
flg = False
dtype = 'complex'
for j in xrange(0,n):
for i in xrange(0,m):
val = nint(lowlimit + (uplimit-lowlimit)*rand())
val2 = nint(lowlimit + (uplimit-lowlimit)*rand())
A[i,j] = mpc(val, val2)
else:
flg = True
dtype = 'real'
for j in xrange(0,n):
for i in xrange(0,m):
val = nint(lowlimit + (uplimit-lowlimit)*rand())
A[i,j] = mpf(val)
# perform A -> QR decomposition
Q, R = qr(A, mode, edps = exdps)
#print('\n\n A = \n', nstr(A, 4))
#print('\n Q = \n', nstr(Q, 4))
#print('\n R = \n', nstr(R, 4))
#print('\n Q*R = \n', nstr(Q*R, 4))
maxnorm = mpf('1.0E-11')
n1 = norm(A - Q * R)
#print '\n Norm of A - Q * R = ', n1
if n1 > maxnorm:
raise ValueError('Excessive norm value')
if dtype == 'real':
n1 = norm(eye(m) - Q.T * Q)
#print ' Norm of I - Q.T * Q = ', n1
if n1 > maxnorm:
raise ValueError('Excessive norm value')
n1 = norm(eye(m) - Q * Q.T)
#print ' Norm of I - Q * Q.T = ', n1
if n1 > maxnorm:
raise ValueError('Excessive norm value')
if dtype == 'complex':
n1 = norm(eye(m) - Q.T * Q.conjugate())
#print ' Norm of I - Q.T * Q.conjugate() = ', n1
if n1 > maxnorm:
raise ValueError('Excessive norm value')
n1 = norm(eye(m) - Q.conjugate() * Q.T)
#print ' Norm of I - Q.conjugate() * Q.T = ', n1
if n1 > maxnorm:
raise ValueError('Excessive norm value')
|
mit
|
bodhiconnolly/python-day-one-client
|
location.py
|
1
|
7437
|
#-------------------------------------------------------------------------------
# Name: location v1.0
# Purpose: get location input from user and find relevant weather
#
# Author: Bodhi Connolly
#
# Created: 24/05/2014
# Copyright: (c) Bodhi Connolly 2014
# Licence: GNU General Public License, version 3 (GPL-3.0)
#-------------------------------------------------------------------------------
from pygeocoder import Geocoder,GeocoderError
import urllib2
import json
import wx
from cStringIO import StringIO
class Location ( wx.Dialog ):
"""A dialog to get user location and local weather via Google Maps and
OpenWeatherMap"""
def __init__( self, parent ):
"""Initialises the items in the dialog
Location.__init__(Parent) -> None
"""
wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY,
title = 'Entry Location',
pos = wx.DefaultPosition, size = wx.Size( -1,-1),
style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )
bSizer1 = wx.BoxSizer( wx.VERTICAL )
bSizer2 = wx.BoxSizer( wx.HORIZONTAL )
self.input_location = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString,
wx.DefaultPosition,
wx.Size( 200,-1 ),
wx.TE_PROCESS_ENTER)
self.Bind(wx.EVT_TEXT_ENTER,self.button_click,self.input_location)
bSizer2.Add( self.input_location, 0, wx.ALL, 5 )
self.button_search = wx.Button( self, wx.ID_ANY, u"Search",
wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer2.Add( self.button_search, 0, wx.ALL, 5 )
self.button_search.Bind(wx.EVT_BUTTON,self.button_click)
self.button_submit = wx.Button( self, wx.ID_OK, u"OK",
wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer2.Add( self.button_submit, 0, wx.ALL, 5 )
self.button_submit.Bind(wx.EVT_BUTTON,self.submit)
self.cancel = wx.Button(self, wx.ID_CANCEL,size=(1,1))
bSizer1.Add( bSizer2, 1, wx.EXPAND, 5 )
self.bitmap = wx.StaticBitmap( self, wx.ID_ANY, wx.NullBitmap,
wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer1.Add( self.bitmap, 1, wx.ALL|wx.EXPAND, 5 )
self.location_text = wx.StaticText( self, wx.ID_ANY,
u"Location:", wx.DefaultPosition,
wx.DefaultSize, 0 )
self.location_text.Wrap( -1 )
bSizer1.Add( self.location_text, 0, wx.ALL, 5 )
self.weather_text = wx.StaticText( self, wx.ID_ANY,
u"Weather:", wx.DefaultPosition,
wx.DefaultSize, 0 )
self.weather_text.Wrap( -1 )
bSizer1.Add( self.weather_text, 0, wx.ALL, 5 )
self.weathernames={'Clear':'Clear','Clouds':'cloudy'}
self.SetSizer( bSizer1 )
self.Layout()
self.Centre( wx.BOTH )
self.searched=False
def button_click(self,evt=None):
"""Finds the coordinates from the user entry text and gets the weather
from these coordinates
Location.button_click(event) -> None
"""
self.get_weather(self.get_coordinates())
self.searched=True
def get_coordinates(self):
"""Searches Google Maps for the location entered and returns the results
Note: returns None if cannot find location
Location.get_coordinates() -> pygeolib.GeocoderResult
"""
try:
self.results=Geocoder.geocode(self.input_location.GetRange(0,
self.input_location.GetLastPosition()))
except GeocoderError:
return None
try:
self.location_text.SetLabel(str(self.results))
except UnicodeDecodeError:
return None
return self.results
def get_weather(self,coordinates):
"""Searches OpenWeatherMap for the weather at specified coordinates
and sets variables based on this result for adding to entry. Also loads
image of coordinates from Google Maps Static Image API.
Location.get_weather() -> None
"""
if coordinates==None:
self.location_text.SetLabel('Invalid Location')
self.weather_text.SetLabel('No Weather')
else:
coordinates=coordinates.coordinates
request = urllib2.urlopen(
'http://api.openweathermap.org/data/2.5/weather?lat='
+str(coordinates[0])+'&lon='
+str(coordinates[1])+'&units=metric')
response = request.read()
self.weather_json = json.loads(response)
self.weather_text.SetLabel("Weather is %s with a temperature of %d"
% (self.weather_json['weather'][0]['main'].lower(),
self.weather_json['main']['temp']))
request.close()
img_source = urllib2.urlopen(
'http://maps.googleapis.com/maps/api/staticmap?'+
'&zoom=11&size=600x200&sensor=false&markers='
+str(coordinates[0])+','+str(coordinates[1]))
data = img_source.read()
img_source.close()
img = wx.ImageFromStream(StringIO(data))
bm = wx.BitmapFromImage((img))
self.bitmap.SetBitmap(bm)
w, h = self.GetClientSize()
self.SetSize((w+50,h+50))
try:
self.celcius=int(self.weather_json['main']['temp'])
except KeyError:
pass
try:
self.icon=(self.weathernames[self.weather_json
['weather'][0]['main']])
except KeyError:
self.icon='Clear'
try:
self.description=self.weather_json['weather'][0]['main']
except KeyError:
pass
try:
self.humidity=self.weather_json['main']['humidity']
except KeyError:
pass
try:
self.country=self.results.country
except KeyError:
pass
try:
self.placename=(str(self.results.street_number)
+' '+self.results.route)
except (TypeError,KeyError):
self.placename=''
try:
self.adminarea=self.results.administrative_area_level_1
except KeyError:
pass
try:
self.locality=self.results.locality
except KeyError:
pass
def submit(self,evt=None):
"""Closes the dialog if user has already searched, else search and then
close the dialog.
Location.submit() -> None
"""
if self.searched:
self.Close()
else:
self.button_click()
self.Close()
def main():
a = wx.App(0)
f = Location(None)
f.Show()
a.MainLoop()
if __name__ == '__main__':
main()
|
gpl-3.0
|
IllusionRom-deprecated/android_platform_external_ceres-solver
|
docs/source/conf.py
|
8
|
7844
|
# -*- coding: utf-8 -*-
#
# Ceres Solver documentation build configuration file, created by
# sphinx-quickstart on Sun Jan 20 20:34:07 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Ceres Solver'
copyright = u'2013, Google Inc.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.7'
# The full version, including alpha/beta/rc tags.
release = '1.7.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'armstrong'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_themes",]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Ceres Solver"
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_domain_indices = True
# If false, no index is generated.
html_use_index = True
# If true, the index is split into individual pages for each letter.
html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'CeresSolverdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'CeresSolver.tex', u'Ceres Solver',
u'Sameer Agarwal \\& Keir Mierle', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ceressolver', u'Ceres Solver',
[u'Sameer Agarwal & Keir Mierle'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'CeresSolver', u'Ceres Solver',
u'Sameer Agarwal & Keir Mierle', 'CeresSolver', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
bsd-3-clause
|
Jaxkr/TruthBot.org
|
Truthbot/news/migrations/0001_initial.py
|
1
|
3654
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-06 00:21
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField()),
('score', models.IntegerField(default=0)),
('timestamp', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='CommentReply',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.TextField()),
('timestamp', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('comment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='news.Comment')),
],
options={
'ordering': ['timestamp'],
},
),
migrations.CreateModel(
name='CommentVote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='news.Comment')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('link', models.CharField(max_length=2083)),
('title', models.CharField(max_length=350)),
('timestamp', models.DateTimeField(default=django.utils.timezone.now)),
('score', models.IntegerField(default=0)),
('slug', models.SlugField(blank=True, unique=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='PostVote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='news.Post')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='commentreply',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='news.Post'),
),
migrations.AddField(
model_name='comment',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='news.Post'),
),
]
|
gpl-2.0
|
bdacode/hoster
|
hoster/mediafire_com.py
|
1
|
5195
|
# -*- coding: utf-8 -*-
"""Copyright (C) 2013 COLDWELL AG
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import time
import base64
import hashlib
from bs4 import BeautifulSoup
from ... import hoster
# fix for HTTPS TLSv1 connection
import ssl
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.poolmanager import PoolManager
@hoster.host
class this:
model = hoster.HttpPremiumHoster
name = 'mediafire.com'
patterns = [
hoster.Matcher('https?', '*.mediafire.com', "!/download/<id>/<name>"),
hoster.Matcher('https?', '*.mediafire.com', "!/download/<id>"),
hoster.Matcher('https?', '*.mediafire.com', r'/(file/|(view/?|download\.php)?\?)(?P<id>\w{11}|\w{15})($|/)'),
hoster.Matcher('https?', '*.mediafire.com', _query_string=r'^(?P<id>(\w{11}|\w{15}))$'),
]
url_template = 'http://www.mediafire.com/file/{id}'
def on_check(file):
name, size = get_file_infos(file)
print name, size
file.set_infos(name=name, size=size)
def get_file_infos(file):
id = file.pmatch.id
resp = file.account.get("http://www.mediafire.com/api/file/get_info.php", params={"quick_key": id})
name = re.search(r"<filename>(.*?)</filename>", resp.text).group(1)
size = re.search(r"<size>(.*?)</size>", resp.text).group(1)
return name, int(size)
def on_download_premium(chunk):
id = chunk.file.pmatch.id
resp = chunk.account.get("http://www.mediafire.com/?{}".format(id), allow_redirects=False)
if "Enter Password" in resp.text and 'display:block;">This file is' in resp.text:
raise NotImplementedError()
password = input.password(file=chunk.file)
if not password:
chunk.password_aborted()
password = password['password']
url = re.search(r'kNO = "(http://.*?)"', resp.text)
if url:
url = url.group(1)
if not url:
if resp.status_code == 302 and resp.headers['Location']:
url = resp.headers['location']
if not url:
resp = chunk.account.get("http://www.mediafire.com/dynamic/dlget.php", params={"qk": id})
url = re.search('dllink":"(http:.*?)"', resp.text)
if url:
url = url.group(1)
if not url:
chunk.no_download_link()
return url
def on_download_free(chunk):
resp = chunk.account.get(chunk.file.url, allow_redirects=False)
if resp.status_code == 302 and resp.headers['Location']:
return resp.headers['Location']
raise NotImplementedError()
class MyHTTPSAdapter(HTTPAdapter):
def init_poolmanager(self, connections, maxsize, block):
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, ssl_version=ssl.PROTOCOL_TLSv1, block=block)
def on_initialize_account(self):
self.APP_ID = 27112
self.APP_KEY = "czQ1cDd5NWE3OTl2ZGNsZmpkd3Q1eXZhNHcxdzE4c2Zlbmt2djdudw=="
self.token = None
self.browser.mount('https://', MyHTTPSAdapter())
resp = self.get("https://www.mediafire.com/")
if self.username is None:
return
s = BeautifulSoup(resp.text)
form = s.select('#form_login1')
url, form = hoster.serialize_html_form(form[0])
url = hoster.urljoin("https://www.mediafire.com/", url)
form['login_email'] = self.username
form['login_pass'] = self.password
form['login_remember'] = "on"
resp = self.post(url, data=form, referer="https://www.mediafire.com/")
if not self.browser.cookies['user']:
self.login_failed()
sig = hashlib.sha1()
sig.update(self.username)
sig.update(self.password)
sig.update(str(self.APP_ID))
sig.update(base64.b64decode(self.APP_KEY))
sig = sig.hexdigest()
params = {
"email": self.username,
"password": self.password,
"application_id": self.APP_ID,
"signature": sig,
"version": 1}
resp = self.get("https://www.mediafire.com/api/user/get_session_token.php", params=params)
m = re.search(r"<session_token>(.*?)</session_token>", resp.text)
if not m:
self.fatal('error getting session token')
self.token = m.group(1)
resp = self.get("https://www.mediafire.com/myaccount/billinghistory.php")
m = re.search(r'<div class="lg-txt">(\d+/\d+/\d+)</div> <div>', resp.text)
if m:
self.expires = m.group(1)
self.premium = self.expires > time.time() and True or False
if self.premium:
resp = self.get("https://www.mediafire.com/myaccount.php")
m = re.search(r'View Statistics.*?class="lg-txt">(.*?)</div', resp.text)
if m:
self.traffic = m.group(1)
else:
self.traffic = None
|
gpl-3.0
|
conejoninja/xbmc-seriesly
|
servers/rapidshare.py
|
1
|
1655
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# seriesly - XBMC Plugin
# Conector para rapidshare
# http://blog.tvalacarta.info/plugin-xbmc/seriesly/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[rapidshare.py] get_video_url(page_url='%s')" % page_url)
video_urls = []
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
# https://rapidshare.com/files/3346009389/_BiW__Last_Exile_Ginyoku_no_Fam_-_Episodio_09__A68583B1_.mkv
# "https://rapidshare.com/files/3346009389/_BiW__Last_Exile_Ginyoku_no_Fam_-_Episodio_09__A68583B1_.mkv"
# http://rapidshare.com/files/2327495081/Camino.Sangriento.4.HDR.Proper.200Ro.dri.part5.rar
# https://rapidshare.com/files/715435909/Salmon.Fishing.in.the.Yemen.2012.720p.UNSOLOCLIC.INFO.mkv
patronvideos = '(rapidshare.com/files/[0-9]+/.*?)["|<]'
logger.info("[rapidshare.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data+'"')
for match in matches:
titulo = "[rapidshare]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'rapidshare' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
|
gpl-3.0
|
2013Commons/HUE-SHARK
|
build/env/lib/python2.7/site-packages/Markdown-2.0.3-py2.7.egg/markdown/extensions/footnotes.py
|
107
|
10628
|
"""
========================= FOOTNOTES =================================
This section adds footnote handling to markdown. It can be used as
an example for extending python-markdown with relatively complex
functionality. While in this case the extension is included inside
the module itself, it could just as easily be added from outside the
module. Not that all markdown classes above are ignorant about
footnotes. All footnote functionality is provided separately and
then added to the markdown instance at the run time.
Footnote functionality is attached by calling extendMarkdown()
method of FootnoteExtension. The method also registers the
extension to allow it's state to be reset by a call to reset()
method.
Example:
Footnotes[^1] have a label[^label] and a definition[^!DEF].
[^1]: This is a footnote
[^label]: A footnote on "label"
[^!DEF]: The footnote for definition
"""
import re, markdown
from markdown import etree
FN_BACKLINK_TEXT = "zz1337820767766393qq"
NBSP_PLACEHOLDER = "qq3936677670287331zz"
DEF_RE = re.compile(r'(\ ?\ ?\ ?)\[\^([^\]]*)\]:\s*(.*)')
TABBED_RE = re.compile(r'((\t)|( ))(.*)')
class FootnoteExtension(markdown.Extension):
""" Footnote Extension. """
def __init__ (self, configs):
""" Setup configs. """
self.config = {'PLACE_MARKER':
["///Footnotes Go Here///",
"The text string that marks where the footnotes go"],
'UNIQUE_IDS':
[False,
"Avoid name collisions across "
"multiple calls to reset()."]}
for key, value in configs:
self.config[key][0] = value
# In multiple invocations, emit links that don't get tangled.
self.unique_prefix = 0
self.reset()
def extendMarkdown(self, md, md_globals):
""" Add pieces to Markdown. """
md.registerExtension(self)
self.parser = md.parser
# Insert a preprocessor before ReferencePreprocessor
md.preprocessors.add("footnote", FootnotePreprocessor(self),
"<reference")
# Insert an inline pattern before ImageReferencePattern
FOOTNOTE_RE = r'\[\^([^\]]*)\]' # blah blah [^1] blah
md.inlinePatterns.add("footnote", FootnotePattern(FOOTNOTE_RE, self),
"<reference")
# Insert a tree-processor that would actually add the footnote div
# This must be before the inline treeprocessor so inline patterns
# run on the contents of the div.
md.treeprocessors.add("footnote", FootnoteTreeprocessor(self),
"<inline")
# Insert a postprocessor after amp_substitute oricessor
md.postprocessors.add("footnote", FootnotePostprocessor(self),
">amp_substitute")
def reset(self):
""" Clear the footnotes on reset, and prepare for a distinct document. """
self.footnotes = markdown.odict.OrderedDict()
self.unique_prefix += 1
def findFootnotesPlaceholder(self, root):
""" Return ElementTree Element that contains Footnote placeholder. """
def finder(element):
for child in element:
if child.text:
if child.text.find(self.getConfig("PLACE_MARKER")) > -1:
return child, True
if child.tail:
if child.tail.find(self.getConfig("PLACE_MARKER")) > -1:
return (child, element), False
finder(child)
return None
res = finder(root)
return res
def setFootnote(self, id, text):
""" Store a footnote for later retrieval. """
self.footnotes[id] = text
def makeFootnoteId(self, id):
""" Return footnote link id. """
if self.getConfig("UNIQUE_IDS"):
return 'fn:%d-%s' % (self.unique_prefix, id)
else:
return 'fn:%s' % id
def makeFootnoteRefId(self, id):
""" Return footnote back-link id. """
if self.getConfig("UNIQUE_IDS"):
return 'fnref:%d-%s' % (self.unique_prefix, id)
else:
return 'fnref:%s' % id
def makeFootnotesDiv(self, root):
""" Return div of footnotes as et Element. """
if not self.footnotes.keys():
return None
div = etree.Element("div")
div.set('class', 'footnote')
hr = etree.SubElement(div, "hr")
ol = etree.SubElement(div, "ol")
for id in self.footnotes.keys():
li = etree.SubElement(ol, "li")
li.set("id", self.makeFootnoteId(id))
self.parser.parseChunk(li, self.footnotes[id])
backlink = etree.Element("a")
backlink.set("href", "#" + self.makeFootnoteRefId(id))
backlink.set("rev", "footnote")
backlink.set("title", "Jump back to footnote %d in the text" % \
(self.footnotes.index(id)+1))
backlink.text = FN_BACKLINK_TEXT
if li.getchildren():
node = li[-1]
if node.tag == "p":
node.text = node.text + NBSP_PLACEHOLDER
node.append(backlink)
else:
p = etree.SubElement(li, "p")
p.append(backlink)
return div
class FootnotePreprocessor(markdown.preprocessors.Preprocessor):
""" Find all footnote references and store for later use. """
def __init__ (self, footnotes):
self.footnotes = footnotes
def run(self, lines):
lines = self._handleFootnoteDefinitions(lines)
text = "\n".join(lines)
return text.split("\n")
def _handleFootnoteDefinitions(self, lines):
"""
Recursively find all footnote definitions in lines.
Keywords:
* lines: A list of lines of text
Return: A list of lines with footnote definitions removed.
"""
i, id, footnote = self._findFootnoteDefinition(lines)
if id :
plain = lines[:i]
detabbed, theRest = self.detectTabbed(lines[i+1:])
self.footnotes.setFootnote(id,
footnote + "\n"
+ "\n".join(detabbed))
more_plain = self._handleFootnoteDefinitions(theRest)
return plain + [""] + more_plain
else :
return lines
def _findFootnoteDefinition(self, lines):
"""
Find the parts of a footnote definition.
Keywords:
* lines: A list of lines of text.
Return: A three item tuple containing the index of the first line of a
footnote definition, the id of the definition and the body of the
definition.
"""
counter = 0
for line in lines:
m = DEF_RE.match(line)
if m:
return counter, m.group(2), m.group(3)
counter += 1
return counter, None, None
def detectTabbed(self, lines):
""" Find indented text and remove indent before further proccesing.
Keyword arguments:
* lines: an array of strings
Returns: a list of post processed items and the unused
remainder of the original list
"""
items = []
item = -1
i = 0 # to keep track of where we are
def detab(line):
match = TABBED_RE.match(line)
if match:
return match.group(4)
for line in lines:
if line.strip(): # Non-blank line
line = detab(line)
if line:
items.append(line)
i += 1
continue
else:
return items, lines[i:]
else: # Blank line: _maybe_ we are done.
i += 1 # advance
# Find the next non-blank line
for j in range(i, len(lines)):
if lines[j].strip():
next_line = lines[j]; break
else:
break # There is no more text; we are done.
# Check if the next non-blank line is tabbed
if detab(next_line): # Yes, more work to do.
items.append("")
continue
else:
break # No, we are done.
else:
i += 1
return items, lines[i:]
class FootnotePattern(markdown.inlinepatterns.Pattern):
""" InlinePattern for footnote markers in a document's body text. """
def __init__(self, pattern, footnotes):
markdown.inlinepatterns.Pattern.__init__(self, pattern)
self.footnotes = footnotes
def handleMatch(self, m):
sup = etree.Element("sup")
a = etree.SubElement(sup, "a")
id = m.group(2)
sup.set('id', self.footnotes.makeFootnoteRefId(id))
a.set('href', '#' + self.footnotes.makeFootnoteId(id))
a.set('rel', 'footnote')
a.text = str(self.footnotes.footnotes.index(id) + 1)
return sup
class FootnoteTreeprocessor(markdown.treeprocessors.Treeprocessor):
""" Build and append footnote div to end of document. """
def __init__ (self, footnotes):
self.footnotes = footnotes
def run(self, root):
footnotesDiv = self.footnotes.makeFootnotesDiv(root)
if footnotesDiv:
result = self.footnotes.findFootnotesPlaceholder(root)
if result:
node, isText = result
if isText:
node.text = None
node.getchildren().insert(0, footnotesDiv)
else:
child, element = node
ind = element.getchildren().find(child)
element.getchildren().insert(ind + 1, footnotesDiv)
child.tail = None
fnPlaceholder.parent.replaceChild(fnPlaceholder, footnotesDiv)
else:
root.append(footnotesDiv)
class FootnotePostprocessor(markdown.postprocessors.Postprocessor):
""" Replace placeholders with html entities. """
def run(self, text):
text = text.replace(FN_BACKLINK_TEXT, "↩")
return text.replace(NBSP_PLACEHOLDER, " ")
def makeExtension(configs=[]):
""" Return an instance of the FootnoteExtension """
return FootnoteExtension(configs=configs)
|
apache-2.0
|
rickerc/nova_audit
|
nova/virt/xenapi/vm_utils.py
|
6
|
92118
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright 2011 Piston Cloud Computing, Inc.
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Helper methods for operations related to the management of VM records and
their attributes like VDIs, VIFs, as well as their lookup functions.
"""
import contextlib
import os
import re
import time
import urllib
import urlparse
import uuid
from xml.parsers import expat
from eventlet import greenthread
from oslo.config import cfg
from nova.api.metadata import base as instance_metadata
from nova import block_device
from nova.compute import flavors
from nova.compute import power_state
from nova.compute import task_states
from nova import exception
from nova.network import model as network_model
from nova.openstack.common import excutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
from nova.openstack.common import processutils
from nova.openstack.common import strutils
from nova.openstack.common import timeutils
from nova.openstack.common import xmlutils
from nova import utils
from nova.virt import configdrive
from nova.virt.disk import api as disk
from nova.virt.disk.vfs import localfs as vfsimpl
from nova.virt.xenapi import agent
from nova.virt.xenapi.image import utils as image_utils
from nova.virt.xenapi import volume_utils
LOG = logging.getLogger(__name__)
xenapi_vm_utils_opts = [
cfg.StrOpt('cache_images',
default='all',
help='Cache glance images locally. `all` will cache all'
' images, `some` will only cache images that have the'
' image_property `cache_in_nova=True`, and `none` turns'
' off caching entirely'),
cfg.IntOpt('xenapi_image_compression_level',
help='Compression level for images, e.g., 9 for gzip -9.'
' Range is 1-9, 9 being most compressed but most CPU'
' intensive on dom0.'),
cfg.StrOpt('default_os_type',
default='linux',
help='Default OS type'),
cfg.IntOpt('block_device_creation_timeout',
default=10,
help='Time to wait for a block device to be created'),
cfg.IntOpt('max_kernel_ramdisk_size',
default=16 * 1024 * 1024,
help='Maximum size in bytes of kernel or ramdisk images'),
cfg.StrOpt('sr_matching_filter',
default='default-sr:true',
help='Filter for finding the SR to be used to install guest '
'instances on. To use the Local Storage in default '
'XenServer/XCP installations set this flag to '
'other-config:i18n-key=local-storage. To select an SR '
'with a different matching criteria, you could set it to '
'other-config:my_favorite_sr=true. On the other hand, to '
'fall back on the Default SR, as displayed by XenCenter, '
'set this flag to: default-sr:true'),
cfg.BoolOpt('xenapi_sparse_copy',
default=True,
help='Whether to use sparse_copy for copying data on a '
'resize down (False will use standard dd). This speeds '
'up resizes down considerably since large runs of zeros '
'won\'t have to be rsynced'),
cfg.IntOpt('xenapi_num_vbd_unplug_retries',
default=10,
help='Maximum number of retries to unplug VBD'),
cfg.StrOpt('xenapi_torrent_images',
default='none',
help='Whether or not to download images via Bit Torrent '
'(all|some|none).'),
cfg.StrOpt('xenapi_ipxe_network_name',
help='Name of network to use for booting iPXE ISOs'),
cfg.StrOpt('xenapi_ipxe_boot_menu_url',
help='URL to the iPXE boot menu'),
cfg.StrOpt('xenapi_ipxe_mkisofs_cmd',
default='mkisofs',
help='Name and optionally path of the tool used for '
'ISO image creation'),
]
CONF = cfg.CONF
CONF.register_opts(xenapi_vm_utils_opts)
CONF.import_opt('default_ephemeral_format', 'nova.virt.driver')
CONF.import_opt('use_cow_images', 'nova.virt.driver')
CONF.import_opt('glance_num_retries', 'nova.image.glance')
CONF.import_opt('use_ipv6', 'nova.netconf')
XENAPI_POWER_STATE = {
'Halted': power_state.SHUTDOWN,
'Running': power_state.RUNNING,
'Paused': power_state.PAUSED,
'Suspended': power_state.SUSPENDED,
'Crashed': power_state.CRASHED}
SECTOR_SIZE = 512
MBR_SIZE_SECTORS = 63
MBR_SIZE_BYTES = MBR_SIZE_SECTORS * SECTOR_SIZE
KERNEL_DIR = '/boot/guest'
MAX_VDI_CHAIN_SIZE = 16
PROGRESS_INTERVAL_SECONDS = 300
# Fudge factor to allow for the VHD chain to be slightly larger than
# the partitioned space. Otherwise, legitimate images near their
# maximum allowed size can fail on build with InstanceDiskTypeTooSmall.
VHD_SIZE_CHECK_FUDGE_FACTOR_GB = 10
class ImageType(object):
"""Enumeration class for distinguishing different image types
| 0 - kernel image (goes on dom0's filesystem)
| 1 - ramdisk image (goes on dom0's filesystem)
| 2 - disk image (local SR, partitioned by objectstore plugin)
| 3 - raw disk image (local SR, NOT partitioned by plugin)
| 4 - vhd disk image (local SR, NOT inspected by XS, PV assumed for
| linux, HVM assumed for Windows)
| 5 - ISO disk image (local SR, NOT partitioned by plugin)
| 6 - config drive
"""
KERNEL = 0
RAMDISK = 1
DISK = 2
DISK_RAW = 3
DISK_VHD = 4
DISK_ISO = 5
DISK_CONFIGDRIVE = 6
_ids = (KERNEL, RAMDISK, DISK, DISK_RAW, DISK_VHD, DISK_ISO,
DISK_CONFIGDRIVE)
KERNEL_STR = "kernel"
RAMDISK_STR = "ramdisk"
DISK_STR = "root"
DISK_RAW_STR = "os_raw"
DISK_VHD_STR = "vhd"
DISK_ISO_STR = "iso"
DISK_CONFIGDRIVE_STR = "configdrive"
_strs = (KERNEL_STR, RAMDISK_STR, DISK_STR, DISK_RAW_STR, DISK_VHD_STR,
DISK_ISO_STR, DISK_CONFIGDRIVE_STR)
@classmethod
def to_string(cls, image_type):
return dict(zip(cls._ids, ImageType._strs)).get(image_type)
@classmethod
def get_role(cls, image_type_id):
"""Get the role played by the image, based on its type."""
return {
cls.KERNEL: 'kernel',
cls.RAMDISK: 'ramdisk',
cls.DISK: 'root',
cls.DISK_RAW: 'root',
cls.DISK_VHD: 'root',
cls.DISK_ISO: 'iso',
cls.DISK_CONFIGDRIVE: 'configdrive'
}.get(image_type_id)
def create_vm(session, instance, name_label, kernel, ramdisk,
use_pv_kernel=False):
"""Create a VM record. Returns new VM reference.
the use_pv_kernel flag indicates whether the guest is HVM or PV
There are 3 scenarios:
1. Using paravirtualization, kernel passed in
2. Using paravirtualization, kernel within the image
3. Using hardware virtualization
"""
instance_type = flavors.extract_flavor(instance)
mem = str(long(instance_type['memory_mb']) * 1024 * 1024)
vcpus = str(instance_type['vcpus'])
vcpu_weight = instance_type['vcpu_weight']
vcpu_params = {}
if vcpu_weight is not None:
# NOTE(johngarbutt) bug in XenServer 6.1 and 6.2 means
# we need to specify both weight and cap for either to apply
vcpu_params = {"weight": str(vcpu_weight), "cap": "0"}
rec = {
'actions_after_crash': 'destroy',
'actions_after_reboot': 'restart',
'actions_after_shutdown': 'destroy',
'affinity': '',
'blocked_operations': {},
'ha_always_run': False,
'ha_restart_priority': '',
'HVM_boot_params': {},
'HVM_boot_policy': '',
'is_a_template': False,
'memory_dynamic_min': mem,
'memory_dynamic_max': mem,
'memory_static_min': '0',
'memory_static_max': mem,
'memory_target': mem,
'name_description': '',
'name_label': name_label,
'other_config': {'nova_uuid': str(instance['uuid'])},
'PCI_bus': '',
'platform': {'acpi': 'true', 'apic': 'true', 'pae': 'true',
'viridian': 'true', 'timeoffset': '0'},
'PV_args': '',
'PV_bootloader': '',
'PV_bootloader_args': '',
'PV_kernel': '',
'PV_legacy_args': '',
'PV_ramdisk': '',
'recommendations': '',
'tags': [],
'user_version': '0',
'VCPUs_at_startup': vcpus,
'VCPUs_max': vcpus,
'VCPUs_params': vcpu_params,
'xenstore_data': {'allowvssprovider': 'false'}}
# Complete VM configuration record according to the image type
# non-raw/raw with PV kernel/raw in HVM mode
if use_pv_kernel:
rec['platform']['nx'] = 'false'
if instance['kernel_id']:
# 1. Kernel explicitly passed in, use that
rec['PV_args'] = 'root=/dev/xvda1'
rec['PV_kernel'] = kernel
rec['PV_ramdisk'] = ramdisk
else:
# 2. Use kernel within the image
rec['PV_bootloader'] = 'pygrub'
else:
# 3. Using hardware virtualization
rec['platform']['nx'] = 'true'
rec['HVM_boot_params'] = {'order': 'dc'}
rec['HVM_boot_policy'] = 'BIOS order'
vm_ref = session.call_xenapi('VM.create', rec)
LOG.debug(_('Created VM'), instance=instance)
return vm_ref
def destroy_vm(session, instance, vm_ref):
"""Destroys a VM record."""
try:
session.call_xenapi('VM.destroy', vm_ref)
except session.XenAPI.Failure as exc:
LOG.exception(exc)
return
LOG.debug(_("VM destroyed"), instance=instance)
def clean_shutdown_vm(session, instance, vm_ref):
if is_vm_shutdown(session, vm_ref):
LOG.warn(_("VM already halted, skipping shutdown..."),
instance=instance)
return True
LOG.debug(_("Shutting down VM (cleanly)"), instance=instance)
try:
session.call_xenapi('VM.clean_shutdown', vm_ref)
except session.XenAPI.Failure as exc:
LOG.exception(exc)
return False
return True
def hard_shutdown_vm(session, instance, vm_ref):
if is_vm_shutdown(session, vm_ref):
LOG.warn(_("VM already halted, skipping shutdown..."),
instance=instance)
return True
LOG.debug(_("Shutting down VM (hard)"), instance=instance)
try:
session.call_xenapi('VM.hard_shutdown', vm_ref)
except session.XenAPI.Failure as exc:
LOG.exception(exc)
return False
return True
def is_vm_shutdown(session, vm_ref):
vm_rec = session.call_xenapi("VM.get_record", vm_ref)
state = compile_info(vm_rec)['state']
if state == power_state.SHUTDOWN:
return True
return False
def is_enough_free_mem(session, instance):
instance_type = flavors.extract_flavor(instance)
mem = long(instance_type['memory_mb']) * 1024 * 1024
host = session.get_xenapi_host()
host_free_mem = long(session.call_xenapi("host.compute_free_memory",
host))
return host_free_mem >= mem
def find_vbd_by_number(session, vm_ref, number):
"""Get the VBD reference from the device number."""
vbd_refs = session.call_xenapi("VM.get_VBDs", vm_ref)
if vbd_refs:
for vbd_ref in vbd_refs:
try:
vbd_rec = session.call_xenapi("VBD.get_record", vbd_ref)
if vbd_rec['userdevice'] == str(number):
return vbd_ref
except session.XenAPI.Failure as exc:
LOG.exception(exc)
raise volume_utils.StorageError(
_('VBD not found in instance %s') % vm_ref)
def unplug_vbd(session, vbd_ref):
"""Unplug VBD from VM."""
# Call VBD.unplug on the given VBD, with a retry if we get
# DEVICE_DETACH_REJECTED. For reasons which we don't understand,
# we're seeing the device still in use, even when all processes
# using the device should be dead.
max_attempts = CONF.xenapi_num_vbd_unplug_retries + 1
for num_attempt in xrange(1, max_attempts + 1):
try:
session.call_xenapi('VBD.unplug', vbd_ref)
return
except session.XenAPI.Failure as exc:
err = len(exc.details) > 0 and exc.details[0]
if err == 'DEVICE_ALREADY_DETACHED':
LOG.info(_('VBD %s already detached'), vbd_ref)
return
elif err == 'DEVICE_DETACH_REJECTED':
LOG.info(_('VBD %(vbd_ref)s detach rejected, attempt'
' %(num_attempt)d/%(max_attempts)d'),
{'vbd_ref': vbd_ref, 'num_attempt': num_attempt,
'max_attempts': max_attempts})
else:
LOG.exception(exc)
raise volume_utils.StorageError(
_('Unable to unplug VBD %s') % vbd_ref)
greenthread.sleep(1)
raise volume_utils.StorageError(
_('Reached maximum number of retries trying to unplug VBD %s')
% vbd_ref)
def destroy_vbd(session, vbd_ref):
"""Destroy VBD from host database."""
try:
session.call_xenapi('VBD.destroy', vbd_ref)
except session.XenAPI.Failure as exc:
LOG.exception(exc)
raise volume_utils.StorageError(
_('Unable to destroy VBD %s') % vbd_ref)
def create_vbd(session, vm_ref, vdi_ref, userdevice, vbd_type='disk',
read_only=False, bootable=False, osvol=False,
empty=False, unpluggable=True):
"""Create a VBD record and returns its reference."""
vbd_rec = {}
vbd_rec['VM'] = vm_ref
if vdi_ref == None:
vdi_ref = 'OpaqueRef:NULL'
vbd_rec['VDI'] = vdi_ref
vbd_rec['userdevice'] = str(userdevice)
vbd_rec['bootable'] = bootable
vbd_rec['mode'] = read_only and 'RO' or 'RW'
vbd_rec['type'] = vbd_type
vbd_rec['unpluggable'] = unpluggable
vbd_rec['empty'] = empty
vbd_rec['other_config'] = {}
vbd_rec['qos_algorithm_type'] = ''
vbd_rec['qos_algorithm_params'] = {}
vbd_rec['qos_supported_algorithms'] = []
LOG.debug(_('Creating %(vbd_type)s-type VBD for VM %(vm_ref)s,'
' VDI %(vdi_ref)s ... '),
{'vbd_type': vbd_type, 'vm_ref': vm_ref, 'vdi_ref': vdi_ref})
vbd_ref = session.call_xenapi('VBD.create', vbd_rec)
LOG.debug(_('Created VBD %(vbd_ref)s for VM %(vm_ref)s,'
' VDI %(vdi_ref)s.'),
{'vbd_ref': vbd_ref, 'vm_ref': vm_ref, 'vdi_ref': vdi_ref})
if osvol:
# set osvol=True in other-config to indicate this is an
# attached nova (or cinder) volume
session.call_xenapi('VBD.add_to_other_config',
vbd_ref, 'osvol', 'True')
return vbd_ref
def attach_cd(session, vm_ref, vdi_ref, userdevice):
"""Create an empty VBD, then insert the CD."""
vbd_ref = create_vbd(session, vm_ref, None, userdevice,
vbd_type='cd', read_only=True,
bootable=True, empty=True,
unpluggable=False)
session.call_xenapi('VBD.insert', vbd_ref, vdi_ref)
return vbd_ref
def destroy_vdi(session, vdi_ref):
try:
session.call_xenapi('VDI.destroy', vdi_ref)
except session.XenAPI.Failure as exc:
LOG.exception(exc)
raise volume_utils.StorageError(
_('Unable to destroy VDI %s') % vdi_ref)
def safe_destroy_vdis(session, vdi_refs):
"""Destroys the requested VDIs, logging any StorageError exceptions."""
for vdi_ref in vdi_refs:
try:
destroy_vdi(session, vdi_ref)
except volume_utils.StorageError as exc:
LOG.error(exc)
def create_vdi(session, sr_ref, instance, name_label, disk_type, virtual_size,
read_only=False):
"""Create a VDI record and returns its reference."""
vdi_ref = session.call_xenapi("VDI.create",
{'name_label': name_label,
'name_description': disk_type,
'SR': sr_ref,
'virtual_size': str(virtual_size),
'type': 'User',
'sharable': False,
'read_only': read_only,
'xenstore_data': {},
'other_config': _get_vdi_other_config(disk_type, instance=instance),
'sm_config': {},
'tags': []})
LOG.debug(_('Created VDI %(vdi_ref)s (%(name_label)s,'
' %(virtual_size)s, %(read_only)s) on %(sr_ref)s.'),
{'vdi_ref': vdi_ref, 'name_label': name_label,
'virtual_size': virtual_size, 'read_only': read_only,
'sr_ref': sr_ref})
return vdi_ref
def get_vdi_uuid_for_volume(session, connection_data):
sr_uuid, label, sr_params = volume_utils.parse_sr_info(connection_data)
sr_ref = volume_utils.find_sr_by_uuid(session, sr_uuid)
if not sr_ref:
sr_ref = volume_utils.introduce_sr(session, sr_uuid, label, sr_params)
if sr_ref is None:
raise exception.NovaException(_('SR not present and could not be '
'introduced'))
vdi_uuid = None
if 'vdi_uuid' in connection_data:
_scan_sr(session, sr_ref)
vdi_uuid = connection_data['vdi_uuid']
else:
try:
vdi_ref = volume_utils.introduce_vdi(session, sr_ref)
vdi_rec = session.call_xenapi("VDI.get_record", vdi_ref)
vdi_uuid = vdi_rec['uuid']
except volume_utils.StorageError as exc:
LOG.exception(exc)
volume_utils.forget_sr(session, sr_ref)
return vdi_uuid
def get_vdis_for_instance(context, session, instance, name_label, image,
image_type, block_device_info=None):
vdis = {}
if block_device_info:
LOG.debug(_("block device info: %s"), block_device_info)
root_device_name = block_device_info['root_device_name']
for bdm in block_device_info['block_device_mapping']:
if (block_device.strip_prefix(bdm['mount_device']) ==
block_device.strip_prefix(root_device_name)):
# If we're a root-device, record that fact so we don't download
# a root image via Glance
type_ = 'root'
else:
# Otherwise, use mount_device as `type_` so that we have easy
# access to it in _attach_disks to create the VBD
type_ = bdm['mount_device']
connection_data = bdm['connection_info']['data']
vdi_uuid = get_vdi_uuid_for_volume(session, connection_data)
if vdi_uuid:
vdis[type_] = dict(uuid=vdi_uuid, file=None, osvol=True)
# If we didn't get a root VDI from volumes, then use the Glance image as
# the root device
if 'root' not in vdis:
create_image_vdis = _create_image(
context, session, instance, name_label, image, image_type)
vdis.update(create_image_vdis)
# Just get the VDI ref once
for vdi in vdis.itervalues():
vdi['ref'] = session.call_xenapi('VDI.get_by_uuid', vdi['uuid'])
return vdis
@contextlib.contextmanager
def _dummy_vm(session, instance, vdi_ref):
"""This creates a temporary VM so that we can snapshot a VDI.
VDI's can't be snapshotted directly since the API expects a `vm_ref`. To
work around this, we need to create a temporary VM and then map the VDI to
the VM using a temporary VBD.
"""
name_label = "dummy"
vm_ref = create_vm(session, instance, name_label, None, None)
try:
vbd_ref = create_vbd(session, vm_ref, vdi_ref, 'autodetect',
read_only=True)
try:
yield vm_ref
finally:
try:
destroy_vbd(session, vbd_ref)
except volume_utils.StorageError:
# destroy_vbd() will log error
pass
finally:
destroy_vm(session, instance, vm_ref)
def _safe_copy_vdi(session, sr_ref, instance, vdi_to_copy_ref):
"""Copy a VDI and return the new VDIs reference.
This function differs from the XenAPI `VDI.copy` call in that the copy is
atomic and isolated, meaning we don't see half-downloaded images. It
accomplishes this by copying the VDI's into a temporary directory and then
atomically renaming them into the SR when the copy is completed.
The correct long term solution is to fix `VDI.copy` so that it is atomic
and isolated.
"""
with _dummy_vm(session, instance, vdi_to_copy_ref) as vm_ref:
label = "snapshot"
with snapshot_attached_here(
session, instance, vm_ref, label) as vdi_uuids:
imported_vhds = session.call_plugin_serialized(
'workarounds', 'safe_copy_vdis',
sr_path=get_sr_path(session, sr_ref=sr_ref),
vdi_uuids=vdi_uuids, uuid_stack=_make_uuid_stack())
root_uuid = imported_vhds['root']['uuid']
# rescan to discover new VHDs
scan_default_sr(session)
vdi_ref = session.call_xenapi('VDI.get_by_uuid', root_uuid)
return vdi_ref
def _clone_vdi(session, vdi_to_clone_ref):
"""Clones a VDI and return the new VDIs reference."""
vdi_ref = session.call_xenapi('VDI.clone', vdi_to_clone_ref)
LOG.debug(_('Cloned VDI %(vdi_ref)s from VDI '
'%(vdi_to_clone_ref)s'),
{'vdi_ref': vdi_ref, 'vdi_to_clone_ref': vdi_to_clone_ref})
return vdi_ref
def _get_vdi_other_config(disk_type, instance=None):
"""Return metadata to store in VDI's other_config attribute.
`nova_instance_uuid` is used to associate a VDI with a particular instance
so that, if it becomes orphaned from an unclean shutdown of a
compute-worker, we can safely detach it.
"""
other_config = {'nova_disk_type': disk_type}
# create_vdi may be called simply while creating a volume
# hence information about instance may or may not be present
if instance:
other_config['nova_instance_uuid'] = instance['uuid']
return other_config
def _set_vdi_info(session, vdi_ref, vdi_type, name_label, description,
instance):
vdi_rec = session.call_xenapi('VDI.get_record', vdi_ref)
session.call_xenapi('VDI.set_name_label', vdi_ref, name_label)
session.call_xenapi('VDI.set_name_description', vdi_ref, description)
other_config = _get_vdi_other_config(vdi_type, instance=instance)
for key, value in other_config.iteritems():
if key not in vdi_rec['other_config']:
session.call_xenapi(
"VDI.add_to_other_config", vdi_ref, key, value)
def get_vdi_for_vm_safely(session, vm_ref):
"""Retrieves the primary VDI for a VM."""
vbd_refs = session.call_xenapi("VM.get_VBDs", vm_ref)
for vbd in vbd_refs:
vbd_rec = session.call_xenapi("VBD.get_record", vbd)
# Convention dictates the primary VDI will be userdevice 0
if vbd_rec['userdevice'] == '0':
vdi_rec = session.call_xenapi("VDI.get_record", vbd_rec['VDI'])
return vbd_rec['VDI'], vdi_rec
raise exception.NovaException(_("No primary VDI found for %s") % vm_ref)
@contextlib.contextmanager
def snapshot_attached_here(session, instance, vm_ref, label, *args):
update_task_state = None
if len(args) == 1:
update_task_state = args[0]
"""Snapshot the root disk only. Return a list of uuids for the vhds
in the chain.
"""
LOG.debug(_("Starting snapshot for VM"), instance=instance)
# Memorize the original_parent_uuid so we can poll for coalesce
vm_vdi_ref, vm_vdi_rec = get_vdi_for_vm_safely(session, vm_ref)
original_parent_uuid = _get_vhd_parent_uuid(session, vm_vdi_ref)
sr_ref = vm_vdi_rec["SR"]
snapshot_ref = session.call_xenapi("VDI.snapshot", vm_vdi_ref, {})
if update_task_state is not None:
update_task_state(task_state=task_states.IMAGE_PENDING_UPLOAD)
try:
snapshot_rec = session.call_xenapi("VDI.get_record", snapshot_ref)
_wait_for_vhd_coalesce(session, instance, sr_ref, vm_vdi_ref,
original_parent_uuid)
vdi_uuids = [vdi_rec['uuid'] for vdi_rec in
_walk_vdi_chain(session, snapshot_rec['uuid'])]
yield vdi_uuids
finally:
safe_destroy_vdis(session, [snapshot_ref])
def get_sr_path(session, sr_ref=None):
"""Return the path to our storage repository
This is used when we're dealing with VHDs directly, either by taking
snapshots or by restoring an image in the DISK_VHD format.
"""
if sr_ref is None:
sr_ref = safe_find_sr(session)
host_ref = session.get_xenapi_host()
pbd_rec = session.call_xenapi("PBD.get_all_records_where",
'field "host"="%s" and '
'field "SR"="%s"' % (host_ref, sr_ref))
# NOTE(bobball): There can only be one PBD for a host/SR pair, but path is
# not always present - older versions of XS do not set it.
pbd_ref = pbd_rec.keys()[0]
device_config = pbd_rec[pbd_ref]['device_config']
if 'path' in device_config:
return device_config['path']
sr_rec = session.call_xenapi("SR.get_record", sr_ref)
sr_uuid = sr_rec["uuid"]
if sr_rec["type"] not in ["ext", "nfs"]:
raise exception.NovaException(
_("Only file-based SRs (ext/NFS) are supported by this feature."
" SR %(uuid)s is of type %(type)s") %
{"uuid": sr_uuid, "type": sr_rec["type"]})
return os.path.join(CONF.xenapi_sr_base_path, sr_uuid)
def destroy_cached_images(session, sr_ref, all_cached=False, dry_run=False):
"""Destroy used or unused cached images.
A cached image that is being used by at least one VM is said to be 'used'.
In the case of an 'unused' image, the cached image will be the only
descendent of the base-copy. So when we delete the cached-image, the
refcount will drop to zero and XenServer will automatically destroy the
base-copy for us.
The default behavior of this function is to destroy only 'unused' cached
images. To destroy all cached images, use the `all_cached=True` kwarg.
"""
cached_images = _find_cached_images(session, sr_ref)
destroyed = set()
def destroy_cached_vdi(vdi_uuid, vdi_ref):
LOG.debug(_("Destroying cached VDI '%(vdi_uuid)s'"))
if not dry_run:
destroy_vdi(session, vdi_ref)
destroyed.add(vdi_uuid)
for vdi_ref in cached_images.values():
vdi_uuid = session.call_xenapi('VDI.get_uuid', vdi_ref)
if all_cached:
destroy_cached_vdi(vdi_uuid, vdi_ref)
continue
# Unused-Only: Search for siblings
# Chain length greater than two implies a VM must be holding a ref to
# the base-copy (otherwise it would have coalesced), so consider this
# cached image used.
chain = list(_walk_vdi_chain(session, vdi_uuid))
if len(chain) > 2:
continue
elif len(chain) == 2:
# Siblings imply cached image is used
root_vdi_rec = chain[-1]
children = _child_vhds(session, sr_ref, root_vdi_rec['uuid'])
if len(children) > 1:
continue
destroy_cached_vdi(vdi_uuid, vdi_ref)
return destroyed
def _find_cached_images(session, sr_ref):
"""Return a dict(uuid=vdi_ref) representing all cached images."""
cached_images = {}
for vdi_ref, vdi_rec in _get_all_vdis_in_sr(session, sr_ref):
try:
image_id = vdi_rec['other_config']['image-id']
except KeyError:
continue
cached_images[image_id] = vdi_ref
return cached_images
def _find_cached_image(session, image_id, sr_ref):
"""Returns the vdi-ref of the cached image."""
cached_images = _find_cached_images(session, sr_ref)
return cached_images.get(image_id)
def resize_disk(session, instance, vdi_ref, instance_type):
size_gb = instance_type['root_gb']
if size_gb == 0:
reason = _("Can't resize a disk to 0 GB.")
raise exception.ResizeError(reason=reason)
# Copy VDI over to something we can resize
# NOTE(jerdfelt): Would be nice to just set vdi_ref to read/write
sr_ref = safe_find_sr(session)
copy_ref = session.call_xenapi('VDI.copy', vdi_ref, sr_ref)
try:
# Resize partition and filesystem down
_auto_configure_disk(session, copy_ref, size_gb)
# Create new VDI
vdi_size = size_gb * 1024 * 1024 * 1024
# NOTE(johannes): No resizing allowed for rescue instances, so
# using instance['name'] is safe here
new_ref = create_vdi(session, sr_ref, instance, instance['name'],
'root', vdi_size)
new_uuid = session.call_xenapi('VDI.get_uuid', new_ref)
# Manually copy contents over
virtual_size = size_gb * 1024 * 1024 * 1024
_copy_partition(session, copy_ref, new_ref, 1, virtual_size)
return new_ref, new_uuid
finally:
destroy_vdi(session, copy_ref)
def _auto_configure_disk(session, vdi_ref, new_gb):
"""Partition and resize FS to match the size specified by
flavors.root_gb.
This is a fail-safe to prevent accidentally destroying data on a disk
erroneously marked as auto_disk_config=True.
The criteria for allowing resize are:
1. 'auto_disk_config' must be true for the instance (and image).
(If we've made it here, then auto_disk_config=True.)
2. The disk must have only one partition.
3. The file-system on the one partition must be ext3 or ext4.
"""
if new_gb == 0:
LOG.debug(_("Skipping auto_config_disk as destination size is 0GB"))
return
with vdi_attached_here(session, vdi_ref, read_only=False) as dev:
partitions = _get_partitions(dev)
if len(partitions) != 1:
reason = _('Disk must have only one partition.')
raise exception.CannotResizeDisk(reason=reason)
_num, start, old_sectors, ptype = partitions[0]
if ptype in ('ext3', 'ext4'):
new_sectors = new_gb * 1024 * 1024 * 1024 / SECTOR_SIZE
_resize_part_and_fs(dev, start, old_sectors, new_sectors)
else:
reason = _('Disk contains a filesystem '
'we are unable to resize: %s')
raise exception.CannotResizeDisk(reason=(reason % ptype))
def try_auto_configure_disk(session, vdi_ref, new_gb):
try:
_auto_configure_disk(session, vdi_ref, new_gb)
except exception.CannotResizeDisk as e:
msg = _('Attempted auto_configure_disk failed because: %s')
LOG.warn(msg % e)
def _make_partition(session, dev, partition_start, partition_end):
dev_path = utils.make_dev_path(dev)
# NOTE(bobball) If this runs in Dom0, parted will error trying
# to re-read the partition table and return a generic error
utils.execute('parted', '--script', dev_path,
'mklabel', 'msdos', run_as_root=True,
check_exit_code=not session.is_local_connection)
utils.execute('parted', '--script', dev_path,
'mkpart', 'primary',
partition_start,
partition_end,
run_as_root=True,
check_exit_code=not session.is_local_connection)
partition_path = utils.make_dev_path(dev, partition=1)
if session.is_local_connection:
# Need to refresh the partitions
utils.trycmd('kpartx', '-a', dev_path,
run_as_root=True,
discard_warnings=True)
# Sometimes the partition gets created under /dev/mapper, depending
# on the setup in dom0.
mapper_path = '/dev/mapper/%s' % os.path.basename(partition_path)
if os.path.exists(mapper_path):
return mapper_path
return partition_path
def _generate_disk(session, instance, vm_ref, userdevice, name_label,
disk_type, size_mb, fs_type):
"""
Steps to programmatically generate a disk:
1. Create VDI of desired size
2. Attach VDI to compute worker
3. Create partition
4. Create VBD between instance VM and VDI
"""
# 1. Create VDI
sr_ref = safe_find_sr(session)
ONE_MEG = 1024 * 1024
virtual_size = size_mb * ONE_MEG
vdi_ref = create_vdi(session, sr_ref, instance, name_label, disk_type,
virtual_size)
try:
# 2. Attach VDI to compute worker (VBD hotplug)
with vdi_attached_here(session, vdi_ref, read_only=False) as dev:
# 3. Create partition
partition_start = 0
partition_end = size_mb
partition_path = _make_partition(session, dev,
"%d" % partition_start,
"%d" % partition_end)
if fs_type == 'linux-swap':
utils.execute('mkswap', partition_path, run_as_root=True)
elif fs_type is not None:
utils.execute('mkfs', '-t', fs_type, partition_path,
run_as_root=True)
# 4. Create VBD between instance VM and VDI
create_vbd(session, vm_ref, vdi_ref, userdevice, bootable=False)
except Exception:
with excutils.save_and_reraise_exception():
destroy_vdi(session, vdi_ref)
return vdi_ref
def generate_swap(session, instance, vm_ref, userdevice, name_label, swap_mb):
# NOTE(jk0): We use a FAT32 filesystem for the Windows swap
# partition because that is what parted supports.
is_windows = instance['os_type'] == "windows"
fs_type = "vfat" if is_windows else "linux-swap"
_generate_disk(session, instance, vm_ref, userdevice, name_label,
'swap', swap_mb, fs_type)
def generate_ephemeral(session, instance, vm_ref, first_userdevice,
initial_name_label, total_size_gb):
# NOTE(johngarbutt): max possible size of a VHD disk is 2043GB
if total_size_gb % 1024 == 0:
max_size_gb = 1024
else:
max_size_gb = 2000
left_to_allocate = total_size_gb
first_userdevice = int(first_userdevice)
userdevice = first_userdevice
name_label = initial_name_label
vdi_refs = []
try:
while left_to_allocate > 0:
size_gb = min(max_size_gb, left_to_allocate)
ref = _generate_disk(session, instance, vm_ref, str(userdevice),
name_label, 'ephemeral', size_gb * 1024,
CONF.default_ephemeral_format)
vdi_refs.append(ref)
left_to_allocate -= size_gb
userdevice += 1
label_number = userdevice - first_userdevice
name_label = "%s (%d)" % (initial_name_label, label_number)
except Exception as exc:
with excutils.save_and_reraise_exception():
LOG.debug(_("Error when generating ephemeral disk. "
"Device: %(userdevice)s Size GB: %(size_gb)s "
"Error: %(exc)s"), {
userdevice: userdevice,
size_gb: size_gb,
exc: exc})
safe_destroy_vdis(session, vdi_refs)
def generate_iso_blank_root_disk(session, instance, vm_ref, userdevice,
name_label, size_gb):
_generate_disk(session, instance, vm_ref, userdevice, name_label,
'user', size_gb * 1024, CONF.default_ephemeral_format)
def generate_configdrive(session, instance, vm_ref, userdevice,
admin_password=None, files=None):
sr_ref = safe_find_sr(session)
vdi_ref = create_vdi(session, sr_ref, instance, 'config-2',
'configdrive', configdrive.CONFIGDRIVESIZE_BYTES)
try:
with vdi_attached_here(session, vdi_ref, read_only=False) as dev:
extra_md = {}
if admin_password:
extra_md['admin_pass'] = admin_password
inst_md = instance_metadata.InstanceMetadata(instance,
content=files,
extra_md=extra_md)
with configdrive.ConfigDriveBuilder(instance_md=inst_md) as cdb:
with utils.tempdir() as tmp_path:
tmp_file = os.path.join(tmp_path, 'configdrive')
cdb.make_drive(tmp_file)
dev_path = utils.make_dev_path(dev)
utils.execute('dd',
'if=%s' % tmp_file,
'of=%s' % dev_path,
run_as_root=True)
create_vbd(session, vm_ref, vdi_ref, userdevice, bootable=False,
read_only=True)
except Exception:
with excutils.save_and_reraise_exception():
destroy_vdi(session, vdi_ref)
def _create_kernel_image(context, session, instance, name_label, image_id,
image_type):
"""Creates kernel/ramdisk file from the image stored in the cache.
If the image is not present in the cache, it streams it from glance.
Returns: A list of dictionaries that describe VDIs
"""
filename = ""
if CONF.cache_images:
args = {}
args['cached-image'] = image_id
args['new-image-uuid'] = str(uuid.uuid4())
filename = session.call_plugin('kernel', 'create_kernel_ramdisk', args)
if filename == "":
return _fetch_disk_image(context, session, instance, name_label,
image_id, image_type)
else:
vdi_type = ImageType.to_string(image_type)
return {vdi_type: dict(uuid=None, file=filename)}
def create_kernel_and_ramdisk(context, session, instance, name_label):
kernel_file = None
ramdisk_file = None
if instance['kernel_id']:
vdis = _create_kernel_image(context, session,
instance, name_label, instance['kernel_id'],
ImageType.KERNEL)
kernel_file = vdis['kernel'].get('file')
if instance['ramdisk_id']:
vdis = _create_kernel_image(context, session,
instance, name_label, instance['ramdisk_id'],
ImageType.RAMDISK)
ramdisk_file = vdis['ramdisk'].get('file')
return kernel_file, ramdisk_file
def destroy_kernel_ramdisk(session, instance, kernel, ramdisk):
args = {}
if kernel:
args['kernel-file'] = kernel
if ramdisk:
args['ramdisk-file'] = ramdisk
if args:
LOG.debug(_("Removing kernel/ramdisk files from dom0"),
instance=instance)
session.call_plugin('kernel', 'remove_kernel_ramdisk', args)
def _create_cached_image(context, session, instance, name_label,
image_id, image_type):
sr_ref = safe_find_sr(session)
sr_type = session.call_xenapi('SR.get_record', sr_ref)["type"]
vdis = {}
if CONF.use_cow_images and sr_type != "ext":
LOG.warning(_("Fast cloning is only supported on default local SR "
"of type ext. SR on this system was found to be of "
"type %s. Ignoring the cow flag."), sr_type)
cache_vdi_ref = _find_cached_image(session, image_id, sr_ref)
if cache_vdi_ref is None:
vdis = _fetch_image(context, session, instance, name_label,
image_id, image_type)
cache_vdi_ref = session.call_xenapi(
'VDI.get_by_uuid', vdis['root']['uuid'])
session.call_xenapi('VDI.set_name_label', cache_vdi_ref,
'Glance Image %s' % image_id)
session.call_xenapi('VDI.set_name_description', cache_vdi_ref, 'root')
session.call_xenapi('VDI.add_to_other_config',
cache_vdi_ref, 'image-id', str(image_id))
if CONF.use_cow_images and sr_type == 'ext':
new_vdi_ref = _clone_vdi(session, cache_vdi_ref)
elif sr_type == 'ext':
new_vdi_ref = _safe_copy_vdi(session, sr_ref, instance, cache_vdi_ref)
else:
new_vdi_ref = session.call_xenapi("VDI.copy", cache_vdi_ref, sr_ref)
session.call_xenapi('VDI.remove_from_other_config',
new_vdi_ref, 'image-id')
vdi_type = ImageType.get_role(image_type)
vdi_uuid = session.call_xenapi('VDI.get_uuid', new_vdi_ref)
vdis[vdi_type] = dict(uuid=vdi_uuid, file=None)
return vdis
def _create_image(context, session, instance, name_label, image_id,
image_type):
"""Creates VDI from the image stored in the local cache. If the image
is not present in the cache, it streams it from glance.
Returns: A list of dictionaries that describe VDIs
"""
cache_images = CONF.cache_images.lower()
# Deterimine if the image is cacheable
if image_type == ImageType.DISK_ISO:
cache = False
elif cache_images == 'all':
cache = True
elif cache_images == 'some':
sys_meta = utils.instance_sys_meta(instance)
try:
cache = strutils.bool_from_string(sys_meta['image_cache_in_nova'])
except KeyError:
cache = False
elif cache_images == 'none':
cache = False
else:
LOG.warning(_("Unrecognized cache_images value '%s', defaulting to"
" True"), CONF.cache_images)
cache = True
# Fetch (and cache) the image
if cache:
vdis = _create_cached_image(context, session, instance, name_label,
image_id, image_type)
else:
vdis = _fetch_image(context, session, instance, name_label,
image_id, image_type)
for vdi_type, vdi in vdis.iteritems():
vdi_ref = session.call_xenapi('VDI.get_by_uuid', vdi['uuid'])
_set_vdi_info(session, vdi_ref, vdi_type, name_label, vdi_type,
instance)
return vdis
def _fetch_image(context, session, instance, name_label, image_id, image_type):
"""Fetch image from glance based on image type.
Returns: A single filename if image_type is KERNEL or RAMDISK
A list of dictionaries that describe VDIs, otherwise
"""
if image_type == ImageType.DISK_VHD:
vdis = _fetch_vhd_image(context, session, instance, image_id)
else:
vdis = _fetch_disk_image(context, session, instance, name_label,
image_id, image_type)
for vdi_type, vdi in vdis.iteritems():
vdi_uuid = vdi['uuid']
LOG.debug(_("Fetched VDIs of type '%(vdi_type)s' with UUID"
" '%(vdi_uuid)s'"),
{'vdi_type': vdi_type, 'vdi_uuid': vdi_uuid},
instance=instance)
return vdis
def _make_uuid_stack():
# NOTE(sirp): The XenAPI plugins run under Python 2.4
# which does not have the `uuid` module. To work around this,
# we generate the uuids here (under Python 2.6+) and
# pass them as arguments
return [str(uuid.uuid4()) for i in xrange(MAX_VDI_CHAIN_SIZE)]
def _image_uses_bittorrent(context, instance):
bittorrent = False
xenapi_torrent_images = CONF.xenapi_torrent_images.lower()
if xenapi_torrent_images == 'all':
bittorrent = True
elif xenapi_torrent_images == 'some':
sys_meta = utils.instance_sys_meta(instance)
try:
bittorrent = strutils.bool_from_string(
sys_meta['image_bittorrent'])
except KeyError:
pass
elif xenapi_torrent_images == 'none':
pass
else:
LOG.warning(_("Invalid value '%s' for xenapi_torrent_images"),
xenapi_torrent_images)
return bittorrent
def _default_download_handler():
# TODO(sirp): This should be configurable like upload_handler
return importutils.import_object(
'nova.virt.xenapi.image.glance.GlanceStore')
def _choose_download_handler(context, instance):
if _image_uses_bittorrent(context, instance):
return importutils.import_object(
'nova.virt.xenapi.image.bittorrent.BittorrentStore')
else:
return _default_download_handler()
def get_compression_level():
level = CONF.xenapi_image_compression_level
if level is not None and (level < 1 or level > 9):
LOG.warn(_("Invalid value '%d' for xenapi_image_compression_level"),
level)
return None
return level
def _fetch_vhd_image(context, session, instance, image_id):
"""Tell glance to download an image and put the VHDs into the SR
Returns: A list of dictionaries that describe VDIs
"""
LOG.debug(_("Asking xapi to fetch vhd image %s"), image_id,
instance=instance)
handler = _choose_download_handler(context, instance)
try:
vdis = handler.download_image(context, session, instance, image_id)
except Exception as e:
default_handler = _default_download_handler()
# Using type() instead of isinstance() so instance of subclass doesn't
# test as equivalent
if type(handler) == type(default_handler):
raise
LOG.exception(_("Download handler '%(handler)s' raised an"
" exception, falling back to default handler"
" '%(default_handler)s'") %
{'handler': handler,
'default_handler': default_handler})
vdis = default_handler.download_image(
context, session, instance, image_id)
# Ensure we can see the import VHDs as VDIs
scan_default_sr(session)
try:
_check_vdi_size(context, session, instance, vdis['root']['uuid'])
except Exception:
with excutils.save_and_reraise_exception():
for key in vdis:
vdi = vdis[key]
vdi_uuid = vdi['uuid']
vdi_ref = session.call_xenapi('VDI.get_by_uuid', vdi_uuid)
destroy_vdi(session, vdi_ref)
return vdis
def _get_vdi_chain_size(session, vdi_uuid):
"""Compute the total size of a VDI chain, starting with the specified
VDI UUID.
This will walk the VDI chain to the root, add the size of each VDI into
the total.
"""
size_bytes = 0
for vdi_rec in _walk_vdi_chain(session, vdi_uuid):
cur_vdi_uuid = vdi_rec['uuid']
vdi_size_bytes = int(vdi_rec['physical_utilisation'])
LOG.debug(_('vdi_uuid=%(cur_vdi_uuid)s vdi_size_bytes='
'%(vdi_size_bytes)d'),
{'cur_vdi_uuid': cur_vdi_uuid,
'vdi_size_bytes': vdi_size_bytes})
size_bytes += vdi_size_bytes
return size_bytes
def _check_vdi_size(context, session, instance, vdi_uuid):
instance_type = flavors.extract_flavor(instance)
allowed_size = (instance_type['root_gb'] +
VHD_SIZE_CHECK_FUDGE_FACTOR_GB) * (1024 ** 3)
if not instance_type['root_gb']:
# root_gb=0 indicates that we're disabling size checks
return
size = _get_vdi_chain_size(session, vdi_uuid)
if size > allowed_size:
LOG.error(_("Image size %(size)d exceeded instance_type "
"allowed size %(allowed_size)d"),
{'size': size, 'allowed_size': allowed_size},
instance=instance)
raise exception.InstanceTypeDiskTooSmall()
def _fetch_disk_image(context, session, instance, name_label, image_id,
image_type):
"""Fetch the image from Glance
NOTE:
Unlike _fetch_vhd_image, this method does not use the Glance
plugin; instead, it streams the disks through domU to the VDI
directly.
Returns: A single filename if image_type is KERNEL_RAMDISK
A list of dictionaries that describe VDIs, otherwise
"""
# FIXME(sirp): Since the Glance plugin seems to be required for the
# VHD disk, it may be worth using the plugin for both VHD and RAW and
# DISK restores
image_type_str = ImageType.to_string(image_type)
LOG.debug(_("Fetching image %(image_id)s, type %(image_type_str)s"),
{'image_id': image_id, 'image_type_str': image_type_str},
instance=instance)
if image_type == ImageType.DISK_ISO:
sr_ref = _safe_find_iso_sr(session)
else:
sr_ref = safe_find_sr(session)
glance_image = image_utils.GlanceImage(context, image_id)
if glance_image.is_raw_tgz():
image = image_utils.RawTGZImage(glance_image)
else:
image = image_utils.RawImage(glance_image)
virtual_size = image.get_size()
vdi_size = virtual_size
LOG.debug(_("Size for image %(image_id)s: %(virtual_size)d"),
{'image_id': image_id, 'virtual_size': virtual_size},
instance=instance)
if image_type == ImageType.DISK:
# Make room for MBR.
vdi_size += MBR_SIZE_BYTES
elif (image_type in (ImageType.KERNEL, ImageType.RAMDISK) and
vdi_size > CONF.max_kernel_ramdisk_size):
max_size = CONF.max_kernel_ramdisk_size
raise exception.NovaException(
_("Kernel/Ramdisk image is too large: %(vdi_size)d bytes, "
"max %(max_size)d bytes") %
{'vdi_size': vdi_size, 'max_size': max_size})
vdi_ref = create_vdi(session, sr_ref, instance, name_label,
image_type_str, vdi_size)
# From this point we have a VDI on Xen host;
# If anything goes wrong, we need to remember its uuid.
try:
filename = None
vdi_uuid = session.call_xenapi("VDI.get_uuid", vdi_ref)
with vdi_attached_here(session, vdi_ref, read_only=False) as dev:
_stream_disk(
session, image.stream_to, image_type, virtual_size, dev)
if image_type in (ImageType.KERNEL, ImageType.RAMDISK):
# We need to invoke a plugin for copying the
# content of the VDI into the proper path.
LOG.debug(_("Copying VDI %s to /boot/guest on dom0"),
vdi_ref, instance=instance)
args = {}
args['vdi-ref'] = vdi_ref
# Let the plugin copy the correct number of bytes.
args['image-size'] = str(vdi_size)
if CONF.cache_images:
args['cached-image'] = image_id
filename = session.call_plugin('kernel', 'copy_vdi', args)
# Remove the VDI as it is not needed anymore.
destroy_vdi(session, vdi_ref)
LOG.debug(_("Kernel/Ramdisk VDI %s destroyed"), vdi_ref,
instance=instance)
vdi_role = ImageType.get_role(image_type)
return {vdi_role: dict(uuid=None, file=filename)}
else:
vdi_role = ImageType.get_role(image_type)
return {vdi_role: dict(uuid=vdi_uuid, file=None)}
except (session.XenAPI.Failure, IOError, OSError) as e:
# We look for XenAPI and OS failures.
LOG.exception(_("Failed to fetch glance image"),
instance=instance)
e.args = e.args + ([dict(type=ImageType.to_string(image_type),
uuid=vdi_uuid,
file=filename)],)
raise
def determine_disk_image_type(image_meta):
"""Disk Image Types are used to determine where the kernel will reside
within an image. To figure out which type we're dealing with, we use
the following rules:
1. If we're using Glance, we can use the image_type field to
determine the image_type
2. If we're not using Glance, then we need to deduce this based on
whether a kernel_id is specified.
"""
if not image_meta:
return None
disk_format = image_meta['disk_format']
disk_format_map = {
'ami': ImageType.DISK,
'aki': ImageType.KERNEL,
'ari': ImageType.RAMDISK,
'raw': ImageType.DISK_RAW,
'vhd': ImageType.DISK_VHD,
'iso': ImageType.DISK_ISO,
}
try:
image_type = disk_format_map[disk_format]
except KeyError:
raise exception.InvalidDiskFormat(disk_format=disk_format)
image_ref = image_meta.get('id')
params = {
'image_type_str': ImageType.to_string(image_type),
'image_ref': image_ref
}
LOG.debug(_("Detected %(image_type_str)s format for image %(image_ref)s"),
params)
return image_type
def determine_is_pv(session, vdi_ref, disk_image_type, os_type):
"""
Determine whether the VM will use a paravirtualized kernel or if it
will use hardware virtualization.
1. Glance (VHD): if `os_type` is windows, HVM, otherwise PV
2. Glance (DISK_RAW): HVM
3. Glance (DISK): PV
4. Glance (DISK_ISO): HVM
5. Boot From Volume - without image metadata (None): use HVM
NOTE: if disk_image_type is not specified, instances launched
from remote volumes will have to include kernel and ramdisk
because external kernel and ramdisk will not be fetched.
"""
LOG.debug(_("Looking up vdi %s for PV kernel"), vdi_ref)
if disk_image_type == ImageType.DISK_VHD:
# 1. VHD
if os_type == 'windows':
is_pv = False
else:
is_pv = True
elif disk_image_type == ImageType.DISK_RAW:
# 2. RAW
is_pv = False
elif disk_image_type == ImageType.DISK:
# 3. Disk
is_pv = True
elif disk_image_type == ImageType.DISK_ISO:
# 4. ISO
is_pv = False
elif not disk_image_type:
is_pv = False
else:
raise exception.NovaException(_("Unknown image format %s") %
disk_image_type)
return is_pv
def set_vm_name_label(session, vm_ref, name_label):
session.call_xenapi("VM.set_name_label", vm_ref, name_label)
def list_vms(session):
for vm_ref, vm_rec in session.get_all_refs_and_recs('VM'):
if (vm_rec["resident_on"] != session.get_xenapi_host() or
vm_rec["is_a_template"] or vm_rec["is_control_domain"]):
continue
else:
yield vm_ref, vm_rec
def lookup_vm_vdis(session, vm_ref):
"""Look for the VDIs that are attached to the VM."""
# Firstly we get the VBDs, then the VDIs.
# TODO(Armando): do we leave the read-only devices?
vbd_refs = session.call_xenapi("VM.get_VBDs", vm_ref)
vdi_refs = []
if vbd_refs:
for vbd_ref in vbd_refs:
try:
vdi_ref = session.call_xenapi("VBD.get_VDI", vbd_ref)
# Test valid VDI
record = session.call_xenapi("VDI.get_record", vdi_ref)
LOG.debug(_('VDI %s is still available'), record['uuid'])
vbd_other_config = session.call_xenapi("VBD.get_other_config",
vbd_ref)
if not vbd_other_config.get('osvol'):
# This is not an attached volume
vdi_refs.append(vdi_ref)
except session.XenAPI.Failure as exc:
LOG.exception(exc)
return vdi_refs
def lookup(session, name_label, check_rescue=False):
"""Look the instance up and return it if available.
:param check_rescue: if True will return the 'name'-rescue vm if it
exists, instead of just 'name'
"""
if check_rescue:
result = lookup(session, name_label + '-rescue', False)
if result:
return result
vm_refs = session.call_xenapi("VM.get_by_name_label", name_label)
n = len(vm_refs)
if n == 0:
return None
elif n > 1:
raise exception.InstanceExists(name=name_label)
else:
return vm_refs[0]
def preconfigure_instance(session, instance, vdi_ref, network_info):
"""Makes alterations to the image before launching as part of spawn.
"""
# As mounting the image VDI is expensive, we only want do do it once,
# if at all, so determine whether it's required first, and then do
# everything
mount_required = False
key, net, metadata = _prepare_injectables(instance, network_info)
mount_required = key or net or metadata
if not mount_required:
return
with vdi_attached_here(session, vdi_ref, read_only=False) as dev:
_mounted_processing(dev, key, net, metadata)
def lookup_kernel_ramdisk(session, vm):
vm_rec = session.call_xenapi("VM.get_record", vm)
if 'PV_kernel' in vm_rec and 'PV_ramdisk' in vm_rec:
return (vm_rec['PV_kernel'], vm_rec['PV_ramdisk'])
else:
return (None, None)
def is_snapshot(session, vm):
vm_rec = session.call_xenapi("VM.get_record", vm)
if 'is_a_template' in vm_rec and 'is_a_snapshot' in vm_rec:
return vm_rec['is_a_template'] and vm_rec['is_a_snapshot']
else:
return False
def compile_info(record):
"""Fill record with VM status information."""
return {'state': XENAPI_POWER_STATE[record['power_state']],
'max_mem': long(record['memory_static_max']) >> 10,
'mem': long(record['memory_dynamic_max']) >> 10,
'num_cpu': record['VCPUs_max'],
'cpu_time': 0}
def compile_diagnostics(record):
"""Compile VM diagnostics data."""
try:
keys = []
diags = {}
vm_uuid = record["uuid"]
xml = _get_rrd(_get_rrd_server(), vm_uuid)
if xml:
rrd = xmlutils.safe_minidom_parse_string(xml)
for i, node in enumerate(rrd.firstChild.childNodes):
# Provide the last update of the information
if node.localName == 'lastupdate':
diags['last_update'] = node.firstChild.data
# Create a list of the diagnostic keys (in their order)
if node.localName == 'ds':
ref = node.childNodes
# Name and Value
if len(ref) > 6:
keys.append(ref[0].firstChild.data)
# Read the last row of the first RRA to get the latest info
if node.localName == 'rra':
rows = node.childNodes[4].childNodes
last_row = rows[rows.length - 1].childNodes
for j, value in enumerate(last_row):
diags[keys[j]] = value.firstChild.data
break
return diags
except expat.ExpatError as e:
LOG.exception(_('Unable to parse rrd of %s'), e)
return {"Unable to retrieve diagnostics": e}
def fetch_bandwidth(session):
bw = session.call_plugin_serialized('bandwidth', 'fetch_all_bandwidth')
return bw
def _scan_sr(session, sr_ref=None, max_attempts=4):
if sr_ref:
# NOTE(johngarbutt) xenapi will collapse any duplicate requests
# for SR.scan if there is already a scan in progress.
# However, we don't want that, because the scan may have started
# before we modified the underlying VHDs on disk through a plugin.
# Using our own mutex will reduce cases where our periodic SR scan
# in host.update_status starts racing the sr.scan after a plugin call.
@utils.synchronized('sr-scan-' + sr_ref)
def do_scan(sr_ref):
LOG.debug(_("Scanning SR %s"), sr_ref)
attempt = 1
while True:
try:
return session.call_xenapi('SR.scan', sr_ref)
except session.XenAPI.Failure as exc:
with excutils.save_and_reraise_exception() as ctxt:
if exc.details[0] == 'SR_BACKEND_FAILURE_40':
if attempt < max_attempts:
ctxt.reraise = False
LOG.warn(_("Retry SR scan due to error: %s")
% exc)
greenthread.sleep(2 ** attempt)
attempt += 1
do_scan(sr_ref)
def scan_default_sr(session):
"""Looks for the system default SR and triggers a re-scan."""
sr_ref = safe_find_sr(session)
_scan_sr(session, sr_ref)
return sr_ref
def safe_find_sr(session):
"""Same as _find_sr except raises a NotFound exception if SR cannot be
determined
"""
sr_ref = _find_sr(session)
if sr_ref is None:
raise exception.StorageRepositoryNotFound()
return sr_ref
def _find_sr(session):
"""Return the storage repository to hold VM images."""
host = session.get_xenapi_host()
try:
tokens = CONF.sr_matching_filter.split(':')
filter_criteria = tokens[0]
filter_pattern = tokens[1]
except IndexError:
# oops, flag is invalid
LOG.warning(_("Flag sr_matching_filter '%s' does not respect "
"formatting convention"), CONF.sr_matching_filter)
return None
if filter_criteria == 'other-config':
key, value = filter_pattern.split('=', 1)
for sr_ref, sr_rec in session.get_all_refs_and_recs('SR'):
if not (key in sr_rec['other_config'] and
sr_rec['other_config'][key] == value):
continue
for pbd_ref in sr_rec['PBDs']:
pbd_rec = session.get_rec('PBD', pbd_ref)
if pbd_rec and pbd_rec['host'] == host:
return sr_ref
elif filter_criteria == 'default-sr' and filter_pattern == 'true':
pool_ref = session.call_xenapi('pool.get_all')[0]
sr_ref = session.call_xenapi('pool.get_default_SR', pool_ref)
if sr_ref:
return sr_ref
# No SR found!
LOG.error(_("XenAPI is unable to find a Storage Repository to "
"install guest instances on. Please check your "
"configuration (e.g. set a default SR for the pool) "
"and/or configure the flag 'sr_matching_filter'."))
return None
def _safe_find_iso_sr(session):
"""Same as _find_iso_sr except raises a NotFound exception if SR
cannot be determined
"""
sr_ref = _find_iso_sr(session)
if sr_ref is None:
raise exception.NotFound(_('Cannot find SR of content-type ISO'))
return sr_ref
def _find_iso_sr(session):
"""Return the storage repository to hold ISO images."""
host = session.get_xenapi_host()
for sr_ref, sr_rec in session.get_all_refs_and_recs('SR'):
LOG.debug(_("ISO: looking at SR %s"), sr_rec)
if not sr_rec['content_type'] == 'iso':
LOG.debug(_("ISO: not iso content"))
continue
if 'i18n-key' not in sr_rec['other_config']:
LOG.debug(_("ISO: iso content_type, no 'i18n-key' key"))
continue
if not sr_rec['other_config']['i18n-key'] == 'local-storage-iso':
LOG.debug(_("ISO: iso content_type, i18n-key value not "
"'local-storage-iso'"))
continue
LOG.debug(_("ISO: SR MATCHing our criteria"))
for pbd_ref in sr_rec['PBDs']:
LOG.debug(_("ISO: ISO, looking to see if it is host local"))
pbd_rec = session.get_rec('PBD', pbd_ref)
if not pbd_rec:
LOG.debug(_("ISO: PBD %s disappeared"), pbd_ref)
continue
pbd_rec_host = pbd_rec['host']
LOG.debug(_("ISO: PBD matching, want %(pbd_rec)s, have %(host)s"),
{'pbd_rec': pbd_rec, 'host': host})
if pbd_rec_host == host:
LOG.debug(_("ISO: SR with local PBD"))
return sr_ref
return None
def _get_rrd_server():
"""Return server's scheme and address to use for retrieving RRD XMLs."""
xs_url = urlparse.urlparse(CONF.xenapi_connection_url)
return [xs_url.scheme, xs_url.netloc]
def _get_rrd(server, vm_uuid):
"""Return the VM RRD XML as a string."""
try:
xml = urllib.urlopen("%s://%s:%s@%s/vm_rrd?uuid=%s" % (
server[0],
CONF.xenapi_connection_username,
CONF.xenapi_connection_password,
server[1],
vm_uuid))
return xml.read()
except IOError:
LOG.exception(_('Unable to obtain RRD XML for VM %(vm_uuid)s with '
'server details: %(server)s.'),
{'vm_uuid': vm_uuid, 'server': server})
return None
def _get_all_vdis_in_sr(session, sr_ref):
for vdi_ref in session.call_xenapi('SR.get_VDIs', sr_ref):
try:
vdi_rec = session.call_xenapi('VDI.get_record', vdi_ref)
yield vdi_ref, vdi_rec
except session.XenAPI.Failure:
continue
def get_instance_vdis_for_sr(session, vm_ref, sr_ref):
"""Return opaqueRef for all the vdis which live on sr."""
for vbd_ref in session.call_xenapi('VM.get_VBDs', vm_ref):
try:
vdi_ref = session.call_xenapi('VBD.get_VDI', vbd_ref)
if sr_ref == session.call_xenapi('VDI.get_SR', vdi_ref):
yield vdi_ref
except session.XenAPI.Failure:
continue
def _get_vhd_parent_uuid(session, vdi_ref):
vdi_rec = session.call_xenapi("VDI.get_record", vdi_ref)
if 'vhd-parent' not in vdi_rec['sm_config']:
return None
parent_uuid = vdi_rec['sm_config']['vhd-parent']
vdi_uuid = vdi_rec['uuid']
LOG.debug(_('VHD %(vdi_uuid)s has parent %(parent_uuid)s'),
{'vdi_uuid': vdi_uuid, 'parent_uuid': parent_uuid})
return parent_uuid
def _walk_vdi_chain(session, vdi_uuid):
"""Yield vdi_recs for each element in a VDI chain."""
scan_default_sr(session)
while True:
vdi_ref = session.call_xenapi("VDI.get_by_uuid", vdi_uuid)
vdi_rec = session.call_xenapi("VDI.get_record", vdi_ref)
yield vdi_rec
parent_uuid = _get_vhd_parent_uuid(session, vdi_ref)
if not parent_uuid:
break
vdi_uuid = parent_uuid
def _child_vhds(session, sr_ref, vdi_uuid):
"""Return the immediate children of a given VHD.
This is not recursive, only the immediate children are returned.
"""
children = set()
for ref, rec in _get_all_vdis_in_sr(session, sr_ref):
rec_uuid = rec['uuid']
if rec_uuid == vdi_uuid:
continue
parent_uuid = _get_vhd_parent_uuid(session, ref)
if parent_uuid != vdi_uuid:
continue
children.add(rec_uuid)
return children
def _wait_for_vhd_coalesce(session, instance, sr_ref, vdi_ref,
original_parent_uuid):
"""Spin until the parent VHD is coalesced into its parent VHD
Before coalesce:
* original_parent_vhd
* parent_vhd
snapshot
After coalesce:
* parent_vhd
snapshot
"""
# NOTE(sirp): If we don't have an original_parent_uuid, then the snapshot
# doesn't have a grandparent to coalesce into, so we can skip waiting
if not original_parent_uuid:
return
def _another_child_vhd():
# Search for any other vdi which parents to original parent and is not
# in the active vm/instance vdi chain.
vdi_uuid = session.call_xenapi('VDI.get_record', vdi_ref)['uuid']
parent_vdi_uuid = _get_vhd_parent_uuid(session, vdi_ref)
for _ref, rec in _get_all_vdis_in_sr(session, sr_ref):
if ((rec['uuid'] != vdi_uuid) and
(rec['uuid'] != parent_vdi_uuid) and
(rec['sm_config'].get('vhd-parent') == original_parent_uuid)):
# Found another vhd which too parents to original parent.
return True
# Found no other vdi with the same parent.
return False
# Check if original parent has any other child. If so, coalesce will
# not take place.
if _another_child_vhd():
parent_uuid = _get_vhd_parent_uuid(session, vdi_ref)
parent_ref = session.call_xenapi("VDI.get_by_uuid", parent_uuid)
base_uuid = _get_vhd_parent_uuid(session, parent_ref)
return parent_uuid, base_uuid
max_attempts = CONF.xenapi_vhd_coalesce_max_attempts
for i in xrange(max_attempts):
# NOTE(sirp): This rescan is necessary to ensure the VM's `sm_config`
# matches the underlying VHDs.
_scan_sr(session, sr_ref)
parent_uuid = _get_vhd_parent_uuid(session, vdi_ref)
if parent_uuid and (parent_uuid != original_parent_uuid):
LOG.debug(_("Parent %(parent_uuid)s doesn't match original parent"
" %(original_parent_uuid)s, waiting for coalesce..."),
{'parent_uuid': parent_uuid,
'original_parent_uuid': original_parent_uuid},
instance=instance)
else:
parent_ref = session.call_xenapi("VDI.get_by_uuid", parent_uuid)
base_uuid = _get_vhd_parent_uuid(session, parent_ref)
return parent_uuid, base_uuid
greenthread.sleep(CONF.xenapi_vhd_coalesce_poll_interval)
msg = (_("VHD coalesce attempts exceeded (%d)"
", giving up...") % max_attempts)
raise exception.NovaException(msg)
def _remap_vbd_dev(dev):
"""Return the appropriate location for a plugged-in VBD device
Ubuntu Maverick moved xvd? -> sd?. This is considered a bug and will be
fixed in future versions:
https://bugs.launchpad.net/ubuntu/+source/linux/+bug/684875
For now, we work around it by just doing a string replace.
"""
# NOTE(sirp): This hack can go away when we pull support for Maverick
should_remap = CONF.xenapi_remap_vbd_dev
if not should_remap:
return dev
old_prefix = 'xvd'
new_prefix = CONF.xenapi_remap_vbd_dev_prefix
remapped_dev = dev.replace(old_prefix, new_prefix)
return remapped_dev
def _wait_for_device(dev):
"""Wait for device node to appear."""
for i in xrange(0, CONF.block_device_creation_timeout):
dev_path = utils.make_dev_path(dev)
if os.path.exists(dev_path):
return
time.sleep(1)
raise volume_utils.StorageError(
_('Timeout waiting for device %s to be created') % dev)
def cleanup_attached_vdis(session):
"""Unplug any instance VDIs left after an unclean restart."""
this_vm_ref = _get_this_vm_ref(session)
vbd_refs = session.call_xenapi('VM.get_VBDs', this_vm_ref)
for vbd_ref in vbd_refs:
try:
vbd_rec = session.call_xenapi('VBD.get_record', vbd_ref)
vdi_rec = session.call_xenapi('VDI.get_record', vbd_rec['VDI'])
except session.XenAPI.Failure as e:
if e.details[0] != 'HANDLE_INVALID':
raise
continue
if 'nova_instance_uuid' in vdi_rec['other_config']:
# Belongs to an instance and probably left over after an
# unclean restart
LOG.info(_('Disconnecting stale VDI %s from compute domU'),
vdi_rec['uuid'])
unplug_vbd(session, vbd_ref)
destroy_vbd(session, vbd_ref)
@contextlib.contextmanager
def vdi_attached_here(session, vdi_ref, read_only=False):
this_vm_ref = _get_this_vm_ref(session)
vbd_ref = create_vbd(session, this_vm_ref, vdi_ref, 'autodetect',
read_only=read_only, bootable=False)
try:
LOG.debug(_('Plugging VBD %s ... '), vbd_ref)
session.call_xenapi("VBD.plug", vbd_ref)
try:
LOG.debug(_('Plugging VBD %s done.'), vbd_ref)
orig_dev = session.call_xenapi("VBD.get_device", vbd_ref)
LOG.debug(_('VBD %(vbd_ref)s plugged as %(orig_dev)s'),
{'vbd_ref': vbd_ref, 'orig_dev': orig_dev})
dev = _remap_vbd_dev(orig_dev)
if dev != orig_dev:
LOG.debug(_('VBD %(vbd_ref)s plugged into wrong dev, '
'remapping to %(dev)s'),
{'vbd_ref': vbd_ref, 'dev': dev})
_wait_for_device(dev)
yield dev
finally:
LOG.debug(_('Destroying VBD for VDI %s ... '), vdi_ref)
unplug_vbd(session, vbd_ref)
finally:
try:
destroy_vbd(session, vbd_ref)
except volume_utils.StorageError:
# destroy_vbd() will log error
pass
LOG.debug(_('Destroying VBD for VDI %s done.'), vdi_ref)
def _get_sys_hypervisor_uuid():
with file('/sys/hypervisor/uuid') as f:
return f.readline().strip()
def get_this_vm_uuid(session):
if session and session.is_local_connection:
# UUID is the control domain running on this host
host_ref = session.get_xenapi_host()
vms = session.call_xenapi("VM.get_all_records_where",
'field "is_control_domain"="true" and '
'field "resident_on"="%s"' % host_ref)
return vms[vms.keys()[0]]['uuid']
try:
return _get_sys_hypervisor_uuid()
except IOError:
# Some guest kernels (without 5c13f8067745efc15f6ad0158b58d57c44104c25)
# cannot read from uuid after a reboot. Fall back to trying xenstore.
# See https://bugs.launchpad.net/ubuntu/+source/xen-api/+bug/1081182
domid, _ = utils.execute('xenstore-read', 'domid', run_as_root=True)
vm_key, _ = utils.execute('xenstore-read',
'/local/domain/%s/vm' % domid.strip(),
run_as_root=True)
return vm_key.strip()[4:]
def _get_this_vm_ref(session):
return session.call_xenapi("VM.get_by_uuid", get_this_vm_uuid(session))
def _get_partitions(dev):
"""Return partition information (num, size, type) for a device."""
dev_path = utils.make_dev_path(dev)
out, _err = utils.execute('parted', '--script', '--machine',
dev_path, 'unit s', 'print',
run_as_root=True)
lines = [line for line in out.split('\n') if line]
partitions = []
LOG.debug(_("Partitions:"))
for line in lines[2:]:
num, start, end, size, ptype = line.split(':')[:5]
start = int(start.rstrip('s'))
end = int(end.rstrip('s'))
size = int(size.rstrip('s'))
LOG.debug(_(" %(num)s: %(ptype)s %(size)d sectors"),
{'num': num, 'ptype': ptype, 'size': size})
partitions.append((num, start, size, ptype))
return partitions
def _stream_disk(session, image_service_func, image_type, virtual_size, dev):
offset = 0
if image_type == ImageType.DISK:
offset = MBR_SIZE_BYTES
_write_partition(session, virtual_size, dev)
dev_path = utils.make_dev_path(dev)
with utils.temporary_chown(dev_path):
with open(dev_path, 'wb') as f:
f.seek(offset)
image_service_func(f)
def _write_partition(session, virtual_size, dev):
dev_path = utils.make_dev_path(dev)
primary_first = MBR_SIZE_SECTORS
primary_last = MBR_SIZE_SECTORS + (virtual_size / SECTOR_SIZE) - 1
LOG.debug(_('Writing partition table %(primary_first)d %(primary_last)d'
' to %(dev_path)s...'),
{'primary_first': primary_first, 'primary_last': primary_last,
'dev_path': dev_path})
def execute(*cmd, **kwargs):
return utils.execute(*cmd, **kwargs)
_make_partition(session, dev, "%ds" % primary_first, "%ds" % primary_last)
LOG.debug(_('Writing partition table %s done.'), dev_path)
def _get_min_sectors(partition_path, block_size=4096):
stdout, _err = utils.execute('resize2fs', '-P', partition_path,
run_as_root=True)
min_size_blocks = long(re.sub('[^0-9]', '', stdout))
min_size_bytes = min_size_blocks * block_size
return min_size_bytes / SECTOR_SIZE
def _repair_filesystem(partition_path):
# Exit Code 1 = File system errors corrected
# 2 = File system errors corrected, system needs a reboot
utils.execute('e2fsck', '-f', '-y', partition_path, run_as_root=True,
check_exit_code=[0, 1, 2])
def _resize_part_and_fs(dev, start, old_sectors, new_sectors):
"""Resize partition and fileystem.
This assumes we are dealing with a single primary partition and using
ext3 or ext4.
"""
size = new_sectors - start
end = new_sectors - 1
dev_path = utils.make_dev_path(dev)
partition_path = utils.make_dev_path(dev, partition=1)
# Replay journal if FS wasn't cleanly unmounted
_repair_filesystem(partition_path)
# Remove ext3 journal (making it ext2)
utils.execute('tune2fs', '-O ^has_journal', partition_path,
run_as_root=True)
if new_sectors < old_sectors:
# Resizing down, resize filesystem before partition resize
min_sectors = _get_min_sectors(partition_path)
if min_sectors >= new_sectors:
reason = (_('Resize down not allowed because minimum '
'filesystem sectors %(min_sectors)d is too big '
'for target sectors %(new_sectors)d') %
{'min_sectors': min_sectors, 'new_sectors': new_sectors})
raise exception.ResizeError(reason=reason)
utils.execute('resize2fs', partition_path, '%ds' % size,
run_as_root=True)
utils.execute('parted', '--script', dev_path, 'rm', '1',
run_as_root=True)
utils.execute('parted', '--script', dev_path, 'mkpart',
'primary',
'%ds' % start,
'%ds' % end,
run_as_root=True)
if new_sectors > old_sectors:
# Resizing up, resize filesystem after partition resize
utils.execute('resize2fs', partition_path, run_as_root=True)
# Add back journal
utils.execute('tune2fs', '-j', partition_path, run_as_root=True)
def _log_progress_if_required(left, last_log_time, virtual_size):
if timeutils.is_older_than(last_log_time, PROGRESS_INTERVAL_SECONDS):
last_log_time = timeutils.utcnow()
complete_pct = float(virtual_size - left) / virtual_size * 100
LOG.debug(_("Sparse copy in progress, "
"%(complete_pct).2f%% complete. "
"%(left)s bytes left to copy"),
{"complete_pct": complete_pct, "left": left})
return last_log_time
def _sparse_copy(src_path, dst_path, virtual_size, block_size=4096):
"""Copy data, skipping long runs of zeros to create a sparse file."""
start_time = last_log_time = timeutils.utcnow()
EMPTY_BLOCK = '\0' * block_size
bytes_read = 0
skipped_bytes = 0
left = virtual_size
LOG.debug(_("Starting sparse_copy src=%(src_path)s dst=%(dst_path)s "
"virtual_size=%(virtual_size)d block_size=%(block_size)d"),
{'src_path': src_path, 'dst_path': dst_path,
'virtual_size': virtual_size, 'block_size': block_size})
# NOTE(sirp): we need read/write access to the devices; since we don't have
# the luxury of shelling out to a sudo'd command, we temporarily take
# ownership of the devices.
with utils.temporary_chown(src_path):
with utils.temporary_chown(dst_path):
with open(src_path, "r") as src:
with open(dst_path, "w") as dst:
data = src.read(min(block_size, left))
while data:
if data == EMPTY_BLOCK:
dst.seek(block_size, os.SEEK_CUR)
left -= block_size
bytes_read += block_size
skipped_bytes += block_size
else:
dst.write(data)
data_len = len(data)
left -= data_len
bytes_read += data_len
if left <= 0:
break
data = src.read(min(block_size, left))
greenthread.sleep(0)
last_log_time = _log_progress_if_required(
left, last_log_time, virtual_size)
duration = timeutils.delta_seconds(start_time, timeutils.utcnow())
compression_pct = float(skipped_bytes) / bytes_read * 100
LOG.debug(_("Finished sparse_copy in %(duration).2f secs, "
"%(compression_pct).2f%% reduction in size"),
{'duration': duration, 'compression_pct': compression_pct})
def _copy_partition(session, src_ref, dst_ref, partition, virtual_size):
# Part of disk taken up by MBR
virtual_size -= MBR_SIZE_BYTES
with vdi_attached_here(session, src_ref, read_only=True) as src:
src_path = utils.make_dev_path(src, partition=partition)
with vdi_attached_here(session, dst_ref, read_only=False) as dst:
dst_path = utils.make_dev_path(dst, partition=partition)
_write_partition(session, virtual_size, dst)
if CONF.xenapi_sparse_copy:
_sparse_copy(src_path, dst_path, virtual_size)
else:
num_blocks = virtual_size / SECTOR_SIZE
utils.execute('dd',
'if=%s' % src_path,
'of=%s' % dst_path,
'count=%d' % num_blocks,
run_as_root=True)
def _mount_filesystem(dev_path, dir):
"""mounts the device specified by dev_path in dir."""
try:
_out, err = utils.execute('mount',
'-t', 'ext2,ext3,ext4,reiserfs',
dev_path, dir, run_as_root=True)
except processutils.ProcessExecutionError as e:
err = str(e)
return err
def _mounted_processing(device, key, net, metadata):
"""Callback which runs with the image VDI attached."""
# NB: Partition 1 hardcoded
dev_path = utils.make_dev_path(device, partition=1)
with utils.tempdir() as tmpdir:
# Mount only Linux filesystems, to avoid disturbing NTFS images
err = _mount_filesystem(dev_path, tmpdir)
if not err:
try:
# This try block ensures that the umount occurs
if not agent.find_guest_agent(tmpdir):
vfs = vfsimpl.VFSLocalFS(imgfile=None,
imgfmt=None,
imgdir=tmpdir)
LOG.info(_('Manipulating interface files directly'))
# for xenapi, we don't 'inject' admin_password here,
# it's handled at instance startup time, nor do we
# support injecting arbitrary files here.
disk.inject_data_into_fs(vfs,
key, net, metadata, None, None)
finally:
utils.execute('umount', dev_path, run_as_root=True)
else:
LOG.info(_('Failed to mount filesystem (expected for '
'non-linux instances): %s') % err)
def _prepare_injectables(inst, network_info):
"""
prepares the ssh key and the network configuration file to be
injected into the disk image
"""
#do the import here - Jinja2 will be loaded only if injection is performed
import jinja2
tmpl_path, tmpl_file = os.path.split(CONF.injected_network_template)
env = jinja2.Environment(loader=jinja2.FileSystemLoader(tmpl_path))
template = env.get_template(tmpl_file)
metadata = inst['metadata']
key = str(inst['key_data'])
net = None
if network_info:
ifc_num = -1
interfaces_info = []
for vif in network_info:
ifc_num += 1
try:
if not vif['network'].get_meta('injected'):
# network is not specified injected
continue
except KeyError:
# vif network is None
continue
# NOTE(tr3buchet): using all subnets in case dns is stored in a
# subnet that isn't chosen as first v4 or v6
# subnet in the case where there is more than one
# dns = list of address of each dns entry from each vif subnet
dns = [ip['address'] for subnet in vif['network']['subnets']
for ip in subnet['dns']]
dns = ' '.join(dns).strip()
interface_info = {'name': 'eth%d' % ifc_num,
'address': '',
'netmask': '',
'gateway': '',
'broadcast': '',
'dns': dns or '',
'address_v6': '',
'netmask_v6': '',
'gateway_v6': '',
'use_ipv6': CONF.use_ipv6}
# NOTE(tr3buchet): the original code used the old network_info
# which only supported a single ipv4 subnet
# (and optionally, a single ipv6 subnet).
# I modified it to use the new network info model,
# which adds support for multiple v4 or v6
# subnets. I chose to ignore any additional
# subnets, just as the original code ignored
# additional IP information
# populate v4 info if v4 subnet and ip exist
try:
# grab the first v4 subnet (or it raises)
subnet = [s for s in vif['network']['subnets']
if s['version'] == 4][0]
# get the subnet's first ip (or it raises)
ip = subnet['ips'][0]
# populate interface_info
subnet_netaddr = subnet.as_netaddr()
interface_info['address'] = ip['address']
interface_info['netmask'] = subnet_netaddr.netmask
interface_info['gateway'] = subnet['gateway']['address']
interface_info['broadcast'] = subnet_netaddr.broadcast
except IndexError:
# there isn't a v4 subnet or there are no ips
pass
# populate v6 info if v6 subnet and ip exist
try:
# grab the first v6 subnet (or it raises)
subnet = [s for s in vif['network']['subnets']
if s['version'] == 6][0]
# get the subnet's first ip (or it raises)
ip = subnet['ips'][0]
# populate interface_info
interface_info['address_v6'] = ip['address']
interface_info['netmask_v6'] = subnet.as_netaddr().netmask
interface_info['gateway_v6'] = subnet['gateway']['address']
except IndexError:
# there isn't a v6 subnet or there are no ips
pass
interfaces_info.append(interface_info)
if interfaces_info:
net = template.render({'interfaces': interfaces_info,
'use_ipv6': CONF.use_ipv6})
return key, net, metadata
def ensure_correct_host(session):
"""Ensure we're connected to the host we're running on. This is the
required configuration for anything that uses vdi_attached_here.
"""
this_vm_uuid = get_this_vm_uuid(session)
try:
session.call_xenapi('VM.get_by_uuid', this_vm_uuid)
except session.XenAPI.Failure as exc:
if exc.details[0] != 'UUID_INVALID':
raise
raise Exception(_('This domU must be running on the host '
'specified by xenapi_connection_url'))
def move_disks(session, instance, disk_info):
"""Move and possibly link VHDs via the XAPI plugin."""
imported_vhds = session.call_plugin_serialized(
'migration', 'move_vhds_into_sr', instance_uuid=instance['uuid'],
sr_path=get_sr_path(session), uuid_stack=_make_uuid_stack())
# Now we rescan the SR so we find the VHDs
scan_default_sr(session)
root_uuid = imported_vhds['root']['uuid']
root_vdi_ref = session.call_xenapi('VDI.get_by_uuid', root_uuid)
# Set name-label so we can find if we need to clean up a failed migration
_set_vdi_info(session, root_vdi_ref, 'root', instance['name'], 'root',
instance)
return {'uuid': root_uuid, 'ref': root_vdi_ref}
def vm_ref_or_raise(session, instance_name):
vm_ref = lookup(session, instance_name)
if vm_ref is None:
raise exception.InstanceNotFound(instance_id=instance_name)
return vm_ref
def handle_ipxe_iso(session, instance, cd_vdi, network_info):
"""iPXE ISOs are a mechanism to allow the customer to roll their own
image.
To use this feature, a service provider needs to configure the
appropriate Nova flags, roll an iPXE ISO, then distribute that image
to customers via Glance.
NOTE: `mkisofs` is not present by default in the Dom0, so the service
provider can either add that package manually to Dom0 or include the
`mkisofs` binary in the image itself.
"""
boot_menu_url = CONF.xenapi_ipxe_boot_menu_url
if not boot_menu_url:
LOG.warn(_('xenapi_ipxe_boot_menu_url not set, user will have to'
' enter URL manually...'), instance=instance)
return
network_name = CONF.xenapi_ipxe_network_name
if not network_name:
LOG.warn(_('xenapi_ipxe_network_name not set, user will have to'
' enter IP manually...'), instance=instance)
return
network = None
for vif in network_info:
if vif['network']['label'] == network_name:
network = vif['network']
break
if not network:
LOG.warn(_("Unable to find network matching '%(network_name)s', user"
" will have to enter IP manually...") %
{'network_name': network_name}, instance=instance)
return
sr_path = get_sr_path(session)
# Unpack IPv4 network info
subnet = [sn for sn in network['subnets']
if sn['version'] == 4][0]
ip = subnet['ips'][0]
ip_address = ip['address']
netmask = network_model.get_netmask(ip, subnet)
gateway = subnet['gateway']['address']
dns = subnet['dns'][0]['address']
try:
session.call_plugin_serialized("ipxe", "inject", sr_path,
cd_vdi['uuid'], boot_menu_url, ip_address, netmask,
gateway, dns, CONF.xenapi_ipxe_mkisofs_cmd)
except session.XenAPI.Failure as exc:
_type, _method, error = exc.details[:3]
if error == 'CommandNotFound':
LOG.warn(_("ISO creation tool '%s' does not exist.") %
CONF.xenapi_ipxe_mkisofs_cmd, instance=instance)
else:
raise
|
apache-2.0
|
leoliujie/odoo
|
addons/account_bank_statement_extensions/wizard/cancel_statement_line.py
|
381
|
1484
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class cancel_statement_line(osv.osv_memory):
_name = 'cancel.statement.line'
_description = 'Cancel selected statement lines'
def cancel_lines(self, cr, uid, ids, context):
line_ids = context['active_ids']
line_obj = self.pool.get('account.bank.statement.line')
line_obj.write(cr, uid, line_ids, {'state': 'draft'}, context=context)
return {}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
androidrbox/android_kernel_amazon_bueller
|
Documentation/networking/cxacru-cf.py
|
14668
|
1626
|
#!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
|
gpl-2.0
|
hyperized/ansible
|
contrib/inventory/serf.py
|
79
|
2967
|
#!/usr/bin/env python
# (c) 2015, Marc Abramowitz <marca@surveymonkey.com>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Dynamic inventory script which lets you use nodes discovered by Serf
# (https://serfdom.io/).
#
# Requires the `serfclient` Python module from
# https://pypi.org/project/serfclient/
#
# Environment variables
# ---------------------
# - `SERF_RPC_ADDR`
# - `SERF_RPC_AUTH`
#
# These variables are described at https://www.serfdom.io/docs/commands/members.html#_rpc_addr
import argparse
import collections
import os
import sys
# https://pypi.org/project/serfclient/
from serfclient import SerfClient, EnvironmentConfig
import json
_key = 'serf'
def _serf_client():
env = EnvironmentConfig()
return SerfClient(host=env.host, port=env.port, rpc_auth=env.auth_key)
def get_serf_members_data():
return _serf_client().members().body['Members']
def get_nodes(data):
return [node['Name'] for node in data]
def get_groups(data):
groups = collections.defaultdict(list)
for node in data:
for key, value in node['Tags'].items():
groups[value].append(node['Name'])
return groups
def get_meta(data):
meta = {'hostvars': {}}
for node in data:
meta['hostvars'][node['Name']] = node['Tags']
return meta
def print_list():
data = get_serf_members_data()
nodes = get_nodes(data)
groups = get_groups(data)
meta = get_meta(data)
inventory_data = {_key: nodes, '_meta': meta}
inventory_data.update(groups)
print(json.dumps(inventory_data))
def print_host(host):
data = get_serf_members_data()
meta = get_meta(data)
print(json.dumps(meta['hostvars'][host]))
def get_args(args_list):
parser = argparse.ArgumentParser(
description='ansible inventory script reading from serf cluster')
mutex_group = parser.add_mutually_exclusive_group(required=True)
help_list = 'list all hosts from serf cluster'
mutex_group.add_argument('--list', action='store_true', help=help_list)
help_host = 'display variables for a host'
mutex_group.add_argument('--host', help=help_host)
return parser.parse_args(args_list)
def main(args_list):
args = get_args(args_list)
if args.list:
print_list()
if args.host:
print_host(args.host)
if __name__ == '__main__':
main(sys.argv[1:])
|
gpl-3.0
|
vup1120/oq-hazardlib
|
openquake/hazardlib/tests/gsim/cauzzi_2014_test.py
|
2
|
4588
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2014-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Implements the test cases for the Cauzzi et al. (2014) GMPE
Test data taken from the Matlab implementation provided as a supplement
to the original manuscript
"""
from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase
from openquake.hazardlib.gsim.cauzzi_2014 import (
CauzziEtAl2014,
CauzziEtAl2014NoSOF,
CauzziEtAl2014FixedVs30,
CauzziEtAl2014FixedVs30NoSOF,
CauzziEtAl2014Eurocode8,
CauzziEtAl2014Eurocode8NoSOF)
# Discrepency percentages to be applied to all tests
MEAN_DISCREP = 0.1
STDDEV_DISCREP = 0.1
class CauzziEtAl2014TestCase(BaseGSIMTestCase):
"""
Implements the test case for the class with required style of faulting
and period-dependent reference Vs30
"""
GSIM_CLASS = CauzziEtAl2014
# File containing the mean data
MEAN_FILE = "C14/CAUZZI_MEAN.csv"
# File containing the total standard deviation test data
STD_FILE = "C14/CAUZZI_TOTAL_STD.csv"
# File containing the inter-event standard deviation test data
INTER_FILE = "C14/CAUZZI_INTER_STD.csv"
# File containing the inter-event standard deviation test data
INTRA_FILE = "C14/CAUZZI_INTRA_STD.csv"
def test_mean(self):
self.check(self.MEAN_FILE,
max_discrep_percentage=MEAN_DISCREP)
def test_std_total(self):
self.check(self.STD_FILE,
max_discrep_percentage=STDDEV_DISCREP)
def test_std_inter(self):
self.check(self.INTER_FILE,
max_discrep_percentage=STDDEV_DISCREP)
def test_std_intra(self):
self.check(self.INTRA_FILE,
max_discrep_percentage=STDDEV_DISCREP)
class CauzziEtAl2014NoSOFTestCase(CauzziEtAl2014TestCase):
"""
Implements the test case for the class with unspecified style of faulting
and period-dependent reference Vs30
"""
GSIM_CLASS = CauzziEtAl2014NoSOF
MEAN_FILE = "C14/CAUZZI_NoSOF_MEAN.csv"
STD_FILE = "C14/CAUZZI_NoSOF_TOTAL_STD.csv"
INTER_FILE = "C14/CAUZZI_NoSOF_INTER_STD.csv"
INTRA_FILE = "C14/CAUZZI_NoSOF_INTRA_STD.csv"
class CauzziEtAl2014FixedVs30TestCase(CauzziEtAl2014TestCase):
"""
Implements the test case for the class with required style of faulting
and fixed reference Vs30
"""
GSIM_CLASS = CauzziEtAl2014FixedVs30
MEAN_FILE = "C14/CAUZZI_FIXEDVS_MEAN.csv"
STD_FILE = "C14/CAUZZI_FIXEDVS_TOTAL_STD.csv"
INTER_FILE = "C14/CAUZZI_FIXEDVS_INTER_STD.csv"
INTRA_FILE = "C14/CAUZZI_FIXEDVS_INTRA_STD.csv"
class CauzziEtAl2014FixedVs30NoSOFTestCase(CauzziEtAl2014TestCase):
"""
Implements the test case for the class with unspecified style of faulting
and fixed reference Vs30
"""
GSIM_CLASS = CauzziEtAl2014FixedVs30NoSOF
MEAN_FILE = "C14/CAUZZI_NoSOF_FIXEDVS_MEAN.csv"
STD_FILE = "C14/CAUZZI_NoSOF_FIXEDVS_TOTAL_STD.csv"
INTER_FILE = "C14/CAUZZI_NoSOF_FIXEDVS_INTER_STD.csv"
INTRA_FILE = "C14/CAUZZI_NoSOF_FIXEDVS_INTRA_STD.csv"
class CauzziEtAl2014Eurocode8(CauzziEtAl2014TestCase):
"""
Implements the test case for the class with required style of faulting
and Eurocode 8 site classification
"""
GSIM_CLASS = CauzziEtAl2014Eurocode8
MEAN_FILE = "C14/CAUZZI_EC8_MEAN.csv"
STD_FILE = "C14/CAUZZI_EC8_TOTAL_STD.csv"
INTER_FILE = "C14/CAUZZI_EC8_INTER_STD.csv"
INTRA_FILE = "C14/CAUZZI_EC8_INTRA_STD.csv"
class CauzziEtAl2014Eurocode8NoSOF(CauzziEtAl2014TestCase):
"""
Implements the test case for the class with unspecified style of faulting
and Eurocode 8 site classification
"""
GSIM_CLASS = CauzziEtAl2014Eurocode8NoSOF
MEAN_FILE = "C14/CAUZZI_NoSOF_EC8_MEAN.csv"
STD_FILE = "C14/CAUZZI_NoSOF_EC8_TOTAL_STD.csv"
INTER_FILE = "C14/CAUZZI_NoSOF_EC8_INTER_STD.csv"
INTRA_FILE = "C14/CAUZZI_NoSOF_EC8_INTRA_STD.csv"
|
agpl-3.0
|
sdecoder/CMDS-HDFS
|
common/build/contrib/hod/hodlib/Common/xmlrpc.py
|
182
|
2374
|
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import xmlrpclib, time, random, signal
from hodlib.Common.util import hodInterrupt, HodInterruptException
class hodXRClient(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, installSignalHandlers=1, retryRequests=True, timeOut=15):
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none)
self.__retryRequests = retryRequests
self.__timeOut = timeOut
if (installSignalHandlers!=0):
self.__set_alarm()
def __set_alarm(self):
def alarm_handler(sigNum, sigHandler):
raise Exception("XML-RPC socket timeout.")
signal.signal(signal.SIGALRM, alarm_handler)
def __request(self, methodname, params):
response = None
retryWaitTime = 5 + random.randint(0, 5)
for i in range(0, 30):
signal.alarm(self.__timeOut)
try:
response = self._ServerProxy__request(methodname, params)
signal.alarm(0)
break
except Exception:
if self.__retryRequests:
if hodInterrupt.isSet():
raise HodInterruptException()
time.sleep(retryWaitTime)
else:
raise Exception("hodXRClientTimeout")
return response
def __getattr__(self, name):
# magic method dispatcher
return xmlrpclib._Method(self.__request, name)
|
apache-2.0
|
RedHatInsights/insights-core
|
insights/combiners/md5check.py
|
1
|
1060
|
"""
NormalMD5 Combiner for the NormalMD5 Parser
===========================================
Combiner for the :class:`insights.parsers.md5check.NormalMD5` parser.
This parser is multioutput, one parser instance for each file md5sum.
Ths combiner puts all of them back together and presents them as a dict
where the keys are the filenames and the md5sums are the values.
This class inherits all methods and attributes from the ``dict`` object.
Examples:
>>> type(md5sums)
<class 'insights.combiners.md5check.NormalMD5'>
>>> sorted(md5sums.keys())
['/etc/localtime1', '/etc/localtime2']
>>> md5sums['/etc/localtime2']
'd41d8cd98f00b204e9800998ecf8427e'
"""
from .. import combiner
from insights.parsers.md5check import NormalMD5 as NormalMD5Parser
@combiner(NormalMD5Parser)
class NormalMD5(dict):
"""
Combiner for the NormalMD5 parser.
"""
def __init__(self, md5_checksums):
super(NormalMD5, self).__init__()
for md5info in md5_checksums:
self.update({md5info.filename: md5info.md5sum})
|
apache-2.0
|
devs1991/test_edx_docmode
|
venv/lib/python2.7/site-packages/boto/pyami/copybot.py
|
153
|
4261
|
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
from boto.pyami.scriptbase import ScriptBase
import os, StringIO
class CopyBot(ScriptBase):
def __init__(self):
super(CopyBot, self).__init__()
self.wdir = boto.config.get('Pyami', 'working_dir')
self.log_file = '%s.log' % self.instance_id
self.log_path = os.path.join(self.wdir, self.log_file)
boto.set_file_logger(self.name, self.log_path)
self.src_name = boto.config.get(self.name, 'src_bucket')
self.dst_name = boto.config.get(self.name, 'dst_bucket')
self.replace = boto.config.getbool(self.name, 'replace_dst', True)
s3 = boto.connect_s3()
self.src = s3.lookup(self.src_name)
if not self.src:
boto.log.error('Source bucket does not exist: %s' % self.src_name)
dest_access_key = boto.config.get(self.name, 'dest_aws_access_key_id', None)
if dest_access_key:
dest_secret_key = boto.config.get(self.name, 'dest_aws_secret_access_key', None)
s3 = boto.connect(dest_access_key, dest_secret_key)
self.dst = s3.lookup(self.dst_name)
if not self.dst:
self.dst = s3.create_bucket(self.dst_name)
def copy_bucket_acl(self):
if boto.config.get(self.name, 'copy_acls', True):
acl = self.src.get_xml_acl()
self.dst.set_xml_acl(acl)
def copy_key_acl(self, src, dst):
if boto.config.get(self.name, 'copy_acls', True):
acl = src.get_xml_acl()
dst.set_xml_acl(acl)
def copy_keys(self):
boto.log.info('src=%s' % self.src.name)
boto.log.info('dst=%s' % self.dst.name)
try:
for key in self.src:
if not self.replace:
exists = self.dst.lookup(key.name)
if exists:
boto.log.info('key=%s already exists in %s, skipping' % (key.name, self.dst.name))
continue
boto.log.info('copying %d bytes from key=%s' % (key.size, key.name))
prefix, base = os.path.split(key.name)
path = os.path.join(self.wdir, base)
key.get_contents_to_filename(path)
new_key = self.dst.new_key(key.name)
new_key.set_contents_from_filename(path)
self.copy_key_acl(key, new_key)
os.unlink(path)
except:
boto.log.exception('Error copying key: %s' % key.name)
def copy_log(self):
key = self.dst.new_key(self.log_file)
key.set_contents_from_filename(self.log_path)
def main(self):
fp = StringIO.StringIO()
boto.config.dump_safe(fp)
self.notify('%s (%s) Starting' % (self.name, self.instance_id), fp.getvalue())
if self.src and self.dst:
self.copy_keys()
if self.dst:
self.copy_log()
self.notify('%s (%s) Stopping' % (self.name, self.instance_id),
'Copy Operation Complete')
if boto.config.getbool(self.name, 'exit_on_completion', True):
ec2 = boto.connect_ec2()
ec2.terminate_instances([self.instance_id])
|
agpl-3.0
|
ifarup/colourlab
|
tests/test_misc.py
|
1
|
1116
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
test_misc: Unittests for all functions in the misc module.
Copyright (C) 2017 Ivar Farup
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at
your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import unittest
import matplotlib
import matplotlib.pyplot as plt
from colourlab import misc, space, data
t = data.g_MacAdam()
ell = t.get_ellipses(space.xyY)
_, ax = plt.subplots()
misc.plot_ellipses(ell, ax)
misc.plot_ellipses(ell)
class TestPlot(unittest.TestCase):
def test_plot(self):
self.assertTrue(isinstance(ax, matplotlib.axes.Axes))
|
gpl-3.0
|
anaran/olympia
|
migrations/578-migrate-remora-admin-events.py
|
7
|
2440
|
from datetime import datetime
from itertools import chain
import amo
from access.models import Group
from devhub.models import ActivityLog
from editors.models import EventLog
from users.models import UserProfile
# Are there other group changes we care about here?
# All of the old group IDs aside from Admins seem to have been deleted.
group_map = {
1: 'Admins',
2: 'Add-on Reviewers'
}
def run():
new_groups = Group.objects.filter(name__in=group_map.values())
new_groups = dict((g.name, g) for g in new_groups)
for id, name in group_map.items():
group_map[id] = new_groups[name]
items = (EventLog.objects.values_list('action', 'user', 'added', 'removed',
'changed_id', 'created')
.filter(type='admin',
action__in=('group_addmember',
'group_removemember'),
changed_id__in=group_map.keys())
.order_by('created'))
user_ids = set(chain(*[(i[1], int(i[2] or i[3]))
for i in items
if (i[2] or i[3] or '').isdigit()]))
users = dict((u.id, u)
for u in UserProfile.objects.filter(id__in=user_ids))
for action, admin, added, removed, group_id, created in items:
if action == 'group_addmember':
user_id, action = added, amo.LOG.GROUP_USER_ADDED
else:
user_id, action = removed, amo.LOG.GROUP_USER_REMOVED
if not user_id.isdigit():
continue
user_id = int(user_id)
kw = {'created': created}
if admin in users:
kw['user'] = users[admin]
if user_id in users:
amo.log(action, group_map[group_id], users[user_id], **kw)
# Fudge logs for editors who were added while logging was broken.
created = datetime(2013, 3, 14, 3, 14, 15, 926535)
user = group_map[1].users.all()[0]
group = group_map[2]
logs = (ActivityLog.objects.for_group(group)
.filter(action=amo.LOG.GROUP_USER_ADDED.id))
editors = (UserProfile.objects.filter(groups=group)
.exclude(id__in=[l.arguments[1].id for l in logs]))
for editor in editors:
amo.log(amo.LOG.GROUP_USER_ADDED, group, editor, user=user,
created=created)
|
bsd-3-clause
|
zstyblik/infernal-twin
|
build/reportlab/src/reportlab/pdfgen/textobject.py
|
25
|
19420
|
#Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/pdfgen/textobject.py
__version__=''' $Id$ '''
__doc__="""
PDFTextObject is an efficient way to add text to a Canvas. Do not
instantiate directly, obtain one from the Canvas instead.
Progress Reports:
8.83, 2000-01-13, gmcm: created from pdfgen.py
"""
import string
from types import *
from reportlab.lib.colors import Color, CMYKColor, CMYKColorSep, toColor, black, white, _CMYK_black, _CMYK_white
from reportlab.lib.utils import isBytes, isStr, asUnicode
from reportlab.lib.rl_accel import fp_str
from reportlab.pdfbase import pdfmetrics
from reportlab.rl_config import rtlSupport
log2vis = None
if rtlSupport:
try:
from pyfribidi2 import log2vis, ON as DIR_ON, LTR as DIR_LTR, RTL as DIR_RTL
directionsMap = dict(LTR=DIR_LTR,RTL=DIR_RTL)
except:
import warnings
warnings.warn('pyfribidi is not installed - RTL not supported')
class _PDFColorSetter:
'''Abstracts the color setting operations; used in Canvas and Textobject
asseumes we have a _code object'''
def _checkSeparation(self,cmyk):
if isinstance(cmyk,CMYKColorSep):
name,sname = self._doc.addColor(cmyk)
if name not in self._colorsUsed:
self._colorsUsed[name] = sname
return name
#if this is set to a callable(color) --> color it can be used to check color setting
#see eg _enforceCMYK/_enforceRGB
_enforceColorSpace = None
def setFillColorCMYK(self, c, m, y, k, alpha=None):
"""set the fill color useing negative color values
(cyan, magenta, yellow and darkness value).
Takes 4 arguments between 0.0 and 1.0"""
self.setFillColor((c,m,y,k),alpha=alpha)
def setStrokeColorCMYK(self, c, m, y, k, alpha=None):
"""set the stroke color useing negative color values
(cyan, magenta, yellow and darkness value).
Takes 4 arguments between 0.0 and 1.0"""
self.setStrokeColor((c,m,y,k),alpha=alpha)
def setFillColorRGB(self, r, g, b, alpha=None):
"""Set the fill color using positive color description
(Red,Green,Blue). Takes 3 arguments between 0.0 and 1.0"""
self.setFillColor((r,g,b),alpha=alpha)
def setStrokeColorRGB(self, r, g, b, alpha=None):
"""Set the stroke color using positive color description
(Red,Green,Blue). Takes 3 arguments between 0.0 and 1.0"""
self.setStrokeColor((r,g,b),alpha=alpha)
def setFillColor(self, aColor, alpha=None):
"""Takes a color object, allowing colors to be referred to by name"""
if self._enforceColorSpace:
aColor = self._enforceColorSpace(aColor)
if isinstance(aColor, CMYKColor):
d = aColor.density
c,m,y,k = (d*aColor.cyan, d*aColor.magenta, d*aColor.yellow, d*aColor.black)
self._fillColorObj = aColor
name = self._checkSeparation(aColor)
if name:
self._code.append('/%s cs %s scn' % (name,fp_str(d)))
else:
self._code.append('%s k' % fp_str(c, m, y, k))
elif isinstance(aColor, Color):
rgb = (aColor.red, aColor.green, aColor.blue)
self._fillColorObj = aColor
self._code.append('%s rg' % fp_str(rgb) )
elif isinstance(aColor,(tuple,list)):
l = len(aColor)
if l==3:
self._fillColorObj = aColor
self._code.append('%s rg' % fp_str(aColor) )
elif l==4:
self._fillColorObj = aColor
self._code.append('%s k' % fp_str(aColor))
else:
raise ValueError('Unknown color %r' % aColor)
elif isStr(aColor):
self.setFillColor(toColor(aColor))
else:
raise ValueError('Unknown color %r' % aColor)
if alpha is not None:
self.setFillAlpha(alpha)
elif getattr(aColor, 'alpha', None) is not None:
self.setFillAlpha(aColor.alpha)
def setStrokeColor(self, aColor, alpha=None):
"""Takes a color object, allowing colors to be referred to by name"""
if self._enforceColorSpace:
aColor = self._enforceColorSpace(aColor)
if isinstance(aColor, CMYKColor):
d = aColor.density
c,m,y,k = (d*aColor.cyan, d*aColor.magenta, d*aColor.yellow, d*aColor.black)
self._strokeColorObj = aColor
name = self._checkSeparation(aColor)
if name:
self._code.append('/%s CS %s SCN' % (name,fp_str(d)))
else:
self._code.append('%s K' % fp_str(c, m, y, k))
elif isinstance(aColor, Color):
rgb = (aColor.red, aColor.green, aColor.blue)
self._strokeColorObj = aColor
self._code.append('%s RG' % fp_str(rgb) )
elif isinstance(aColor,(tuple,list)):
l = len(aColor)
if l==3:
self._strokeColorObj = aColor
self._code.append('%s RG' % fp_str(aColor) )
elif l==4:
self._strokeColorObj = aColor
self._code.append('%s K' % fp_str(aColor))
else:
raise ValueError('Unknown color %r' % aColor)
elif isStr(aColor):
self.setStrokeColor(toColor(aColor))
else:
raise ValueError('Unknown color %r' % aColor)
if alpha is not None:
self.setStrokeAlpha(alpha)
elif getattr(aColor, 'alpha', None) is not None:
self.setStrokeAlpha(aColor.alpha)
def setFillGray(self, gray, alpha=None):
"""Sets the gray level; 0.0=black, 1.0=white"""
self._fillColorObj = (gray, gray, gray)
self._code.append('%s g' % fp_str(gray))
if alpha is not None:
self.setFillAlpha(alpha)
def setStrokeGray(self, gray, alpha=None):
"""Sets the gray level; 0.0=black, 1.0=white"""
self._strokeColorObj = (gray, gray, gray)
self._code.append('%s G' % fp_str(gray))
if alpha is not None:
self.setFillAlpha(alpha)
def setStrokeAlpha(self,a):
if not (isinstance(a,(float,int)) and 0<=a<=1):
raise ValueError('setStrokeAlpha invalid value %r' % a)
getattr(self,'_setStrokeAlpha',lambda x: None)(a)
def setFillAlpha(self,a):
if not (isinstance(a,(float,int)) and 0<=a<=1):
raise ValueError('setFillAlpha invalid value %r' % a)
getattr(self,'_setFillAlpha',lambda x: None)(a)
def setStrokeOverprint(self,a):
getattr(self,'_setStrokeOverprint',lambda x: None)(a)
def setFillOverprint(self,a):
getattr(self,'_setFillOverprint',lambda x: None)(a)
def setOverprintMask(self,a):
getattr(self,'_setOverprintMask',lambda x: None)(a)
class PDFTextObject(_PDFColorSetter):
"""PDF logically separates text and graphics drawing; text
operations need to be bracketed between BT (Begin text) and
ET operators. This class ensures text operations are
properly encapusalted. Ask the canvas for a text object
with beginText(x, y). Do not construct one directly.
Do not use multiple text objects in parallel; PDF is
not multi-threaded!
It keeps track of x and y coordinates relative to its origin."""
def __init__(self, canvas, x=0,y=0, direction=None):
self._code = ['BT'] #no point in [] then append RGB
self._canvas = canvas #canvas sets this so it has access to size info
self._fontname = self._canvas._fontname
self._fontsize = self._canvas._fontsize
self._leading = self._canvas._leading
self._doc = self._canvas._doc
self._colorsUsed = self._canvas._colorsUsed
self._enforceColorSpace = getattr(canvas,'_enforceColorSpace',None)
font = pdfmetrics.getFont(self._fontname)
self._curSubset = -1
self.direction = direction
self.setTextOrigin(x, y)
self._textRenderMode = 0
self._clipping = 0
def getCode(self):
"pack onto one line; used internally"
self._code.append('ET')
if self._clipping:
self._code.append('%d Tr' % (self._textRenderMode^4))
return ' '.join(self._code)
def setTextOrigin(self, x, y):
if self._canvas.bottomup:
self._code.append('1 0 0 1 %s Tm' % fp_str(x, y)) #bottom up
else:
self._code.append('1 0 0 -1 %s Tm' % fp_str(x, y)) #top down
# The current cursor position is at the text origin
self._x0 = self._x = x
self._y0 = self._y = y
def setTextTransform(self, a, b, c, d, e, f):
"Like setTextOrigin, but does rotation, scaling etc."
if not self._canvas.bottomup:
c = -c #reverse bottom row of the 2D Transform
d = -d
self._code.append('%s Tm' % fp_str(a, b, c, d, e, f))
# The current cursor position is at the text origin Note that
# we aren't keeping track of all the transform on these
# coordinates: they are relative to the rotations/sheers
# defined in the matrix.
self._x0 = self._x = e
self._y0 = self._y = f
def moveCursor(self, dx, dy):
"""Starts a new line at an offset dx,dy from the start of the
current line. This does not move the cursor relative to the
current position, and it changes the current offset of every
future line drawn (i.e. if you next do a textLine() call, it
will move the cursor to a position one line lower than the
position specificied in this call. """
# Check if we have a previous move cursor call, and combine
# them if possible.
if self._code and self._code[-1][-3:]==' Td':
L = self._code[-1].split()
if len(L)==3:
del self._code[-1]
else:
self._code[-1] = ''.join(L[:-4])
# Work out the last movement
lastDx = float(L[-3])
lastDy = float(L[-2])
# Combine the two movement
dx += lastDx
dy -= lastDy
# We will soon add the movement to the line origin, so if
# we've already done this for lastDx, lastDy, remove it
# first (so it will be right when added back again).
self._x0 -= lastDx
self._y0 -= lastDy
# Output the move text cursor call.
self._code.append('%s Td' % fp_str(dx, -dy))
# Keep track of the new line offsets and the cursor position
self._x0 += dx
self._y0 += dy
self._x = self._x0
self._y = self._y0
def setXPos(self, dx):
"""Starts a new line dx away from the start of the
current line - NOT from the current point! So if
you call it in mid-sentence, watch out."""
self.moveCursor(dx,0)
def getCursor(self):
"""Returns current text position relative to the last origin."""
return (self._x, self._y)
def getStartOfLine(self):
"""Returns a tuple giving the text position of the start of the
current line."""
return (self._x0, self._y0)
def getX(self):
"""Returns current x position relative to the last origin."""
return self._x
def getY(self):
"""Returns current y position relative to the last origin."""
return self._y
def _setFont(self, psfontname, size):
"""Sets the font and fontSize
Raises a readable exception if an illegal font
is supplied. Font names are case-sensitive! Keeps track
of font anme and size for metrics."""
self._fontname = psfontname
self._fontsize = size
font = pdfmetrics.getFont(self._fontname)
if font._dynamicFont:
self._curSubset = -1
else:
pdffontname = self._canvas._doc.getInternalFontName(psfontname)
self._code.append('%s %s Tf' % (pdffontname, fp_str(size)))
def setFont(self, psfontname, size, leading = None):
"""Sets the font. If leading not specified, defaults to 1.2 x
font size. Raises a readable exception if an illegal font
is supplied. Font names are case-sensitive! Keeps track
of font anme and size for metrics."""
self._fontname = psfontname
self._fontsize = size
if leading is None:
leading = size * 1.2
self._leading = leading
font = pdfmetrics.getFont(self._fontname)
if font._dynamicFont:
self._curSubset = -1
else:
pdffontname = self._canvas._doc.getInternalFontName(psfontname)
self._code.append('%s %s Tf %s TL' % (pdffontname, fp_str(size), fp_str(leading)))
def setCharSpace(self, charSpace):
"""Adjusts inter-character spacing"""
self._charSpace = charSpace
self._code.append('%s Tc' % fp_str(charSpace))
def setWordSpace(self, wordSpace):
"""Adjust inter-word spacing. This can be used
to flush-justify text - you get the width of the
words, and add some space between them."""
self._wordSpace = wordSpace
self._code.append('%s Tw' % fp_str(wordSpace))
def setHorizScale(self, horizScale):
"Stretches text out horizontally"
self._horizScale = 100 + horizScale
self._code.append('%s Tz' % fp_str(horizScale))
def setLeading(self, leading):
"How far to move down at the end of a line."
self._leading = leading
self._code.append('%s TL' % fp_str(leading))
def setTextRenderMode(self, mode):
"""Set the text rendering mode.
0 = Fill text
1 = Stroke text
2 = Fill then stroke
3 = Invisible
4 = Fill text and add to clipping path
5 = Stroke text and add to clipping path
6 = Fill then stroke and add to clipping path
7 = Add to clipping path
after we start clipping we mustn't change the mode back until after the ET
"""
assert mode in (0,1,2,3,4,5,6,7), "mode must be in (0,1,2,3,4,5,6,7)"
if (mode & 4)!=self._clipping:
mode |= 4
self._clipping = mode & 4
if self._textRenderMode!=mode:
self._textRenderMode = mode
self._code.append('%d Tr' % mode)
def setRise(self, rise):
"Move text baseline up or down to allow superscript/subscripts"
self._rise = rise
self._y = self._y - rise # + ? _textLineMatrix?
self._code.append('%s Ts' % fp_str(rise))
def _formatText(self, text):
"Generates PDF text output operator(s)"
if log2vis and self.direction in ('LTR','RTL'):
# Use pyfribidi to write the text in the correct visual order.
text = log2vis(text, directionsMap.get(self.direction.upper(),DIR_ON),clean=True)
canv = self._canvas
font = pdfmetrics.getFont(self._fontname)
R = []
if font._dynamicFont:
#it's a truetype font and should be utf8. If an error is raised,
for subset, t in font.splitString(text, canv._doc):
if subset!=self._curSubset:
pdffontname = font.getSubsetInternalName(subset, canv._doc)
R.append("%s %s Tf %s TL" % (pdffontname, fp_str(self._fontsize), fp_str(self._leading)))
self._curSubset = subset
R.append("(%s) Tj" % canv._escape(t))
elif font._multiByte:
#all the fonts should really work like this - let them know more about PDF...
R.append("%s %s Tf %s TL" % (
canv._doc.getInternalFontName(font.fontName),
fp_str(self._fontsize),
fp_str(self._leading)
))
R.append("(%s) Tj" % font.formatForPdf(text))
else:
#convert to T1 coding
fc = font
if isBytes(text):
try:
text = text.decode('utf8')
except UnicodeDecodeError as e:
i,j = e.args[2:4]
raise UnicodeDecodeError(*(e.args[:4]+('%s\n%s-->%s<--%s' % (e.args[4],text[max(i-10,0):i],text[i:j],text[j:j+10]),)))
for f, t in pdfmetrics.unicode2T1(text,[font]+font.substitutionFonts):
if f!=fc:
R.append("%s %s Tf %s TL" % (canv._doc.getInternalFontName(f.fontName), fp_str(self._fontsize), fp_str(self._leading)))
fc = f
R.append("(%s) Tj" % canv._escape(t))
if font!=fc:
R.append("%s %s Tf %s TL" % (canv._doc.getInternalFontName(self._fontname), fp_str(self._fontsize), fp_str(self._leading)))
return ' '.join(R)
def _textOut(self, text, TStar=0):
"prints string at current point, ignores text cursor"
self._code.append('%s%s' % (self._formatText(text), (TStar and ' T*' or '')))
def textOut(self, text):
"""prints string at current point, text cursor moves across."""
self._x = self._x + self._canvas.stringWidth(text, self._fontname, self._fontsize)
self._code.append(self._formatText(text))
def textLine(self, text=''):
"""prints string at current point, text cursor moves down.
Can work with no argument to simply move the cursor down."""
# Update the coordinates of the cursor
self._x = self._x0
if self._canvas.bottomup:
self._y = self._y - self._leading
else:
self._y = self._y + self._leading
# Update the location of the start of the line
# self._x0 is unchanged
self._y0 = self._y
# Output the text followed by a PDF newline command
self._code.append('%s T*' % self._formatText(text))
def textLines(self, stuff, trim=1):
"""prints multi-line or newlined strings, moving down. One
comon use is to quote a multi-line block in your Python code;
since this may be indented, by default it trims whitespace
off each line and from the beginning; set trim=0 to preserve
whitespace."""
if isStr(stuff):
lines = asUnicode(stuff).strip().split(u'\n')
if trim==1:
lines = [s.strip() for s in lines]
elif isinstance(stuff,(tuple,list)):
lines = stuff
else:
assert 1==0, "argument to textlines must be string,, list or tuple"
# Output each line one at a time. This used to be a long-hand
# copy of the textLine code, now called as a method.
for line in lines:
self.textLine(line)
def __nonzero__(self):
'PDFTextObject is true if it has something done after the init'
return self._code != ['BT']
def _setFillAlpha(self,v):
self._canvas._doc.ensureMinPdfVersion('transparency')
self._canvas._extgstate.set(self,'ca',v)
def _setStrokeOverprint(self,v):
self._canvas._extgstate.set(self,'OP',v)
def _setFillOverprint(self,v):
self._canvas._extgstate.set(self,'op',v)
def _setOverprintMask(self,v):
self._canvas._extgstate.set(self,'OPM',v and 1 or 0)
|
gpl-3.0
|
kbrebanov/ansible-modules-extras
|
packaging/kibana_plugin.py
|
2
|
6899
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage elasticsearch shield role
(c) 2016, Thierno IB. BARRY @barryib
Sponsored by Polyconseil http://polyconseil.fr.
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
import os
DOCUMENTATION = '''
---
module: kibana_plugin
short_description: Manage Kibana plugins
description:
- Manages Kibana plugins.
version_added: "2.2"
author: Thierno IB. BARRY (@barryib)
options:
name:
description:
- Name of the plugin to install
required: True
state:
description:
- Desired state of a plugin.
required: False
choices: ["present", "absent"]
default: present
url:
description:
- Set exact URL to download the plugin from.
For local file, prefix its absolute path with file://
required: False
default: None
timeout:
description:
- "Timeout setting: 30s, 1m, 1h..."
required: False
default: 1m
plugin_bin:
description:
- Location of the plugin binary
required: False
default: /opt/kibana/bin/kibana
plugin_dir:
description:
- Your configured plugin directory specified in Kibana
required: False
default: /opt/kibana/installedPlugins/
version:
description:
- Version of the plugin to be installed.
If plugin exists with previous version, it will NOT be updated if C(force) is not set to yes
required: False
default: None
force:
description:
- Delete and re-install the plugin. Can be useful for plugins update
required: False
choices: ["yes", "no"]
default: no
'''
EXAMPLES = '''
# Install Elasticsearch head plugin
- kibana_plugin:
state: present
name=: elasticsearch/marvel
# Install specific version of a plugin
- kibana_plugin:
state: present
name: elasticsearch/marvel
version: '2.3.3'
# Uninstall Elasticsearch head plugin
- kibana_plugin:
state: absent
name: elasticsearch/marvel
'''
RETURN = '''
cmd:
description: the launched command during plugin mangement (install / remove)
returned: success
type: string
name:
description: the plugin name to install or remove
returned: success
type: string
url:
description: the url from where the plugin is installed from
returned: success
type: string
timeout:
description: the timout for plugin download
returned: success
type: string
stdout:
description: the command stdout
returned: success
type: string
stderr:
description: the command stderr
returned: success
type: string
state:
description: the state for the managed plugin
returned: success
type: string
'''
PACKAGE_STATE_MAP = dict(
present="--install",
absent="--remove"
)
def parse_plugin_repo(string):
elements = string.split("/")
# We first consider the simplest form: pluginname
repo = elements[0]
# We consider the form: username/pluginname
if len(elements) > 1:
repo = elements[1]
# remove elasticsearch- prefix
# remove es- prefix
for string in ("elasticsearch-", "es-"):
if repo.startswith(string):
return repo[len(string):]
return repo
def is_plugin_present(plugin_dir, working_dir):
return os.path.isdir(os.path.join(working_dir, plugin_dir))
def parse_error(string):
reason = "reason: "
try:
return string[string.index(reason) + len(reason):].strip()
except ValueError:
return string
def install_plugin(module, plugin_bin, plugin_name, url, timeout):
cmd_args = [plugin_bin, "plugin", PACKAGE_STATE_MAP["present"], plugin_name]
if url:
cmd_args.append("--url %s" % url)
if timeout:
cmd_args.append("--timeout %s" % timeout)
cmd = " ".join(cmd_args)
if module.check_mode:
return True, cmd, "check mode", ""
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def remove_plugin(module, plugin_bin, plugin_name):
cmd_args = [plugin_bin, "plugin", PACKAGE_STATE_MAP["absent"], plugin_name]
cmd = " ".join(cmd_args)
if module.check_mode:
return True, cmd, "check mode", ""
rc, out, err = module.run_command(cmd)
if rc != 0:
reason = parse_error(out)
module.fail_json(msg=reason)
return True, cmd, out, err
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
state=dict(default="present", choices=PACKAGE_STATE_MAP.keys()),
url=dict(default=None),
timeout=dict(default="1m"),
plugin_bin=dict(default="/opt/kibana/bin/kibana", type="path"),
plugin_dir=dict(default="/opt/kibana/installedPlugins/", type="path"),
version=dict(default=None),
force=dict(default="no", type="bool")
),
supports_check_mode=True,
)
name = module.params["name"]
state = module.params["state"]
url = module.params["url"]
timeout = module.params["timeout"]
plugin_bin = module.params["plugin_bin"]
plugin_dir = module.params["plugin_dir"]
version = module.params["version"]
force = module.params["force"]
present = is_plugin_present(parse_plugin_repo(name), plugin_dir)
# skip if the state is correct
if (present and state == "present" and not force) or (state == "absent" and not present and not force):
module.exit_json(changed=False, name=name, state=state)
if (version):
name = name + '/' + version
if state == "present":
if force:
remove_plugin(module, plugin_bin, name)
changed, cmd, out, err = install_plugin(module, plugin_bin, name, url, timeout)
elif state == "absent":
changed, cmd, out, err = remove_plugin(module, plugin_bin, name)
module.exit_json(changed=changed, cmd=cmd, name=name, state=state, url=url, timeout=timeout, stdout=out, stderr=err)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
scrollback/kuma
|
vendor/packages/nose/unit_tests/test_capture_plugin.py
|
10
|
2779
|
# -*- coding: utf-8 -*-
import sys
import unittest
from optparse import OptionParser
from nose.config import Config
from nose.plugins.capture import Capture
class TestCapturePlugin(unittest.TestCase):
def setUp(self):
self._stdout = sys.stdout
def tearDown(self):
sys.stdout = self._stdout
def test_enabled_by_default(self):
c = Capture()
assert c.enabled
def test_can_be_disabled(self):
c = Capture()
parser = OptionParser()
c.addOptions(parser)
options, args = parser.parse_args(['test_can_be_disabled',
'-s'])
c.configure(options, Config())
assert not c.enabled
c = Capture()
options, args = parser.parse_args(['test_can_be_disabled_long',
'--nocapture'])
c.configure(options, Config())
assert not c.enabled
env = {'NOSE_NOCAPTURE': 1}
c = Capture()
parser = OptionParser()
c.addOptions(parser, env)
options, args = parser.parse_args(['test_can_be_disabled'])
c.configure(options, Config())
assert not c.enabled
c = Capture()
parser = OptionParser()
c.addOptions(parser)
options, args = parser.parse_args(['test_can_be_disabled'])
c.configure(options, Config())
assert c.enabled
def test_captures_stdout(self):
c = Capture()
c.start()
print "Hello"
c.end()
self.assertEqual(c.buffer, "Hello\n")
def test_captures_nonascii_stdout(self):
c = Capture()
c.start()
print "test 日本"
c.end()
self.assertEqual(c.buffer, "test 日本\n")
def test_format_error(self):
class Dummy:
pass
d = Dummy()
c = Capture()
c.start()
try:
print "Oh my!"
raise Exception("boom")
except:
err = sys.exc_info()
formatted = c.formatError(d, err)
ec, ev, tb = err
(fec, fev, ftb) = formatted
# print fec, fev, ftb
self.assertEqual(ec, fec)
self.assertEqual(tb, ftb)
assert 'Oh my!' in fev, "Output not found in error message"
assert 'Oh my!' in d.capturedOutput, "Output not attached to test"
def test_format_nonascii_error(self):
class Dummy:
pass
d = Dummy()
c = Capture()
c.start()
try:
print "debug 日本"
raise AssertionError(u'response does not contain 名')
except:
err = sys.exc_info()
formatted = c.formatError(d, err)
if __name__ == '__main__':
unittest.main()
|
mpl-2.0
|
miteshvp/fabric8-analytics-worker
|
alembic/versions/f8bb0efac483_nuget_ecosystem.py
|
3
|
3035
|
"""nuget ecosystem.
Revision ID: f8bb0efac483
Revises: e2762a61d34c
Create Date: 2017-08-03 13:55:04.065158
"""
# revision identifiers, used by Alembic.
revision = 'f8bb0efac483'
down_revision = 'e2762a61d34c'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
"""Upgrade the database to a newer revision."""
# ### commands auto generated by Alembic - please adjust! ###
# See https://bitbucket.org/zzzeek/alembic/issues/123/a-way-to-run-non-transactional-ddl
connection = None
if not op.get_context().as_sql:
connection = op.get_bind()
connection.execution_options(isolation_level='AUTOCOMMIT')
op.execute("ALTER TYPE ecosystem_backend_enum ADD VALUE 'nuget'")
op.execute("INSERT INTO ecosystems VALUES "
"('{id}', '{name}', '{backend}', '{url}', '{fetch_url}')".
format(id=8, name='nuget', backend='nuget',
url='https://nuget.org/', fetch_url='https://api.nuget.org/packages/'))
if connection is not None:
connection.execution_options(isolation_level='READ_COMMITTED')
# ### end Alembic commands ###
def downgrade():
"""Downgrade the database to an older revision."""
# ### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute("DELETE FROM ecosystems WHERE name = 'nuget'")
# There's no 'ALTER TYPE enum REMOVE VALUE'
op.alter_column('package_gh_usage', 'ecosystem_backend',
existing_type=postgresql.ENUM('none', 'npm', 'maven', 'pypi', 'rubygems',
'scm', 'crates', 'nuget',
name='ecosystem_backend_enum'),
type_=postgresql.ENUM('none', 'npm', 'maven', 'pypi', 'rubygems', 'scm',
'crates', name='ecosystem_backend_enum'),
existing_nullable=True)
op.alter_column('ecosystems', '_backend',
existing_type=sa.Enum('none', 'npm', 'maven', 'pypi', 'rubygems', 'scm',
'crates', 'nuget', name='ecosystem_backend_enum'),
type_=sa.Enum('none', 'npm', 'maven', 'pypi', 'rubygems', 'scm', 'crates',
name='ecosystem_backend_enum'),
existing_nullable=True)
op.alter_column('component_gh_usage', 'ecosystem_backend',
existing_type=postgresql.ENUM('none', 'npm', 'maven', 'pypi', 'rubygems',
'scm', 'crates', 'nuget',
name='ecosystem_backend_enum'),
type_=postgresql.ENUM('none', 'npm', 'maven', 'pypi', 'rubygems', 'scm',
'crates', name='ecosystem_backend_enum'),
existing_nullable=True)
# ### end Alembic commands ###
|
gpl-3.0
|
Greennut/ostproject
|
django/contrib/gis/gdal/tests/test_srs.py
|
351
|
11199
|
from django.contrib.gis.gdal import SpatialReference, CoordTransform, OGRException, SRSException
from django.utils import unittest
class TestSRS:
def __init__(self, wkt, **kwargs):
self.wkt = wkt
for key, value in kwargs.items():
setattr(self, key, value)
# Some Spatial Reference examples
srlist = (TestSRS('GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]',
proj='+proj=longlat +ellps=WGS84 +datum=WGS84 +no_defs ',
epsg=4326, projected=False, geographic=True, local=False,
lin_name='unknown', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
auth={'GEOGCS' : ('EPSG', '4326'), 'spheroid' : ('EPSG', '7030')},
attr=(('DATUM', 'WGS_1984'), (('SPHEROID', 1), '6378137'),('primem|authority', 'EPSG'),),
),
TestSRS('PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333333333333],PARAMETER["standard_parallel_2",28.38333333333333],PARAMETER["latitude_of_origin",27.83333333333333],PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',
proj=None, epsg=32140, projected=True, geographic=False, local=False,
lin_name='metre', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
auth={'PROJCS' : ('EPSG', '32140'), 'spheroid' : ('EPSG', '7019'), 'unit' : ('EPSG', '9001'),},
attr=(('DATUM', 'North_American_Datum_1983'),(('SPHEROID', 2), '298.257222101'),('PROJECTION','Lambert_Conformal_Conic_2SP'),),
),
TestSRS('PROJCS["NAD_1983_StatePlane_Texas_South_Central_FIPS_4204_Feet",GEOGCS["GCS_North_American_1983",DATUM["North_American_Datum_1983",SPHEROID["GRS_1980",6378137.0,298.257222101]],PRIMEM["Greenwich",0.0],UNIT["Degree",0.0174532925199433]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["False_Easting",1968500.0],PARAMETER["False_Northing",13123333.33333333],PARAMETER["Central_Meridian",-99.0],PARAMETER["Standard_Parallel_1",28.38333333333333],PARAMETER["Standard_Parallel_2",30.28333333333334],PARAMETER["Latitude_Of_Origin",27.83333333333333],UNIT["Foot_US",0.3048006096012192]]',
proj=None, epsg=None, projected=True, geographic=False, local=False,
lin_name='Foot_US', ang_name='Degree', lin_units=0.3048006096012192, ang_units=0.0174532925199,
auth={'PROJCS' : (None, None),},
attr=(('PROJCS|GeOgCs|spheroid', 'GRS_1980'),(('projcs', 9), 'UNIT'), (('projcs', 11), None),),
),
# This is really ESRI format, not WKT -- but the import should work the same
TestSRS('LOCAL_CS["Non-Earth (Meter)",LOCAL_DATUM["Local Datum",0],UNIT["Meter",1.0],AXIS["X",EAST],AXIS["Y",NORTH]]',
esri=True, proj=None, epsg=None, projected=False, geographic=False, local=True,
lin_name='Meter', ang_name='degree', lin_units=1.0, ang_units=0.0174532925199,
attr=(('LOCAL_DATUM', 'Local Datum'), ('unit', 'Meter')),
),
)
# Well-Known Names
well_known = (TestSRS('GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],TOWGS84[0,0,0,0,0,0,0],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]', wk='WGS84', name='WGS 84', attrs=(('GEOGCS|AUTHORITY', 1, '4326'), ('SPHEROID', 'WGS 84'))),
TestSRS('GEOGCS["WGS 72",DATUM["WGS_1972",SPHEROID["WGS 72",6378135,298.26,AUTHORITY["EPSG","7043"]],AUTHORITY["EPSG","6322"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4322"]]', wk='WGS72', name='WGS 72', attrs=(('GEOGCS|AUTHORITY', 1, '4322'), ('SPHEROID', 'WGS 72'))),
TestSRS('GEOGCS["NAD27",DATUM["North_American_Datum_1927",SPHEROID["Clarke 1866",6378206.4,294.9786982138982,AUTHORITY["EPSG","7008"]],AUTHORITY["EPSG","6267"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4267"]]', wk='NAD27', name='NAD27', attrs=(('GEOGCS|AUTHORITY', 1, '4267'), ('SPHEROID', 'Clarke 1866'))),
TestSRS('GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]]', wk='NAD83', name='NAD83', attrs=(('GEOGCS|AUTHORITY', 1, '4269'), ('SPHEROID', 'GRS 1980'))),
TestSRS('PROJCS["NZGD49 / Karamea Circuit",GEOGCS["NZGD49",DATUM["New_Zealand_Geodetic_Datum_1949",SPHEROID["International 1924",6378388,297,AUTHORITY["EPSG","7022"]],TOWGS84[59.47,-5.04,187.44,0.47,-0.1,1.024,-4.5993],AUTHORITY["EPSG","6272"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4272"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",-41.28991152777778],PARAMETER["central_meridian",172.1090281944444],PARAMETER["scale_factor",1],PARAMETER["false_easting",300000],PARAMETER["false_northing",700000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","27216"]]', wk='EPSG:27216', name='NZGD49 / Karamea Circuit', attrs=(('PROJECTION','Transverse_Mercator'), ('SPHEROID', 'International 1924'))),
)
bad_srlist = ('Foobar', 'OOJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980",6378137,298.257222101,AUTHORITY["EPSG","7019"]],AUTHORITY["EPSG","6269"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.01745329251994328,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4269"]],PROJECTION["Lambert_Conformal_Conic_2SP"],PARAMETER["standard_parallel_1",30.28333333333333],PARAMETER["standard_parallel_2",28.38333333333333],PARAMETER["latitude_of_origin",27.83333333333333],PARAMETER["central_meridian",-99],PARAMETER["false_easting",600000],PARAMETER["false_northing",4000000],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AUTHORITY["EPSG","32140"]]',)
class SpatialRefTest(unittest.TestCase):
def test01_wkt(self):
"Testing initialization on valid OGC WKT."
for s in srlist:
srs = SpatialReference(s.wkt)
def test02_bad_wkt(self):
"Testing initialization on invalid WKT."
for bad in bad_srlist:
try:
srs = SpatialReference(bad)
srs.validate()
except (SRSException, OGRException):
pass
else:
self.fail('Should not have initialized on bad WKT "%s"!')
def test03_get_wkt(self):
"Testing getting the WKT."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.wkt, srs.wkt)
def test04_proj(self):
"Test PROJ.4 import and export."
for s in srlist:
if s.proj:
srs1 = SpatialReference(s.wkt)
srs2 = SpatialReference(s.proj)
self.assertEqual(srs1.proj, srs2.proj)
def test05_epsg(self):
"Test EPSG import."
for s in srlist:
if s.epsg:
srs1 = SpatialReference(s.wkt)
srs2 = SpatialReference(s.epsg)
srs3 = SpatialReference(str(s.epsg))
srs4 = SpatialReference('EPSG:%d' % s.epsg)
for srs in (srs1, srs2, srs3, srs4):
for attr, expected in s.attr:
self.assertEqual(expected, srs[attr])
def test07_boolean_props(self):
"Testing the boolean properties."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.projected, srs.projected)
self.assertEqual(s.geographic, srs.geographic)
def test08_angular_linear(self):
"Testing the linear and angular units routines."
for s in srlist:
srs = SpatialReference(s.wkt)
self.assertEqual(s.ang_name, srs.angular_name)
self.assertEqual(s.lin_name, srs.linear_name)
self.assertAlmostEqual(s.ang_units, srs.angular_units, 9)
self.assertAlmostEqual(s.lin_units, srs.linear_units, 9)
def test09_authority(self):
"Testing the authority name & code routines."
for s in srlist:
if hasattr(s, 'auth'):
srs = SpatialReference(s.wkt)
for target, tup in s.auth.items():
self.assertEqual(tup[0], srs.auth_name(target))
self.assertEqual(tup[1], srs.auth_code(target))
def test10_attributes(self):
"Testing the attribute retrieval routines."
for s in srlist:
srs = SpatialReference(s.wkt)
for tup in s.attr:
att = tup[0] # Attribute to test
exp = tup[1] # Expected result
self.assertEqual(exp, srs[att])
def test11_wellknown(self):
"Testing Well Known Names of Spatial References."
for s in well_known:
srs = SpatialReference(s.wk)
self.assertEqual(s.name, srs.name)
for tup in s.attrs:
if len(tup) == 2:
key = tup[0]
exp = tup[1]
elif len(tup) == 3:
key = tup[:2]
exp = tup[2]
self.assertEqual(srs[key], exp)
def test12_coordtransform(self):
"Testing initialization of a CoordTransform."
target = SpatialReference('WGS84')
for s in srlist:
if s.proj:
ct = CoordTransform(SpatialReference(s.wkt), target)
def test13_attr_value(self):
"Testing the attr_value() method."
s1 = SpatialReference('WGS84')
self.assertRaises(TypeError, s1.__getitem__, 0)
self.assertRaises(TypeError, s1.__getitem__, ('GEOGCS', 'foo'))
self.assertEqual('WGS 84', s1['GEOGCS'])
self.assertEqual('WGS_1984', s1['DATUM'])
self.assertEqual('EPSG', s1['AUTHORITY'])
self.assertEqual(4326, int(s1['AUTHORITY', 1]))
self.assertEqual(None, s1['FOOBAR'])
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(SpatialRefTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
bsd-3-clause
|
ngpestelos/ansible
|
lib/ansible/plugins/action/fail.py
|
227
|
1391
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2012, Dag Wieers <dag@wieers.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
''' Fail with custom message '''
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(tmp, task_vars)
msg = 'Failed as requested from task'
if self._task.args and 'msg' in self._task.args:
msg = self._task.args.get('msg')
result['failed'] = True
result['msg'] = msg
return result
|
gpl-3.0
|
victor-prado/broker-manager
|
environment/lib/python3.5/site-packages/numpy/polynomial/legendre.py
|
22
|
57303
|
"""
Legendre Series (:mod: `numpy.polynomial.legendre`)
===================================================
.. currentmodule:: numpy.polynomial.polynomial
This module provides a number of objects (mostly functions) useful for
dealing with Legendre series, including a `Legendre` class that
encapsulates the usual arithmetic operations. (General information
on how this module represents and works with such polynomials is in the
docstring for its "parent" sub-package, `numpy.polynomial`).
Constants
---------
.. autosummary::
:toctree: generated/
legdomain Legendre series default domain, [-1,1].
legzero Legendre series that evaluates identically to 0.
legone Legendre series that evaluates identically to 1.
legx Legendre series for the identity map, ``f(x) = x``.
Arithmetic
----------
.. autosummary::
:toctree: generated/
legmulx multiply a Legendre series in P_i(x) by x.
legadd add two Legendre series.
legsub subtract one Legendre series from another.
legmul multiply two Legendre series.
legdiv divide one Legendre series by another.
legpow raise a Legendre series to an positive integer power
legval evaluate a Legendre series at given points.
legval2d evaluate a 2D Legendre series at given points.
legval3d evaluate a 3D Legendre series at given points.
leggrid2d evaluate a 2D Legendre series on a Cartesian product.
leggrid3d evaluate a 3D Legendre series on a Cartesian product.
Calculus
--------
.. autosummary::
:toctree: generated/
legder differentiate a Legendre series.
legint integrate a Legendre series.
Misc Functions
--------------
.. autosummary::
:toctree: generated/
legfromroots create a Legendre series with specified roots.
legroots find the roots of a Legendre series.
legvander Vandermonde-like matrix for Legendre polynomials.
legvander2d Vandermonde-like matrix for 2D power series.
legvander3d Vandermonde-like matrix for 3D power series.
leggauss Gauss-Legendre quadrature, points and weights.
legweight Legendre weight function.
legcompanion symmetrized companion matrix in Legendre form.
legfit least-squares fit returning a Legendre series.
legtrim trim leading coefficients from a Legendre series.
legline Legendre series representing given straight line.
leg2poly convert a Legendre series to a polynomial.
poly2leg convert a polynomial to a Legendre series.
Classes
-------
Legendre A Legendre series class.
See also
--------
numpy.polynomial.polynomial
numpy.polynomial.chebyshev
numpy.polynomial.laguerre
numpy.polynomial.hermite
numpy.polynomial.hermite_e
"""
from __future__ import division, absolute_import, print_function
import warnings
import numpy as np
import numpy.linalg as la
from . import polyutils as pu
from ._polybase import ABCPolyBase
__all__ = [
'legzero', 'legone', 'legx', 'legdomain', 'legline', 'legadd',
'legsub', 'legmulx', 'legmul', 'legdiv', 'legpow', 'legval', 'legder',
'legint', 'leg2poly', 'poly2leg', 'legfromroots', 'legvander',
'legfit', 'legtrim', 'legroots', 'Legendre', 'legval2d', 'legval3d',
'leggrid2d', 'leggrid3d', 'legvander2d', 'legvander3d', 'legcompanion',
'leggauss', 'legweight']
legtrim = pu.trimcoef
def poly2leg(pol):
"""
Convert a polynomial to a Legendre series.
Convert an array representing the coefficients of a polynomial (relative
to the "standard" basis) ordered from lowest degree to highest, to an
array of the coefficients of the equivalent Legendre series, ordered
from lowest to highest degree.
Parameters
----------
pol : array_like
1-D array containing the polynomial coefficients
Returns
-------
c : ndarray
1-D array containing the coefficients of the equivalent Legendre
series.
See Also
--------
leg2poly
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> from numpy import polynomial as P
>>> p = P.Polynomial(np.arange(4))
>>> p
Polynomial([ 0., 1., 2., 3.], [-1., 1.])
>>> c = P.Legendre(P.poly2leg(p.coef))
>>> c
Legendre([ 1. , 3.25, 1. , 0.75], [-1., 1.])
"""
[pol] = pu.as_series([pol])
deg = len(pol) - 1
res = 0
for i in range(deg, -1, -1):
res = legadd(legmulx(res), pol[i])
return res
def leg2poly(c):
"""
Convert a Legendre series to a polynomial.
Convert an array representing the coefficients of a Legendre series,
ordered from lowest degree to highest, to an array of the coefficients
of the equivalent polynomial (relative to the "standard" basis) ordered
from lowest to highest degree.
Parameters
----------
c : array_like
1-D array containing the Legendre series coefficients, ordered
from lowest order term to highest.
Returns
-------
pol : ndarray
1-D array containing the coefficients of the equivalent polynomial
(relative to the "standard" basis) ordered from lowest order term
to highest.
See Also
--------
poly2leg
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> c = P.Legendre(range(4))
>>> c
Legendre([ 0., 1., 2., 3.], [-1., 1.])
>>> p = c.convert(kind=P.Polynomial)
>>> p
Polynomial([-1. , -3.5, 3. , 7.5], [-1., 1.])
>>> P.leg2poly(range(4))
array([-1. , -3.5, 3. , 7.5])
"""
from .polynomial import polyadd, polysub, polymulx
[c] = pu.as_series([c])
n = len(c)
if n < 3:
return c
else:
c0 = c[-2]
c1 = c[-1]
# i is the current degree of c1
for i in range(n - 1, 1, -1):
tmp = c0
c0 = polysub(c[i - 2], (c1*(i - 1))/i)
c1 = polyadd(tmp, (polymulx(c1)*(2*i - 1))/i)
return polyadd(c0, polymulx(c1))
#
# These are constant arrays are of integer type so as to be compatible
# with the widest range of other types, such as Decimal.
#
# Legendre
legdomain = np.array([-1, 1])
# Legendre coefficients representing zero.
legzero = np.array([0])
# Legendre coefficients representing one.
legone = np.array([1])
# Legendre coefficients representing the identity x.
legx = np.array([0, 1])
def legline(off, scl):
"""
Legendre series whose graph is a straight line.
Parameters
----------
off, scl : scalars
The specified line is given by ``off + scl*x``.
Returns
-------
y : ndarray
This module's representation of the Legendre series for
``off + scl*x``.
See Also
--------
polyline, chebline
Examples
--------
>>> import numpy.polynomial.legendre as L
>>> L.legline(3,2)
array([3, 2])
>>> L.legval(-3, L.legline(3,2)) # should be -3
-3.0
"""
if scl != 0:
return np.array([off, scl])
else:
return np.array([off])
def legfromroots(roots):
"""
Generate a Legendre series with given roots.
The function returns the coefficients of the polynomial
.. math:: p(x) = (x - r_0) * (x - r_1) * ... * (x - r_n),
in Legendre form, where the `r_n` are the roots specified in `roots`.
If a zero has multiplicity n, then it must appear in `roots` n times.
For instance, if 2 is a root of multiplicity three and 3 is a root of
multiplicity 2, then `roots` looks something like [2, 2, 2, 3, 3]. The
roots can appear in any order.
If the returned coefficients are `c`, then
.. math:: p(x) = c_0 + c_1 * L_1(x) + ... + c_n * L_n(x)
The coefficient of the last term is not generally 1 for monic
polynomials in Legendre form.
Parameters
----------
roots : array_like
Sequence containing the roots.
Returns
-------
out : ndarray
1-D array of coefficients. If all roots are real then `out` is a
real array, if some of the roots are complex, then `out` is complex
even if all the coefficients in the result are real (see Examples
below).
See Also
--------
polyfromroots, chebfromroots, lagfromroots, hermfromroots,
hermefromroots.
Examples
--------
>>> import numpy.polynomial.legendre as L
>>> L.legfromroots((-1,0,1)) # x^3 - x relative to the standard basis
array([ 0. , -0.4, 0. , 0.4])
>>> j = complex(0,1)
>>> L.legfromroots((-j,j)) # x^2 + 1 relative to the standard basis
array([ 1.33333333+0.j, 0.00000000+0.j, 0.66666667+0.j])
"""
if len(roots) == 0:
return np.ones(1)
else:
[roots] = pu.as_series([roots], trim=False)
roots.sort()
p = [legline(-r, 1) for r in roots]
n = len(p)
while n > 1:
m, r = divmod(n, 2)
tmp = [legmul(p[i], p[i+m]) for i in range(m)]
if r:
tmp[0] = legmul(tmp[0], p[-1])
p = tmp
n = m
return p[0]
def legadd(c1, c2):
"""
Add one Legendre series to another.
Returns the sum of two Legendre series `c1` + `c2`. The arguments
are sequences of coefficients ordered from lowest order term to
highest, i.e., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the Legendre series of their sum.
See Also
--------
legsub, legmul, legdiv, legpow
Notes
-----
Unlike multiplication, division, etc., the sum of two Legendre series
is a Legendre series (without having to "reproject" the result onto
the basis set) so addition, just like that of "standard" polynomials,
is simply "component-wise."
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2,1)
>>> L.legadd(c1,c2)
array([ 4., 4., 4.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c1[:c2.size] += c2
ret = c1
else:
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def legsub(c1, c2):
"""
Subtract one Legendre series from another.
Returns the difference of two Legendre series `c1` - `c2`. The
sequences of coefficients are from lowest order term to highest, i.e.,
[1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Legendre series coefficients representing their difference.
See Also
--------
legadd, legmul, legdiv, legpow
Notes
-----
Unlike multiplication, division, etc., the difference of two Legendre
series is a Legendre series (without having to "reproject" the result
onto the basis set) so subtraction, just like that of "standard"
polynomials, is simply "component-wise."
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2,1)
>>> L.legsub(c1,c2)
array([-2., 0., 2.])
>>> L.legsub(c2,c1) # -C.legsub(c1,c2)
array([ 2., 0., -2.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c1[:c2.size] -= c2
ret = c1
else:
c2 = -c2
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def legmulx(c):
"""Multiply a Legendre series by x.
Multiply the Legendre series `c` by x, where x is the independent
variable.
Parameters
----------
c : array_like
1-D array of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the result of the multiplication.
Notes
-----
The multiplication uses the recursion relationship for Legendre
polynomials in the form
.. math::
xP_i(x) = ((i + 1)*P_{i + 1}(x) + i*P_{i - 1}(x))/(2i + 1)
"""
# c is a trimmed copy
[c] = pu.as_series([c])
# The zero series needs special treatment
if len(c) == 1 and c[0] == 0:
return c
prd = np.empty(len(c) + 1, dtype=c.dtype)
prd[0] = c[0]*0
prd[1] = c[0]
for i in range(1, len(c)):
j = i + 1
k = i - 1
s = i + j
prd[j] = (c[i]*j)/s
prd[k] += (c[i]*i)/s
return prd
def legmul(c1, c2):
"""
Multiply one Legendre series by another.
Returns the product of two Legendre series `c1` * `c2`. The arguments
are sequences of coefficients, from lowest order "term" to highest,
e.g., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Legendre series coefficients representing their product.
See Also
--------
legadd, legsub, legdiv, legpow
Notes
-----
In general, the (polynomial) product of two C-series results in terms
that are not in the Legendre polynomial basis set. Thus, to express
the product as a Legendre series, it is necessary to "reproject" the
product onto said basis set, which may produce "unintuitive" (but
correct) results; see Examples section below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2)
>>> P.legmul(c1,c2) # multiplication requires "reprojection"
array([ 4.33333333, 10.4 , 11.66666667, 3.6 ])
"""
# s1, s2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c = c2
xs = c1
else:
c = c1
xs = c2
if len(c) == 1:
c0 = c[0]*xs
c1 = 0
elif len(c) == 2:
c0 = c[0]*xs
c1 = c[1]*xs
else:
nd = len(c)
c0 = c[-2]*xs
c1 = c[-1]*xs
for i in range(3, len(c) + 1):
tmp = c0
nd = nd - 1
c0 = legsub(c[-i]*xs, (c1*(nd - 1))/nd)
c1 = legadd(tmp, (legmulx(c1)*(2*nd - 1))/nd)
return legadd(c0, legmulx(c1))
def legdiv(c1, c2):
"""
Divide one Legendre series by another.
Returns the quotient-with-remainder of two Legendre series
`c1` / `c2`. The arguments are sequences of coefficients from lowest
order "term" to highest, e.g., [1,2,3] represents the series
``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
quo, rem : ndarrays
Of Legendre series coefficients representing the quotient and
remainder.
See Also
--------
legadd, legsub, legmul, legpow
Notes
-----
In general, the (polynomial) division of one Legendre series by another
results in quotient and remainder terms that are not in the Legendre
polynomial basis set. Thus, to express these results as a Legendre
series, it is necessary to "reproject" the results onto the Legendre
basis set, which may produce "unintuitive" (but correct) results; see
Examples section below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2,1)
>>> L.legdiv(c1,c2) # quotient "intuitive," remainder not
(array([ 3.]), array([-8., -4.]))
>>> c2 = (0,1,2,3)
>>> L.legdiv(c2,c1) # neither "intuitive"
(array([-0.07407407, 1.66666667]), array([-1.03703704, -2.51851852]))
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if c2[-1] == 0:
raise ZeroDivisionError()
lc1 = len(c1)
lc2 = len(c2)
if lc1 < lc2:
return c1[:1]*0, c1
elif lc2 == 1:
return c1/c2[-1], c1[:1]*0
else:
quo = np.empty(lc1 - lc2 + 1, dtype=c1.dtype)
rem = c1
for i in range(lc1 - lc2, - 1, -1):
p = legmul([0]*i + [1], c2)
q = rem[-1]/p[-1]
rem = rem[:-1] - q*p[:-1]
quo[i] = q
return quo, pu.trimseq(rem)
def legpow(c, pow, maxpower=16):
"""Raise a Legendre series to a power.
Returns the Legendre series `c` raised to the power `pow`. The
arguement `c` is a sequence of coefficients ordered from low to high.
i.e., [1,2,3] is the series ``P_0 + 2*P_1 + 3*P_2.``
Parameters
----------
c : array_like
1-D array of Legendre series coefficients ordered from low to
high.
pow : integer
Power to which the series will be raised
maxpower : integer, optional
Maximum power allowed. This is mainly to limit growth of the series
to unmanageable size. Default is 16
Returns
-------
coef : ndarray
Legendre series of power.
See Also
--------
legadd, legsub, legmul, legdiv
Examples
--------
"""
# c is a trimmed copy
[c] = pu.as_series([c])
power = int(pow)
if power != pow or power < 0:
raise ValueError("Power must be a non-negative integer.")
elif maxpower is not None and power > maxpower:
raise ValueError("Power is too large")
elif power == 0:
return np.array([1], dtype=c.dtype)
elif power == 1:
return c
else:
# This can be made more efficient by using powers of two
# in the usual way.
prd = c
for i in range(2, power + 1):
prd = legmul(prd, c)
return prd
def legder(c, m=1, scl=1, axis=0):
"""
Differentiate a Legendre series.
Returns the Legendre series coefficients `c` differentiated `m` times
along `axis`. At each iteration the result is multiplied by `scl` (the
scaling factor is for use in a linear change of variable). The argument
`c` is an array of coefficients from low to high degree along each
axis, e.g., [1,2,3] represents the series ``1*L_0 + 2*L_1 + 3*L_2``
while [[1,2],[1,2]] represents ``1*L_0(x)*L_0(y) + 1*L_1(x)*L_0(y) +
2*L_0(x)*L_1(y) + 2*L_1(x)*L_1(y)`` if axis=0 is ``x`` and axis=1 is
``y``.
Parameters
----------
c : array_like
Array of Legendre series coefficients. If c is multidimensional the
different axis correspond to different variables with the degree in
each axis given by the corresponding index.
m : int, optional
Number of derivatives taken, must be non-negative. (Default: 1)
scl : scalar, optional
Each differentiation is multiplied by `scl`. The end result is
multiplication by ``scl**m``. This is for use in a linear change of
variable. (Default: 1)
axis : int, optional
Axis over which the derivative is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
der : ndarray
Legendre series of the derivative.
See Also
--------
legint
Notes
-----
In general, the result of differentiating a Legendre series does not
resemble the same operation on a power series. Thus the result of this
function may be "unintuitive," albeit correct; see Examples section
below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c = (1,2,3,4)
>>> L.legder(c)
array([ 6., 9., 20.])
>>> L.legder(c, 3)
array([ 60.])
>>> L.legder(c, scl=-1)
array([ -6., -9., -20.])
>>> L.legder(c, 2,-1)
array([ 9., 60.])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of derivation must be integer")
if cnt < 0:
raise ValueError("The order of derivation must be non-negative")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
n = len(c)
if cnt >= n:
c = c[:1]*0
else:
for i in range(cnt):
n = n - 1
c *= scl
der = np.empty((n,) + c.shape[1:], dtype=c.dtype)
for j in range(n, 2, -1):
der[j - 1] = (2*j - 1)*c[j]
c[j - 2] += c[j]
if n > 1:
der[1] = 3*c[2]
der[0] = c[1]
c = der
c = np.rollaxis(c, 0, iaxis + 1)
return c
def legint(c, m=1, k=[], lbnd=0, scl=1, axis=0):
"""
Integrate a Legendre series.
Returns the Legendre series coefficients `c` integrated `m` times from
`lbnd` along `axis`. At each iteration the resulting series is
**multiplied** by `scl` and an integration constant, `k`, is added.
The scaling factor is for use in a linear change of variable. ("Buyer
beware": note that, depending on what one is doing, one may want `scl`
to be the reciprocal of what one might expect; for more information,
see the Notes section below.) The argument `c` is an array of
coefficients from low to high degree along each axis, e.g., [1,2,3]
represents the series ``L_0 + 2*L_1 + 3*L_2`` while [[1,2],[1,2]]
represents ``1*L_0(x)*L_0(y) + 1*L_1(x)*L_0(y) + 2*L_0(x)*L_1(y) +
2*L_1(x)*L_1(y)`` if axis=0 is ``x`` and axis=1 is ``y``.
Parameters
----------
c : array_like
Array of Legendre series coefficients. If c is multidimensional the
different axis correspond to different variables with the degree in
each axis given by the corresponding index.
m : int, optional
Order of integration, must be positive. (Default: 1)
k : {[], list, scalar}, optional
Integration constant(s). The value of the first integral at
``lbnd`` is the first value in the list, the value of the second
integral at ``lbnd`` is the second value, etc. If ``k == []`` (the
default), all constants are set to zero. If ``m == 1``, a single
scalar can be given instead of a list.
lbnd : scalar, optional
The lower bound of the integral. (Default: 0)
scl : scalar, optional
Following each integration the result is *multiplied* by `scl`
before the integration constant is added. (Default: 1)
axis : int, optional
Axis over which the integral is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
S : ndarray
Legendre series coefficient array of the integral.
Raises
------
ValueError
If ``m < 0``, ``len(k) > m``, ``np.isscalar(lbnd) == False``, or
``np.isscalar(scl) == False``.
See Also
--------
legder
Notes
-----
Note that the result of each integration is *multiplied* by `scl`.
Why is this important to note? Say one is making a linear change of
variable :math:`u = ax + b` in an integral relative to `x`. Then
.. math::`dx = du/a`, so one will need to set `scl` equal to
:math:`1/a` - perhaps not what one would have first thought.
Also note that, in general, the result of integrating a C-series needs
to be "reprojected" onto the C-series basis set. Thus, typically,
the result of this function is "unintuitive," albeit correct; see
Examples section below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c = (1,2,3)
>>> L.legint(c)
array([ 0.33333333, 0.4 , 0.66666667, 0.6 ])
>>> L.legint(c, 3)
array([ 1.66666667e-02, -1.78571429e-02, 4.76190476e-02,
-1.73472348e-18, 1.90476190e-02, 9.52380952e-03])
>>> L.legint(c, k=3)
array([ 3.33333333, 0.4 , 0.66666667, 0.6 ])
>>> L.legint(c, lbnd=-2)
array([ 7.33333333, 0.4 , 0.66666667, 0.6 ])
>>> L.legint(c, scl=2)
array([ 0.66666667, 0.8 , 1.33333333, 1.2 ])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if not np.iterable(k):
k = [k]
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of integration must be integer")
if cnt < 0:
raise ValueError("The order of integration must be non-negative")
if len(k) > cnt:
raise ValueError("Too many integration constants")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
k = list(k) + [0]*(cnt - len(k))
for i in range(cnt):
n = len(c)
c *= scl
if n == 1 and np.all(c[0] == 0):
c[0] += k[i]
else:
tmp = np.empty((n + 1,) + c.shape[1:], dtype=c.dtype)
tmp[0] = c[0]*0
tmp[1] = c[0]
if n > 1:
tmp[2] = c[1]/3
for j in range(2, n):
t = c[j]/(2*j + 1)
tmp[j + 1] = t
tmp[j - 1] -= t
tmp[0] += k[i] - legval(lbnd, tmp)
c = tmp
c = np.rollaxis(c, 0, iaxis + 1)
return c
def legval(x, c, tensor=True):
"""
Evaluate a Legendre series at points x.
If `c` is of length `n + 1`, this function returns the value:
.. math:: p(x) = c_0 * L_0(x) + c_1 * L_1(x) + ... + c_n * L_n(x)
The parameter `x` is converted to an array only if it is a tuple or a
list, otherwise it is treated as a scalar. In either case, either `x`
or its elements must support multiplication and addition both with
themselves and with the elements of `c`.
If `c` is a 1-D array, then `p(x)` will have the same shape as `x`. If
`c` is multidimensional, then the shape of the result depends on the
value of `tensor`. If `tensor` is true the shape will be c.shape[1:] +
x.shape. If `tensor` is false the shape will be c.shape[1:]. Note that
scalars have shape (,).
Trailing zeros in the coefficients will be used in the evaluation, so
they should be avoided if efficiency is a concern.
Parameters
----------
x : array_like, compatible object
If `x` is a list or tuple, it is converted to an ndarray, otherwise
it is left unchanged and treated as a scalar. In either case, `x`
or its elements must support addition and multiplication with
with themselves and with the elements of `c`.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree n are contained in c[n]. If `c` is multidimensional the
remaining indices enumerate multiple polynomials. In the two
dimensional case the coefficients may be thought of as stored in
the columns of `c`.
tensor : boolean, optional
If True, the shape of the coefficient array is extended with ones
on the right, one for each dimension of `x`. Scalars have dimension 0
for this action. The result is that every column of coefficients in
`c` is evaluated for every element of `x`. If False, `x` is broadcast
over the columns of `c` for the evaluation. This keyword is useful
when `c` is multidimensional. The default value is True.
.. versionadded:: 1.7.0
Returns
-------
values : ndarray, algebra_like
The shape of the return value is described above.
See Also
--------
legval2d, leggrid2d, legval3d, leggrid3d
Notes
-----
The evaluation uses Clenshaw recursion, aka synthetic division.
Examples
--------
"""
c = np.array(c, ndmin=1, copy=0)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if isinstance(x, (tuple, list)):
x = np.asarray(x)
if isinstance(x, np.ndarray) and tensor:
c = c.reshape(c.shape + (1,)*x.ndim)
if len(c) == 1:
c0 = c[0]
c1 = 0
elif len(c) == 2:
c0 = c[0]
c1 = c[1]
else:
nd = len(c)
c0 = c[-2]
c1 = c[-1]
for i in range(3, len(c) + 1):
tmp = c0
nd = nd - 1
c0 = c[-i] - (c1*(nd - 1))/nd
c1 = tmp + (c1*x*(2*nd - 1))/nd
return c0 + c1*x
def legval2d(x, y, c):
"""
Evaluate a 2-D Legendre series at points (x, y).
This function returns the values:
.. math:: p(x,y) = \\sum_{i,j} c_{i,j} * L_i(x) * L_j(y)
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars and they
must have the same shape after conversion. In either case, either `x`
and `y` or their elements must support multiplication and addition both
with themselves and with the elements of `c`.
If `c` is a 1-D array a one is implicitly appended to its shape to make
it 2-D. The shape of the result will be c.shape[2:] + x.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points `(x, y)`,
where `x` and `y` must have the same shape. If `x` or `y` is a list
or tuple, it is first converted to an ndarray, otherwise it is left
unchanged and if it isn't an ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term
of multi-degree i,j is contained in ``c[i,j]``. If `c` has
dimension greater than two the remaining indices enumerate multiple
sets of coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional Legendre series at points formed
from pairs of corresponding values from `x` and `y`.
See Also
--------
legval, leggrid2d, legval3d, leggrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y = np.array((x, y), copy=0)
except:
raise ValueError('x, y are incompatible')
c = legval(x, c)
c = legval(y, c, tensor=False)
return c
def leggrid2d(x, y, c):
"""
Evaluate a 2-D Legendre series on the Cartesian product of x and y.
This function returns the values:
.. math:: p(a,b) = \\sum_{i,j} c_{i,j} * L_i(a) * L_j(b)
where the points `(a, b)` consist of all pairs formed by taking
`a` from `x` and `b` from `y`. The resulting points form a grid with
`x` in the first dimension and `y` in the second.
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars. In either
case, either `x` and `y` or their elements must support multiplication
and addition both with themselves and with the elements of `c`.
If `c` has fewer than two dimensions, ones are implicitly appended to
its shape to make it 2-D. The shape of the result will be c.shape[2:] +
x.shape + y.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points in the
Cartesian product of `x` and `y`. If `x` or `y` is a list or
tuple, it is first converted to an ndarray, otherwise it is left
unchanged and, if it isn't an ndarray, it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term of
multi-degree i,j is contained in `c[i,j]`. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional Chebyshev series at points in the
Cartesian product of `x` and `y`.
See Also
--------
legval, legval2d, legval3d, leggrid3d
Notes
-----
.. versionadded::1.7.0
"""
c = legval(x, c)
c = legval(y, c)
return c
def legval3d(x, y, z, c):
"""
Evaluate a 3-D Legendre series at points (x, y, z).
This function returns the values:
.. math:: p(x,y,z) = \\sum_{i,j,k} c_{i,j,k} * L_i(x) * L_j(y) * L_k(z)
The parameters `x`, `y`, and `z` are converted to arrays only if
they are tuples or a lists, otherwise they are treated as a scalars and
they must have the same shape after conversion. In either case, either
`x`, `y`, and `z` or their elements must support multiplication and
addition both with themselves and with the elements of `c`.
If `c` has fewer than 3 dimensions, ones are implicitly appended to its
shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape.
Parameters
----------
x, y, z : array_like, compatible object
The three dimensional series is evaluated at the points
`(x, y, z)`, where `x`, `y`, and `z` must have the same shape. If
any of `x`, `y`, or `z` is a list or tuple, it is first converted
to an ndarray, otherwise it is left unchanged and if it isn't an
ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term of
multi-degree i,j,k is contained in ``c[i,j,k]``. If `c` has dimension
greater than 3 the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the multidimensional polynomial on points formed with
triples of corresponding values from `x`, `y`, and `z`.
See Also
--------
legval, legval2d, leggrid2d, leggrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y, z = np.array((x, y, z), copy=0)
except:
raise ValueError('x, y, z are incompatible')
c = legval(x, c)
c = legval(y, c, tensor=False)
c = legval(z, c, tensor=False)
return c
def leggrid3d(x, y, z, c):
"""
Evaluate a 3-D Legendre series on the Cartesian product of x, y, and z.
This function returns the values:
.. math:: p(a,b,c) = \\sum_{i,j,k} c_{i,j,k} * L_i(a) * L_j(b) * L_k(c)
where the points `(a, b, c)` consist of all triples formed by taking
`a` from `x`, `b` from `y`, and `c` from `z`. The resulting points form
a grid with `x` in the first dimension, `y` in the second, and `z` in
the third.
The parameters `x`, `y`, and `z` are converted to arrays only if they
are tuples or a lists, otherwise they are treated as a scalars. In
either case, either `x`, `y`, and `z` or their elements must support
multiplication and addition both with themselves and with the elements
of `c`.
If `c` has fewer than three dimensions, ones are implicitly appended to
its shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape + y.shape + z.shape.
Parameters
----------
x, y, z : array_like, compatible objects
The three dimensional series is evaluated at the points in the
Cartesian product of `x`, `y`, and `z`. If `x`,`y`, or `z` is a
list or tuple, it is first converted to an ndarray, otherwise it is
left unchanged and, if it isn't an ndarray, it is treated as a
scalar.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree i,j are contained in ``c[i,j]``. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points in the Cartesian
product of `x` and `y`.
See Also
--------
legval, legval2d, leggrid2d, legval3d
Notes
-----
.. versionadded::1.7.0
"""
c = legval(x, c)
c = legval(y, c)
c = legval(z, c)
return c
def legvander(x, deg):
"""Pseudo-Vandermonde matrix of given degree.
Returns the pseudo-Vandermonde matrix of degree `deg` and sample points
`x`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., i] = L_i(x)
where `0 <= i <= deg`. The leading indices of `V` index the elements of
`x` and the last index is the degree of the Legendre polynomial.
If `c` is a 1-D array of coefficients of length `n + 1` and `V` is the
array ``V = legvander(x, n)``, then ``np.dot(V, c)`` and
``legval(x, c)`` are the same up to roundoff. This equivalence is
useful both for least squares fitting and for the evaluation of a large
number of Legendre series of the same degree and sample points.
Parameters
----------
x : array_like
Array of points. The dtype is converted to float64 or complex128
depending on whether any of the elements are complex. If `x` is
scalar it is converted to a 1-D array.
deg : int
Degree of the resulting matrix.
Returns
-------
vander : ndarray
The pseudo-Vandermonde matrix. The shape of the returned matrix is
``x.shape + (deg + 1,)``, where The last index is the degree of the
corresponding Legendre polynomial. The dtype will be the same as
the converted `x`.
"""
ideg = int(deg)
if ideg != deg:
raise ValueError("deg must be integer")
if ideg < 0:
raise ValueError("deg must be non-negative")
x = np.array(x, copy=0, ndmin=1) + 0.0
dims = (ideg + 1,) + x.shape
dtyp = x.dtype
v = np.empty(dims, dtype=dtyp)
# Use forward recursion to generate the entries. This is not as accurate
# as reverse recursion in this application but it is more efficient.
v[0] = x*0 + 1
if ideg > 0:
v[1] = x
for i in range(2, ideg + 1):
v[i] = (v[i-1]*x*(2*i - 1) - v[i-2]*(i - 1))/i
return np.rollaxis(v, 0, v.ndim)
def legvander2d(x, y, deg):
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y)`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., deg[1]*i + j] = L_i(x) * L_j(y),
where `0 <= i <= deg[0]` and `0 <= j <= deg[1]`. The leading indices of
`V` index the points `(x, y)` and the last index encodes the degrees of
the Legendre polynomials.
If ``V = legvander2d(x, y, [xdeg, ydeg])``, then the columns of `V`
correspond to the elements of a 2-D coefficient array `c` of shape
(xdeg + 1, ydeg + 1) in the order
.. math:: c_{00}, c_{01}, c_{02} ... , c_{10}, c_{11}, c_{12} ...
and ``np.dot(V, c.flat)`` and ``legval2d(x, y, c)`` will be the same
up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 2-D Legendre
series of the same degrees and sample points.
Parameters
----------
x, y : array_like
Arrays of point coordinates, all of the same shape. The dtypes
will be converted to either float64 or complex128 depending on
whether any of the elements are complex. Scalars are converted to
1-D arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg].
Returns
-------
vander2d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)`. The dtype will be the same
as the converted `x` and `y`.
See Also
--------
legvander, legvander3d. legval2d, legval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy = ideg
x, y = np.array((x, y), copy=0) + 0.0
vx = legvander(x, degx)
vy = legvander(y, degy)
v = vx[..., None]*vy[..., None,:]
return v.reshape(v.shape[:-2] + (-1,))
def legvander3d(x, y, z, deg):
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y, z)`. If `l, m, n` are the given degrees in `x, y, z`,
then The pseudo-Vandermonde matrix is defined by
.. math:: V[..., (m+1)(n+1)i + (n+1)j + k] = L_i(x)*L_j(y)*L_k(z),
where `0 <= i <= l`, `0 <= j <= m`, and `0 <= j <= n`. The leading
indices of `V` index the points `(x, y, z)` and the last index encodes
the degrees of the Legendre polynomials.
If ``V = legvander3d(x, y, z, [xdeg, ydeg, zdeg])``, then the columns
of `V` correspond to the elements of a 3-D coefficient array `c` of
shape (xdeg + 1, ydeg + 1, zdeg + 1) in the order
.. math:: c_{000}, c_{001}, c_{002},... , c_{010}, c_{011}, c_{012},...
and ``np.dot(V, c.flat)`` and ``legval3d(x, y, z, c)`` will be the
same up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 3-D Legendre
series of the same degrees and sample points.
Parameters
----------
x, y, z : array_like
Arrays of point coordinates, all of the same shape. The dtypes will
be converted to either float64 or complex128 depending on whether
any of the elements are complex. Scalars are converted to 1-D
arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg, z_deg].
Returns
-------
vander3d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)*(deg[2]+1)`. The dtype will
be the same as the converted `x`, `y`, and `z`.
See Also
--------
legvander, legvander3d. legval2d, legval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy, degz = ideg
x, y, z = np.array((x, y, z), copy=0) + 0.0
vx = legvander(x, degx)
vy = legvander(y, degy)
vz = legvander(z, degz)
v = vx[..., None, None]*vy[..., None,:, None]*vz[..., None, None,:]
return v.reshape(v.shape[:-3] + (-1,))
def legfit(x, y, deg, rcond=None, full=False, w=None):
"""
Least squares fit of Legendre series to data.
Return the coefficients of a Legendre series of degree `deg` that is the
least squares fit to the data values `y` given at points `x`. If `y` is
1-D the returned coefficients will also be 1-D. If `y` is 2-D multiple
fits are done, one for each column of `y`, and the resulting
coefficients are stored in the corresponding columns of a 2-D return.
The fitted polynomial(s) are in the form
.. math:: p(x) = c_0 + c_1 * L_1(x) + ... + c_n * L_n(x),
where `n` is `deg`.
Parameters
----------
x : array_like, shape (M,)
x-coordinates of the M sample points ``(x[i], y[i])``.
y : array_like, shape (M,) or (M, K)
y-coordinates of the sample points. Several data sets of sample
points sharing the same x-coordinates can be fitted at once by
passing in a 2D-array that contains one dataset per column.
deg : int or 1-D array_like
Degree(s) of the fitting polynomials. If `deg` is a single integer
all terms up to and including the `deg`'th term are included in the
fit. For NumPy versions >= 1.11.0 a list of integers specifying the
degrees of the terms to include may be used instead.
rcond : float, optional
Relative condition number of the fit. Singular values smaller than
this relative to the largest singular value will be ignored. The
default value is len(x)*eps, where eps is the relative precision of
the float type, about 2e-16 in most cases.
full : bool, optional
Switch determining nature of return value. When it is False (the
default) just the coefficients are returned, when True diagnostic
information from the singular value decomposition is also returned.
w : array_like, shape (`M`,), optional
Weights. If not None, the contribution of each point
``(x[i],y[i])`` to the fit is weighted by `w[i]`. Ideally the
weights are chosen so that the errors of the products ``w[i]*y[i]``
all have the same variance. The default value is None.
.. versionadded:: 1.5.0
Returns
-------
coef : ndarray, shape (M,) or (M, K)
Legendre coefficients ordered from low to high. If `y` was
2-D, the coefficients for the data in column k of `y` are in
column `k`. If `deg` is specified as a list, coefficients for
terms not included in the fit are set equal to zero in the
returned `coef`.
[residuals, rank, singular_values, rcond] : list
These values are only returned if `full` = True
resid -- sum of squared residuals of the least squares fit
rank -- the numerical rank of the scaled Vandermonde matrix
sv -- singular values of the scaled Vandermonde matrix
rcond -- value of `rcond`.
For more details, see `linalg.lstsq`.
Warns
-----
RankWarning
The rank of the coefficient matrix in the least-squares fit is
deficient. The warning is only raised if `full` = False. The
warnings can be turned off by
>>> import warnings
>>> warnings.simplefilter('ignore', RankWarning)
See Also
--------
chebfit, polyfit, lagfit, hermfit, hermefit
legval : Evaluates a Legendre series.
legvander : Vandermonde matrix of Legendre series.
legweight : Legendre weight function (= 1).
linalg.lstsq : Computes a least-squares fit from the matrix.
scipy.interpolate.UnivariateSpline : Computes spline fits.
Notes
-----
The solution is the coefficients of the Legendre series `p` that
minimizes the sum of the weighted squared errors
.. math:: E = \\sum_j w_j^2 * |y_j - p(x_j)|^2,
where :math:`w_j` are the weights. This problem is solved by setting up
as the (typically) overdetermined matrix equation
.. math:: V(x) * c = w * y,
where `V` is the weighted pseudo Vandermonde matrix of `x`, `c` are the
coefficients to be solved for, `w` are the weights, and `y` are the
observed values. This equation is then solved using the singular value
decomposition of `V`.
If some of the singular values of `V` are so small that they are
neglected, then a `RankWarning` will be issued. This means that the
coefficient values may be poorly determined. Using a lower order fit
will usually get rid of the warning. The `rcond` parameter can also be
set to a value smaller than its default, but the resulting fit may be
spurious and have large contributions from roundoff error.
Fits using Legendre series are usually better conditioned than fits
using power series, but much can depend on the distribution of the
sample points and the smoothness of the data. If the quality of the fit
is inadequate splines may be a good alternative.
References
----------
.. [1] Wikipedia, "Curve fitting",
http://en.wikipedia.org/wiki/Curve_fitting
Examples
--------
"""
x = np.asarray(x) + 0.0
y = np.asarray(y) + 0.0
deg = np.asarray(deg)
# check arguments.
if deg.ndim > 1 or deg.dtype.kind not in 'iu' or deg.size == 0:
raise TypeError("deg must be an int or non-empty 1-D array of int")
if deg.min() < 0:
raise ValueError("expected deg >= 0")
if x.ndim != 1:
raise TypeError("expected 1D vector for x")
if x.size == 0:
raise TypeError("expected non-empty vector for x")
if y.ndim < 1 or y.ndim > 2:
raise TypeError("expected 1D or 2D array for y")
if len(x) != len(y):
raise TypeError("expected x and y to have same length")
if deg.ndim == 0:
lmax = deg
order = lmax + 1
van = legvander(x, lmax)
else:
deg = np.sort(deg)
lmax = deg[-1]
order = len(deg)
van = legvander(x, lmax)[:, deg]
# set up the least squares matrices in transposed form
lhs = van.T
rhs = y.T
if w is not None:
w = np.asarray(w) + 0.0
if w.ndim != 1:
raise TypeError("expected 1D vector for w")
if len(x) != len(w):
raise TypeError("expected x and w to have same length")
# apply weights. Don't use inplace operations as they
# can cause problems with NA.
lhs = lhs * w
rhs = rhs * w
# set rcond
if rcond is None:
rcond = len(x)*np.finfo(x.dtype).eps
# Determine the norms of the design matrix columns.
if issubclass(lhs.dtype.type, np.complexfloating):
scl = np.sqrt((np.square(lhs.real) + np.square(lhs.imag)).sum(1))
else:
scl = np.sqrt(np.square(lhs).sum(1))
scl[scl == 0] = 1
# Solve the least squares problem.
c, resids, rank, s = la.lstsq(lhs.T/scl, rhs.T, rcond)
c = (c.T/scl).T
# Expand c to include non-fitted coefficients which are set to zero
if deg.ndim > 0:
if c.ndim == 2:
cc = np.zeros((lmax+1, c.shape[1]), dtype=c.dtype)
else:
cc = np.zeros(lmax+1, dtype=c.dtype)
cc[deg] = c
c = cc
# warn on rank reduction
if rank != order and not full:
msg = "The fit may be poorly conditioned"
warnings.warn(msg, pu.RankWarning, stacklevel=2)
if full:
return c, [resids, rank, s, rcond]
else:
return c
def legcompanion(c):
"""Return the scaled companion matrix of c.
The basis polynomials are scaled so that the companion matrix is
symmetric when `c` is an Legendre basis polynomial. This provides
better eigenvalue estimates than the unscaled case and for basis
polynomials the eigenvalues are guaranteed to be real if
`numpy.linalg.eigvalsh` is used to obtain them.
Parameters
----------
c : array_like
1-D array of Legendre series coefficients ordered from low to high
degree.
Returns
-------
mat : ndarray
Scaled companion matrix of dimensions (deg, deg).
Notes
-----
.. versionadded::1.7.0
"""
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) < 2:
raise ValueError('Series must have maximum degree of at least 1.')
if len(c) == 2:
return np.array([[-c[0]/c[1]]])
n = len(c) - 1
mat = np.zeros((n, n), dtype=c.dtype)
scl = 1./np.sqrt(2*np.arange(n) + 1)
top = mat.reshape(-1)[1::n+1]
bot = mat.reshape(-1)[n::n+1]
top[...] = np.arange(1, n)*scl[:n-1]*scl[1:n]
bot[...] = top
mat[:, -1] -= (c[:-1]/c[-1])*(scl/scl[-1])*(n/(2*n - 1))
return mat
def legroots(c):
"""
Compute the roots of a Legendre series.
Return the roots (a.k.a. "zeros") of the polynomial
.. math:: p(x) = \\sum_i c[i] * L_i(x).
Parameters
----------
c : 1-D array_like
1-D array of coefficients.
Returns
-------
out : ndarray
Array of the roots of the series. If all the roots are real,
then `out` is also real, otherwise it is complex.
See Also
--------
polyroots, chebroots, lagroots, hermroots, hermeroots
Notes
-----
The root estimates are obtained as the eigenvalues of the companion
matrix, Roots far from the origin of the complex plane may have large
errors due to the numerical instability of the series for such values.
Roots with multiplicity greater than 1 will also show larger errors as
the value of the series near such points is relatively insensitive to
errors in the roots. Isolated roots near the origin can be improved by
a few iterations of Newton's method.
The Legendre series basis polynomials aren't powers of ``x`` so the
results of this function may seem unintuitive.
Examples
--------
>>> import numpy.polynomial.legendre as leg
>>> leg.legroots((1, 2, 3, 4)) # 4L_3 + 3L_2 + 2L_1 + 1L_0, all real roots
array([-0.85099543, -0.11407192, 0.51506735])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) < 2:
return np.array([], dtype=c.dtype)
if len(c) == 2:
return np.array([-c[0]/c[1]])
m = legcompanion(c)
r = la.eigvals(m)
r.sort()
return r
def leggauss(deg):
"""
Gauss-Legendre quadrature.
Computes the sample points and weights for Gauss-Legendre quadrature.
These sample points and weights will correctly integrate polynomials of
degree :math:`2*deg - 1` or less over the interval :math:`[-1, 1]` with
the weight function :math:`f(x) = 1`.
Parameters
----------
deg : int
Number of sample points and weights. It must be >= 1.
Returns
-------
x : ndarray
1-D ndarray containing the sample points.
y : ndarray
1-D ndarray containing the weights.
Notes
-----
.. versionadded::1.7.0
The results have only been tested up to degree 100, higher degrees may
be problematic. The weights are determined by using the fact that
.. math:: w_k = c / (L'_n(x_k) * L_{n-1}(x_k))
where :math:`c` is a constant independent of :math:`k` and :math:`x_k`
is the k'th root of :math:`L_n`, and then scaling the results to get
the right value when integrating 1.
"""
ideg = int(deg)
if ideg != deg or ideg < 1:
raise ValueError("deg must be a non-negative integer")
# first approximation of roots. We use the fact that the companion
# matrix is symmetric in this case in order to obtain better zeros.
c = np.array([0]*deg + [1])
m = legcompanion(c)
x = la.eigvalsh(m)
# improve roots by one application of Newton
dy = legval(x, c)
df = legval(x, legder(c))
x -= dy/df
# compute the weights. We scale the factor to avoid possible numerical
# overflow.
fm = legval(x, c[1:])
fm /= np.abs(fm).max()
df /= np.abs(df).max()
w = 1/(fm * df)
# for Legendre we can also symmetrize
w = (w + w[::-1])/2
x = (x - x[::-1])/2
# scale w to get the right value
w *= 2. / w.sum()
return x, w
def legweight(x):
"""
Weight function of the Legendre polynomials.
The weight function is :math:`1` and the interval of integration is
:math:`[-1, 1]`. The Legendre polynomials are orthogonal, but not
normalized, with respect to this weight function.
Parameters
----------
x : array_like
Values at which the weight function will be computed.
Returns
-------
w : ndarray
The weight function at `x`.
Notes
-----
.. versionadded::1.7.0
"""
w = x*0.0 + 1.0
return w
#
# Legendre series class
#
class Legendre(ABCPolyBase):
"""A Legendre series class.
The Legendre class provides the standard Python numerical methods
'+', '-', '*', '//', '%', 'divmod', '**', and '()' as well as the
attributes and methods listed in the `ABCPolyBase` documentation.
Parameters
----------
coef : array_like
Legendre coefficients in order of increasing degree, i.e.,
``(1, 2, 3)`` gives ``1*P_0(x) + 2*P_1(x) + 3*P_2(x)``.
domain : (2,) array_like, optional
Domain to use. The interval ``[domain[0], domain[1]]`` is mapped
to the interval ``[window[0], window[1]]`` by shifting and scaling.
The default value is [-1, 1].
window : (2,) array_like, optional
Window, see `domain` for its use. The default value is [-1, 1].
.. versionadded:: 1.6.0
"""
# Virtual Functions
_add = staticmethod(legadd)
_sub = staticmethod(legsub)
_mul = staticmethod(legmul)
_div = staticmethod(legdiv)
_pow = staticmethod(legpow)
_val = staticmethod(legval)
_int = staticmethod(legint)
_der = staticmethod(legder)
_fit = staticmethod(legfit)
_line = staticmethod(legline)
_roots = staticmethod(legroots)
_fromroots = staticmethod(legfromroots)
# Virtual properties
nickname = 'leg'
domain = np.array(legdomain)
window = np.array(legdomain)
|
mit
|
takeshineshiro/horizon
|
openstack_dashboard/test/test_plugins/panel_tests.py
|
23
|
3503
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from django.conf import settings
from django.test.utils import override_settings
import horizon
from openstack_dashboard.dashboards.admin.info import panel as info_panel
from openstack_dashboard.test import helpers as test
from openstack_dashboard.test.test_panels.plugin_panel \
import panel as plugin_panel
from openstack_dashboard.test.test_panels.nonloading_panel \
import panel as nonloading_panel
from openstack_dashboard.test.test_plugins import panel_config
from openstack_dashboard.utils import settings as util_settings
HORIZON_CONFIG = copy.deepcopy(settings.HORIZON_CONFIG)
INSTALLED_APPS = list(settings.INSTALLED_APPS)
# NOTE: Ensure dashboards and default_dashboard are not included in
# HORIZON_CONFIG to ensure warning messages from update_dashboards below.
HORIZON_CONFIG.pop('dashboards', None)
HORIZON_CONFIG.pop('default_dashboard', None)
HORIZON_CONFIG.pop('js_files', None)
HORIZON_CONFIG.pop('js_spec_files', None)
HORIZON_CONFIG.pop('scss_files', None)
util_settings.update_dashboards([panel_config,], HORIZON_CONFIG, INSTALLED_APPS)
@override_settings(HORIZON_CONFIG=HORIZON_CONFIG,
INSTALLED_APPS=INSTALLED_APPS)
class PanelPluginTests(test.PluginTestCase):
def test_add_panel(self):
dashboard = horizon.get_dashboard("admin")
panel_group = dashboard.get_panel_group('admin')
# Check that the panel is in its configured dashboard.
self.assertIn(plugin_panel.PluginPanel,
[p.__class__ for p in dashboard.get_panels()])
# Check that the panel is in its configured panel group.
self.assertIn(plugin_panel.PluginPanel,
[p.__class__ for p in panel_group])
# Ensure that static resources are properly injected
pc = panel_config._10_admin_add_panel
self.assertEquals(pc.ADD_JS_FILES, HORIZON_CONFIG['js_files'])
self.assertEquals(pc.ADD_JS_SPEC_FILES, HORIZON_CONFIG['js_spec_files'])
self.assertEquals(pc.ADD_SCSS_FILES, HORIZON_CONFIG['scss_files'])
def test_remove_panel(self):
dashboard = horizon.get_dashboard("admin")
panel_group = dashboard.get_panel_group('admin')
# Check that the panel is no longer in the configured dashboard.
self.assertNotIn(info_panel.Info,
[p.__class__ for p in dashboard.get_panels()])
# Check that the panel is no longer in the configured panel group.
self.assertNotIn(info_panel.Info,
[p.__class__ for p in panel_group])
def test_default_panel(self):
dashboard = horizon.get_dashboard("admin")
self.assertEqual('defaults', dashboard.default_panel)
def test_panel_not_added(self):
dashboard = horizon.get_dashboard("admin")
self.assertNotIn(nonloading_panel.NonloadingPanel,
[p.__class__ for p in dashboard.get_panels()])
|
apache-2.0
|
Answeror/pypaper
|
pypaper/acm.py
|
1
|
4948
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pyquery import PyQuery as pq
import yapbib.biblist as biblist
class ACM(object):
def __init__(self, id):
self.id = id
@property
def title(self):
if not hasattr(self, 'b'):
self.b = self._full_bibtex()
return self.b.get_items()[0]['title']
@staticmethod
def from_url(url):
from urlparse import urlparse, parse_qs
words = parse_qs(urlparse(url).query)['id'][0].split('.')
assert len(words) == 2
return ACM(id=words[1])
#import re
#try:
#content = urlread(url)
#return ACM(id=re.search(r"document.cookie = 'picked=' \+ '(\d+)'", content).group(1))
#except:
#print(url)
#return None
@staticmethod
def from_title(title):
from urllib import urlencode
url = 'http://dl.acm.org/results.cfm'
d = pq(urlread(url + '?' + urlencode({'query': title})))
return ACM.from_url(d('a.medium-text').eq(0).attr('href'))
@staticmethod
def from_bibtex(f):
b = biblist.BibList()
ret = b.import_bibtex(f)
assert ret
return [ACM.from_title(it['title']) for it in b.get_items()]
def export_bibtex(self, f):
b = self._full_bibtex()
b.export_bibtex(f)
def _full_bibtex(self):
b = self._original_bibtex()
it = b.get_items()[0]
it['abstract'] = self._abstract()
return b
def _original_bibtex(self):
TEMPLATE = 'http://dl.acm.org/exportformats.cfm?id=%s&expformat=bibtex&_cf_containerId=theformats_body&_cf_nodebug=true&_cf_nocache=true&_cf_clientid=142656B43EEEE8D6E34FC208DBFCC647&_cf_rc=3'
url = TEMPLATE % self.id
d = pq(urlread(url))
content = d('pre').text()
from StringIO import StringIO
f = StringIO(content)
b = biblist.BibList()
ret = b.import_bibtex(f)
assert ret, content
return b
def _abstract(self):
TEMPLATE = 'http://dl.acm.org/tab_abstract.cfm?id=%s&usebody=tabbody&cfid=216938597&cftoken=33552307&_cf_containerId=abstract&_cf_nodebug=true&_cf_nocache=true&_cf_clientid=142656B43EEEE8D6E34FC208DBFCC647&_cf_rc=0'
url = TEMPLATE % self.id
d = pq(urlread(url))
return d.text()
def download_pdf(self):
TEMPLATE = 'http://dl.acm.org/ft_gateway.cfm?id=%s&ftid=723552&dwn=1&CFID=216938597&CFTOKEN=33552307'
url = TEMPLATE % self.id
content = urlread(url)
filename = escape(self.title) + '.pdf'
import os
if not os.path.exists(filename):
with open(filename, 'wb') as f:
f.write(content)
def escape(name):
#import string
#valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits)
#return ''.join([ch if ch in valid_chars else ' ' for ch in name])
from gn import Gn
gn = Gn()
return gn(name)
def urlread(url):
import urllib2
import cookielib
hdr = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'}
req = urllib2.Request(url, headers=hdr)
page = urllib2.urlopen(req)
return page.read()
def from_clipboard():
import win32clipboard
win32clipboard.OpenClipboard()
data = win32clipboard.GetClipboardData()
win32clipboard.CloseClipboard()
return data
def test_download():
bib = ACM.from_url('http://dl.acm.org/citation.cfm?id=1672308.1672326&coll=DL&dl=ACM&CFID=216938597&CFTOKEN=33552307')
bib.download_pdf()
def test_from_url():
bib = ACM.from_url('http://dl.acm.org/citation.cfm?id=1672308.1672326&coll=DL&dl=ACM&CFID=216938597&CFTOKEN=33552307')
print(bib.id)
def test_from_title():
bib = ACM.from_title('Applications of mobile activity recognition')
print(bib.id)
def get_params():
import sys
return sys.argv[1] if len(sys.argv) > 1 else from_clipboard()
def download_bibtex(arg):
bib = ACM.from_url(arg)
#from StringIO import StringIO
#f = StringIO()
bib.export_bibtex('out.bib')
#print(f.getvalue())
def download_pdf(arg):
import time
bibs = ACM.from_bibtex(arg)
print('bibs loaded')
for bib in bibs:
for i in range(10):
try:
print(bib.title)
bib.download_pdf()
time.sleep(10)
except:
print('failed')
else:
print('done')
break
if __name__ == '__main__':
arg = get_params()
if arg.endswith('.bib'):
download_pdf(arg)
else:
download_bibtex(arg)
|
mit
|
timoschwarzer/blendworks
|
BlendWorks Server/python/Lib/_sitebuiltins.py
|
5
|
2896
|
"""
The objects used by the site module to add custom builtins.
"""
# Those objects are almost immortal and they keep a reference to their module
# globals. Defining them in the site module would keep too many references
# alive.
# Note this means this module should also avoid keep things alive in its
# globals.
import sys
class Quitter(object):
def __init__(self, name, eof):
self.name = name
self.eof = eof
def __repr__(self):
return 'Use %s() or %s to exit' % (self.name, self.eof)
def __call__(self, code=None):
# Shells like IDLE catch the SystemExit, but listen when their
# stdin wrapper is closed.
try:
sys.stdin.close()
except:
pass
raise SystemExit(code)
class _Printer(object):
"""interactive prompt objects for printing the license text, a list of
contributors and the copyright notice."""
MAXLINES = 23
def __init__(self, name, data, files=(), dirs=()):
import os
self.__name = name
self.__data = data
self.__lines = None
self.__filenames = [os.path.join(dir, filename)
for dir in dirs
for filename in files]
def __setup(self):
if self.__lines:
return
data = None
for filename in self.__filenames:
try:
with open(filename, "r") as fp:
data = fp.read()
break
except OSError:
pass
if not data:
data = self.__data
self.__lines = data.split('\n')
self.__linecnt = len(self.__lines)
def __repr__(self):
self.__setup()
if len(self.__lines) <= self.MAXLINES:
return "\n".join(self.__lines)
else:
return "Type %s() to see the full %s text" % ((self.__name,)*2)
def __call__(self):
self.__setup()
prompt = 'Hit Return for more, or q (and Return) to quit: '
lineno = 0
while 1:
try:
for i in range(lineno, lineno + self.MAXLINES):
print(self.__lines[i])
except IndexError:
break
else:
lineno += self.MAXLINES
key = None
while key is None:
key = input(prompt)
if key not in ('', 'q'):
key = None
if key == 'q':
break
class _Helper(object):
"""Define the builtin 'help'.
This is a wrapper around pydoc.help (with a twist).
"""
def __repr__(self):
return "Type help() for interactive help, " \
"or help(object) for help about object."
def __call__(self, *args, **kwds):
import pydoc
return pydoc.help(*args, **kwds)
|
gpl-2.0
|
palimadra/namebench
|
libnamebench/util_test.py
|
175
|
1787
|
#!/usr/bin/env python
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for NameBench and basic methods."""
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import datetime
import util
import unittest
class TestBasicMethods(unittest.TestCase):
def testTimeDeltaToMilliseconds(self):
delta = datetime.timedelta(days=1)
self.assertEqual(util.TimeDeltaToMilliseconds(delta), 86400000)
delta = datetime.timedelta(0, 3, 248193)
self.assertEqual(util.TimeDeltaToMilliseconds(delta),
3248.1930000000002)
def testCalculateListAverage(self):
self.assertEqual(util.CalculateListAverage([3, 2, 2]),
2.3333333333333335)
def testDrawTextBar(self):
self.assertEqual(util.DrawTextBar(1, 10, max_width=10), '#')
self.assertEqual(util.DrawTextBar(5, 10, max_width=10), '#####')
self.assertEqual(util.DrawTextBar(5, 5, max_width=5), '#####')
# Make sure to draw at least something!
self.assertEqual(util.DrawTextBar(0.05, 10, max_width=10), '#')
def testInternalNameServers(self):
self.assertTrue(len(util.InternalNameServers()) > 0)
self.assertTrue(len(util.InternalNameServers()) < 5)
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
likaiwalkman/elasticsearch
|
dev-tools/prepare_release_create_release_version.py
|
9
|
6411
|
# Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
# Prepare a release
#
# 1. Update the Version.java to remove the snapshot bit
# 2. Remove the -SNAPSHOT suffix in all pom.xml files
#
# USAGE:
#
# python3 ./dev-tools/prepare-release.py
#
# Note: Ensure the script is run from the root directory
#
import fnmatch
import subprocess
import tempfile
import re
import os
import shutil
VERSION_FILE = 'core/src/main/java/org/elasticsearch/Version.java'
POM_FILE = 'pom.xml'
def run(command):
if os.system('%s' % (command)):
raise RuntimeError(' FAILED: %s' % (command))
def ensure_checkout_is_clean():
# Make sure no local mods:
s = subprocess.check_output('git diff --shortstat', shell=True)
if len(s) > 0:
raise RuntimeError('git diff --shortstat is non-empty: got:\n%s' % s)
# Make sure no untracked files:
s = subprocess.check_output('git status', shell=True).decode('utf-8', errors='replace')
if 'Untracked files:' in s:
raise RuntimeError('git status shows untracked files: got:\n%s' % s)
# Make sure we have all changes from origin:
if 'is behind' in s:
raise RuntimeError('git status shows not all changes pulled from origin; try running "git pull origin" in this branch: got:\n%s' % (s))
# Make sure we no local unpushed changes (this is supposed to be a clean area):
if 'is ahead' in s:
raise RuntimeError('git status shows local commits; try running "git fetch origin", "git checkout ", "git reset --hard origin/" in this branch: got:\n%s' % (s))
# Reads the given file and applies the
# callback to it. If the callback changed
# a line the given file is replaced with
# the modified input.
def process_file(file_path, line_callback):
fh, abs_path = tempfile.mkstemp()
modified = False
with open(abs_path,'w', encoding='utf-8') as new_file:
with open(file_path, encoding='utf-8') as old_file:
for line in old_file:
new_line = line_callback(line)
modified = modified or (new_line != line)
new_file.write(new_line)
os.close(fh)
if modified:
#Remove original file
os.remove(file_path)
#Move new file
shutil.move(abs_path, file_path)
return True
else:
# nothing to do - just remove the tmp file
os.remove(abs_path)
return False
# Moves the Version.java file from a snapshot to a release
def remove_version_snapshot(version_file, release):
# 1.0.0.Beta1 -> 1_0_0_Beta1
release = release.replace('.', '_')
release = release.replace('-', '_')
pattern = 'new Version(V_%s_ID, true' % (release)
replacement = 'new Version(V_%s_ID, false' % (release)
def callback(line):
return line.replace(pattern, replacement)
processed = process_file(version_file, callback)
if not processed:
raise RuntimeError('failed to remove snapshot version for %s' % (release))
# Checks the pom.xml for the release version.
# This method fails if the pom file has no SNAPSHOT version set ie.
# if the version is already on a release version we fail.
# Returns the next version string ie. 0.90.7
def find_release_version():
with open('pom.xml', encoding='utf-8') as file:
for line in file:
match = re.search(r'<version>(.+)-SNAPSHOT</version>', line)
if match:
return match.group(1)
raise RuntimeError('Could not find release version in branch')
if __name__ == "__main__":
release_version = find_release_version()
print('*** Preparing release version: [%s]' % release_version)
ensure_checkout_is_clean()
run('cd dev-tools && mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
run('cd rest-api-spec && mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
run('mvn versions:set -DnewVersion=%s -DgenerateBackupPoms=false' % (release_version))
remove_version_snapshot(VERSION_FILE, release_version)
print('*** Done removing snapshot version. DO NOT COMMIT THIS, WHEN CREATING A RELEASE CANDIDATE.')
shortHash = subprocess.check_output('git log --pretty=format:"%h" -n 1', shell=True).decode('utf-8')
localRepo = '/tmp/elasticsearch-%s-%s' % (release_version, shortHash)
localRepoElasticsearch = localRepo + '/org/elasticsearch'
print('')
print('*** To create a release candidate run: ')
print(' mvn clean install deploy -Prelease -DskipTests -Dgpg.keyname="D88E42B4" -Dpackaging.rpm.rpmbuild=/usr/bin/rpmbuild -Drpm.sign=true -Dmaven.repo.local=%s -Dno.commit.pattern="\\bno(n|)commit\\b" -Dforbidden.test.signatures=""' % (localRepo))
print(' 1. Remove all _remote.repositories: find %s -name _remote.repositories -exec rm {} \;' % (localRepoElasticsearch))
print(' 2. Rename all maven metadata files: for i in $(find %s -name "maven-metadata-local.xml*") ; do mv "$i" "${i/-local/}" ; done' % (localRepoElasticsearch))
print(' 3. Sync %s into S3 bucket' % (localRepoElasticsearch))
print (' s3cmd sync %s s3://download.elasticsearch.org/elasticsearch/staging/elasticsearch-%s-%s/maven/org/' % (localRepoElasticsearch, release_version, shortHash))
print(' 4. Create repositories: ')
print (' export S3_BUCKET_SYNC_TO="download.elasticsearch.org/elasticsearch/staging/elasticsearch-%s-%s/repos"' % (release_version, shortHash))
print (' export S3_BUCKET_SYNC_FROM="$S3_BUCKET_SYNC_TO"')
print(' dev-tools/build_repositories.sh %s' % (release_version))
print('')
print('NOTE: the above mvn command will promt you several times for the GPG passphrase of the key you specified you can alternatively pass it via -Dgpg.passphrase=yourPassPhrase')
print('NOTE: Running s3cmd might require you to create a config file with your credentials, if the s3cmd does not support suppliying them via the command line!')
|
apache-2.0
|
guokeno0/vitess
|
py/vtdb/vtgate_client.py
|
1
|
13325
|
# Copyright 2015 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
"""This module defines the vtgate client interface.
"""
from vtdb import vtgate_cursor
# mapping from protocol to python class.
vtgate_client_conn_classes = dict()
def register_conn_class(protocol, c):
"""Used by implementations to register themselves.
Args:
protocol: short string to document the protocol.
c: class to register.
"""
vtgate_client_conn_classes[protocol] = c
def connect(protocol, vtgate_addrs, timeout, *pargs, **kargs):
"""connect will return a dialed VTGateClient connection to a vtgate server.
FIXME(alainjobart): exceptions raised are not consistent.
Args:
protocol: the registered protocol to use.
vtgate_addrs: single or multiple vtgate server addresses to connect to.
Which address is actually used depends on the load balancing
capabilities of the underlying protocol used.
timeout: connection timeout, float in seconds.
*pargs: passed to the registered protocol __init__ method.
**kargs: passed to the registered protocol __init__ method.
Returns:
A dialed VTGateClient.
Raises:
dbexceptions.OperationalError: if we are unable to establish the connection
(for instance, no available instance).
dbexceptions.Error: if vtgate_addrs have the wrong type.
ValueError: If the protocol is unknown, or vtgate_addrs are malformed.
"""
if protocol not in vtgate_client_conn_classes:
raise ValueError('Unknown vtgate_client protocol', protocol)
conn = vtgate_client_conn_classes[protocol](
vtgate_addrs, timeout, *pargs, **kargs)
conn.dial()
return conn
# Note: Eventually, this object will be replaced by a proto3 CallerID
# object when all vitess customers have migrated to proto3.
class CallerID(object):
"""An object with principal, component, and subcomponent fields."""
def __init__(self, principal=None, component=None, subcomponent=None):
self.principal = principal
self.component = component
self.subcomponent = subcomponent
class VTGateClient(object):
"""VTGateClient is the interface for the vtgate client implementations.
All implementations must implement all these methods.
If something goes wrong with the connection, this object will be thrown out.
FIXME(alainjobart) transactional state (the Session object) is currently
maintained by this object. It should be maintained by the cursor, and just
returned / passed in with every method that makes sense.
"""
def __init__(self, addr, timeout):
"""Initialize a vtgate connection.
Args:
addr: server address. Can be protocol dependent.
timeout: connection timeout (float, in seconds).
"""
self.addr = addr
self.timeout = timeout
# self.session is used by vtgate_utils.exponential_backoff_retry.
# implementations should use it to store the session object.
self.session = None
def dial(self):
"""Dial to the server.
If successful, call close() to close the connection.
"""
raise NotImplementedError('Child class needs to implement this')
def close(self):
"""Close the connection.
This object may be re-used again by calling dial().
"""
raise NotImplementedError('Child class needs to implement this')
def is_closed(self):
"""Checks the connection status.
Returns:
True if this connection is closed.
"""
raise NotImplementedError('Child class needs to implement this')
def cursor(self, *pargs, **kwargs):
"""Creates a cursor instance associated with this connection.
Args:
*pargs: passed to the cursor constructor.
**kwargs: passed to the cursor constructor.
Returns:
A new cursor to use on this connection.
"""
cursorclass = kwargs.pop('cursorclass', None) or vtgate_cursor.VTGateCursor
return cursorclass(self, *pargs, **kwargs)
def begin(self, effective_caller_id=None):
"""Starts a transaction.
FIXME(alainjobart): instead of storing the Session as member variable,
should return it and let the cursor store it.
Args:
effective_caller_id: CallerID Object.
Raises:
dbexceptions.TimeoutError: for connection timeout.
dbexceptions.TransientError: the server is overloaded, and this query
is asked to back off.
dbexceptions.IntegrityError: integrity of an index would not be
guaranteed with this statement.
dbexceptions.DatabaseError: generic database error.
dbexceptions.ProgrammingError: the supplied statements are invalid,
this is probably an error in the code.
dbexceptions.FatalError: this query should not be retried.
"""
raise NotImplementedError('Child class needs to implement this')
def commit(self):
"""Commits the current transaction.
FIXME(alainjobart): should take the session in.
Raises:
dbexceptions.TimeoutError: for connection timeout.
dbexceptions.TransientError: the server is overloaded, and this query
is asked to back off.
dbexceptions.IntegrityError: integrity of an index would not be
guaranteed with this statement.
dbexceptions.DatabaseError: generic database error.
dbexceptions.ProgrammingError: the supplied statements are invalid,
this is probably an error in the code.
dbexceptions.FatalError: this query should not be retried.
"""
raise NotImplementedError('Child class needs to implement this')
def rollback(self):
"""Rolls the current transaction back.
FIXME(alainjobart): should take the session in.
Raises:
dbexceptions.TimeoutError: for connection timeout.
dbexceptions.TransientError: the server is overloaded, and this query
is asked to back off.
dbexceptions.IntegrityError: integrity of an index would not be
guaranteed with this statement.
dbexceptions.DatabaseError: generic database error.
dbexceptions.ProgrammingError: the supplied statements are invalid,
this is probably an error in the code.
dbexceptions.FatalError: this query should not be retried.
"""
raise NotImplementedError('Child class needs to implement this')
def _execute(self, sql, bind_variables, tablet_type,
keyspace_name=None,
shards=None,
keyspace_ids=None,
keyranges=None,
entity_keyspace_id_map=None, entity_column_name=None,
not_in_transaction=False, effective_caller_id=None, **kwargs):
"""Executes the given sql.
FIXME(alainjobart): should take the session in.
FIXME(alainjobart): implementations have keyspace before tablet_type!
Args:
sql: query to execute.
bind_variables: map of bind variables for the query.
tablet_type: the (string) version of the tablet type.
keyspace_name: if specified, the keyspace to send the query to.
Required if any of the routing parameters is used.
Not required only if using vtgate v3 API.
shards: if specified, use this list of shards names to route the query.
Incompatible with keyspace_ids, keyranges, entity_keyspace_id_map,
entity_column_name.
Requires keyspace.
keyspace_ids: if specified, use this list to route the query.
Incompatible with shards, keyranges, entity_keyspace_id_map,
entity_column_name.
Requires keyspace.
keyranges: if specified, use this list to route the query.
Incompatible with shards, keyspace_ids, entity_keyspace_id_map,
entity_column_name.
Requires keyspace.
entity_keyspace_id_map: if specified, use this map to route the query.
Incompatible with shards, keyspace_ids, keyranges.
Requires keyspace, entity_column_name.
entity_column_name: if specified, use this value to route the query.
Incompatible with shards, keyspace_ids, keyranges.
Requires keyspace, entity_keyspace_id_map.
not_in_transaction: force this execute to be outside the current
transaction, if any.
effective_caller_id: CallerID object.
**kwargs: implementation specific parameters.
Returns:
results: list of rows.
rowcount: how many rows were affected.
lastrowid: auto-increment value for the last row inserted.
fields: describes the field names and types.
Raises:
dbexceptions.TimeoutError: for connection timeout.
dbexceptions.TransientError: the server is overloaded, and this query
is asked to back off.
dbexceptions.IntegrityError: integrity of an index would not be
guaranteed with this statement.
dbexceptions.DatabaseError: generic database error.
dbexceptions.ProgrammingError: the supplied statements are invalid,
this is probably an error in the code.
dbexceptions.FatalError: this query should not be retried.
"""
raise NotImplementedError('Child class needs to implement this')
def _execute_batch(
self, sql_list, bind_variables_list, tablet_type,
keyspace_list=None, shards_list=None, keyspace_ids_list=None,
as_transaction=False, effective_caller_id=None, **kwargs):
"""Executes a list of sql queries.
These follow the same routing rules as _execute.
FIXME(alainjobart): should take the session in.
Args:
sql_list: list of SQL queries to execute.
bind_variables_list: bind variables to associated with each query.
tablet_type: the (string) version of the tablet type.
keyspace_list: if specified, the keyspaces to send the queries to.
Required if any of the routing parameters is used.
Not required only if using vtgate v3 API.
shards_list: if specified, use this list of shards names (per sql query)
to route each query.
Incompatible with keyspace_ids_list.
Requires keyspace_list.
keyspace_ids_list: if specified, use this list of keyspace_ids (per sql
query) to route each query.
Incompatible with shards_list.
Requires keyspace_list.
as_transaction: starts and commits a transaction around the statements.
effective_caller_id: CallerID object.
**kwargs: implementation specific parameters.
Returns:
results: an array of (results, rowcount, lastrowid, fields) tuples,
one for each query.
Raises:
dbexceptions.TimeoutError: for connection timeout.
dbexceptions.TransientError: the server is overloaded, and this query
is asked to back off.
dbexceptions.IntegrityError: integrity of an index would not be
guaranteed with this statement.
dbexceptions.DatabaseError: generic database error.
dbexceptions.ProgrammingError: the supplied statements are invalid,
this is probably an error in the code.
dbexceptions.FatalError: this query should not be retried.
"""
raise NotImplementedError('Child class needs to implement this')
def _stream_execute(
self, sql, bind_variables, tablet_type, keyspace=None, shards=None,
keyspace_ids=None, keyranges=None, effective_caller_id=None, **kwargs):
"""Executes the given sql, in streaming mode.
FIXME(alainjobart): the return values are weird (historical reasons)
and unused for now. We should use them, and not store the current
streaming status in the connection, but in the cursor.
Args:
sql: query to execute.
bind_variables: map of bind variables for the query.
tablet_type: the (string) version of the tablet type.
keyspace: if specified, the keyspace to send the query to.
Required if any of the routing parameters is used.
Not required only if using vtgate v3 API.
shards: if specified, use this list of shards names to route the query.
Incompatible with keyspace_ids, keyranges.
Requires keyspace.
keyspace_ids: if specified, use this list to route the query.
Incompatible with shards, keyranges.
Requires keyspace.
keyranges: if specified, use this list to route the query.
Incompatible with shards, keyspace_ids.
Requires keyspace.
effective_caller_id: CallerID object.
**kwargs: implementation specific parameters.
Returns:
A (row generator, fields) pair.
Raises:
dbexceptions.TimeoutError: for connection timeout.
dbexceptions.TransientError: the server is overloaded, and this query
is asked to back off.
dbexceptions.IntegrityError: integrity of an index would not be
guaranteed with this statement.
dbexceptions.DatabaseError: generic database error.
dbexceptions.ProgrammingError: the supplied statements are invalid,
this is probably an error in the code.
dbexceptions.FatalError: this query should not be retried.
"""
raise NotImplementedError('Child class needs to implement this')
def get_srv_keyspace(self, keyspace):
"""Returns a SrvKeyspace object.
Args:
keyspace: name of the keyspace to retrieve.
Returns:
srv_keyspace: a keyspace.Keyspace object.
Raises:
TBD
"""
raise NotImplementedError('Child class needs to implement this')
|
bsd-3-clause
|
Gagaro/django
|
django/conf/locale/de/formats.py
|
504
|
1100
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
|
bsd-3-clause
|
eleonrk/SickRage
|
lib/github/tests/Issue158.py
|
9
|
2210
|
# -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. #
# http://pygithub.github.io/PyGithub/v1/index.html #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import Framework
import github
class Issue158(Framework.TestCase): # https://github.com/jacquev6/PyGithub/issues/158
# Warning: I don't have a scret key, so the requests for this test are forged
def testPaginationWithSecretKeyAuthentication(self):
g = github.Github(client_id=self.client_id, client_secret=self.client_secret)
self.assertListKeyEqual(g.get_organization("BeaverSoftware").get_repos("public"), lambda r: r.name, ["FatherBeaver", "PyGithub"])
|
gpl-3.0
|
wbg-optronix-lab/emergence-lab
|
project_management/urls/literature.py
|
2
|
1717
|
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
import project_management.views as views
urlpatterns = [
url(r'^search$',
views.MendeleyLibrarySearchView.as_view(),
name="mendeley_search"),
url(r'^$',
views.LiteratureLandingView.as_view(),
name="literature_landing"),
url(r'^add/external/milestone/(?P<milestone>[0-9]+)/(?P<external_id>[0-9a-f\w-]{36,})$',
views.AddMendeleyObjectView.as_view(),
name="add_mendeley_object_milestone"),
url(r'^add/external/investigation/(?P<investigation>[0-9]+)/(?P<external_id>[0-9a-f\w-]{36,})$',
views.AddMendeleyObjectView.as_view(),
name="add_mendeley_object_investigation"),
url(r'^add/existing/milestone/(?P<milestone>[0-9]+)/(?P<pk>[0-9]+)$',
views.AddMendeleyObjectView.as_view(),
name="add_literature_object_milestone"),
url(r'^add/existing/investigation/(?P<investigation>[0-9]+)/(?P<pk>[0-9]+)$',
views.AddMendeleyObjectView.as_view(),
name="add_literature_object_investigation"),
url(r'^create$',
views.CreateLiteratureObjectView.as_view(),
name="literature_create"),
url(r'^detail/redirect/(?P<pk>[0-9]+)$',
views.LiteratureDetailRedirector.as_view(),
name="literature_detail_redirector"),
url(r'^detail/internal/(?P<pk>[0-9]+)$',
views.LiteratureDetailView.as_view(),
name="literature_detail"),
url(r'^detail/external/(?P<external_id>[0-9a-f\w-]{36,})$',
views.MendeleyDetailView.as_view(),
name="mendeley_detail"),
url(r'^error$',
views.MendeleySearchErrorView.as_view(),
name="mendeley_error"),
]
|
mit
|
abhishekarora12/ansible
|
lib/ansible/utils/module_docs_fragments/openstack.py
|
97
|
4021
|
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard openstack documentation fragment
DOCUMENTATION = '''
options:
cloud:
description:
- Named cloud to operate against. Provides default values for I(auth) and
I(auth_type). This parameter is not needed if I(auth) is provided or if
OpenStack OS_* environment variables are present.
required: false
auth:
description:
- Dictionary containing auth information as needed by the cloud's auth
plugin strategy. For the default I(password) plugin, this would contain
I(auth_url), I(username), I(password), I(project_name) and any
information about domains if the cloud supports them. For other plugins,
this param will need to contain whatever parameters that auth plugin
requires. This parameter is not needed if a named cloud is provided or
OpenStack OS_* environment variables are present.
required: false
auth_type:
description:
- Name of the auth plugin to use. If the cloud uses something other than
password authentication, the name of the plugin should be indicated here
and the contents of the I(auth) parameter should be updated accordingly.
required: false
default: password
region_name:
description:
- Name of the region.
required: false
availability_zone:
description:
- Name of the availability zone.
required: false
wait:
description:
- Should ansible wait until the requested resource is complete.
required: false
default: "yes"
choices: ["yes", "no"]
timeout:
description:
- How long should ansible wait for the requested resource.
required: false
default: 180
api_timeout:
description:
- How long should the socket layer wait before timing out for API calls.
If this is omitted, nothing will be passed to the requests library.
required: false
default: None
validate_certs:
description:
- Whether or not SSL API requests should be verified.
required: false
default: True
aliases: ['verify']
cacert:
description:
- A path to a CA Cert bundle that can be used as part of verifying
SSL API requests.
required: false
default: None
cert:
description:
- A path to a client certificate to use as part of the SSL transaction
required: false
default: None
key:
description:
- A path to a client key to use as part of the SSL transaction
required: false
default: None
endpoint_type:
description:
- Endpoint URL type to fetch from the service catalog.
choices: [public, internal, admin]
required: false
default: public
requirements:
- python >= 2.7
- shade
notes:
- The standard OpenStack environment variables, such as C(OS_USERNAME)
may be user instead of providing explicit values.
- Auth information is driven by os-client-config, which means that values
can come from a yaml config file in /etc/ansible/openstack.yaml,
/etc/openstack/clouds.yaml or ~/.config/openstack/clouds.yaml, then from
standard environment variables, then finally by explicit parameters in
plays. More information can be found at
U(http://docs.openstack.org/developer/os-client-config)
'''
|
gpl-3.0
|
valtandor/easybuild-easyblocks
|
easybuild/easyblocks/e/esmf.py
|
2
|
4293
|
##
# Copyright 2013 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for building and installing ESMF, implemented as an easyblock
@author: Kenneth Hoste (Ghent University)
"""
import os
import easybuild.tools.environment as env
import easybuild.tools.toolchain as toolchain
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.framework.easyblock import EasyBlock
from easybuild.framework.easyconfig import BUILD
from easybuild.tools.modules import get_software_root
from easybuild.tools.run import run_cmd
from easybuild.tools.systemtools import get_shared_lib_ext
class EB_ESMF(ConfigureMake):
"""Support for building/installing ESMF."""
def configure_step(self):
"""Custom configuration procedure for ESMF through environment variables."""
env.setvar('ESMF_DIR', self.cfg['start_dir'])
env.setvar('ESMF_INSTALL_PREFIX', self.installdir)
env.setvar('ESMF_INSTALL_BINDIR', 'bin')
env.setvar('ESMF_INSTALL_LIBDIR', 'lib')
env.setvar('ESMF_INSTALL_MODDIR', 'mod')
# specify compiler
comp_family = self.toolchain.comp_family()
if comp_family in [toolchain.GCC]:
compiler = 'gfortran'
else:
compiler = comp_family.lower()
env.setvar('ESMF_COMPILER', compiler)
# specify MPI communications library
comm = None
mpi_family = self.toolchain.mpi_family()
if mpi_family in [toolchain.MPICH, toolchain.QLOGICMPI]:
# MPICH family for MPICH v3.x, which is MPICH2 compatible
comm = 'mpich2'
else:
comm = mpi_family.lower()
env.setvar('ESMF_COMM', comm)
# specify decent LAPACK lib
env.setvar('ESMF_LAPACK', 'user')
env.setvar('ESMF_LAPACK_LIBS', '%s %s' % (os.getenv('LDFLAGS'), os.getenv('LIBLAPACK_MT')))
# specify netCDF
netcdf = get_software_root('netCDF')
if netcdf:
env.setvar('ESMF_NETCDF', 'user')
netcdf_libs = ['-L%s/lib' % netcdf, '-lnetcdf']
# Fortran
netcdff = get_software_root('netCDF-Fortran')
if netcdff:
netcdf_libs = ["-L%s/lib" % netcdff] + netcdf_libs + ["-lnetcdff"]
else:
netcdf_libs.append('-lnetcdff')
# C++
netcdfcxx = get_software_root('netCDF-C++')
if netcdfcxx:
netcdf_libs = ["-L%s/lib" % netcdfcxx] + netcdf_libs + ["-lnetcdf_c++"]
else:
netcdf_libs.append('-lnetcdf_c++')
env.setvar('ESMF_NETCDF_LIBS', ' '.join(netcdf_libs))
# 'make info' provides useful debug info
cmd = "make info"
run_cmd(cmd, log_all=True, simple=True, log_ok=True)
def sanity_check_step(self):
"""Custom sanity check for ESMF."""
shlib_ext = get_shared_lib_ext()
custom_paths = {
'files':
[os.path.join('bin', x) for x in ['ESMF_Info', 'ESMF_InfoC', 'ESMF_RegridWeightGen', 'ESMF_WebServController']] +
[os.path.join('lib', x) for x in ['libesmf.a', 'libesmf.%s' % shlib_ext]],
'dirs': ['include', 'mod'],
}
super(EB_ESMF, self).sanity_check_step(custom_paths=custom_paths)
|
gpl-2.0
|
CitoEngine/cito_plugin_server
|
cito_plugin_server/settings/base.py
|
1
|
5183
|
"""Copyright 2014 Cyrus Dasadia
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys
import logging
try:
from unipath import Path
except ImportError:
print 'Please run pip install Unipath, to install this module.'
sys.exit(1)
PROJECT_ROOT = Path(__file__).ancestor(2)
# PROJECT_ROOT = os.path.realpath(os.path.dirname(__file__))
# sys.path.insert(0, PROJECT_ROOT)
LOG_PATH = PROJECT_ROOT.ancestor(1)
DEBUG = False
TEMPLATE_DEBUG = False
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'UTC'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = PROJECT_ROOT.child('static')
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
PROJECT_ROOT.child('static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.request",
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'cito_plugin_server.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'cito_plugin_server.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
PROJECT_ROOT.child('templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'gunicorn',
'south',
'cito_plugin_server',
'webservice',
)
STATIC_FILES = PROJECT_ROOT.ancestor(1).child('staticfiles')
try:
from .secret_key import *
except ImportError:
print "settings/secret_key.py not found!"
sys.exit(1)
|
apache-2.0
|
lucernae/inasafe
|
safe/plugin.py
|
6
|
43925
|
# coding=utf-8
"""InaSAFE Plugin."""
import sys
import os
from functools import partial
from distutils.version import StrictVersion
# noinspection PyUnresolvedReferences
import qgis # NOQA pylint: disable=unused-import
# Import the PyQt and QGIS libraries
from qgis.core import (
QgsRectangle,
QgsRasterLayer,
QgsMapLayer,
QgsExpression,
QgsProject,
)
# noinspection PyPackageRequirements
from qgis.PyQt.QtCore import QCoreApplication, Qt
# noinspection PyPackageRequirements
from qgis.PyQt.QtWidgets import (
QAction,
QApplication,
QToolButton,
QMenu,
QLineEdit,
QInputDialog
)
from qgis.PyQt.QtGui import QIcon
from safe.common.custom_logging import LOGGER
from safe.utilities.expressions import qgis_expressions
from safe.definitions.versions import inasafe_release_status, inasafe_version
from safe.common.exceptions import (
KeywordNotFoundError,
NoKeywordsFoundError,
MetadataReadError,
)
from safe.common.signals import send_static_message
from safe.utilities.resources import resources_path
from safe.utilities.gis import is_raster_layer
from safe.definitions.layer_purposes import (
layer_purpose_exposure, layer_purpose_hazard
)
from safe.definitions.utilities import get_field_groups
from safe.utilities.i18n import tr
from safe.utilities.keyword_io import KeywordIO
from safe.utilities.utilities import is_keyword_version_supported
from safe.utilities.settings import setting, set_setting
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "info@inasafe.org"
__revision__ = '$Format:%H$'
class Plugin():
"""The QGIS interface implementation for the InaSAFE plugin.
This class acts as the 'glue' between QGIS and our custom logic.
It creates a toolbar and menu bar entry and launches the InaSAFE user
interface if these are activated.
"""
def __init__(self, iface):
"""Class constructor.
On instantiation, the plugin instance will be assigned a copy
of the QGIS iface object which will allow this plugin to access and
manipulate the running QGIS instance that spawned it.
:param iface:Quantum GIS iface instance. This instance is
automatically passed to the plugin by QGIS when it loads the
plugin.
:type iface: QgisAppInterface
"""
# Save reference to the QGIS interface
self.iface = iface
self.dock_widget = None
# Actions
self.action_add_layers = None
self.action_add_osm_layer = None
self.action_add_petabencana_layer = None
self.action_batch_runner = None
self.action_dock = None
self.action_extent_selector = None
self.action_field_mapping = None
self.action_multi_exposure = None
self.action_function_centric_wizard = None
self.action_import_dialog = None
self.action_keywords_wizard = None
self.action_minimum_needs = None
self.action_minimum_needs_config = None
self.action_multi_buffer = None
self.action_options = None
self.action_run_tests = None
self.action_save_scenario = None
self.action_shake_converter = None
self.action_show_definitions = None
self.action_toggle_rubberbands = None
self.action_metadata_converter = None
self.translator = None
self.toolbar = None
self.wizard = None
self.actions = [] # list of all QActions we create for InaSAFE
self.message_bar_item = None
# Flag indicating if toolbar should show only common icons or not
self.full_toolbar = False
# print self.tr('InaSAFE')
# For enable/disable the keyword editor icon
self.iface.currentLayerChanged.connect(self.layer_changed)
developer_mode = setting(
'developer_mode', False, expected_type=bool)
self.hide_developer_buttons = (
inasafe_release_status == 'final' and not developer_mode)
# noinspection PyMethodMayBeStatic
def tr(self, message):
"""Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString
"""
# noinspection PyTypeChecker,PyArgumentList,PyCallByClass
return QCoreApplication.translate('Plugin', message)
def add_action(self, action, add_to_toolbar=True, add_to_legend=False):
"""Add a toolbar icon to the InaSAFE toolbar.
:param action: The action that should be added to the toolbar.
:type action: QAction
:param add_to_toolbar: Flag indicating whether the action should also
be added to the InaSAFE toolbar. Defaults to True.
:type add_to_toolbar: bool
:param add_to_legend: Flag indicating whether the action should also
be added to the layer legend menu. Default to False.
:type add_to_legend: bool
"""
# store in the class list of actions for easy plugin unloading
self.actions.append(action)
self.iface.addPluginToMenu(self.tr('InaSAFE'), action)
if add_to_toolbar:
self.toolbar.addAction(action)
if add_to_legend:
# The id is the action name without spaces, tabs ...
self.iface.addCustomActionForLayerType(
action,
self.tr('InaSAFE'),
QgsMapLayer.VectorLayer,
True)
self.iface.addCustomActionForLayerType(
action,
self.tr('InaSAFE'),
QgsMapLayer.RasterLayer,
True)
def _create_dock_toggle_action(self):
"""Create action for plugin dockable window (show/hide)."""
# pylint: disable=W0201
icon = resources_path('img', 'icons', 'icon.svg')
self.action_dock = QAction(
QIcon(icon),
self.tr('Toggle InaSAFE Dock'), self.iface.mainWindow())
self.action_dock.setObjectName('InaSAFEDockToggle')
self.action_dock.setStatusTip(self.tr(
'Show/hide InaSAFE dock widget'))
self.action_dock.setWhatsThis(self.tr(
'Show/hide InaSAFE dock widget'))
self.action_dock.setCheckable(True)
self.action_dock.setChecked(True)
self.action_dock.triggered.connect(self.toggle_dock_visibility)
self.add_action(self.action_dock)
# --------------------------------------
# Create action for keywords creation wizard
# -------------------------------------
def _create_keywords_wizard_action(self):
"""Create action for keywords creation wizard."""
icon = resources_path('img', 'icons', 'show-keyword-wizard.svg')
self.action_keywords_wizard = QAction(
QIcon(icon),
self.tr('Keywords Creation Wizard'),
self.iface.mainWindow())
self.action_keywords_wizard.setStatusTip(self.tr(
'Open InaSAFE keywords creation wizard'))
self.action_keywords_wizard.setWhatsThis(self.tr(
'Open InaSAFE keywords creation wizard'))
self.action_keywords_wizard.setEnabled(False)
self.action_keywords_wizard.triggered.connect(
self.show_keywords_wizard)
self.add_action(self.action_keywords_wizard, add_to_legend=True)
def _create_analysis_wizard_action(self):
"""Create action for IF-centric wizard."""
icon = resources_path('img', 'icons', 'show-wizard.svg')
self.action_function_centric_wizard = QAction(
QIcon(icon),
self.tr('Impact Function Centric Wizard'),
self.iface.mainWindow())
self.action_function_centric_wizard.setStatusTip(self.tr(
'Open InaSAFE impact function centric wizard'))
self.action_function_centric_wizard.setWhatsThis(self.tr(
'Open InaSAFE impact function centric wizard'))
self.action_function_centric_wizard.setEnabled(True)
self.action_function_centric_wizard.triggered.connect(
self.show_function_centric_wizard)
self.add_action(self.action_function_centric_wizard)
def _create_options_dialog_action(self):
"""Create action for options dialog."""
icon = resources_path('img', 'icons', 'configure-inasafe.svg')
self.action_options = QAction(
QIcon(icon),
self.tr('Options'), self.iface.mainWindow())
self.action_options.setStatusTip(self.tr(
'Open InaSAFE options dialog'))
self.action_options.setWhatsThis(self.tr(
'Open InaSAFE options dialog'))
self.action_options.triggered.connect(self.show_options)
self.add_action(self.action_options, add_to_toolbar=self.full_toolbar)
def _create_minimum_needs_action(self):
"""Create action for minimum needs dialog."""
icon = resources_path('img', 'icons', 'show-minimum-needs.svg')
self.action_minimum_needs = QAction(
QIcon(icon),
self.tr('Minimum Needs Calculator'), self.iface.mainWindow())
self.action_minimum_needs.setStatusTip(self.tr(
'Open InaSAFE minimum needs calculator'))
self.action_minimum_needs.setWhatsThis(self.tr(
'Open InaSAFE minimum needs calculator'))
self.action_minimum_needs.triggered.connect(self.show_minimum_needs)
self.add_action(
self.action_minimum_needs, add_to_toolbar=self.full_toolbar)
def _create_multi_buffer_action(self):
"""Create action for multi buffer dialog."""
icon = resources_path('img', 'icons', 'show-multi-buffer.svg')
self.action_multi_buffer = QAction(
QIcon(icon),
self.tr('Multi Buffer'), self.iface.mainWindow())
self.action_multi_buffer.setStatusTip(self.tr(
'Open InaSAFE multi buffer'))
self.action_multi_buffer.setWhatsThis(self.tr(
'Open InaSAFE multi buffer'))
self.action_multi_buffer.triggered.connect(self.show_multi_buffer)
self.add_action(
self.action_multi_buffer,
add_to_toolbar=self.full_toolbar)
def _create_minimum_needs_options_action(self):
"""Create action for global minimum needs dialog."""
icon = resources_path('img', 'icons', 'show-global-minimum-needs.svg')
self.action_minimum_needs_config = QAction(
QIcon(icon),
self.tr('Minimum Needs Configuration'),
self.iface.mainWindow())
self.action_minimum_needs_config.setStatusTip(self.tr(
'Open InaSAFE minimum needs configuration'))
self.action_minimum_needs_config.setWhatsThis(self.tr(
'Open InaSAFE minimum needs configuration'))
self.action_minimum_needs_config.triggered.connect(
self.show_minimum_needs_configuration)
self.add_action(
self.action_minimum_needs_config, add_to_toolbar=self.full_toolbar)
def _create_shakemap_converter_action(self):
"""Create action for converter dialog."""
icon = resources_path('img', 'icons', 'show-converter-tool.svg')
self.action_shake_converter = QAction(
QIcon(icon),
self.tr('Shakemap Converter'), self.iface.mainWindow())
self.action_shake_converter.setStatusTip(self.tr(
'Open InaSAFE Converter'))
self.action_shake_converter.setWhatsThis(self.tr(
'Open InaSAFE Converter'))
self.action_shake_converter.triggered.connect(
self.show_shakemap_importer)
self.add_action(
self.action_shake_converter, add_to_toolbar=self.full_toolbar)
def _create_batch_runner_action(self):
"""Create action for batch runner dialog."""
icon = resources_path('img', 'icons', 'show-batch-runner.svg')
self.action_batch_runner = QAction(
QIcon(icon),
self.tr('Batch Runner'), self.iface.mainWindow())
self.action_batch_runner.setStatusTip(self.tr(
'Open Batch Runner'))
self.action_batch_runner.setWhatsThis(self.tr(
'Open Batch Runner'))
self.action_batch_runner.triggered.connect(self.show_batch_runner)
self.add_action(
self.action_batch_runner, add_to_toolbar=self.full_toolbar)
def _create_save_scenario_action(self):
"""Create action for save scenario dialog."""
icon = resources_path('img', 'icons', 'save-as-scenario.svg')
self.action_save_scenario = QAction(
QIcon(icon),
self.tr('Save Current Scenario'), self.iface.mainWindow())
message = self.tr('Save current scenario to text file')
self.action_save_scenario.setStatusTip(message)
self.action_save_scenario.setWhatsThis(message)
# noinspection PyUnresolvedReferences
self.action_save_scenario.triggered.connect(self.save_scenario)
self.add_action(
self.action_save_scenario, add_to_toolbar=self.full_toolbar)
def _create_osm_downloader_action(self):
"""Create action for import OSM Dialog."""
icon = resources_path('img', 'icons', 'show-osm-download.svg')
self.action_import_dialog = QAction(
QIcon(icon),
self.tr('OpenStreetMap Downloader'),
self.iface.mainWindow())
self.action_import_dialog.setStatusTip(self.tr(
'OpenStreetMap Downloader'))
self.action_import_dialog.setWhatsThis(self.tr(
'OpenStreetMap Downloader'))
self.action_import_dialog.triggered.connect(self.show_osm_downloader)
self.add_action(self.action_import_dialog, add_to_toolbar=True)
def _create_geonode_uploader_action(self):
"""Create action for Geonode uploader dialog."""
icon = resources_path('img', 'icons', 'geonode.png')
label = tr('Geonode Uploader')
self.action_geonode = QAction(
QIcon(icon), label, self.iface.mainWindow())
self.action_geonode.setStatusTip(label)
self.action_geonode.setWhatsThis(label)
self.action_geonode.triggered.connect(self.show_geonode_uploader)
self.add_action(self.action_geonode, add_to_toolbar=False)
def _create_add_osm_layer_action(self):
"""Create action for import OSM Dialog."""
icon = resources_path('img', 'icons', 'add-osm-tiles-layer.svg')
self.action_add_osm_layer = QAction(
QIcon(icon),
self.tr('Add OpenStreetMap Tile Layer'),
self.iface.mainWindow())
self.action_add_osm_layer.setStatusTip(self.tr(
'Add OpenStreetMap Tile Layer'))
self.action_add_osm_layer.setWhatsThis(self.tr(
'Use this to add an OSM layer to your map. '
'It needs internet access to function.'))
self.action_add_osm_layer.triggered.connect(self.add_osm_layer)
self.add_action(self.action_add_osm_layer, add_to_toolbar=True)
def _create_show_definitions_action(self):
"""Create action for showing definitions / help."""
icon = resources_path('img', 'icons', 'show-inasafe-help.svg')
self.action_show_definitions = QAction(
QIcon(icon),
self.tr('InaSAFE Help'),
self.iface.mainWindow())
self.action_show_definitions.setStatusTip(self.tr(
'Show InaSAFE Help'))
self.action_show_definitions.setWhatsThis(self.tr(
'Use this to show a document describing all InaSAFE concepts.'))
self.action_show_definitions.triggered.connect(
self.show_definitions)
self.add_action(
self.action_show_definitions,
add_to_toolbar=True)
def _create_metadata_converter_action(self):
"""Create action for showing metadata converter dialog."""
icon = resources_path('img', 'icons', 'show-metadata-converter.svg')
self.action_metadata_converter = QAction(
QIcon(icon),
self.tr('InaSAFE Metadata Converter'),
self.iface.mainWindow())
self.action_metadata_converter.setStatusTip(self.tr(
'Convert metadata from version 4.3 to version 3.5.'))
self.action_metadata_converter.setWhatsThis(self.tr(
'Use this tool to convert metadata 4.3 to version 3.5'))
self.action_metadata_converter.triggered.connect(
self.show_metadata_converter)
self.add_action(
self.action_metadata_converter, add_to_toolbar=self.full_toolbar)
def _create_field_mapping_action(self):
"""Create action for showing field mapping dialog."""
icon = resources_path('img', 'icons', 'show-mapping-tool.svg')
self.action_field_mapping = QAction(
QIcon(icon),
self.tr('InaSAFE Field Mapping Tool'),
self.iface.mainWindow())
self.action_field_mapping.setStatusTip(self.tr(
'Assign field mapping to layer.'))
self.action_field_mapping.setWhatsThis(self.tr(
'Use this tool to assign field mapping in layer.'))
self.action_field_mapping.setEnabled(False)
self.action_field_mapping.triggered.connect(self.show_field_mapping)
self.add_action(
self.action_field_mapping, add_to_toolbar=self.full_toolbar)
def _create_multi_exposure_action(self):
"""Create action for showing the multi exposure tool."""
self.action_multi_exposure = QAction(
QIcon(resources_path('img', 'icons', 'show-multi-exposure.svg')),
self.tr('InaSAFE Multi Exposure Tool'),
self.iface.mainWindow())
self.action_multi_exposure.setStatusTip(self.tr(
'Open the multi exposure tool.'))
self.action_multi_exposure.setWhatsThis(self.tr(
'Open the multi exposure tool.'))
self.action_multi_exposure.setEnabled(True)
self.action_multi_exposure.triggered.connect(self.show_multi_exposure)
self.add_action(
self.action_multi_exposure, add_to_toolbar=self.full_toolbar)
def _create_add_petabencana_layer_action(self):
"""Create action for import OSM Dialog."""
icon = resources_path('img', 'icons', 'add-petabencana-layer.svg')
self.action_add_petabencana_layer = QAction(
QIcon(icon),
self.tr('Add PetaBencana Flood Layer'),
self.iface.mainWindow())
self.action_add_petabencana_layer.setStatusTip(self.tr(
'Add PetaBencana Flood Layer'))
self.action_add_petabencana_layer.setWhatsThis(self.tr(
'Use this to add a PetaBencana layer to your map. '
'It needs internet access to function.'))
self.action_add_petabencana_layer.triggered.connect(
self.add_petabencana_layer)
self.add_action(
self.action_add_petabencana_layer,
add_to_toolbar=self.full_toolbar)
def _create_rubber_bands_action(self):
"""Create action for toggling rubber bands."""
icon = resources_path('img', 'icons', 'toggle-rubber-bands.svg')
self.action_toggle_rubberbands = QAction(
QIcon(icon),
self.tr('Toggle Scenario Outlines'), self.iface.mainWindow())
message = self.tr('Toggle rubber bands showing scenario extents.')
self.action_toggle_rubberbands.setStatusTip(message)
self.action_toggle_rubberbands.setWhatsThis(message)
# Set initial state
self.action_toggle_rubberbands.setCheckable(True)
flag = setting('showRubberBands', False, expected_type=bool)
self.action_toggle_rubberbands.setChecked(flag)
# noinspection PyUnresolvedReferences
self.action_toggle_rubberbands.triggered.connect(
self.dock_widget.toggle_rubber_bands)
self.add_action(self.action_toggle_rubberbands)
def _create_analysis_extent_action(self):
"""Create action for analysis extent dialog."""
icon = resources_path('img', 'icons', 'set-extents-tool.svg')
self.action_extent_selector = QAction(
QIcon(icon),
self.tr('Set Analysis Area'),
self.iface.mainWindow())
self.action_extent_selector.setStatusTip(self.tr(
'Set the analysis area for InaSAFE'))
self.action_extent_selector.setWhatsThis(self.tr(
'Set the analysis area for InaSAFE'))
self.action_extent_selector.triggered.connect(
self.show_extent_selector)
self.add_action(self.action_extent_selector)
def _create_test_layers_action(self):
"""Create action for adding layers (developer mode, non final only)."""
if self.hide_developer_buttons:
return
icon = resources_path('img', 'icons', 'add-test-layers.svg')
self.action_add_layers = QAction(
QIcon(icon),
self.tr('Add Test Layers'),
self.iface.mainWindow())
self.action_add_layers.setStatusTip(self.tr(
'Add test layers'))
self.action_add_layers.setWhatsThis(self.tr(
'Add test layers'))
self.action_add_layers.triggered.connect(
self.add_test_layers)
self.add_action(self.action_add_layers)
def _create_run_test_action(self):
"""Create action for running tests (developer mode, non final only)."""
if self.hide_developer_buttons:
return
default_package = str(
setting('testPackage', 'safe', expected_type=str))
msg = self.tr('Run tests in %s' % default_package)
self.test_button = QToolButton()
self.test_button.setMenu(QMenu())
self.test_button.setPopupMode(QToolButton.MenuButtonPopup)
icon = resources_path('img', 'icons', 'run-tests.svg')
self.action_run_tests = QAction(
QIcon(icon), msg, self.iface.mainWindow())
self.action_run_tests.setStatusTip(msg)
self.action_run_tests.setWhatsThis(msg)
self.action_run_tests.triggered.connect(self.run_tests)
self.test_button.menu().addAction(self.action_run_tests)
self.test_button.setDefaultAction(self.action_run_tests)
self.action_select_package = QAction(
QIcon(icon), self.tr('Select package'), self.iface.mainWindow())
self.action_select_package.setStatusTip(self.tr('Select Test Package'))
self.action_select_package.setWhatsThis(self.tr('Select Test Package'))
self.action_select_package.triggered.connect(
self.select_test_package)
self.test_button.menu().addAction(self.action_select_package)
self.toolbar.addWidget(self.test_button)
self.add_action(self.action_run_tests, add_to_toolbar=False)
self.add_action(self.action_select_package, add_to_toolbar=False)
def _create_dock(self):
"""Create dockwidget and tabify it with the legend."""
# Import dock here as it needs to be imported AFTER i18n is set up
from safe.gui.widgets.dock import Dock
self.dock_widget = Dock(self.iface)
self.dock_widget.setObjectName('InaSAFE-Dock')
self.iface.addDockWidget(Qt.RightDockWidgetArea, self.dock_widget)
legend_tab = self.iface.mainWindow().findChild(QApplication, 'Legend')
if legend_tab:
self.iface.mainWindow().tabifyDockWidget(
legend_tab, self.dock_widget)
self.dock_widget.raise_()
# noinspection PyPep8Naming
def initGui(self):
"""Gui initialisation procedure (for QGIS plugin api).
.. note:: Don't change the name of this method from initGui!
This method is called by QGIS and should be used to set up
any graphical user interface elements that should appear in QGIS by
default (i.e. before the user performs any explicit action with the
plugin).
"""
self.toolbar = self.iface.addToolBar('InaSAFE')
self.toolbar.setObjectName('InaSAFEToolBar')
self.dock_widget = None
# Now create the actual dock
self._create_dock()
# And all the menu actions
# Configuration Group
self._create_dock_toggle_action()
self._create_options_dialog_action()
self._create_minimum_needs_options_action()
self._create_analysis_extent_action()
self._create_rubber_bands_action()
self._add_spacer_to_menu()
self._create_keywords_wizard_action()
self._create_analysis_wizard_action()
self._add_spacer_to_menu()
self._create_field_mapping_action()
self._create_multi_exposure_action()
self._create_metadata_converter_action()
self._create_osm_downloader_action()
self._create_add_osm_layer_action()
self._create_add_petabencana_layer_action()
self._create_geonode_uploader_action()
self._create_shakemap_converter_action()
self._create_minimum_needs_action()
self._create_multi_buffer_action()
self._create_test_layers_action()
self._create_run_test_action()
self._add_spacer_to_menu()
self._create_batch_runner_action()
self._create_save_scenario_action()
self._add_spacer_to_menu()
self._create_show_definitions_action()
# Hook up a slot for when the dock is hidden using its close button
# or view-panels
#
self.dock_widget.visibilityChanged.connect(self.toggle_inasafe_action)
# Also deal with the fact that on start of QGIS dock may already be
# hidden.
self.action_dock.setChecked(self.dock_widget.isVisible())
self.iface.initializationCompleted.connect(
partial(self.show_welcome_message)
)
def _add_spacer_to_menu(self):
"""Create a spacer to the menu to separate action groups."""
separator = QAction(self.iface.mainWindow())
separator.setSeparator(True)
self.iface.addPluginToMenu(self.tr('InaSAFE'), separator)
@staticmethod
def clear_modules():
"""Unload inasafe functions and try to return QGIS to before InaSAFE.
.. todo:: I think this function can be removed. TS.
"""
# next lets force remove any inasafe related modules
modules = []
for module in sys.modules:
if 'inasafe' in module:
# Check if it is really one of our modules i.e. exists in the
# plugin directory
tokens = module.split('.')
path = ''
for myToken in tokens:
path += os.path.sep + myToken
parent = os.path.abspath(os.path.join(
__file__, os.path.pardir, os.path.pardir))
full_path = os.path.join(parent, path + '.py')
if os.path.exists(os.path.abspath(full_path)):
LOGGER.debug('Removing: %s' % module)
modules.append(module)
for module in modules:
del (sys.modules[module])
for module in sys.modules:
if 'inasafe' in module:
print(module)
# Lets also clean up all the path additions that were made
package_path = os.path.abspath(os.path.join(
os.path.dirname(__file__), os.path.pardir))
LOGGER.debug('Path to remove: %s' % package_path)
# We use a list comprehension to ensure duplicate entries are removed
LOGGER.debug(sys.path)
sys.path = [y for y in sys.path if package_path not in y]
LOGGER.debug(sys.path)
def unload(self):
"""GUI breakdown procedure (for QGIS plugin api).
.. note:: Don't change the name of this method from unload!
This method is called by QGIS and should be used to *remove*
any graphical user interface elements that should appear in QGIS.
"""
# Remove the plugin menu item and icon
if self.wizard:
self.wizard.deleteLater()
for myAction in self.actions:
self.iface.removePluginMenu(self.tr('InaSAFE'), myAction)
self.iface.removeToolBarIcon(myAction)
self.iface.removeCustomActionForLayerType(myAction)
self.iface.mainWindow().removeDockWidget(self.dock_widget)
self.iface.mainWindow().removeToolBar(self.toolbar)
self.dock_widget.setVisible(False)
self.dock_widget.destroy()
self.iface.currentLayerChanged.disconnect(self.layer_changed)
# Unload QGIS expressions loaded by the plugin.
for qgis_expression in list(qgis_expressions().keys()):
QgsExpression.unregisterFunction(qgis_expression)
def toggle_inasafe_action(self, checked):
"""Check or un-check the toggle inaSAFE toolbar button.
This slot is called when the user hides the inaSAFE panel using its
close button or using view->panels.
:param checked: True if the dock should be shown, otherwise False.
:type checked: bool
"""
self.action_dock.setChecked(checked)
# Run method that performs all the real work
def toggle_dock_visibility(self):
"""Show or hide the dock widget."""
if self.dock_widget.isVisible():
self.dock_widget.setVisible(False)
else:
self.dock_widget.setVisible(True)
self.dock_widget.raise_()
def add_test_layers(self):
"""Add standard test layers."""
from safe.test.utilities import load_standard_layers
load_standard_layers()
rect = QgsRectangle(106.806, -6.195, 106.837, -6.167)
self.iface.mapCanvas().setExtent(rect)
def select_test_package(self):
"""Select the test package."""
default_package = 'safe'
user_package = str(
setting('testPackage', default_package, expected_type=str))
test_package, _ = QInputDialog.getText(
self.iface.mainWindow(),
self.tr('Select the python test package'),
self.tr('Select the python test package'),
QLineEdit.Normal,
user_package)
if test_package == '':
test_package = default_package
set_setting('testPackage', test_package)
msg = self.tr('Run tests in %s' % test_package)
self.action_run_tests.setWhatsThis(msg)
self.action_run_tests.setText(msg)
def run_tests(self):
"""Run unit tests in the python console."""
from qgis.PyQt.QtWidgets import QDockWidget
main_window = self.iface.mainWindow()
action = main_window.findChild(QAction, 'mActionShowPythonDialog')
action.trigger()
package = str(setting('testPackage', 'safe', expected_type=str))
for child in main_window.findChildren(QDockWidget, 'PythonConsole'):
if child.objectName() == 'PythonConsole':
child.show()
for widget in child.children():
if 'PythonConsoleWidget' in str(widget.__class__):
# print "Console widget found"
shell = widget.shell
shell.runCommand(
'from inasafe.test_suite import test_package')
shell.runCommand('test_package(\'%s\')' % package)
break
def show_extent_selector(self):
"""Show the extent selector widget for defining analysis extents."""
# import here only so that it is AFTER i18n set up
from safe.gui.tools.extent_selector_dialog import ExtentSelectorDialog
widget = ExtentSelectorDialog(
self.iface,
self.iface.mainWindow(),
extent=self.dock_widget.extent.user_extent,
crs=self.dock_widget.extent.crs)
widget.clear_extent.connect(
self.dock_widget.extent.clear_user_analysis_extent)
widget.extent_defined.connect(
self.dock_widget.define_user_analysis_extent)
# This ensures that run button state is updated on dialog close
widget.extent_selector_closed.connect(
self.dock_widget.validate_impact_function)
# Needs to be non modal to support hide -> interact with map -> show
widget.show() # non modal
def show_minimum_needs(self):
"""Show the minimum needs dialog."""
# import here only so that it is AFTER i18n set up
from safe.gui.tools.minimum_needs.needs_calculator_dialog import (
NeedsCalculatorDialog
)
dialog = NeedsCalculatorDialog(self.iface.mainWindow())
dialog.exec_()
def show_minimum_needs_configuration(self):
"""Show the minimum needs dialog."""
# import here only so that it is AFTER i18n set up
from safe.gui.tools.minimum_needs.needs_manager_dialog import (
NeedsManagerDialog)
dialog = NeedsManagerDialog(
parent=self.iface.mainWindow(),
dock=self.dock_widget)
dialog.exec_() # modal
def show_options(self):
"""Show the options dialog."""
# import here only so that it is AFTER i18n set up
from safe.gui.tools.options_dialog import OptionsDialog
dialog = OptionsDialog(
iface=self.iface,
parent=self.iface.mainWindow())
dialog.show_option_dialog()
if dialog.exec_(): # modal
self.dock_widget.read_settings()
from safe.gui.widgets.message import getting_started_message
send_static_message(self.dock_widget, getting_started_message())
# Issue #4734, make sure to update the combobox after update the
# InaSAFE option
self.dock_widget.get_layers()
def show_welcome_message(self):
"""Show the welcome message."""
# import here only so that it is AFTER i18n set up
from safe.gui.tools.options_dialog import OptionsDialog
# Do not show by default
show_message = False
previous_version = StrictVersion(setting('previous_version'))
current_version = StrictVersion(inasafe_version)
# Set previous_version to the current inasafe_version
set_setting('previous_version', inasafe_version)
if setting('always_show_welcome_message', expected_type=bool):
# Show if it the setting said so
show_message = True
elif previous_version < current_version:
# Always show if the user installed new version
show_message = True
# Allow to disable welcome message when running automated tests
if os.environ.get('INASAFE_DISABLE_WELCOME_MESSAGE', False):
show_message = False
if show_message:
dialog = OptionsDialog(
iface=self.iface,
parent=self.iface.mainWindow())
dialog.show_welcome_dialog()
if dialog.exec_(): # modal
self.dock_widget.read_settings()
def show_keywords_wizard(self):
"""Show the keywords creation wizard."""
# import here only so that it is AFTER i18n set up
from safe.gui.tools.wizard.wizard_dialog import WizardDialog
if self.iface.activeLayer() is None:
return
# Don't break an existing wizard session if accidentally clicked
if self.wizard and self.wizard.isVisible():
return
# Prevent spawning multiple copies since the IFCW is non modal
if not self.wizard:
self.wizard = WizardDialog(
self.iface.mainWindow(),
self.iface,
self.dock_widget)
self.wizard.set_keywords_creation_mode()
self.wizard.exec_() # modal
def show_function_centric_wizard(self):
"""Show the function centric wizard."""
# import here only so that it is AFTER i18n set up
from safe.gui.tools.wizard.wizard_dialog import WizardDialog
# Don't break an existing wizard session if accidentally clicked
if self.wizard and self.wizard.isVisible():
return
# Prevent spawning multiple copies since it is non modal
if not self.wizard:
self.wizard = WizardDialog(
self.iface.mainWindow(),
self.iface,
self.dock_widget)
self.wizard.set_function_centric_mode()
# non-modal in order to hide for selecting user extent
self.wizard.show()
def show_shakemap_importer(self):
"""Show the converter dialog."""
# import here only so that it is AFTER i18n set up
from safe.gui.tools.shake_grid.shakemap_converter_dialog import (
ShakemapConverterDialog)
dialog = ShakemapConverterDialog(
self.iface.mainWindow(), self.iface, self.dock_widget)
dialog.exec_() # modal
def show_multi_buffer(self):
"""Show the multi buffer tool."""
from safe.gui.tools.multi_buffer_dialog import (
MultiBufferDialog)
dialog = MultiBufferDialog(
self.iface.mainWindow(), self.iface, self.dock_widget)
dialog.exec_() # modal
def show_osm_downloader(self):
"""Show the OSM buildings downloader dialog."""
from safe.gui.tools.osm_downloader_dialog import OsmDownloaderDialog
dialog = OsmDownloaderDialog(self.iface.mainWindow(), self.iface)
# otherwise dialog is never deleted
dialog.setAttribute(Qt.WA_DeleteOnClose, True)
dialog.show() # non modal
def show_geonode_uploader(self):
"""Show the Geonode uploader dialog."""
from safe.gui.tools.geonode_uploader import GeonodeUploaderDialog
dialog = GeonodeUploaderDialog(self.iface.mainWindow())
dialog.show() # non modal
def add_osm_layer(self):
"""Add OSM tile layer to the map.
This uses a gdal wrapper around the OSM tile service - see the
WorldOSM.gdal file for how it is constructed.
"""
path = resources_path('osm', 'WorldOSM.gdal')
layer = QgsRasterLayer(path, self.tr('OpenStreetMap'))
project = QgsProject.instance()
# Try to add it as the last layer in the list
# False flag prevents layer being added to legend
project.addMapLayer(layer, False)
root = QgsProject.instance().layerTreeRoot()
index = len(root.findLayers()) + 1
# LOGGER.info('Inserting layer %s at position %s' % (
# layer.source(), index))
root.insertLayer(index, layer)
project.addMapLayer(layer)
def show_definitions(self):
"""Show InaSAFE Definitions (a report showing all key metadata)."""
from safe.utilities.help import show_help
from safe.gui.tools.help import definitions_help
show_help(definitions_help.definitions_help())
def show_field_mapping(self):
"""Show InaSAFE Field Mapping."""
from safe.gui.tools.field_mapping_dialog import FieldMappingDialog
dialog = FieldMappingDialog(
parent=self.iface.mainWindow(),
iface=self.iface,)
if dialog.exec_(): # modal
LOGGER.debug('Show field mapping accepted')
self.dock_widget.layer_changed(self.iface.activeLayer())
else:
LOGGER.debug('Show field mapping not accepted')
def show_metadata_converter(self):
"""Show InaSAFE Metadata Converter."""
from safe.gui.tools.metadata_converter_dialog import (
MetadataConverterDialog)
dialog = MetadataConverterDialog(
parent=self.iface.mainWindow(),
iface=self.iface,
)
dialog.exec_()
def show_multi_exposure(self):
"""Show InaSAFE Multi Exposure."""
from safe.gui.tools.multi_exposure_dialog import MultiExposureDialog
dialog = MultiExposureDialog(
self.iface.mainWindow(), self.iface)
dialog.exec_() # modal
def add_petabencana_layer(self):
"""Add petabencana layer to the map.
This uses the PetaBencana API to fetch the latest floods in JK. See
https://data.petabencana.id/floods
"""
from safe.gui.tools.peta_bencana_dialog import PetaBencanaDialog
dialog = PetaBencanaDialog(self.iface.mainWindow(), self.iface)
dialog.show() # non modal
def show_batch_runner(self):
"""Show the batch runner dialog."""
from safe.gui.tools.batch.batch_dialog import BatchDialog
dialog = BatchDialog(
parent=self.iface.mainWindow(),
iface=self.iface,
dock=self.dock_widget)
dialog.exec_() # modal
def save_scenario(self):
"""Save current scenario to text file."""
from safe.gui.tools.save_scenario import SaveScenarioDialog
dialog = SaveScenarioDialog(
iface=self.iface,
dock=self.dock_widget)
dialog.save_scenario()
def layer_changed(self, layer):
"""Enable or disable keywords editor icon when active layer changes.
:param layer: The layer that is now active.
:type layer: QgsMapLayer
"""
if not layer:
enable_keyword_wizard = False
elif not hasattr(layer, 'providerType'):
enable_keyword_wizard = False
elif layer.providerType() == 'wms':
enable_keyword_wizard = False
else:
enable_keyword_wizard = True
try:
if layer:
if is_raster_layer(layer):
enable_field_mapping_tool = False
else:
keywords = KeywordIO().read_keywords(layer)
keywords_version = keywords.get('keyword_version')
if not keywords_version:
supported = False
else:
supported = (
is_keyword_version_supported(keywords_version))
if not supported:
enable_field_mapping_tool = False
else:
layer_purpose = keywords.get('layer_purpose')
if not layer_purpose:
enable_field_mapping_tool = False
else:
if layer_purpose == layer_purpose_exposure['key']:
layer_subcategory = keywords.get('exposure')
elif layer_purpose == layer_purpose_hazard['key']:
layer_subcategory = keywords.get('hazard')
else:
layer_subcategory = None
field_groups = get_field_groups(
layer_purpose, layer_subcategory)
if len(field_groups) == 0:
# No field group, disable field mapping tool.
enable_field_mapping_tool = False
else:
enable_field_mapping_tool = True
else:
enable_field_mapping_tool = False
except (KeywordNotFoundError, NoKeywordsFoundError, MetadataReadError):
# No keywords, disable field mapping tool.
enable_field_mapping_tool = False
self.action_keywords_wizard.setEnabled(enable_keyword_wizard)
self.action_field_mapping.setEnabled(enable_field_mapping_tool)
def shortcut_f7(self):
"""Executed when user press F7 - will show the shakemap importer."""
self.show_shakemap_importer()
|
gpl-3.0
|
catapult-project/catapult-csm
|
third_party/html5lib-python/html5lib/treebuilders/etree_lxml.py
|
1724
|
14031
|
"""Module for supporting the lxml.etree library. The idea here is to use as much
of the native library as possible, without using fragile hacks like custom element
names that break between releases. The downside of this is that we cannot represent
all possible trees; specifically the following are known to cause problems:
Text or comments as siblings of the root element
Docypes with no name
When any of these things occur, we emit a DataLossWarning
"""
from __future__ import absolute_import, division, unicode_literals
import warnings
import re
import sys
from . import _base
from ..constants import DataLossWarning
from .. import constants
from . import etree as etree_builders
from .. import ihatexml
import lxml.etree as etree
fullTree = True
tag_regexp = re.compile("{([^}]*)}(.*)")
comment_type = etree.Comment("asd").tag
class DocumentType(object):
def __init__(self, name, publicId, systemId):
self.name = name
self.publicId = publicId
self.systemId = systemId
class Document(object):
def __init__(self):
self._elementTree = None
self._childNodes = []
def appendChild(self, element):
self._elementTree.getroot().addnext(element._element)
def _getChildNodes(self):
return self._childNodes
childNodes = property(_getChildNodes)
def testSerializer(element):
rv = []
finalText = None
infosetFilter = ihatexml.InfosetFilter()
def serializeElement(element, indent=0):
if not hasattr(element, "tag"):
if hasattr(element, "getroot"):
# Full tree case
rv.append("#document")
if element.docinfo.internalDTD:
if not (element.docinfo.public_id or
element.docinfo.system_url):
dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name
else:
dtd_str = """<!DOCTYPE %s "%s" "%s">""" % (
element.docinfo.root_name,
element.docinfo.public_id,
element.docinfo.system_url)
rv.append("|%s%s" % (' ' * (indent + 2), dtd_str))
next_element = element.getroot()
while next_element.getprevious() is not None:
next_element = next_element.getprevious()
while next_element is not None:
serializeElement(next_element, indent + 2)
next_element = next_element.getnext()
elif isinstance(element, str) or isinstance(element, bytes):
# Text in a fragment
assert isinstance(element, str) or sys.version_info.major == 2
rv.append("|%s\"%s\"" % (' ' * indent, element))
else:
# Fragment case
rv.append("#document-fragment")
for next_element in element:
serializeElement(next_element, indent + 2)
elif element.tag == comment_type:
rv.append("|%s<!-- %s -->" % (' ' * indent, element.text))
if hasattr(element, "tail") and element.tail:
rv.append("|%s\"%s\"" % (' ' * indent, element.tail))
else:
assert isinstance(element, etree._Element)
nsmatch = etree_builders.tag_regexp.match(element.tag)
if nsmatch is not None:
ns = nsmatch.group(1)
tag = nsmatch.group(2)
prefix = constants.prefixes[ns]
rv.append("|%s<%s %s>" % (' ' * indent, prefix,
infosetFilter.fromXmlName(tag)))
else:
rv.append("|%s<%s>" % (' ' * indent,
infosetFilter.fromXmlName(element.tag)))
if hasattr(element, "attrib"):
attributes = []
for name, value in element.attrib.items():
nsmatch = tag_regexp.match(name)
if nsmatch is not None:
ns, name = nsmatch.groups()
name = infosetFilter.fromXmlName(name)
prefix = constants.prefixes[ns]
attr_string = "%s %s" % (prefix, name)
else:
attr_string = infosetFilter.fromXmlName(name)
attributes.append((attr_string, value))
for name, value in sorted(attributes):
rv.append('|%s%s="%s"' % (' ' * (indent + 2), name, value))
if element.text:
rv.append("|%s\"%s\"" % (' ' * (indent + 2), element.text))
indent += 2
for child in element:
serializeElement(child, indent)
if hasattr(element, "tail") and element.tail:
rv.append("|%s\"%s\"" % (' ' * (indent - 2), element.tail))
serializeElement(element, 0)
if finalText is not None:
rv.append("|%s\"%s\"" % (' ' * 2, finalText))
return "\n".join(rv)
def tostring(element):
"""Serialize an element and its child nodes to a string"""
rv = []
finalText = None
def serializeElement(element):
if not hasattr(element, "tag"):
if element.docinfo.internalDTD:
if element.docinfo.doctype:
dtd_str = element.docinfo.doctype
else:
dtd_str = "<!DOCTYPE %s>" % element.docinfo.root_name
rv.append(dtd_str)
serializeElement(element.getroot())
elif element.tag == comment_type:
rv.append("<!--%s-->" % (element.text,))
else:
# This is assumed to be an ordinary element
if not element.attrib:
rv.append("<%s>" % (element.tag,))
else:
attr = " ".join(["%s=\"%s\"" % (name, value)
for name, value in element.attrib.items()])
rv.append("<%s %s>" % (element.tag, attr))
if element.text:
rv.append(element.text)
for child in element:
serializeElement(child)
rv.append("</%s>" % (element.tag,))
if hasattr(element, "tail") and element.tail:
rv.append(element.tail)
serializeElement(element)
if finalText is not None:
rv.append("%s\"" % (' ' * 2, finalText))
return "".join(rv)
class TreeBuilder(_base.TreeBuilder):
documentClass = Document
doctypeClass = DocumentType
elementClass = None
commentClass = None
fragmentClass = Document
implementation = etree
def __init__(self, namespaceHTMLElements, fullTree=False):
builder = etree_builders.getETreeModule(etree, fullTree=fullTree)
infosetFilter = self.infosetFilter = ihatexml.InfosetFilter()
self.namespaceHTMLElements = namespaceHTMLElements
class Attributes(dict):
def __init__(self, element, value={}):
self._element = element
dict.__init__(self, value)
for key, value in self.items():
if isinstance(key, tuple):
name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1]))
else:
name = infosetFilter.coerceAttribute(key)
self._element._element.attrib[name] = value
def __setitem__(self, key, value):
dict.__setitem__(self, key, value)
if isinstance(key, tuple):
name = "{%s}%s" % (key[2], infosetFilter.coerceAttribute(key[1]))
else:
name = infosetFilter.coerceAttribute(key)
self._element._element.attrib[name] = value
class Element(builder.Element):
def __init__(self, name, namespace):
name = infosetFilter.coerceElement(name)
builder.Element.__init__(self, name, namespace=namespace)
self._attributes = Attributes(self)
def _setName(self, name):
self._name = infosetFilter.coerceElement(name)
self._element.tag = self._getETreeTag(
self._name, self._namespace)
def _getName(self):
return infosetFilter.fromXmlName(self._name)
name = property(_getName, _setName)
def _getAttributes(self):
return self._attributes
def _setAttributes(self, attributes):
self._attributes = Attributes(self, attributes)
attributes = property(_getAttributes, _setAttributes)
def insertText(self, data, insertBefore=None):
data = infosetFilter.coerceCharacters(data)
builder.Element.insertText(self, data, insertBefore)
def appendChild(self, child):
builder.Element.appendChild(self, child)
class Comment(builder.Comment):
def __init__(self, data):
data = infosetFilter.coerceComment(data)
builder.Comment.__init__(self, data)
def _setData(self, data):
data = infosetFilter.coerceComment(data)
self._element.text = data
def _getData(self):
return self._element.text
data = property(_getData, _setData)
self.elementClass = Element
self.commentClass = builder.Comment
# self.fragmentClass = builder.DocumentFragment
_base.TreeBuilder.__init__(self, namespaceHTMLElements)
def reset(self):
_base.TreeBuilder.reset(self)
self.insertComment = self.insertCommentInitial
self.initial_comments = []
self.doctype = None
def testSerializer(self, element):
return testSerializer(element)
def getDocument(self):
if fullTree:
return self.document._elementTree
else:
return self.document._elementTree.getroot()
def getFragment(self):
fragment = []
element = self.openElements[0]._element
if element.text:
fragment.append(element.text)
fragment.extend(list(element))
if element.tail:
fragment.append(element.tail)
return fragment
def insertDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
if not name:
warnings.warn("lxml cannot represent empty doctype", DataLossWarning)
self.doctype = None
else:
coercedName = self.infosetFilter.coerceElement(name)
if coercedName != name:
warnings.warn("lxml cannot represent non-xml doctype", DataLossWarning)
doctype = self.doctypeClass(coercedName, publicId, systemId)
self.doctype = doctype
def insertCommentInitial(self, data, parent=None):
self.initial_comments.append(data)
def insertCommentMain(self, data, parent=None):
if (parent == self.document and
self.document._elementTree.getroot()[-1].tag == comment_type):
warnings.warn("lxml cannot represent adjacent comments beyond the root elements", DataLossWarning)
super(TreeBuilder, self).insertComment(data, parent)
def insertRoot(self, token):
"""Create the document root"""
# Because of the way libxml2 works, it doesn't seem to be possible to
# alter information like the doctype after the tree has been parsed.
# Therefore we need to use the built-in parser to create our iniial
# tree, after which we can add elements like normal
docStr = ""
if self.doctype:
assert self.doctype.name
docStr += "<!DOCTYPE %s" % self.doctype.name
if (self.doctype.publicId is not None or
self.doctype.systemId is not None):
docStr += (' PUBLIC "%s" ' %
(self.infosetFilter.coercePubid(self.doctype.publicId or "")))
if self.doctype.systemId:
sysid = self.doctype.systemId
if sysid.find("'") >= 0 and sysid.find('"') >= 0:
warnings.warn("DOCTYPE system cannot contain single and double quotes", DataLossWarning)
sysid = sysid.replace("'", 'U00027')
if sysid.find("'") >= 0:
docStr += '"%s"' % sysid
else:
docStr += "'%s'" % sysid
else:
docStr += "''"
docStr += ">"
if self.doctype.name != token["name"]:
warnings.warn("lxml cannot represent doctype with a different name to the root element", DataLossWarning)
docStr += "<THIS_SHOULD_NEVER_APPEAR_PUBLICLY/>"
root = etree.fromstring(docStr)
# Append the initial comments:
for comment_token in self.initial_comments:
root.addprevious(etree.Comment(comment_token["data"]))
# Create the root document and add the ElementTree to it
self.document = self.documentClass()
self.document._elementTree = root.getroottree()
# Give the root element the right name
name = token["name"]
namespace = token.get("namespace", self.defaultNamespace)
if namespace is None:
etree_tag = name
else:
etree_tag = "{%s}%s" % (namespace, name)
root.tag = etree_tag
# Add the root element to the internal child/open data structures
root_element = self.elementClass(name, namespace)
root_element._element = root
self.document._childNodes.append(root_element)
self.openElements.append(root_element)
# Reset to the default insert comment function
self.insertComment = self.insertCommentMain
|
bsd-3-clause
|
psav/cfme_tests
|
cfme/test_requirements.py
|
7
|
2719
|
"""Test requirements mapping
This module contains predefined pytest markers for CFME product requirements.
Please import the module instead of elements:
.. code-block:: python
from cfme import test_requirements
pytestmark = [test_requirements.alert]
@test_requirments.quota
def test_quota_alert():
pass
"""
import pytest
ansible = pytest.mark.requirement("ansible")
access = pytest.mark.requirement("access")
alert = pytest.mark.requirement("alert")
auth = pytest.mark.requirement("auth")
automate = pytest.mark.requirement("automate")
black = pytest.mark.requirement("black")
bottleneck = pytest.mark.requirement("bottleneck")
c_and_u = pytest.mark.requirement("c_and_u")
cfme_tenancy = pytest.mark.requirement("cfme_tenancy")
chargeback = pytest.mark.requirement("chargeback")
cloud_init = pytest.mark.requirement("cloud_init")
config_management = pytest.mark.requirement("config_management")
configuration = pytest.mark.requirement("configuration")
control = pytest.mark.requirement("control")
dashboard = pytest.mark.requirement("dashboard")
discovery = pytest.mark.requirement("discovery")
distributed = pytest.mark.requirement("distributed")
drift = pytest.mark.requirement("drift")
filter = pytest.mark.requirement("filter")
genealogy = pytest.mark.requirement("genealogy")
general_ui = pytest.mark.requirement("general_ui")
generic_objects = pytest.mark.requirement("generic_objects")
html5 = pytest.mark.requirement("html5")
ipv6 = pytest.mark.requirement("ipv6")
log_depot = pytest.mark.requirement("log_depot")
ownership = pytest.mark.requirement("ownership")
power = pytest.mark.requirement("power")
provision = pytest.mark.requirement("provision")
quota = pytest.mark.requirement("quota")
rbac = pytest.mark.requirement("rbac")
reconfigure = pytest.mark.requirement("reconfigure")
rep = pytest.mark.requirement("rep")
report = pytest.mark.requirement("report")
rest = pytest.mark.requirement("rest")
retirement = pytest.mark.requirement("retirement")
right_size = pytest.mark.requirement("right_size")
run_process = pytest.mark.requirement("run_process")
sdn = pytest.mark.requirement("sdn")
service = pytest.mark.requirement("service")
settings = pytest.mark.requirement("settings")
smartstate = pytest.mark.requirement("smartstate")
snapshot = pytest.mark.requirement("snapshot")
ssui = pytest.mark.requirement("ssui")
stack = pytest.mark.requirement("stack")
storage = pytest.mark.requirement("storage")
sysprep = pytest.mark.requirement("sysprep")
tag = pytest.mark.requirement("tag")
timelines = pytest.mark.requirement("timelines")
upgrade = pytest.mark.requirement("upgrade")
vm_migrate = pytest.mark.requirement("vm_migrate")
vmrc = pytest.mark.requirement("vmrc")
|
gpl-2.0
|
Mirantis/mos-horizon
|
openstack_dashboard/test/integration_tests/pages/identity/userspage.py
|
1
|
6565
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from selenium.webdriver.common import by
from openstack_dashboard.test.integration_tests.pages import basepage
from openstack_dashboard.test.integration_tests.regions import forms
from openstack_dashboard.test.integration_tests.regions import tables
class UsersTable(tables.TableRegion):
name = 'users'
CREATE_USER_FORM_FIELDS = ("name", "email", "password",
"confirm_password", "project", "role_id")
EDIT_USER_FORM_FIELDS = ("name", "email", "project")
CHANGE_PASSWORD_FORM_FIELDS = ("password", "confirm_password", "name")
_search_button_locator = (by.By.CSS_SELECTOR,
'div.table_search span.fa-search')
@tables.bind_table_action('create')
def create_user(self, create_button):
create_button.click()
return forms.FormRegion(self.driver, self.conf,
field_mappings=self.CREATE_USER_FORM_FIELDS)
@tables.bind_row_action('edit', primary=True)
def edit_user(self, edit_button, row):
edit_button.click()
return forms.FormRegion(self.driver, self.conf,
field_mappings=self.EDIT_USER_FORM_FIELDS)
@tables.bind_row_action('change_password')
def change_password(self, change_password_button, row):
change_password_button.click()
return forms.FormRegion(
self.driver, self.conf,
field_mappings=self.CHANGE_PASSWORD_FORM_FIELDS)
# Row action 'Disable user' / 'Enable user'
@tables.bind_row_action('toggle')
def disable_enable_user(self, disable_enable_user_button, row):
disable_enable_user_button.click()
@tables.bind_row_action('delete')
def delete_user(self, delete_button, row):
delete_button.click()
return forms.BaseFormRegion(self.driver, self.conf)
@tables.bind_table_action('delete')
def delete_users(self, delete_button):
delete_button.click()
return forms.BaseFormRegion(self.driver, self.conf)
def available_row_actions(self, row):
primary_selector = (by.By.CSS_SELECTOR,
'td.actions_column *.btn:nth-child(1)')
secondary_locator = \
(by.By.CSS_SELECTOR,
'td.actions_column li > a, td.actions_column li > button')
result = [row._get_element(
*primary_selector).get_attribute('innerHTML').strip()]
for element in row._get_elements(*secondary_locator):
if element.is_enabled():
result.append(element.get_attribute('innerHTML').strip())
return result
class UsersPage(basepage.BaseNavigationPage):
USERS_TABLE_NAME_COLUMN = 'name'
USERS_TABLE_ENABLED_COLUMN = 'enabled'
def __init__(self, driver, conf):
super(UsersPage, self).__init__(driver, conf)
self._page_title = "Users"
def _get_row_with_user_name(self, name):
return self.users_table.get_row(self.USERS_TABLE_NAME_COLUMN, name)
@property
def users_table(self):
return UsersTable(self.driver, self.conf)
def create_user(self, name, password,
project, role, email=None):
create_user_form = self.users_table.create_user()
create_user_form.name.text = name
if email is not None:
create_user_form.email.text = email
create_user_form.password.text = password
create_user_form.confirm_password.text = password
create_user_form.project.text = project
create_user_form.role_id.text = role
create_user_form.submit()
def edit_user(self, name, new_name=None, new_email=None,
new_primary_project=None):
row = self._get_row_with_user_name(name)
edit_user_form = self.users_table.edit_user(row)
if new_name:
edit_user_form.name.text = new_name
if new_email:
edit_user_form.email.text = new_email
if new_primary_project:
edit_user_form.project.text = new_primary_project
edit_user_form.submit()
def get_user_info(self, name):
user_info = {}
row = self._get_row_with_user_name(name)
edit_user_form = self.users_table.edit_user(row)
user_info['name'] = edit_user_form.name.text
user_info['email'] = edit_user_form.email.text or None
user_info['primary_project'] = edit_user_form.project.text
edit_user_form.cancel()
return user_info
def change_password(self, name, new_passwd):
row = self._get_row_with_user_name(name)
change_password_form = self.users_table.change_password(row)
change_password_form.password.text = new_passwd
change_password_form.confirm_password.text = new_passwd
change_password_form.submit()
def available_row_actions(self, name):
row = self._get_row_with_user_name(name)
return self.users_table.available_row_actions(row)
def delete_user(self, name):
row = self._get_row_with_user_name(name)
confirm_delete_users_form = self.users_table.delete_user(row)
confirm_delete_users_form.submit()
def delete_users(self, *names):
for name in names:
self._get_row_with_user_name(name).mark()
confirm_delete_users_form = self.users_table.delete_users()
confirm_delete_users_form.submit()
def is_user_present(self, name):
return bool(self._get_row_with_user_name(name))
def disable_enable_user(self, name, action):
row = self._get_row_with_user_name(name)
self.users_table.disable_enable_user(row)
if action == 'disable':
return row.cells[self.USERS_TABLE_ENABLED_COLUMN].text == 'No'
elif action == 'enable':
return row.cells[self.USERS_TABLE_ENABLED_COLUMN].text == 'Yes'
@property
def visible_user_names(self):
names = [row.cells['name'].text for row in self.users_table.rows]
return filter(None, names)
|
apache-2.0
|
MichaelNedzelsky/intellij-community
|
python/lib/Lib/site-packages/django/contrib/comments/signals.py
|
425
|
1079
|
"""
Signals relating to comments.
"""
from django.dispatch import Signal
# Sent just before a comment will be posted (after it's been approved and
# moderated; this can be used to modify the comment (in place) with posting
# details or other such actions. If any receiver returns False the comment will be
# discarded and a 403 (not allowed) response. This signal is sent at more or less
# the same time (just before, actually) as the Comment object's pre-save signal,
# except that the HTTP request is sent along with this signal.
comment_will_be_posted = Signal(providing_args=["comment", "request"])
# Sent just after a comment was posted. See above for how this differs
# from the Comment object's post-save signal.
comment_was_posted = Signal(providing_args=["comment", "request"])
# Sent after a comment was "flagged" in some way. Check the flag to see if this
# was a user requesting removal of a comment, a moderator approving/removing a
# comment, or some other custom user flag.
comment_was_flagged = Signal(providing_args=["comment", "flag", "created", "request"])
|
apache-2.0
|
theguardian/JIRA-APPy
|
lib/passlib/tests/test_registry.py
|
2
|
9501
|
"""tests for passlib.pwhash -- (c) Assurance Technologies 2003-2009"""
#=============================================================================
# imports
#=============================================================================
from __future__ import with_statement
# core
import hashlib
from logging import getLogger
import os
import time
import warnings
import sys
# site
# pkg
from lib.passlib import hash, registry, exc
from lib.passlib.registry import register_crypt_handler, register_crypt_handler_path, \
get_crypt_handler, list_crypt_handlers, _unload_handler_name as unload_handler_name
import lib.passlib.utils.handlers as uh
from lib.passlib.tests.utils import TestCase, catch_warnings
# module
log = getLogger(__name__)
#=============================================================================
# dummy handlers
#
# NOTE: these are defined outside of test case
# since they're used by test_register_crypt_handler_path(),
# which needs them to be available as module globals.
#=============================================================================
class dummy_0(uh.StaticHandler):
name = "dummy_0"
class alt_dummy_0(uh.StaticHandler):
name = "dummy_0"
dummy_x = 1
#=============================================================================
# test registry
#=============================================================================
class RegistryTest(TestCase):
descriptionPrefix = "passlib.registry"
def setUp(self):
super(RegistryTest, self).setUp()
# backup registry state & restore it after test.
locations = dict(registry._locations)
handlers = dict(registry._handlers)
def restore():
registry._locations.clear()
registry._locations.update(locations)
registry._handlers.clear()
registry._handlers.update(handlers)
self.addCleanup(restore)
def test_hash_proxy(self):
"""test passlib.hash proxy object"""
# check dir works
dir(hash)
# check repr works
repr(hash)
# check non-existent attrs raise error
self.assertRaises(AttributeError, getattr, hash, 'fooey')
# GAE tries to set __loader__,
# make sure that doesn't call register_crypt_handler.
old = getattr(hash, "__loader__", None)
test = object()
hash.__loader__ = test
self.assertIs(hash.__loader__, test)
if old is None:
del hash.__loader__
self.assertFalse(hasattr(hash, "__loader__"))
else:
hash.__loader__ = old
self.assertIs(hash.__loader__, old)
# check storing attr calls register_crypt_handler
class dummy_1(uh.StaticHandler):
name = "dummy_1"
hash.dummy_1 = dummy_1
self.assertIs(get_crypt_handler("dummy_1"), dummy_1)
# check storing under wrong name results in error
self.assertRaises(ValueError, setattr, hash, "dummy_1x", dummy_1)
def test_register_crypt_handler_path(self):
"""test register_crypt_handler_path()"""
# NOTE: this messes w/ internals of registry, shouldn't be used publically.
paths = registry._locations
# check namespace is clear
self.assertTrue('dummy_0' not in paths)
self.assertFalse(hasattr(hash, 'dummy_0'))
# check invalid names are rejected
self.assertRaises(ValueError, register_crypt_handler_path,
"dummy_0", ".test_registry")
self.assertRaises(ValueError, register_crypt_handler_path,
"dummy_0", __name__ + ":dummy_0:xxx")
self.assertRaises(ValueError, register_crypt_handler_path,
"dummy_0", __name__ + ":dummy_0.xxx")
# try lazy load
register_crypt_handler_path('dummy_0', __name__)
self.assertTrue('dummy_0' in list_crypt_handlers())
self.assertTrue('dummy_0' not in list_crypt_handlers(loaded_only=True))
self.assertIs(hash.dummy_0, dummy_0)
self.assertTrue('dummy_0' in list_crypt_handlers(loaded_only=True))
unload_handler_name('dummy_0')
# try lazy load w/ alt
register_crypt_handler_path('dummy_0', __name__ + ':alt_dummy_0')
self.assertIs(hash.dummy_0, alt_dummy_0)
unload_handler_name('dummy_0')
# check lazy load w/ wrong type fails
register_crypt_handler_path('dummy_x', __name__)
self.assertRaises(TypeError, get_crypt_handler, 'dummy_x')
# check lazy load w/ wrong name fails
register_crypt_handler_path('alt_dummy_0', __name__)
self.assertRaises(ValueError, get_crypt_handler, "alt_dummy_0")
unload_handler_name("alt_dummy_0")
# TODO: check lazy load which calls register_crypt_handler (warning should be issued)
sys.modules.pop("passlib.tests._test_bad_register", None)
register_crypt_handler_path("dummy_bad", "passlib.tests._test_bad_register")
with catch_warnings():
warnings.filterwarnings("ignore", "xxxxxxxxxx", DeprecationWarning)
h = get_crypt_handler("dummy_bad")
from lib.passlib.tests import _test_bad_register as tbr
self.assertIs(h, tbr.alt_dummy_bad)
def test_register_crypt_handler(self):
"""test register_crypt_handler()"""
self.assertRaises(TypeError, register_crypt_handler, {})
self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name=None)))
self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="AB_CD")))
self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="ab-cd")))
self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="ab__cd")))
self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="default")))
class dummy_1(uh.StaticHandler):
name = "dummy_1"
class dummy_1b(uh.StaticHandler):
name = "dummy_1"
self.assertTrue('dummy_1' not in list_crypt_handlers())
register_crypt_handler(dummy_1)
register_crypt_handler(dummy_1)
self.assertIs(get_crypt_handler("dummy_1"), dummy_1)
self.assertRaises(KeyError, register_crypt_handler, dummy_1b)
self.assertIs(get_crypt_handler("dummy_1"), dummy_1)
register_crypt_handler(dummy_1b, force=True)
self.assertIs(get_crypt_handler("dummy_1"), dummy_1b)
self.assertTrue('dummy_1' in list_crypt_handlers())
def test_get_crypt_handler(self):
"""test get_crypt_handler()"""
class dummy_1(uh.StaticHandler):
name = "dummy_1"
# without available handler
self.assertRaises(KeyError, get_crypt_handler, "dummy_1")
self.assertIs(get_crypt_handler("dummy_1", None), None)
# already loaded handler
register_crypt_handler(dummy_1)
self.assertIs(get_crypt_handler("dummy_1"), dummy_1)
with catch_warnings():
warnings.filterwarnings("ignore", "handler names should be lower-case, and use underscores instead of hyphens:.*", UserWarning)
# already loaded handler, using incorrect name
self.assertIs(get_crypt_handler("DUMMY-1"), dummy_1)
# lazy load of unloaded handler, using incorrect name
register_crypt_handler_path('dummy_0', __name__)
self.assertIs(get_crypt_handler("DUMMY-0"), dummy_0)
# check system & private names aren't returned
import lib.passlib.hash # ensure module imported, so py3.3 sets __package__
passlib.hash.__dict__["_fake"] = "dummy" # so behavior seen under py2x also
for name in ["_fake", "__package__"]:
self.assertRaises(KeyError, get_crypt_handler, name)
self.assertIs(get_crypt_handler(name, None), None)
def test_list_crypt_handlers(self):
"""test list_crypt_handlers()"""
from lib.passlib.registry import list_crypt_handlers
# check system & private names aren't returned
import lib.passlib.hash # ensure module imported, so py3.3 sets __package__
passlib.hash.__dict__["_fake"] = "dummy" # so behavior seen under py2x also
for name in list_crypt_handlers():
self.assertFalse(name.startswith("_"), "%r: " % name)
unload_handler_name("_fake")
def test_handlers(self):
"""verify we have tests for all builtin handlers"""
from lib.passlib.registry import list_crypt_handlers
from lib.passlib.tests.test_handlers import get_handler_case
for name in list_crypt_handlers():
# skip some wrappers that don't need independant testing
if name.startswith("ldap_") and name[5:] in list_crypt_handlers():
continue
if name in ["roundup_plaintext"]:
continue
# check the remaining ones all have a handler
try:
self.assertTrue(get_handler_case(name))
except exc.MissingBackendError:
if name in ["bcrypt", "bcrypt_sha256"]: # expected to fail on some setups
continue
raise
#=============================================================================
# eof
#=============================================================================
|
gpl-2.0
|
defance/edx-platform
|
lms/djangoapps/support/tests/test_views.py
|
15
|
10144
|
# coding: UTF-8
"""
Tests for support views.
"""
from datetime import datetime, timedelta
import itertools
import json
import re
import ddt
from django.core.urlresolvers import reverse
from pytz import UTC
from course_modes.models import CourseMode
from course_modes.tests.factories import CourseModeFactory
from lms.djangoapps.verify_student.models import VerificationDeadline
from student.models import CourseEnrollment, ManualEnrollmentAudit, ENROLLED_TO_ENROLLED
from student.roles import GlobalStaff, SupportStaffRole
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase, SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class SupportViewTestCase(ModuleStoreTestCase):
"""
Base class for support view tests.
"""
USERNAME = "support"
EMAIL = "support@example.com"
PASSWORD = "support"
def setUp(self):
"""Create a user and log in. """
super(SupportViewTestCase, self).setUp()
self.user = UserFactory(username=self.USERNAME, email=self.EMAIL, password=self.PASSWORD)
self.course = CourseFactory.create()
success = self.client.login(username=self.USERNAME, password=self.PASSWORD)
self.assertTrue(success, msg="Could not log in")
@ddt.ddt
class SupportViewAccessTests(SupportViewTestCase):
"""
Tests for access control of support views.
"""
@ddt.data(*(
(url_name, role, has_access)
for (url_name, (role, has_access))
in itertools.product((
'support:index',
'support:certificates',
'support:refund',
'support:enrollment',
'support:enrollment_list'
), (
(GlobalStaff, True),
(SupportStaffRole, True),
(None, False)
))
))
@ddt.unpack
def test_access(self, url_name, role, has_access):
if role is not None:
role().add_users(self.user)
url = reverse(url_name)
response = self.client.get(url)
if has_access:
self.assertEqual(response.status_code, 200)
else:
self.assertEqual(response.status_code, 403)
@ddt.data(
"support:index",
"support:certificates",
"support:refund",
"support:enrollment",
"support:enrollment_list"
)
def test_require_login(self, url_name):
url = reverse(url_name)
# Log out then try to retrieve the page
self.client.logout()
response = self.client.get(url)
# Expect a redirect to the login page
redirect_url = "{login_url}?next={original_url}".format(
login_url=reverse("signin_user"),
original_url=url,
)
self.assertRedirects(response, redirect_url)
class SupportViewIndexTests(SupportViewTestCase):
"""
Tests for the support index view.
"""
EXPECTED_URL_NAMES = [
"support:certificates",
"support:refund",
]
def setUp(self):
"""Make the user support staff. """
super(SupportViewIndexTests, self).setUp()
SupportStaffRole().add_users(self.user)
def test_index(self):
response = self.client.get(reverse("support:index"))
self.assertContains(response, "Support")
# Check that all the expected links appear on the index page.
for url_name in self.EXPECTED_URL_NAMES:
self.assertContains(response, reverse(url_name))
class SupportViewCertificatesTests(SupportViewTestCase):
"""
Tests for the certificates support view.
"""
def setUp(self):
"""Make the user support staff. """
super(SupportViewCertificatesTests, self).setUp()
SupportStaffRole().add_users(self.user)
def test_certificates_no_filter(self):
# Check that an empty initial filter is passed to the JavaScript client correctly.
response = self.client.get(reverse("support:certificates"))
self.assertContains(response, "userFilter: ''")
def test_certificates_with_user_filter(self):
# Check that an initial filter is passed to the JavaScript client.
url = reverse("support:certificates") + "?user=student@example.com"
response = self.client.get(url)
self.assertContains(response, "userFilter: 'student@example.com'")
def test_certificates_along_with_course_filter(self):
# Check that an initial filter is passed to the JavaScript client.
url = reverse("support:certificates") + "?user=student@example.com&course_id=" + unicode(self.course.id)
response = self.client.get(url)
self.assertContains(response, "userFilter: 'student@example.com'")
self.assertContains(response, "courseFilter: '" + unicode(self.course.id) + "'")
@ddt.ddt
class SupportViewEnrollmentsTests(SharedModuleStoreTestCase, SupportViewTestCase):
"""Tests for the enrollment support view."""
def setUp(self):
super(SupportViewEnrollmentsTests, self).setUp()
SupportStaffRole().add_users(self.user)
self.course = CourseFactory(display_name=u'teꜱᴛ')
self.student = UserFactory.create(username='student', email='test@example.com', password='test')
for mode in (CourseMode.AUDIT, CourseMode.VERIFIED):
CourseModeFactory.create(mode_slug=mode, course_id=self.course.id) # pylint: disable=no-member
self.verification_deadline = VerificationDeadline(
course_key=self.course.id, # pylint: disable=no-member
deadline=datetime.now(UTC) + timedelta(days=365)
)
self.verification_deadline.save()
CourseEnrollmentFactory.create(mode=CourseMode.AUDIT, user=self.student, course_id=self.course.id) # pylint: disable=no-member
self.url = reverse('support:enrollment_list', kwargs={'username_or_email': self.student.username})
def assert_enrollment(self, mode):
"""
Assert that the student's enrollment has the correct mode.
"""
enrollment = CourseEnrollment.get_enrollment(self.student, self.course.id) # pylint: disable=no-member
self.assertEqual(enrollment.mode, mode)
@ddt.data('username', 'email')
def test_get_enrollments(self, search_string_type):
url = reverse(
'support:enrollment_list',
kwargs={'username_or_email': getattr(self.student, search_string_type)}
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEqual(len(data), 1)
self.assertDictContainsSubset({
'mode': CourseMode.AUDIT,
'manual_enrollment': {},
'user': self.student.username,
'course_id': unicode(self.course.id), # pylint: disable=no-member
'is_active': True,
'verified_upgrade_deadline': None,
}, data[0])
self.assertEqual(
{CourseMode.VERIFIED, CourseMode.AUDIT},
{mode['slug'] for mode in data[0]['course_modes']}
)
def test_get_manual_enrollment_history(self):
ManualEnrollmentAudit.create_manual_enrollment_audit(
self.user,
self.student.email,
ENROLLED_TO_ENROLLED,
'Financial Assistance',
CourseEnrollment.objects.get(course_id=self.course.id, user=self.student) # pylint: disable=no-member
)
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertDictContainsSubset({
'enrolled_by': self.user.email,
'reason': 'Financial Assistance',
}, json.loads(response.content)[0]['manual_enrollment'])
@ddt.data('username', 'email')
def test_change_enrollment(self, search_string_type):
self.assertIsNone(ManualEnrollmentAudit.get_manual_enrollment_by_email(self.student.email))
url = reverse(
'support:enrollment_list',
kwargs={'username_or_email': getattr(self.student, search_string_type)}
)
response = self.client.post(url, data={
'course_id': unicode(self.course.id), # pylint: disable=no-member
'old_mode': CourseMode.AUDIT,
'new_mode': CourseMode.VERIFIED,
'reason': 'Financial Assistance'
})
self.assertEqual(response.status_code, 200)
self.assertIsNotNone(ManualEnrollmentAudit.get_manual_enrollment_by_email(self.student.email))
self.assert_enrollment(CourseMode.VERIFIED)
@ddt.data(
({}, r"The field '\w+' is required."),
({'course_id': 'bad course key'}, 'Could not parse course key.'),
({
'course_id': 'course-v1:TestX+T101+2015',
'old_mode': CourseMode.AUDIT,
'new_mode': CourseMode.VERIFIED,
'reason': ''
}, 'Could not find enrollment for user'),
({
'course_id': None,
'old_mode': CourseMode.HONOR,
'new_mode': CourseMode.VERIFIED,
'reason': ''
}, r'User \w+ is not enrolled with mode ' + CourseMode.HONOR),
({
'course_id': None,
'old_mode': CourseMode.AUDIT,
'new_mode': CourseMode.CREDIT_MODE,
'reason': ''
}, "Specified course mode '{}' unavailable".format(CourseMode.CREDIT_MODE))
)
@ddt.unpack
def test_change_enrollment_bad_data(self, data, error_message):
# `self` isn't available from within the DDT declaration, so
# assign the course ID here
if 'course_id' in data and data['course_id'] is None:
data['course_id'] = unicode(self.course.id) # pylint: disable=no-member
response = self.client.post(self.url, data)
self.assertEqual(response.status_code, 400)
self.assertIsNotNone(re.match(error_message, response.content))
self.assert_enrollment(CourseMode.AUDIT)
self.assertIsNone(ManualEnrollmentAudit.get_manual_enrollment_by_email(self.student.email))
|
agpl-3.0
|
ProfessionalIT/professionalit-webiste
|
sdk/google_appengine/google/appengine/tools/devappserver2/scheduled_executor.py
|
7
|
3997
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Schedule callables to run at a particular time."""
import heapq
import threading
import time
class _Event(object):
def __init__(self, eta, runnable, key):
self._eta = eta
self._runnable = runnable
self._key = key
def __lt__(self, other):
return self.eta < other.eta
def cancel(self):
self._runnable = None
@property
def eta(self):
return self._eta
@property
def key(self):
return self._key
@property
def cancelled(self):
return self._runnable is None
def copy(self, new_eta):
return _Event(new_eta, self._runnable, self.key)
def run(self):
self._runnable()
class ScheduledExecutor(object):
"""An executor that supports scheduling."""
def __init__(self, thread_pool):
self._thread_pool = thread_pool
self._quit_event = threading.Event()
self._work_ready_condition = threading.Condition()
self._queue = []
self._key_to_events = {}
self._worker_thread = threading.Thread(
target=self._loop_and_run_scheduled_events, name="Scheduled Executor")
def start(self):
self._worker_thread.start()
def quit(self):
self._quit_event.set()
with self._work_ready_condition:
self._work_ready_condition.notify()
def add_event(self, runnable, eta, key=None):
"""Schedule an event to be run.
Args:
runnable: A callable to run.
eta: An int containing when to run runnable in seconds since the epoch.
key: An optional key that implements __hash__ that can be passed to
update_event.
"""
event = _Event(eta, runnable, key)
with self._work_ready_condition:
if key is not None:
self._key_to_events[key] = event
self._enqueue_event(event)
def update_event(self, eta, key):
"""Modify when an event should be run.
Args:
eta: An int containing when to schedule the event in seconds since the
epoch.
key: The key of the event to modify.
"""
with self._work_ready_condition:
old_event = self._key_to_events.get(key)
if old_event:
event = old_event.copy(eta)
old_event.cancel()
self._key_to_events[key] = event
self._enqueue_event(event)
def _enqueue_event(self, event):
# Must be called with _work_ready_condition acquired.
if self._queue:
old_next_event_eta = self._queue[0].eta
else:
old_next_event_eta = event.eta + 1
heapq.heappush(self._queue, event)
if event.eta < old_next_event_eta:
self._work_ready_condition.notify()
def _loop_and_run_scheduled_events(self):
with self._work_ready_condition:
while not self._quit_event.is_set():
now = time.time()
while self._queue and self._queue[0].eta <= now:
event = heapq.heappop(self._queue)
if not event.cancelled:
# Only remove uncancelled events because when an Event is cancelled,
# its entry in _key_to_events is replaced with the replacement
# Event.
if event.key:
del self._key_to_events[event.key]
self._work_ready_condition.release()
self._thread_pool.submit(event.run)
self._work_ready_condition.acquire()
now = time.time()
if self._queue:
self._work_ready_condition.wait(self._queue[0].eta - now)
else:
self._work_ready_condition.wait()
|
lgpl-3.0
|
mosaic-cloud/mosaic-distribution-dependencies
|
dependencies/nodejs/0.10.32/tools/gyp/pylib/gyp/ninja_syntax.py
|
2485
|
5536
|
# This file comes from
# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
# Do not edit! Edit the upstream one instead.
"""Python module for generating .ninja files.
Note that this is emphatically not a required piece of Ninja; it's
just a helpful utility for build-file-generation systems that already
use Python.
"""
import textwrap
import re
def escape_path(word):
return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:')
class Writer(object):
def __init__(self, output, width=78):
self.output = output
self.width = width
def newline(self):
self.output.write('\n')
def comment(self, text):
for line in textwrap.wrap(text, self.width - 2):
self.output.write('# ' + line + '\n')
def variable(self, key, value, indent=0):
if value is None:
return
if isinstance(value, list):
value = ' '.join(filter(None, value)) # Filter out empty strings.
self._line('%s = %s' % (key, value), indent)
def pool(self, name, depth):
self._line('pool %s' % name)
self.variable('depth', depth, indent=1)
def rule(self, name, command, description=None, depfile=None,
generator=False, pool=None, restat=False, rspfile=None,
rspfile_content=None, deps=None):
self._line('rule %s' % name)
self.variable('command', command, indent=1)
if description:
self.variable('description', description, indent=1)
if depfile:
self.variable('depfile', depfile, indent=1)
if generator:
self.variable('generator', '1', indent=1)
if pool:
self.variable('pool', pool, indent=1)
if restat:
self.variable('restat', '1', indent=1)
if rspfile:
self.variable('rspfile', rspfile, indent=1)
if rspfile_content:
self.variable('rspfile_content', rspfile_content, indent=1)
if deps:
self.variable('deps', deps, indent=1)
def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
variables=None):
outputs = self._as_list(outputs)
all_inputs = self._as_list(inputs)[:]
out_outputs = list(map(escape_path, outputs))
all_inputs = list(map(escape_path, all_inputs))
if implicit:
implicit = map(escape_path, self._as_list(implicit))
all_inputs.append('|')
all_inputs.extend(implicit)
if order_only:
order_only = map(escape_path, self._as_list(order_only))
all_inputs.append('||')
all_inputs.extend(order_only)
self._line('build %s: %s' % (' '.join(out_outputs),
' '.join([rule] + all_inputs)))
if variables:
if isinstance(variables, dict):
iterator = iter(variables.items())
else:
iterator = iter(variables)
for key, val in iterator:
self.variable(key, val, indent=1)
return outputs
def include(self, path):
self._line('include %s' % path)
def subninja(self, path):
self._line('subninja %s' % path)
def default(self, paths):
self._line('default %s' % ' '.join(self._as_list(paths)))
def _count_dollars_before_index(self, s, i):
"""Returns the number of '$' characters right in front of s[i]."""
dollar_count = 0
dollar_index = i - 1
while dollar_index > 0 and s[dollar_index] == '$':
dollar_count += 1
dollar_index -= 1
return dollar_count
def _line(self, text, indent=0):
"""Write 'text' word-wrapped at self.width characters."""
leading_space = ' ' * indent
while len(leading_space) + len(text) > self.width:
# The text is too wide; wrap if possible.
# Find the rightmost space that would obey our width constraint and
# that's not an escaped space.
available_space = self.width - len(leading_space) - len(' $')
space = available_space
while True:
space = text.rfind(' ', 0, space)
if space < 0 or \
self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# No such space; just use the first unescaped space we can find.
space = available_space - 1
while True:
space = text.find(' ', space + 1)
if space < 0 or \
self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# Give up on breaking.
break
self.output.write(leading_space + text[0:space] + ' $\n')
text = text[space+1:]
# Subsequent lines are continuations, so indent them.
leading_space = ' ' * (indent+2)
self.output.write(leading_space + text + '\n')
def _as_list(self, input):
if input is None:
return []
if isinstance(input, list):
return input
return [input]
def escape(string):
"""Escape a string such that it can be embedded into a Ninja file without
further interpretation."""
assert '\n' not in string, 'Ninja syntax does not allow newlines'
# We only have one special metacharacter: '$'.
return string.replace('$', '$$')
|
apache-2.0
|
fighterCui/L4ReFiascoOC
|
l4/pkg/python/contrib/Lib/shelve.py
|
59
|
7866
|
"""Manage shelves of pickled objects.
A "shelf" is a persistent, dictionary-like object. The difference
with dbm databases is that the values (not the keys!) in a shelf can
be essentially arbitrary Python objects -- anything that the "pickle"
module can handle. This includes most class instances, recursive data
types, and objects containing lots of shared sub-objects. The keys
are ordinary strings.
To summarize the interface (key is a string, data is an arbitrary
object):
import shelve
d = shelve.open(filename) # open, with (g)dbm filename -- no suffix
d[key] = data # store data at key (overwrites old data if
# using an existing key)
data = d[key] # retrieve a COPY of the data at key (raise
# KeyError if no such key) -- NOTE that this
# access returns a *copy* of the entry!
del d[key] # delete data stored at key (raises KeyError
# if no such key)
flag = d.has_key(key) # true if the key exists; same as "key in d"
list = d.keys() # a list of all existing keys (slow!)
d.close() # close it
Dependent on the implementation, closing a persistent dictionary may
or may not be necessary to flush changes to disk.
Normally, d[key] returns a COPY of the entry. This needs care when
mutable entries are mutated: for example, if d[key] is a list,
d[key].append(anitem)
does NOT modify the entry d[key] itself, as stored in the persistent
mapping -- it only modifies the copy, which is then immediately
discarded, so that the append has NO effect whatsoever. To append an
item to d[key] in a way that will affect the persistent mapping, use:
data = d[key]
data.append(anitem)
d[key] = data
To avoid the problem with mutable entries, you may pass the keyword
argument writeback=True in the call to shelve.open. When you use:
d = shelve.open(filename, writeback=True)
then d keeps a cache of all entries you access, and writes them all back
to the persistent mapping when you call d.close(). This ensures that
such usage as d[key].append(anitem) works as intended.
However, using keyword argument writeback=True may consume vast amount
of memory for the cache, and it may make d.close() very slow, if you
access many of d's entries after opening it in this way: d has no way to
check which of the entries you access are mutable and/or which ones you
actually mutate, so it must cache, and write back at close, all of the
entries that you access. You can call d.sync() to write back all the
entries in the cache, and empty the cache (d.sync() also synchronizes
the persistent dictionary on disk, if feasible).
"""
# Try using cPickle and cStringIO if available.
try:
from cPickle import Pickler, Unpickler
except ImportError:
from pickle import Pickler, Unpickler
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import UserDict
__all__ = ["Shelf","BsdDbShelf","DbfilenameShelf","open"]
class _ClosedDict(UserDict.DictMixin):
'Marker for a closed dict. Access attempts raise a ValueError.'
def closed(self, *args):
raise ValueError('invalid operation on closed shelf')
__getitem__ = __setitem__ = __delitem__ = keys = closed
def __repr__(self):
return '<Closed Dictionary>'
class Shelf(UserDict.DictMixin):
"""Base class for shelf implementations.
This is initialized with a dictionary-like object.
See the module's __doc__ string for an overview of the interface.
"""
def __init__(self, dict, protocol=None, writeback=False):
self.dict = dict
if protocol is None:
protocol = 0
self._protocol = protocol
self.writeback = writeback
self.cache = {}
def keys(self):
return self.dict.keys()
def __len__(self):
return len(self.dict)
def has_key(self, key):
return key in self.dict
def __contains__(self, key):
return key in self.dict
def get(self, key, default=None):
if key in self.dict:
return self[key]
return default
def __getitem__(self, key):
try:
value = self.cache[key]
except KeyError:
f = StringIO(self.dict[key])
value = Unpickler(f).load()
if self.writeback:
self.cache[key] = value
return value
def __setitem__(self, key, value):
if self.writeback:
self.cache[key] = value
f = StringIO()
p = Pickler(f, self._protocol)
p.dump(value)
self.dict[key] = f.getvalue()
def __delitem__(self, key):
del self.dict[key]
try:
del self.cache[key]
except KeyError:
pass
def close(self):
self.sync()
try:
self.dict.close()
except AttributeError:
pass
self.dict = _ClosedDict()
def __del__(self):
if not hasattr(self, 'writeback'):
# __init__ didn't succeed, so don't bother closing
return
self.close()
def sync(self):
if self.writeback and self.cache:
self.writeback = False
for key, entry in self.cache.iteritems():
self[key] = entry
self.writeback = True
self.cache = {}
if hasattr(self.dict, 'sync'):
self.dict.sync()
class BsdDbShelf(Shelf):
"""Shelf implementation using the "BSD" db interface.
This adds methods first(), next(), previous(), last() and
set_location() that have no counterpart in [g]dbm databases.
The actual database must be opened using one of the "bsddb"
modules "open" routines (i.e. bsddb.hashopen, bsddb.btopen or
bsddb.rnopen) and passed to the constructor.
See the module's __doc__ string for an overview of the interface.
"""
def __init__(self, dict, protocol=None, writeback=False):
Shelf.__init__(self, dict, protocol, writeback)
def set_location(self, key):
(key, value) = self.dict.set_location(key)
f = StringIO(value)
return (key, Unpickler(f).load())
def next(self):
(key, value) = self.dict.next()
f = StringIO(value)
return (key, Unpickler(f).load())
def previous(self):
(key, value) = self.dict.previous()
f = StringIO(value)
return (key, Unpickler(f).load())
def first(self):
(key, value) = self.dict.first()
f = StringIO(value)
return (key, Unpickler(f).load())
def last(self):
(key, value) = self.dict.last()
f = StringIO(value)
return (key, Unpickler(f).load())
class DbfilenameShelf(Shelf):
"""Shelf implementation using the "anydbm" generic dbm interface.
This is initialized with the filename for the dbm database.
See the module's __doc__ string for an overview of the interface.
"""
def __init__(self, filename, flag='c', protocol=None, writeback=False):
import anydbm
Shelf.__init__(self, anydbm.open(filename, flag), protocol, writeback)
def open(filename, flag='c', protocol=None, writeback=False):
"""Open a persistent dictionary for reading and writing.
The filename parameter is the base filename for the underlying
database. As a side-effect, an extension may be added to the
filename and more than one file may be created. The optional flag
parameter has the same interpretation as the flag parameter of
anydbm.open(). The optional protocol parameter specifies the
version of the pickle protocol (0, 1, or 2).
See the module's __doc__ string for an overview of the interface.
"""
return DbfilenameShelf(filename, flag, protocol, writeback)
|
gpl-2.0
|
joone/chromium-crosswalk
|
native_client_sdk/src/build_tools/tests/verify_filelist_test.py
|
132
|
3854
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import unittest
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
BUILD_TOOLS_DIR = os.path.dirname(SCRIPT_DIR)
sys.path.append(BUILD_TOOLS_DIR)
import verify_filelist
def Verify(platform, rules_contents, directory_list):
rules = verify_filelist.Rules('test', platform, rules_contents)
rules.VerifyDirectoryList(directory_list)
class VerifyFilelistTestCase(unittest.TestCase):
def testBasic(self):
rules = """\
foo/file1
foo/file2
foo/file3
bar/baz/other
"""
dirlist = ['foo/file1', 'foo/file2', 'foo/file3', 'bar/baz/other']
Verify('linux', rules, dirlist)
def testGlob(self):
rules = 'foo/*'
dirlist = ['foo/file1', 'foo/file2', 'foo/file3/and/subdir']
Verify('linux', rules, dirlist)
def testPlatformVar(self):
rules = 'dir/${PLATFORM}/blah'
dirlist = ['dir/linux/blah']
Verify('linux', rules, dirlist)
def testPlatformVarGlob(self):
rules = 'dir/${PLATFORM}/*'
dirlist = ['dir/linux/file1', 'dir/linux/file2']
Verify('linux', rules, dirlist)
def testPlatformRule(self):
rules = """\
[linux]dir/linux/only
all/platforms
"""
linux_dirlist = ['dir/linux/only', 'all/platforms']
other_dirlist = ['all/platforms']
Verify('linux', rules, linux_dirlist)
Verify('mac', rules, other_dirlist)
def testMultiPlatformRule(self):
rules = """\
[linux,win]dir/no/macs
all/platforms
"""
nonmac_dirlist = ['dir/no/macs', 'all/platforms']
mac_dirlist = ['all/platforms']
Verify('linux', rules, nonmac_dirlist)
Verify('win', rules, nonmac_dirlist)
Verify('mac', rules, mac_dirlist)
def testPlatformRuleBadPlatform(self):
rules = '[frob]bad/platform'
self.assertRaises(verify_filelist.ParseException, Verify,
'linux', rules, [])
def testMissingFile(self):
rules = """\
foo/file1
foo/missing
"""
dirlist = ['foo/file1']
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testExtraFile(self):
rules = 'foo/file1'
dirlist = ['foo/file1', 'foo/extra_file']
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testEmptyGlob(self):
rules = 'foo/*'
dirlist = ['foo'] # Directory existing is not enough!
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testBadGlob(self):
rules = '*/foo/bar'
dirlist = []
self.assertRaises(verify_filelist.ParseException, Verify,
'linux', rules, dirlist)
def testUnknownPlatform(self):
rules = 'foo'
dirlist = ['foo']
for platform in ('linux', 'mac', 'win'):
Verify(platform, rules, dirlist)
self.assertRaises(verify_filelist.ParseException, Verify,
'foobar', rules, dirlist)
def testUnexpectedPlatformFile(self):
rules = '[mac,win]foo/file1'
dirlist = ['foo/file1']
self.assertRaises(verify_filelist.VerifyException, Verify,
'linux', rules, dirlist)
def testWindowsPaths(self):
if os.path.sep != '/':
rules = 'foo/bar/baz'
dirlist = ['foo\\bar\\baz']
Verify('win', rules, dirlist)
else:
rules = 'foo/bar/baz\\foo'
dirlist = ['foo/bar/baz\\foo']
Verify('linux', rules, dirlist)
def testNestedGlobs(self):
rules = """\
foo/*
foo/bar/*"""
dirlist = ['foo/file', 'foo/bar/file']
Verify('linux', rules, dirlist)
rules = """\
foo/bar/*
foo/*"""
dirlist = ['foo/file', 'foo/bar/file']
Verify('linux', rules, dirlist)
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
benoitsteiner/tensorflow-xsmm
|
tensorflow/contrib/distributions/python/kernel_tests/mvn_diag_plus_low_rank_test.py
|
85
|
15449
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for MultivariateNormal."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib import distributions
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
ds = distributions
class MultivariateNormalDiagPlusLowRankTest(test.TestCase):
"""Well tested because this is a simple override of the base class."""
def setUp(self):
self._rng = np.random.RandomState(42)
def testDiagBroadcastBothBatchAndEvent(self):
# batch_shape: [3], event_shape: [2]
diag = np.array([[1., 2], [3, 4], [5, 6]])
# batch_shape: [1], event_shape: []
identity_multiplier = np.array([5.])
with self.test_session():
dist = ds.MultivariateNormalDiagPlusLowRank(
scale_diag=diag,
scale_identity_multiplier=identity_multiplier,
validate_args=True)
self.assertAllClose(
np.array([[[1. + 5, 0],
[0, 2 + 5]],
[[3 + 5, 0],
[0, 4 + 5]],
[[5 + 5, 0],
[0, 6 + 5]]]),
dist.scale.to_dense().eval())
def testDiagBroadcastBothBatchAndEvent2(self):
# This test differs from `testDiagBroadcastBothBatchAndEvent` in that it
# broadcasts batch_shape's from both the `scale_diag` and
# `scale_identity_multiplier` args.
# batch_shape: [3], event_shape: [2]
diag = np.array([[1., 2], [3, 4], [5, 6]])
# batch_shape: [3, 1], event_shape: []
identity_multiplier = np.array([[5.], [4], [3]])
with self.test_session():
dist = ds.MultivariateNormalDiagPlusLowRank(
scale_diag=diag,
scale_identity_multiplier=identity_multiplier,
validate_args=True)
self.assertAllEqual(
[3, 3, 2, 2],
dist.scale.to_dense().get_shape())
def testDiagBroadcastOnlyEvent(self):
# batch_shape: [3], event_shape: [2]
diag = np.array([[1., 2], [3, 4], [5, 6]])
# batch_shape: [3], event_shape: []
identity_multiplier = np.array([5., 4, 3])
with self.test_session():
dist = ds.MultivariateNormalDiagPlusLowRank(
scale_diag=diag,
scale_identity_multiplier=identity_multiplier,
validate_args=True)
self.assertAllClose(
np.array([[[1. + 5, 0],
[0, 2 + 5]],
[[3 + 4, 0],
[0, 4 + 4]],
[[5 + 3, 0],
[0, 6 + 3]]]), # shape: [3, 2, 2]
dist.scale.to_dense().eval())
def testDiagBroadcastMultiplierAndLoc(self):
# batch_shape: [], event_shape: [3]
loc = np.array([1., 0, -1])
# batch_shape: [3], event_shape: []
identity_multiplier = np.array([5., 4, 3])
with self.test_session():
dist = ds.MultivariateNormalDiagPlusLowRank(
loc=loc,
scale_identity_multiplier=identity_multiplier,
validate_args=True)
self.assertAllClose(
np.array([[[5, 0, 0],
[0, 5, 0],
[0, 0, 5]],
[[4, 0, 0],
[0, 4, 0],
[0, 0, 4]],
[[3, 0, 0],
[0, 3, 0],
[0, 0, 3]]]),
dist.scale.to_dense().eval())
def testMean(self):
mu = [-1.0, 1.0]
diag_large = [1.0, 5.0]
v = [[2.0], [3.0]]
diag_small = [3.0]
with self.test_session():
dist = ds.MultivariateNormalDiagPlusLowRank(
loc=mu,
scale_diag=diag_large,
scale_perturb_factor=v,
scale_perturb_diag=diag_small,
validate_args=True)
self.assertAllEqual(mu, dist.mean().eval())
def testSample(self):
# TODO(jvdillon): This test should be the basis of a new test fixture which
# is applied to every distribution. When we make this fixture, we'll also
# separate the analytical- and sample-based tests as well as for each
# function tested. For now, we group things so we can recycle one batch of
# samples (thus saving resources).
mu = np.array([-1., 1, 0.5], dtype=np.float32)
diag_large = np.array([1., 0.5, 0.75], dtype=np.float32)
diag_small = np.array([-1.1, 1.2], dtype=np.float32)
v = np.array([[0.7, 0.8],
[0.9, 1],
[0.5, 0.6]], dtype=np.float32) # shape: [k, r] = [3, 2]
true_mean = mu
true_scale = np.diag(diag_large) + np.matmul(np.matmul(
v, np.diag(diag_small)), v.T)
true_covariance = np.matmul(true_scale, true_scale.T)
true_variance = np.diag(true_covariance)
true_stddev = np.sqrt(true_variance)
with self.test_session() as sess:
dist = ds.MultivariateNormalDiagPlusLowRank(
loc=mu,
scale_diag=diag_large,
scale_perturb_factor=v,
scale_perturb_diag=diag_small,
validate_args=True)
# The following distributions will test the KL divergence calculation.
mvn_identity = ds.MultivariateNormalDiag(
loc=np.array([1., 2, 0.25], dtype=np.float32),
validate_args=True)
mvn_scaled = ds.MultivariateNormalDiag(
loc=mvn_identity.loc,
scale_identity_multiplier=2.2,
validate_args=True)
mvn_diag = ds.MultivariateNormalDiag(
loc=mvn_identity.loc,
scale_diag=np.array([0.5, 1.5, 1.], dtype=np.float32),
validate_args=True)
mvn_chol = ds.MultivariateNormalTriL(
loc=np.array([1., 2, -1], dtype=np.float32),
scale_tril=np.array([[6., 0, 0],
[2, 5, 0],
[1, 3, 4]], dtype=np.float32) / 10.,
validate_args=True)
scale = dist.scale.to_dense()
n = int(30e3)
samps = dist.sample(n, seed=0)
sample_mean = math_ops.reduce_mean(samps, 0)
x = samps - sample_mean
sample_covariance = math_ops.matmul(x, x, transpose_a=True) / n
sample_kl_identity = math_ops.reduce_mean(
dist.log_prob(samps) - mvn_identity.log_prob(samps), 0)
analytical_kl_identity = ds.kl_divergence(dist, mvn_identity)
sample_kl_scaled = math_ops.reduce_mean(
dist.log_prob(samps) - mvn_scaled.log_prob(samps), 0)
analytical_kl_scaled = ds.kl_divergence(dist, mvn_scaled)
sample_kl_diag = math_ops.reduce_mean(
dist.log_prob(samps) - mvn_diag.log_prob(samps), 0)
analytical_kl_diag = ds.kl_divergence(dist, mvn_diag)
sample_kl_chol = math_ops.reduce_mean(
dist.log_prob(samps) - mvn_chol.log_prob(samps), 0)
analytical_kl_chol = ds.kl_divergence(dist, mvn_chol)
n = int(10e3)
baseline = ds.MultivariateNormalDiag(
loc=np.array([-1., 0.25, 1.25], dtype=np.float32),
scale_diag=np.array([1.5, 0.5, 1.], dtype=np.float32),
validate_args=True)
samps = baseline.sample(n, seed=0)
sample_kl_identity_diag_baseline = math_ops.reduce_mean(
baseline.log_prob(samps) - mvn_identity.log_prob(samps), 0)
analytical_kl_identity_diag_baseline = ds.kl_divergence(
baseline, mvn_identity)
sample_kl_scaled_diag_baseline = math_ops.reduce_mean(
baseline.log_prob(samps) - mvn_scaled.log_prob(samps), 0)
analytical_kl_scaled_diag_baseline = ds.kl_divergence(
baseline, mvn_scaled)
sample_kl_diag_diag_baseline = math_ops.reduce_mean(
baseline.log_prob(samps) - mvn_diag.log_prob(samps), 0)
analytical_kl_diag_diag_baseline = ds.kl_divergence(baseline, mvn_diag)
sample_kl_chol_diag_baseline = math_ops.reduce_mean(
baseline.log_prob(samps) - mvn_chol.log_prob(samps), 0)
analytical_kl_chol_diag_baseline = ds.kl_divergence(baseline, mvn_chol)
[
sample_mean_,
analytical_mean_,
sample_covariance_,
analytical_covariance_,
analytical_variance_,
analytical_stddev_,
scale_,
sample_kl_identity_, analytical_kl_identity_,
sample_kl_scaled_, analytical_kl_scaled_,
sample_kl_diag_, analytical_kl_diag_,
sample_kl_chol_, analytical_kl_chol_,
sample_kl_identity_diag_baseline_,
analytical_kl_identity_diag_baseline_,
sample_kl_scaled_diag_baseline_, analytical_kl_scaled_diag_baseline_,
sample_kl_diag_diag_baseline_, analytical_kl_diag_diag_baseline_,
sample_kl_chol_diag_baseline_, analytical_kl_chol_diag_baseline_,
] = sess.run([
sample_mean,
dist.mean(),
sample_covariance,
dist.covariance(),
dist.variance(),
dist.stddev(),
scale,
sample_kl_identity, analytical_kl_identity,
sample_kl_scaled, analytical_kl_scaled,
sample_kl_diag, analytical_kl_diag,
sample_kl_chol, analytical_kl_chol,
sample_kl_identity_diag_baseline,
analytical_kl_identity_diag_baseline,
sample_kl_scaled_diag_baseline, analytical_kl_scaled_diag_baseline,
sample_kl_diag_diag_baseline, analytical_kl_diag_diag_baseline,
sample_kl_chol_diag_baseline, analytical_kl_chol_diag_baseline,
])
sample_variance_ = np.diag(sample_covariance_)
sample_stddev_ = np.sqrt(sample_variance_)
logging.vlog(2, "true_mean:\n{} ".format(true_mean))
logging.vlog(2, "sample_mean:\n{}".format(sample_mean_))
logging.vlog(2, "analytical_mean:\n{}".format(analytical_mean_))
logging.vlog(2, "true_covariance:\n{}".format(true_covariance))
logging.vlog(2, "sample_covariance:\n{}".format(sample_covariance_))
logging.vlog(2, "analytical_covariance:\n{}".format(
analytical_covariance_))
logging.vlog(2, "true_variance:\n{}".format(true_variance))
logging.vlog(2, "sample_variance:\n{}".format(sample_variance_))
logging.vlog(2, "analytical_variance:\n{}".format(analytical_variance_))
logging.vlog(2, "true_stddev:\n{}".format(true_stddev))
logging.vlog(2, "sample_stddev:\n{}".format(sample_stddev_))
logging.vlog(2, "analytical_stddev:\n{}".format(analytical_stddev_))
logging.vlog(2, "true_scale:\n{}".format(true_scale))
logging.vlog(2, "scale:\n{}".format(scale_))
logging.vlog(2, "kl_identity: analytical:{} sample:{}".format(
analytical_kl_identity_, sample_kl_identity_))
logging.vlog(2, "kl_scaled: analytical:{} sample:{}".format(
analytical_kl_scaled_, sample_kl_scaled_))
logging.vlog(2, "kl_diag: analytical:{} sample:{}".format(
analytical_kl_diag_, sample_kl_diag_))
logging.vlog(2, "kl_chol: analytical:{} sample:{}".format(
analytical_kl_chol_, sample_kl_chol_))
logging.vlog(
2, "kl_identity_diag_baseline: analytical:{} sample:{}".format(
analytical_kl_identity_diag_baseline_,
sample_kl_identity_diag_baseline_))
logging.vlog(
2, "kl_scaled_diag_baseline: analytical:{} sample:{}".format(
analytical_kl_scaled_diag_baseline_,
sample_kl_scaled_diag_baseline_))
logging.vlog(2, "kl_diag_diag_baseline: analytical:{} sample:{}".format(
analytical_kl_diag_diag_baseline_,
sample_kl_diag_diag_baseline_))
logging.vlog(2, "kl_chol_diag_baseline: analytical:{} sample:{}".format(
analytical_kl_chol_diag_baseline_,
sample_kl_chol_diag_baseline_))
self.assertAllClose(true_mean, sample_mean_,
atol=0., rtol=0.02)
self.assertAllClose(true_mean, analytical_mean_,
atol=0., rtol=1e-6)
self.assertAllClose(true_covariance, sample_covariance_,
atol=0., rtol=0.02)
self.assertAllClose(true_covariance, analytical_covariance_,
atol=0., rtol=1e-6)
self.assertAllClose(true_variance, sample_variance_,
atol=0., rtol=0.02)
self.assertAllClose(true_variance, analytical_variance_,
atol=0., rtol=1e-6)
self.assertAllClose(true_stddev, sample_stddev_,
atol=0., rtol=0.02)
self.assertAllClose(true_stddev, analytical_stddev_,
atol=0., rtol=1e-6)
self.assertAllClose(true_scale, scale_,
atol=0., rtol=1e-6)
self.assertAllClose(sample_kl_identity_, analytical_kl_identity_,
atol=0., rtol=0.02)
self.assertAllClose(sample_kl_scaled_, analytical_kl_scaled_,
atol=0., rtol=0.02)
self.assertAllClose(sample_kl_diag_, analytical_kl_diag_,
atol=0., rtol=0.02)
self.assertAllClose(sample_kl_chol_, analytical_kl_chol_,
atol=0., rtol=0.02)
self.assertAllClose(
sample_kl_identity_diag_baseline_,
analytical_kl_identity_diag_baseline_,
atol=0., rtol=0.02)
self.assertAllClose(
sample_kl_scaled_diag_baseline_,
analytical_kl_scaled_diag_baseline_,
atol=0., rtol=0.02)
self.assertAllClose(
sample_kl_diag_diag_baseline_,
analytical_kl_diag_diag_baseline_,
atol=0., rtol=0.04)
self.assertAllClose(
sample_kl_chol_diag_baseline_,
analytical_kl_chol_diag_baseline_,
atol=0., rtol=0.02)
def testImplicitLargeDiag(self):
mu = np.array([[1., 2, 3],
[11, 22, 33]]) # shape: [b, k] = [2, 3]
u = np.array([[[1., 2],
[3, 4],
[5, 6]],
[[0.5, 0.75],
[1, 0.25],
[1.5, 1.25]]]) # shape: [b, k, r] = [2, 3, 2]
m = np.array([[0.1, 0.2],
[0.4, 0.5]]) # shape: [b, r] = [2, 2]
scale = np.stack([
np.eye(3) + np.matmul(np.matmul(u[0], np.diag(m[0])),
np.transpose(u[0])),
np.eye(3) + np.matmul(np.matmul(u[1], np.diag(m[1])),
np.transpose(u[1])),
])
cov = np.stack([np.matmul(scale[0], scale[0].T),
np.matmul(scale[1], scale[1].T)])
logging.vlog(2, "expected_cov:\n{}".format(cov))
with self.test_session():
mvn = ds.MultivariateNormalDiagPlusLowRank(
loc=mu,
scale_perturb_factor=u,
scale_perturb_diag=m)
self.assertAllClose(cov, mvn.covariance().eval(), atol=0., rtol=1e-6)
if __name__ == "__main__":
test.main()
|
apache-2.0
|
gwr/samba
|
lib/dnspython/dns/rdtypes/ANY/HINFO.py
|
248
|
2659
|
# Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.exception
import dns.rdata
import dns.tokenizer
class HINFO(dns.rdata.Rdata):
"""HINFO record
@ivar cpu: the CPU type
@type cpu: string
@ivar os: the OS type
@type os: string
@see: RFC 1035"""
__slots__ = ['cpu', 'os']
def __init__(self, rdclass, rdtype, cpu, os):
super(HINFO, self).__init__(rdclass, rdtype)
self.cpu = cpu
self.os = os
def to_text(self, origin=None, relativize=True, **kw):
return '"%s" "%s"' % (dns.rdata._escapify(self.cpu),
dns.rdata._escapify(self.os))
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
cpu = tok.get_string()
os = tok.get_string()
tok.get_eol()
return cls(rdclass, rdtype, cpu, os)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
l = len(self.cpu)
assert l < 256
byte = chr(l)
file.write(byte)
file.write(self.cpu)
l = len(self.os)
assert l < 256
byte = chr(l)
file.write(byte)
file.write(self.os)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
l = ord(wire[current])
current += 1
rdlen -= 1
if l > rdlen:
raise dns.exception.FormError
cpu = wire[current : current + l]
current += l
rdlen -= l
l = ord(wire[current])
current += 1
rdlen -= 1
if l != rdlen:
raise dns.exception.FormError
os = wire[current : current + l]
return cls(rdclass, rdtype, cpu, os)
from_wire = classmethod(from_wire)
def _cmp(self, other):
v = cmp(self.cpu, other.cpu)
if v == 0:
v = cmp(self.os, other.os)
return v
|
gpl-3.0
|
sebrandon1/neutron
|
neutron/agent/l3/dvr_fip_ns.py
|
3
|
12410
|
# Copyright (c) 2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_log import log as logging
from neutron.agent.l3 import fip_rule_priority_allocator as frpa
from neutron.agent.l3 import link_local_allocator as lla
from neutron.agent.l3 import namespaces
from neutron.agent.linux import ip_lib
from neutron.agent.linux import iptables_manager
from neutron.common import constants
from neutron.common import utils as common_utils
from neutron.ipam import utils as ipam_utils
LOG = logging.getLogger(__name__)
FIP_NS_PREFIX = 'fip-'
FIP_EXT_DEV_PREFIX = 'fg-'
FIP_2_ROUTER_DEV_PREFIX = 'fpr-'
ROUTER_2_FIP_DEV_PREFIX = namespaces.ROUTER_2_FIP_DEV_PREFIX
# Route Table index for FIPs
FIP_RT_TBL = 16
# Rule priority range for FIPs
FIP_PR_START = 32768
FIP_PR_END = FIP_PR_START + 40000
class FipNamespace(namespaces.Namespace):
def __init__(self, ext_net_id, agent_conf, driver, use_ipv6):
name = self._get_ns_name(ext_net_id)
super(FipNamespace, self).__init__(
name, agent_conf, driver, use_ipv6)
self._ext_net_id = ext_net_id
self.agent_conf = agent_conf
self.driver = driver
self.use_ipv6 = use_ipv6
self.agent_gateway_port = None
self._subscribers = set()
path = os.path.join(agent_conf.state_path, 'fip-priorities')
self._rule_priorities = frpa.FipRulePriorityAllocator(path,
FIP_PR_START,
FIP_PR_END)
self._iptables_manager = iptables_manager.IptablesManager(
namespace=self.get_name(),
use_ipv6=self.use_ipv6)
path = os.path.join(agent_conf.state_path, 'fip-linklocal-networks')
self.local_subnets = lla.LinkLocalAllocator(
path, constants.DVR_FIP_LL_CIDR)
self.destroyed = False
@classmethod
def _get_ns_name(cls, ext_net_id):
return namespaces.build_ns_name(FIP_NS_PREFIX, ext_net_id)
def get_name(self):
return self._get_ns_name(self._ext_net_id)
def get_ext_device_name(self, port_id):
return (FIP_EXT_DEV_PREFIX + port_id)[:self.driver.DEV_NAME_LEN]
def get_int_device_name(self, router_id):
return (FIP_2_ROUTER_DEV_PREFIX + router_id)[:self.driver.DEV_NAME_LEN]
def get_rtr_ext_device_name(self, router_id):
return (ROUTER_2_FIP_DEV_PREFIX + router_id)[:self.driver.DEV_NAME_LEN]
def has_subscribers(self):
return len(self._subscribers) != 0
def subscribe(self, external_net_id):
is_first = not self.has_subscribers()
self._subscribers.add(external_net_id)
return is_first
def unsubscribe(self, external_net_id):
self._subscribers.discard(external_net_id)
return not self.has_subscribers()
def allocate_rule_priority(self, floating_ip):
return self._rule_priorities.allocate(floating_ip)
def deallocate_rule_priority(self, floating_ip):
self._rule_priorities.release(floating_ip)
def _gateway_added(self, ex_gw_port, interface_name):
"""Add Floating IP gateway port."""
LOG.debug("add gateway interface(%s)", interface_name)
ns_name = self.get_name()
self.driver.plug(ex_gw_port['network_id'],
ex_gw_port['id'],
interface_name,
ex_gw_port['mac_address'],
bridge=self.agent_conf.external_network_bridge,
namespace=ns_name,
prefix=FIP_EXT_DEV_PREFIX,
mtu=ex_gw_port.get('mtu'))
# Remove stale fg devices
ip_wrapper = ip_lib.IPWrapper(namespace=ns_name)
devices = ip_wrapper.get_devices()
for device in devices:
name = device.name
if name.startswith(FIP_EXT_DEV_PREFIX) and name != interface_name:
ext_net_bridge = self.agent_conf.external_network_bridge
self.driver.unplug(name,
bridge=ext_net_bridge,
namespace=ns_name,
prefix=FIP_EXT_DEV_PREFIX)
ip_cidrs = common_utils.fixed_ip_cidrs(ex_gw_port['fixed_ips'])
self.driver.init_l3(interface_name, ip_cidrs, namespace=ns_name,
clean_connections=True)
self.update_gateway_port(ex_gw_port)
cmd = ['sysctl', '-w', 'net.ipv4.conf.%s.proxy_arp=1' % interface_name]
ip_wrapper.netns.execute(cmd, check_exit_code=False)
def create(self):
LOG.debug("DVR: add fip namespace: %s", self.name)
# parent class will ensure the namespace exists and turn-on forwarding
super(FipNamespace, self).create()
# Somewhere in the 3.19 kernel timeframe ip_nonlocal_bind was
# changed to be a per-namespace attribute. To be backwards
# compatible we need to try both if at first we fail.
ip_wrapper = ip_lib.IPWrapper(namespace=self.name)
try:
ip_wrapper.netns.execute(['sysctl',
'-w',
'net.ipv4.ip_nonlocal_bind=1'],
log_fail_as_error=False,
run_as_root=True)
except RuntimeError:
LOG.debug('DVR: fip namespace (%s) does not support setting '
'net.ipv4.ip_nonlocal_bind, trying in root namespace',
self.name)
self.ip_wrapper_root.netns.execute(['sysctl',
'-w',
'net.ipv4.ip_nonlocal_bind=1'],
run_as_root=True)
# no connection tracking needed in fip namespace
self._iptables_manager.ipv4['raw'].add_rule('PREROUTING',
'-j CT --notrack')
self._iptables_manager.apply()
def delete(self):
self.destroyed = True
self._delete()
self.agent_gateway_port = None
@namespaces.check_ns_existence
def _delete(self):
ip_wrapper = ip_lib.IPWrapper(namespace=self.name)
for d in ip_wrapper.get_devices(exclude_loopback=True):
if d.name.startswith(FIP_2_ROUTER_DEV_PREFIX):
# internal link between IRs and FIP NS
ip_wrapper.del_veth(d.name)
elif d.name.startswith(FIP_EXT_DEV_PREFIX):
# single port from FIP NS to br-ext
# TODO(carl) Where does the port get deleted?
LOG.debug('DVR: unplug: %s', d.name)
ext_net_bridge = self.agent_conf.external_network_bridge
self.driver.unplug(d.name,
bridge=ext_net_bridge,
namespace=self.name,
prefix=FIP_EXT_DEV_PREFIX)
# TODO(mrsmith): add LOG warn if fip count != 0
LOG.debug('DVR: destroy fip namespace: %s', self.name)
super(FipNamespace, self).delete()
def create_gateway_port(self, agent_gateway_port):
"""Create Floating IP gateway port.
Request port creation from Plugin then creates
Floating IP namespace and adds gateway port.
"""
self.create()
iface_name = self.get_ext_device_name(agent_gateway_port['id'])
self._gateway_added(agent_gateway_port, iface_name)
def _check_for_gateway_ip_change(self, new_agent_gateway_port):
def get_gateway_ips(gateway_port):
gw_ips = {}
if gateway_port:
for subnet in gateway_port.get('subnets', []):
gateway_ip = subnet.get('gateway_ip', None)
if gateway_ip:
ip_version = ip_lib.get_ip_version(gateway_ip)
gw_ips[ip_version] = gateway_ip
return gw_ips
new_gw_ips = get_gateway_ips(new_agent_gateway_port)
old_gw_ips = get_gateway_ips(self.agent_gateway_port)
return new_gw_ips != old_gw_ips
def update_gateway_port(self, agent_gateway_port):
gateway_ip_not_changed = self.agent_gateway_port and (
not self._check_for_gateway_ip_change(agent_gateway_port))
self.agent_gateway_port = agent_gateway_port
if gateway_ip_not_changed:
return
ns_name = self.get_name()
interface_name = self.get_ext_device_name(agent_gateway_port['id'])
for fixed_ip in agent_gateway_port['fixed_ips']:
ip_lib.send_ip_addr_adv_notif(ns_name,
interface_name,
fixed_ip['ip_address'],
self.agent_conf)
ipd = ip_lib.IPDevice(interface_name, namespace=ns_name)
for subnet in agent_gateway_port['subnets']:
gw_ip = subnet.get('gateway_ip')
if gw_ip:
is_gateway_not_in_subnet = not ipam_utils.check_subnet_ip(
subnet.get('cidr'), gw_ip)
if is_gateway_not_in_subnet:
ipd.route.add_route(gw_ip, scope='link')
ipd.route.add_gateway(gw_ip)
else:
current_gateway = ipd.route.get_gateway()
if current_gateway and current_gateway.get('gateway'):
ipd.route.delete_gateway(current_gateway.get('gateway'))
def _add_cidr_to_device(self, device, ip_cidr):
if not device.addr.list(to=ip_cidr):
device.addr.add(ip_cidr, add_broadcast=False)
def create_rtr_2_fip_link(self, ri):
"""Create interface between router and Floating IP namespace."""
LOG.debug("Create FIP link interfaces for router %s", ri.router_id)
rtr_2_fip_name = self.get_rtr_ext_device_name(ri.router_id)
fip_2_rtr_name = self.get_int_device_name(ri.router_id)
fip_ns_name = self.get_name()
# add link local IP to interface
if ri.rtr_fip_subnet is None:
ri.rtr_fip_subnet = self.local_subnets.allocate(ri.router_id)
rtr_2_fip, fip_2_rtr = ri.rtr_fip_subnet.get_pair()
rtr_2_fip_dev = ip_lib.IPDevice(rtr_2_fip_name, namespace=ri.ns_name)
fip_2_rtr_dev = ip_lib.IPDevice(fip_2_rtr_name, namespace=fip_ns_name)
if not rtr_2_fip_dev.exists():
ip_wrapper = ip_lib.IPWrapper(namespace=ri.ns_name)
rtr_2_fip_dev, fip_2_rtr_dev = ip_wrapper.add_veth(rtr_2_fip_name,
fip_2_rtr_name,
fip_ns_name)
mtu = ri.get_ex_gw_port().get('mtu')
if mtu:
rtr_2_fip_dev.link.set_mtu(mtu)
fip_2_rtr_dev.link.set_mtu(mtu)
rtr_2_fip_dev.link.set_up()
fip_2_rtr_dev.link.set_up()
self._add_cidr_to_device(rtr_2_fip_dev, str(rtr_2_fip))
self._add_cidr_to_device(fip_2_rtr_dev, str(fip_2_rtr))
# add default route for the link local interface
rtr_2_fip_dev.route.add_gateway(str(fip_2_rtr.ip), table=FIP_RT_TBL)
def scan_fip_ports(self, ri):
# don't scan if not dvr or count is not None
if ri.dist_fip_count is not None:
return
# scan system for any existing fip ports
ri.dist_fip_count = 0
rtr_2_fip_interface = self.get_rtr_ext_device_name(ri.router_id)
device = ip_lib.IPDevice(rtr_2_fip_interface, namespace=ri.ns_name)
if device.exists():
ri.dist_fip_count = len(ri.get_router_cidrs(device))
|
apache-2.0
|
sserrot/champion_relationships
|
venv/Lib/site-packages/pygments/styles/algol_nu.py
|
4
|
2278
|
# -*- coding: utf-8 -*-
"""
pygments.styles.algol_nu
~~~~~~~~~~~~~~~~~~~~~~~~
Algol publication style without underlining of keywords.
This style renders source code for publication of algorithms in
scientific papers and academic texts, where its format is frequently used.
It is based on the style of the revised Algol-60 language report[1].
o No colours, only black, white and shades of grey are used.
o Keywords are rendered in lowercase boldface.
o Builtins are rendered in lowercase boldface italic.
o Docstrings and pragmas are rendered in dark grey boldface.
o Library identifiers are rendered in dark grey boldface italic.
o Comments are rendered in grey italic.
To render keywords with underlining, refer to the `Algol` style.
For lowercase conversion of keywords and builtins in languages where
these are not or might not be lowercase, a supporting lexer is required.
The Algol and Modula-2 lexers automatically convert to lowercase whenever
this style is selected.
[1] `Revised Report on the Algorithmic Language Algol-60 <http://www.masswerk.at/algol60/report.htm>`
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, Operator
class Algol_NuStyle(Style):
background_color = "#ffffff"
default_style = ""
styles = {
Comment: "italic #888",
Comment.Preproc: "bold noitalic #888",
Comment.Special: "bold noitalic #888",
Keyword: "bold",
Keyword.Declaration: "italic",
Name.Builtin: "bold italic",
Name.Builtin.Pseudo: "bold italic",
Name.Namespace: "bold italic #666",
Name.Class: "bold italic #666",
Name.Function: "bold italic #666",
Name.Variable: "bold italic #666",
Name.Constant: "bold italic #666",
Operator.Word: "bold",
String: "italic #666",
Error: "border:#FF0000"
}
|
mit
|
j-griffith/cinder
|
cinder/manager.py
|
3
|
13090
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base Manager class.
Managers are responsible for a certain aspect of the system. It is a logical
grouping of code relating to a portion of the system. In general other
components should be using the manager to make changes to the components that
it is responsible for.
For example, other components that need to deal with volumes in some way,
should do so by calling methods on the VolumeManager instead of directly
changing fields in the database. This allows us to keep all of the code
relating to volumes in the same place.
We have adopted a basic strategy of Smart managers and dumb data, which means
rather than attaching methods to data objects, components should call manager
methods that act on the data.
Methods on managers that can be executed locally should be called directly. If
a particular method must execute on a remote host, this should be done via rpc
to the service that wraps the manager
Managers should be responsible for most of the db access, and
non-implementation specific data. Anything implementation specific that can't
be generalized should be done by the Driver.
In general, we prefer to have one manager with multiple drivers for different
implementations, but sometimes it makes sense to have multiple managers. You
can think of it this way: Abstract different overall strategies at the manager
level(FlatNetwork vs VlanNetwork), and different implementations at the driver
level(LinuxNetDriver vs CiscoNetDriver).
Managers will often provide methods for initial setup of a host or periodic
tasks to a wrapping service.
This module provides Manager, a base class for managers.
"""
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_service import periodic_task
from oslo_utils import timeutils
from cinder import context
from cinder import db
from cinder.db import base
from cinder import exception
from cinder import objects
from cinder import rpc
from cinder.scheduler import rpcapi as scheduler_rpcapi
from cinder import utils
from eventlet import greenpool
from eventlet import tpool
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class PeriodicTasks(periodic_task.PeriodicTasks):
def __init__(self):
super(PeriodicTasks, self).__init__(CONF)
class Manager(base.Base, PeriodicTasks):
# Set RPC API version to 1.0 by default.
RPC_API_VERSION = '1.0'
target = messaging.Target(version=RPC_API_VERSION)
def __init__(self, host=None, db_driver=None, cluster=None, **kwargs):
if not host:
host = CONF.host
self.host = host
self.cluster = cluster
self.additional_endpoints = []
self.availability_zone = CONF.storage_availability_zone
super(Manager, self).__init__(db_driver)
def _set_tpool_size(self, nthreads):
# NOTE(geguileo): Until PR #472 is merged we have to be very careful
# not to call "tpool.execute" before calling this method.
tpool.set_num_threads(nthreads)
@property
def service_topic_queue(self):
return self.cluster or self.host
def init_host(self, service_id=None, added_to_cluster=None):
"""Handle initialization if this is a standalone service.
A hook point for services to execute tasks before the services are made
available (i.e. showing up on RPC and starting to accept RPC calls) to
other components. Child classes should override this method.
:param service_id: ID of the service where the manager is running.
:param added_to_cluster: True when a host's cluster configuration has
changed from not being defined or being '' to
any other value and the DB service record
reflects this new value.
"""
pass
def init_host_with_rpc(self):
"""A hook for service to do jobs after RPC is ready.
Like init_host(), this method is a hook where services get a chance
to execute tasks that *need* RPC. Child classes should override
this method.
"""
pass
def is_working(self):
"""Method indicating if service is working correctly.
This method is supposed to be overridden by subclasses and return if
manager is working correctly.
"""
return True
def reset(self):
"""Method executed when SIGHUP is caught by the process.
We're utilizing it to reset RPC API version pins to avoid restart of
the service when rolling upgrade is completed.
"""
LOG.info('Resetting cached RPC version pins.')
rpc.LAST_OBJ_VERSIONS = {}
rpc.LAST_RPC_VERSIONS = {}
def set_log_levels(self, context, log_request):
utils.set_log_levels(log_request.prefix, log_request.level)
def get_log_levels(self, context, log_request):
levels = utils.get_log_levels(log_request.prefix)
log_levels = [objects.LogLevel(context, prefix=prefix, level=level)
for prefix, level in levels.items()]
return objects.LogLevelList(context, objects=log_levels)
class ThreadPoolManager(Manager):
def __init__(self, *args, **kwargs):
self._tp = greenpool.GreenPool()
super(ThreadPoolManager, self).__init__(*args, **kwargs)
def _add_to_threadpool(self, func, *args, **kwargs):
self._tp.spawn_n(func, *args, **kwargs)
class SchedulerDependentManager(ThreadPoolManager):
"""Periodically send capability updates to the Scheduler services.
Services that need to update the Scheduler of their capabilities
should derive from this class. Otherwise they can derive from
manager.Manager directly. Updates are only sent after
update_service_capabilities is called with non-None values.
"""
def __init__(self, host=None, db_driver=None, service_name='undefined',
cluster=None):
self.last_capabilities = None
self.service_name = service_name
self.scheduler_rpcapi = scheduler_rpcapi.SchedulerAPI()
super(SchedulerDependentManager, self).__init__(host, db_driver,
cluster=cluster)
def update_service_capabilities(self, capabilities):
"""Remember these capabilities to send on next periodic update."""
self.last_capabilities = capabilities
def _publish_service_capabilities(self, context):
"""Pass data back to the scheduler at a periodic interval."""
if self.last_capabilities:
LOG.debug('Notifying Schedulers of capabilities ...')
self.scheduler_rpcapi.update_service_capabilities(
context,
self.service_name,
self.host,
self.last_capabilities,
self.cluster)
try:
self.scheduler_rpcapi.notify_service_capabilities(
context,
self.service_name,
self.service_topic_queue,
self.last_capabilities)
except exception.ServiceTooOld as e:
# This means we have Newton's c-sch in the deployment, so
# rpcapi cannot send the message. We can safely ignore the
# error. Log it because it shouldn't happen after upgrade.
msg = ("Failed to notify about cinder-volume service "
"capabilities for host %(host)s. This is normal "
"during a live upgrade. Error: %(e)s")
LOG.warning(msg, {'host': self.host, 'e': e})
def reset(self):
super(SchedulerDependentManager, self).reset()
self.scheduler_rpcapi = scheduler_rpcapi.SchedulerAPI()
class CleanableManager(object):
def do_cleanup(self, context, cleanup_request):
LOG.info('Initiating service %s cleanup',
cleanup_request.service_id)
# If the 'until' field in the cleanup request is not set, we default to
# this very moment.
until = cleanup_request.until or timeutils.utcnow()
keep_entry = False
to_clean = db.worker_get_all(
context,
resource_type=cleanup_request.resource_type,
resource_id=cleanup_request.resource_id,
service_id=cleanup_request.service_id,
until=until)
for clean in to_clean:
original_service_id = clean.service_id
original_time = clean.updated_at
# Try to do a soft delete to mark the entry as being cleaned up
# by us (setting service id to our service id).
res = db.worker_claim_for_cleanup(context,
claimer_id=self.service_id,
orm_worker=clean)
# Claim may fail if entry is being cleaned by another service, has
# been removed (finished cleaning) by another service or the user
# started a new cleanable operation.
# In any of these cases we don't have to do cleanup or remove the
# worker entry.
if not res:
continue
# Try to get versioned object for resource we have to cleanup
try:
vo_cls = getattr(objects, clean.resource_type)
vo = vo_cls.get_by_id(context, clean.resource_id)
# Set the worker DB entry in the VO and mark it as being a
# clean operation
clean.cleaning = True
vo.worker = clean
except exception.NotFound:
LOG.debug('Skipping cleanup for non existent %(type)s %(id)s.',
{'type': clean.resource_type,
'id': clean.resource_id})
else:
# Resource status should match
if vo.status != clean.status:
LOG.debug('Skipping cleanup for mismatching work on '
'%(type)s %(id)s: %(exp_sts)s <> %(found_sts)s.',
{'type': clean.resource_type,
'id': clean.resource_id,
'exp_sts': clean.status,
'found_sts': vo.status})
else:
LOG.info('Cleaning %(type)s with id %(id)s and status '
'%(status)s',
{'type': clean.resource_type,
'id': clean.resource_id,
'status': clean.status},
resource=vo)
try:
# Some cleanup jobs are performed asynchronously, so
# we don't delete the worker entry, they'll take care
# of it
keep_entry = self._do_cleanup(context, vo)
except Exception:
LOG.exception('Could not perform cleanup.')
# Return the worker DB entry to the original service
db.worker_update(context, clean.id,
service_id=original_service_id,
updated_at=original_time)
continue
# The resource either didn't exist or was properly cleaned, either
# way we can remove the entry from the worker table if the cleanup
# method doesn't want to keep the entry (for example for delayed
# deletion).
if not keep_entry and not db.worker_destroy(context, id=clean.id):
LOG.warning('Could not remove worker entry %s.', clean.id)
LOG.info('Service %s cleanup completed.', cleanup_request.service_id)
def _do_cleanup(self, ctxt, vo_resource):
return False
def init_host(self, service_id, **kwargs):
ctxt = context.get_admin_context()
self.service_id = service_id
# TODO(geguileo): Once we don't support MySQL 5.5 anymore we can remove
# call to workers_init.
db.workers_init()
cleanup_request = objects.CleanupRequest(service_id=service_id)
self.do_cleanup(ctxt, cleanup_request)
|
apache-2.0
|
frreiss/tensorflow-fred
|
tensorflow/python/ops/standard_ops.py
|
15
|
5803
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=unused-import
"""Import names of Tensor Flow standard Ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import platform as _platform
import sys as _sys
from tensorflow.python import autograph
from tensorflow.python.training.experimental import loss_scaling_gradient_tape
# pylint: disable=g-bad-import-order
# Imports the following modules so that @RegisterGradient get executed.
from tensorflow.python.ops import array_grad
from tensorflow.python.ops import cudnn_rnn_grad
from tensorflow.python.ops import data_flow_grad
from tensorflow.python.ops import manip_grad
from tensorflow.python.ops import math_grad
from tensorflow.python.ops import random_grad
from tensorflow.python.ops import rnn_grad
from tensorflow.python.ops import sparse_grad
from tensorflow.python.ops import state_grad
from tensorflow.python.ops import tensor_array_grad
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.array_ops import *
from tensorflow.python.ops.check_ops import *
from tensorflow.python.ops.clip_ops import *
from tensorflow.python.ops.special_math_ops import *
# TODO(vrv): Switch to import * once we're okay with exposing the module.
from tensorflow.python.ops.confusion_matrix import confusion_matrix
from tensorflow.python.ops.control_flow_ops import Assert
from tensorflow.python.ops.control_flow_ops import case
from tensorflow.python.ops.control_flow_ops import cond
from tensorflow.python.ops.control_flow_ops import group
from tensorflow.python.ops.control_flow_ops import no_op
from tensorflow.python.ops.control_flow_ops import tuple # pylint: disable=redefined-builtin
# pylint: enable=redefined-builtin
from tensorflow.python.eager import wrap_function
from tensorflow.python.ops.control_flow_ops import while_loop
from tensorflow.python.ops.batch_ops import *
from tensorflow.python.ops.critical_section_ops import *
from tensorflow.python.ops.data_flow_ops import *
from tensorflow.python.ops.functional_ops import *
from tensorflow.python.ops.gradients import *
from tensorflow.python.ops.histogram_ops import *
from tensorflow.python.ops.init_ops import *
from tensorflow.python.ops.io_ops import *
from tensorflow.python.ops.linalg_ops import *
from tensorflow.python.ops.logging_ops import Print
from tensorflow.python.ops.logging_ops import get_summary_op
from tensorflow.python.ops.logging_ops import timestamp
from tensorflow.python.ops.lookup_ops import initialize_all_tables
from tensorflow.python.ops.lookup_ops import tables_initializer
from tensorflow.python.ops.manip_ops import *
from tensorflow.python.ops.math_ops import *
from tensorflow.python.ops.numerics import *
from tensorflow.python.ops.parsing_ops import *
from tensorflow.python.ops.partitioned_variables import *
from tensorflow.python.ops.proto_ops import *
from tensorflow.python.ops.ragged import ragged_dispatch as _ragged_dispatch
from tensorflow.python.ops.ragged import ragged_operators as _ragged_operators
from tensorflow.python.ops.random_ops import *
from tensorflow.python.ops.script_ops import py_func
from tensorflow.python.ops.session_ops import *
from tensorflow.python.ops.sort_ops import *
from tensorflow.python.ops.sparse_ops import *
from tensorflow.python.ops.state_ops import assign
from tensorflow.python.ops.state_ops import assign_add
from tensorflow.python.ops.state_ops import assign_sub
from tensorflow.python.ops.state_ops import count_up_to
from tensorflow.python.ops.state_ops import scatter_add
from tensorflow.python.ops.state_ops import scatter_div
from tensorflow.python.ops.state_ops import scatter_mul
from tensorflow.python.ops.state_ops import scatter_sub
from tensorflow.python.ops.state_ops import scatter_min
from tensorflow.python.ops.state_ops import scatter_max
from tensorflow.python.ops.state_ops import scatter_update
from tensorflow.python.ops.state_ops import scatter_nd_add
from tensorflow.python.ops.state_ops import scatter_nd_sub
# TODO(simister): Re-enable once binary size increase due to scatter_nd
# ops is under control.
# from tensorflow.python.ops.state_ops import scatter_nd_mul
# from tensorflow.python.ops.state_ops import scatter_nd_div
from tensorflow.python.ops.state_ops import scatter_nd_update
from tensorflow.python.ops.stateless_random_ops import *
from tensorflow.python.ops.string_ops import *
from tensorflow.python.ops.template import *
from tensorflow.python.ops.tensor_array_ops import *
from tensorflow.python.ops.variable_scope import *
from tensorflow.python.ops.variables import *
from tensorflow.python.ops.parallel_for.control_flow_ops import vectorized_map
# pylint: disable=g-import-not-at-top
if _platform.system() == "Windows":
from tensorflow.python.compiler.tensorrt import trt_convert_windows as trt
else:
from tensorflow.python.compiler.tensorrt import trt_convert as trt
# pylint: enable=g-import-not-at-top
# pylint: enable=wildcard-import
# pylint: enable=g-bad-import-order
# These modules were imported to set up RaggedTensor operators and dispatchers:
del _ragged_dispatch, _ragged_operators
|
apache-2.0
|
poeschlr/kicad-3d-models-in-freecad
|
cadquery/FCAD_script_generator/Connector_Dsub/cq_common.py
|
4
|
16211
|
# -*- coding: utf8 -*-
#!/usr/bin/python
#
#****************************************************************************
#* *
#* base classes for generating part models in STEP AP214 *
#* *
#* This is part of FreeCAD & cadquery tools *
#* to export generated models in STEP & VRML format. *
#* Copyright (c) 2017 *
#* Terje Io https://github.com/terjeio *
#* Maurice https://launchpad.net/~easyw *
#* *
#* All trademarks within this guide belong to their legitimate owners. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., *
#* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA *
#* *
#****************************************************************************
# 2017-11-30
#
# parts of this code is based on work by other contributors
# last copied from cq_base_model.py
#
import collections
from collections import namedtuple
from math import sin, tan, radians
import FreeCAD, Draft, FreeCADGui
import ImportGui
import FreeCADGui as Gui
import shaderColors
import exportPartToVRML as expVRML
## base parametes & model
import collections
from collections import namedtuple
# Import cad_tools
import cq_cad_tools
# Reload tools
reload(cq_cad_tools)
# Explicitly load all needed functions
from cq_cad_tools import FuseObjs_wColors, GetListOfObjects, restore_Main_Tools, \
exportSTEP, close_CQ_Example, exportVRML, saveFCdoc, z_RotateObject, Color_Objects, \
CutObjs_wColors, checkRequirements
# Sphinx workaround #1
try:
QtGui
except NameError:
QtGui = None
#
try:
# Gui.SendMsgToActiveView("Run")
# from Gui.Command import *
Gui.activateWorkbench("CadQueryWorkbench")
import cadquery
cq = cadquery
from Helpers import show
# CadQuery Gui
except: # catch *all* exceptions
msg = "missing CadQuery 0.3.0 or later Module!\r\n\r\n"
msg += "https://github.com/jmwright/cadquery-freecad-module/wiki\n"
if QtGui is not None:
reply = QtGui.QMessageBox.information(None,"Info ...",msg)
# maui end
#checking requirements
try:
close_CQ_Example(FreeCAD, Gui)
except: # catch *all* exceptions
print "CQ 030 doesn't open example file"
from Helpers import show
import Part as FreeCADPart
class Polyline:
r"""A class for creating a polyline wire (including arcs) using **relative** moves (turtle graphics style)
:param plane: the workplane to add the polyline (as a wire)
:type plane: ``workplane``
:param origin: point
:type origin: ``point``
Most of the methods returns a reference to the class instance (self) which allows method chaining
"""
def __init__(self, plane, origin=(0.0, 0.0)):
self.commands = []
self.plane = plane
self.origin = origin
self.x = 0
self.y = 0
self.addMoveTo(origin[0], origin[1])
def getCurrentPosition(self):
r"""get the current position in absolute coordinates
:rtype: Point
"""
return (self.x, self.y)
def addMoveTo(self, x, y):
r"""add a relative move (offset) from the current coordinate
.. note:: when issued as the first call after instatiating the class then the origin is moved accordingly
:param x: x distance from current position
:type x: ``float``
:param y: y distance from current position
:type y: ``float``
:rtype: self
"""
self.x += x
self.y += y
if len(self.commands) == 1:
self.commands = []
self.origin = (self.x, self.y)
self.commands.append((0, self.x, self.y))
return self
def addPoint(self, x, y):
r"""add a straight line to point
:param x: x distance from current position
:type x: ``float``
:param y: y distance from current position
:type y: ``float``
:rtype: self
"""
self.x += x
self.y += y
self.commands.append((1, self.x, self.y))
return self
def addPoints(self, pointList):
r"""add a list of new points
:param pointList:
:type pointList: list of points
:rtype: self
Example where first half is defined by points and then mirrored by adding points in reverse order::
ow = 0.6
pw = self.pin_width
c1 = (ow - pw) / 2.0
pin = Polyline(cq.Workplane("XY"), origin=(0.0, self.body_width / 2.0))\
.addPoints([
(ow / 2.0, 0),
(0.0, -self.body_width),
(-c1, -c1),
(0.0, -(self.pin_length - pw)),
(-pw / 4.0, -pw),
(-pw / 4.0, 0.0),
])\
.addMirror().make().extrude(self.pin_thickness)
.. figure:: ../images/pin.png
Rendering
"""
for point in pointList:
self.addPoint(point[0], point[1])
return self
def addArc(self, radius, angle=90, type=0):
o = sin(radians(abs(angle) / 2.0))
p = 1.0 - 1.0 * o
f = -1.0 if angle < 0.0 else 1.0
if type == 0:
ap1 = self.x + radius * (p if f == 1.0 else o)
ap2 = self.y + radius * (o if f == 1.0 else p) * f
else:
ap1 = self.x + radius * (p if f == -1.0 else o)
ap2 = self.y + radius * (o if f == -1.0 else p) * f
self.x += radius
self.y += radius * f
self.commands.append((2, self.x, self.y, ap1, ap2))
return self
def addThreePointArc(self, point1, point2):
r"""create a three point arc
The starting point is the current position, end point is *point2*, the arc will be drawn through point1
:param point1:
:type width: ``float``
:param point2:
:type point2: ``point``
:rtype: self
Example::
l = 4
a = 0.2
w = 2 - a
body = Polyline(cq.Workplane("XY"))\
.addPoint(0, w)\
.addThreePointArc((l / 2, a), (l, 0))\
.addPoint(0,- w).make().extrude(1)
.. figure:: ../images/threepointarc.png
Rendering
"""
ap1 = self.x + point1[0]
ap2 = self.y + point1[1]
self.x += point2[0]
self.y += point2[1]
self.commands.append((2, self.x, self.y, ap1, ap2))
return self
def addChamferedRectangle(self, length, width, chamfer):
r"""create a chamfered rectangle centered at the current point
:param length:
:type length: ``float``
:param width:
:type width: ``float``
:param chamfer:
:type chamfer: ``float``
:rtype: self
See :func:`addRoundedRectangle` for an example
"""
self.addMoveTo(-length / 2.0, -width / 2.0 + chamfer)
length = length - chamfer * 2.0
width = width - chamfer * 2.0
self.addPoint(0.0, width)
self.addPoint(chamfer, chamfer)
self.addPoint(length, 0)
self.addPoint(chamfer, -chamfer)
self.addPoint(0.0, -width)
self.addPoint(-chamfer, -chamfer)
self.addPoint(-length, 0.0)
self.addPoint(-chamfer, chamfer)
return self
def addRoundedRectangle(self, length, width, radius):
r"""create a rounded rectangle centered at the current point
:param length:
:type length: ``float``
:param width:
:type width: ``float``
:param cornerRadius:
:type cornerRadius: ``float``
:rtype: self
Example with a chamfered rectangle cutout::
l = 4
w = 2
cutout = Polyline(cq.Workplane("XY"))\
.addChamferedRectangle(l - 0.3, w - 0.3, 0.3).make().extrude(1)
body = Polyline(cq.Workplane("XY"))\
.addRoundedRectangle(l, w, 0.3).make().extrude(1).cut(cutout)
.. figure:: ../images/roundedrectangle.png
Rendering
"""
self.addMoveTo(-length / 2.0, -width / 2.0 + radius)
length = length - radius * 2.0
width = width - radius * 2.0
self.addPoint(0.0, width)
self.addArc(radius, 90)
self.addPoint(length, 0)
self.addArc(radius, -90)
self.addPoint(0.0, -width)
self.addArc(-radius, 90)
self.addPoint(-length, 0.0)
self.addArc(-radius, -90)
return self
def mirror(self, axis="X"):
r"""mirror the current polyline
"""
result = []
tx = -1.0 if axis == "X" else 1.0
ty = -1.0 if axis != "X" else 1.0
for point in self.commands:
result.append((point[0], point[1] * tx, point[2] * ty))
self.commands = result
return self
def addMirror(self, axis="X"):
r"""add a mirror of the current polyline by reversing its direction
"""
x0 = self.origin[0] if axis == "X" else 0.0
y0 = self.origin[1] if axis != "X" else 0.0
tx = -1.0 if axis == "X" else 1.0
ty = -1.0 if axis != "X" else 1.0
start = 2 #if axis == "X" else 0
start = 1 if self.commands[0][start] == self.commands[-1][start] else 0
for point in reversed(self.commands[start:-1]):
self.commands.append((1, (point[1] - x0) * tx + x0, (point[2] - y0) * ty + y0))
return self
def _is_equal (self, point1, point2):
return point1[0] == point2[0] and point1[1] == point2[1]
def make(self):
r""" Closes the polyline and creates a wire in the supplied plane
:rtype: ``wire``
"""
plane = self.plane
for point in self.commands:
if point[0] == 0:
plane = plane.moveTo(point[1], point[2])
elif point[0] == 1:
plane = plane.lineTo(point[1], point[2])
elif point[0] == 2:
plane = plane.threePointArc((point[3], point[4]), (point[1], point[2]))
return plane.wire() if self._is_equal(self.origin, (self.commands[-1])[1:3]) else plane.close().wire()
class cq_parameters_help():
def __init__(self):
self.pin_thickness = 0.15
self.pin_length = 1.0
self.pin_width = 0.4
def _make_gullwing_pin(self, pin_height, bottom_length, r_upper_i=None, r_lower_i=None):
""" create gull wing pin
The pin will placed at coordinate 0, 0 and with the base at Z = 0
:param pin_height: overall pin height
:type pin_height: ``float``
:rtype: ``solid``
.. figure:: ../images/gullwingpin.png
Rendering example
"""
# r_upper_i - pin upper corner, inner radius
# r_lower_i - pin lower corner, inner radius
# bottom_length - pin bottom flat part length (excluding corner arc)
if r_lower_i is None:
r_lower_i = self.pin_thickness / 2.0 if r_upper_i is None else r_upper_i
if r_upper_i is None:
r_upper_i = self.pin_thickness / 2.0
r_upper_o = r_upper_i + self.pin_thickness # pin upper corner, outer radius
r_lower_o = r_lower_i + self.pin_thickness # pin lower corner, outer radius
bottom_length = bottom_length - r_lower_i
top_length = self.pin_length - bottom_length - r_upper_i - r_lower_o
return Polyline(cq.Workplane("YZ"), origin=(0, pin_height))\
.addPoint(top_length, 0)\
.addArc(r_upper_i, -90)\
.addPoint(0, -(pin_height - r_upper_i - r_lower_o))\
.addArc(r_lower_o, -90, 1)\
.addPoint(bottom_length, 0)\
.addPoint(0, self.pin_thickness)\
.addPoint(-bottom_length, 0)\
.addArc(-r_lower_i, -90)\
.addPoint(0, pin_height - r_upper_i - r_lower_o)\
.addArc(-r_upper_o, -90, 1)\
.addPoint(-top_length, 0).make().extrude(self.pin_width).translate((-self.pin_width / 2.0, 0, 0))
def _make_Jhook_pin(self, pin_height, bottom_length, top_length = 0.05, r_upper_i=None, r_lower_i=None):
""" create J-hook pin
The pin will placed at coordinate 0, 0 and with the base at Z = 0
:param pin_height: overall pin height
:type pin_height: ``float``
:rtype: ``solid``
.. figure:: ../images/jhookpin.png
Rendering example
"""
# r_upper_i - pin upper corner, inner radius
# r_lower_i - pin lower corner, inner radius
# bottom_length - pin bottom flat part length (excluding corner arc)
if r_lower_i is None:
r_lower_i = self.pin_thickness / 2.0 if r_upper_i is None else r_upper_i
if r_upper_i is None:
r_upper_i = self.pin_thickness / 2.0
r_upper_o = r_upper_i + self.pin_thickness # pin upper corner, outer radius
r_lower_o = r_lower_i + self.pin_thickness # pin lower corner, outer radius
bottom_length = bottom_length - r_lower_i
return Polyline(cq.Workplane("YZ"), (-(top_length + r_upper_i), pin_height))\
.addPoint(top_length, 0)\
.addArc(r_upper_i, -90)\
.addPoint(0, -(pin_height - r_upper_i - r_lower_i - self.pin_thickness))\
.addArc(-r_lower_i, 90)\
.addPoint(-bottom_length, 0)\
.addPoint(0, -self.pin_thickness)\
.addPoint(bottom_length, 0)\
.addArc(r_lower_o, 90, 1)\
.addPoint(0, pin_height - r_upper_i - r_lower_i - self.pin_thickness)\
.addArc(-r_upper_o, -90, 1)\
.addPoint(-top_length, 0).make().extrude(self.pin_width).translate((-self.pin_width / 2.0, 0, 0))
|
gpl-2.0
|
turbinenreiter/micropython
|
tests/basics/int_big_and2.py
|
61
|
2185
|
# test + +
print( 97989513389222316022151446562729620153292831887555425160965597396
& 23716683549865351578586448630079789776107310103486834795830390982)
print( 53817081128841898634258263553430908085326601592682411889506742059
& 37042558948907407488299113387826240429667200950043601129661240876)
print( 26167512042587370698808974207700979337713004510730289760097826496
& 98456276326770292376138852628141531773120376436197321310863125849)
print( 21085380307304977067262070503651827226504797285572981274069266136
& 15928222825828272388778130358888206480162413547887287646273147570)
print( 40827393422334167255488276244226338235131323044408420081160772273
& 63815443187857978125545555033672525708399848575557475462799643340)
print( 5181013159871685724135944379095645225188360725917119022722046448
& 59734090450462480092384049604830976376887859531148103803093112493)
print( 283894311
& 86526825689187217371383854139783231460931720533100376593106943447)
print( 40019818573920230246248826511203818792007462193311949166285967147
& 9487909752)
# test - -
print( -97989513389222316022151446562729620153292831887555425160965597396
& -23716683549865351578586448630079789776107310103486834795830390982)
print( -53817081128841898634258263553430908085326601592682411889506742059
& -37042558948907407488299113387826240429667200950043601129661240876)
print( -26167512042587370698808974207700979337713004510730289760097826496
& -98456276326770292376138852628141531773120376436197321310863125849)
print( -21085380307304977067262070503651827226504797285572981274069266136
& -15928222825828272388778130358888206480162413547887287646273147570)
print( -40827393422334167255488276244226338235131323044408420081160772273
& -63815443187857978125545555033672525708399848575557475462799643340)
print( -5181013159871685724135944379095645225188360725917119022722046448
& -59734090450462480092384049604830976376887859531148103803093112493)
print( -283894311
& -86526825689187217371383854139783231460931720533100376593106943447)
print( -40019818573920230246248826511203818792007462193311949166285967147
& -9487909752)
|
mit
|
tomka/CATMAID
|
django/applications/catmaid/views/image_block_source_importer.py
|
2
|
4518
|
# -*- coding: utf-8 -*-
import json
from django import forms
from django.core.exceptions import ValidationError
from django.contrib import messages
from django.shortcuts import redirect
from formtools.wizard.views import SessionWizardView
from catmaid.models import (
Stack, StackMirror, Project, ProjectStack, StackGroup,
StackStackGroup, StackGroupRelation,
)
from catmaid.fields import (
Double3DFormField, DownsampleFactorsField, DownsampleFactorsFormField,
Integer3DFormField,
)
TEMPLATES = {
'container': 'catmaid/imageblocksourceimport/container.html',
'stack': 'catmaid/imageblocksourceimport/stack.html',
}
TILE_SOURCE_TYPE = 11
class ContainerForm(forms.Form):
container = forms.URLField(label='N5 Root', widget=forms.TextInput(attrs={'size':80}),
help_text='URL to the root of the N5 container containing your stack')
dataset = forms.CharField(widget=forms.TextInput(attrs={'size':80}),
help_text='Path to the stack dataset, not incuding scale level')
has_scales = forms.BooleanField(required=False, label='Dataset has scale levels')
def clean_container(self):
container = self.cleaned_data['container']
return container.strip('/')
def clean_dataset(self):
dataset = self.cleaned_data['dataset']
return dataset.strip('/')
class StackForm(forms.Form):
title = forms.CharField(help_text='Title of the new stack')
slicing_dims = Integer3DFormField(initial=[0, 1, 2],
help_text='Dimensions to slice the dataset corresponding the the X, Y '
'and Z of the CATMAID stack')
block_size = Integer3DFormField()
dimension = Integer3DFormField()
resolution = Double3DFormField()
downsample_factors = DownsampleFactorsFormField(required=False, initial=[[1, 1, 1]], max_length=255)
class ImageBlockSourceImportWizard(SessionWizardView):
form_list = [('container', ContainerForm), ('stack', StackForm)]
def get_template_names(self):
return TEMPLATES[self.steps.current]
def get_context_data(self, form, **kwargs):
context = super().get_context_data(form=form, **kwargs)
if self.steps:
if self.steps.current == 'stack':
context['container'] = self.get_cleaned_data_for_step('container')['container']
context['dataset'] = self.get_cleaned_data_for_step('container')['dataset']
context['has_scales'] = self.get_cleaned_data_for_step('container')['has_scales']
context.update({
'title': 'N5 Source Importer',
})
return context
def done(self, form_list, **kwargs):
container = self.get_cleaned_data_for_step('container')['container']
dataset = self.get_cleaned_data_for_step('container')['dataset']
has_scales = self.get_cleaned_data_for_step('container')['has_scales']
slicing_dims = self.get_cleaned_data_for_step('stack')['slicing_dims']
title = self.get_cleaned_data_for_step('stack')['title']
dimension = self.get_cleaned_data_for_step('stack')['dimension']
resolution = self.get_cleaned_data_for_step('stack')['resolution']
downsample_factors = self.get_cleaned_data_for_step('stack')['downsample_factors']
block_size = self.get_cleaned_data_for_step('stack')['block_size']
image_base = n5_source_url(container, dataset, has_scales, slicing_dims)
stack = Stack(
title=title,
dimension=dimension,
resolution=resolution,
downsample_factors=downsample_factors)
stack.save()
mirror = StackMirror.objects.create(
title='default',
stack=stack,
image_base=image_base,
file_extension='',
tile_width=block_size.x,
tile_height=block_size.y,
tile_source_type=TILE_SOURCE_TYPE)
msg = 'A new stack was successfully created.'
messages.add_message(self.request, messages.SUCCESS, msg)
return redirect(f'catmaid/stack/{stack.id}/change/')
def n5_source_url(container, dataset, has_scales, slicing_dims) -> str:
scales = '%SCALE_DATASET%' if has_scales else ''
dataset_with_scales = '/'.join([dataset, scales]).strip('/')
slice_str = '_'.join(str(i) for i in [slicing_dims.x, slicing_dims.y, slicing_dims.z])
return f"{container}/{dataset_with_scales}/{slice_str}"
|
gpl-3.0
|
depristo/xvfbwrapper
|
setup.py
|
1
|
1339
|
#!/usr/bin/env python
"""disutils setup/install script for xvfbwrapper"""
import os
from distutils.core import setup
this_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_dir, 'README.rst')) as f:
LONG_DESCRIPTION = '\n' + f.read()
setup(
name='xvfbwrapper',
version='0.2.5',
py_modules=['xvfbwrapper'],
author='Corey Goldberg',
author_email='cgoldberg _at_ gmail.com',
description='run headless display inside X virtual framebuffer (Xvfb)',
long_description=LONG_DESCRIPTION,
url='https://github.com/cgoldberg/xvfbwrapper',
download_url='http://pypi.python.org/pypi/xvfbwrapper',
keywords='xvfb virtual display headless x11'.split(),
license='MIT',
classifiers=[
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
mit
|
zstackorg/zstack-utility
|
kvmagent/kvmagent/plugins/ha_plugin.py
|
1
|
27718
|
from kvmagent import kvmagent
from zstacklib.utils import jsonobject
from zstacklib.utils import http
from zstacklib.utils import log
from zstacklib.utils import shell
from zstacklib.utils import linux
from zstacklib.utils import lvm
from zstacklib.utils import thread
import os.path
import time
import traceback
import threading
logger = log.get_logger(__name__)
class UmountException(Exception):
pass
class AgentRsp(object):
def __init__(self):
self.success = True
self.error = None
class ScanRsp(object):
def __init__(self):
super(ScanRsp, self).__init__()
self.result = None
class ReportPsStatusCmd(object):
def __init__(self):
self.hostUuid = None
self.psUuids = None
self.psStatus = None
self.reason = None
class ReportSelfFencerCmd(object):
def __init__(self):
self.hostUuid = None
self.psUuids = None
self.reason = None
last_multipath_run = time.time()
def kill_vm(maxAttempts, mountPaths=None, isFileSystem=None):
zstack_uuid_pattern = "'[0-9a-f]{8}[0-9a-f]{4}[1-5][0-9a-f]{3}[89ab][0-9a-f]{3}[0-9a-f]{12}'"
virsh_list = shell.call("virsh list --all")
logger.debug("virsh_list:\n" + virsh_list)
vm_in_process_uuid_list = shell.call("virsh list | egrep -o " + zstack_uuid_pattern + " | sort | uniq")
logger.debug('vm_in_process_uuid_list:\n' + vm_in_process_uuid_list)
# kill vm's qemu process
vm_pids_dict = {}
for vm_uuid in vm_in_process_uuid_list.split('\n'):
vm_uuid = vm_uuid.strip(' \t\n\r')
if not vm_uuid:
continue
if mountPaths and isFileSystem is not None \
and not is_need_kill(vm_uuid, mountPaths, isFileSystem):
continue
vm_pid = shell.call("ps aux | grep qemu-kvm | grep -v grep | awk '/%s/{print $2}'" % vm_uuid)
vm_pid = vm_pid.strip(' \t\n\r')
vm_pids_dict[vm_uuid] = vm_pid
for vm_uuid, vm_pid in vm_pids_dict.items():
kill = shell.ShellCmd('kill -9 %s' % vm_pid)
kill(False)
if kill.return_code == 0:
logger.warn('kill the vm[uuid:%s, pid:%s] because we lost connection to the storage.'
'failed to read the heartbeat file %s times' % (vm_uuid, vm_pid, maxAttempts))
else:
logger.warn('failed to kill the vm[uuid:%s, pid:%s] %s' % (vm_uuid, vm_pid, kill.stderr))
return vm_pids_dict
def mount_path_is_nfs(mount_path):
typ = shell.call("mount | grep '%s' | awk '{print $5}'" % mount_path)
return typ.startswith('nfs')
@linux.retry(times=8, sleep_time=2)
def do_kill_and_umount(mount_path, is_nfs):
kill_progresses_using_mount_path(mount_path)
umount_fs(mount_path, is_nfs)
def kill_and_umount(mount_path, is_nfs):
do_kill_and_umount(mount_path, is_nfs)
if is_nfs:
shell.ShellCmd("systemctl start nfs-client.target")(False)
def umount_fs(mount_path, is_nfs):
if is_nfs:
shell.ShellCmd("systemctl stop nfs-client.target")(False)
time.sleep(2)
o = shell.ShellCmd("umount -f %s" % mount_path)
o(False)
if o.return_code != 0:
raise UmountException(o.stderr)
def kill_progresses_using_mount_path(mount_path):
o = shell.ShellCmd("pkill -9 -e -f '%s'" % mount_path)
o(False)
logger.warn('kill the progresses with mount path: %s, killed process: %s' % (mount_path, o.stdout))
def is_need_kill(vmUuid, mountPaths, isFileSystem):
def vm_match_storage_type(vmUuid, isFileSystem):
o = shell.ShellCmd("virsh dumpxml %s | grep \"disk type='file'\" | grep -v \"device='cdrom'\"" % vmUuid)
o(False)
if (o.return_code == 0 and isFileSystem) or (o.return_code != 0 and not isFileSystem):
return True
return False
def vm_in_this_file_system_storage(vm_uuid, ps_paths):
cmd = shell.ShellCmd("virsh dumpxml %s | grep \"source file=\" | head -1 |awk -F \"'\" '{print $2}'" % vm_uuid)
cmd(False)
vm_path = cmd.stdout.strip()
if cmd.return_code != 0 or vm_in_storage_list(vm_path, ps_paths):
return True
return False
def vm_in_this_distributed_storage(vm_uuid, ps_paths):
cmd = shell.ShellCmd("virsh dumpxml %s | grep \"source protocol\" | head -1 | awk -F \"'\" '{print $4}'" % vm_uuid)
cmd(False)
vm_path = cmd.stdout.strip()
if cmd.return_code != 0 or vm_in_storage_list(vm_path, ps_paths):
return True
return False
def vm_in_storage_list(vm_path, storage_paths):
if vm_path == "" or any([vm_path.startswith(ps_path) for ps_path in storage_paths]):
return True
return False
if vm_match_storage_type(vmUuid, isFileSystem):
if isFileSystem and vm_in_this_file_system_storage(vmUuid, mountPaths):
return True
elif not isFileSystem and vm_in_this_distributed_storage(vmUuid, mountPaths):
return True
return False
class HaPlugin(kvmagent.KvmAgent):
SCAN_HOST_PATH = "/ha/scanhost"
SETUP_SELF_FENCER_PATH = "/ha/selffencer/setup"
CANCEL_SELF_FENCER_PATH = "/ha/selffencer/cancel"
CEPH_SELF_FENCER = "/ha/ceph/setupselffencer"
CANCEL_CEPH_SELF_FENCER = "/ha/ceph/cancelselffencer"
SHAREDBLOCK_SELF_FENCER = "/ha/sharedblock/setupselffencer"
CANCEL_SHAREDBLOCK_SELF_FENCER = "/ha/sharedblock/cancelselffencer"
ALIYUN_NAS_SELF_FENCER = "/ha/aliyun/nas/setupselffencer"
CANCEL_NAS_SELF_FENCER = "/ha/aliyun/nas/cancelselffencer"
RET_SUCCESS = "success"
RET_FAILURE = "failure"
RET_NOT_STABLE = "unstable"
def __init__(self):
# {ps_uuid: created_time} e.g. {'07ee15b2f68648abb489f43182bd59d7': 1544513500.163033}
self.run_fencer_timestamp = {} # type: dict[str, float]
self.fencer_lock = threading.RLock()
@kvmagent.replyerror
def cancel_ceph_self_fencer(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
self.cancel_fencer(cmd.uuid)
return jsonobject.dumps(AgentRsp())
@kvmagent.replyerror
def cancel_filesystem_self_fencer(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
for ps_uuid in cmd.psUuids:
self.cancel_fencer(ps_uuid)
return jsonobject.dumps(AgentRsp())
@kvmagent.replyerror
def cancel_aliyun_nas_self_fencer(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
self.cancel_fencer(cmd.uuid)
return jsonobject.dumps(AgentRsp())
@kvmagent.replyerror
def setup_aliyun_nas_self_fencer(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
created_time = time.time()
self.setup_fencer(cmd.uuid, created_time)
@thread.AsyncThread
def heartbeat_on_aliyunnas():
failure = 0
while self.run_fencer(cmd.uuid, created_time):
try:
time.sleep(cmd.interval)
mount_path = cmd.mountPath
test_file = os.path.join(mount_path, cmd.heartbeat, '%s-ping-test-file-%s' % (cmd.uuid, kvmagent.HOST_UUID))
touch = shell.ShellCmd('timeout 5 touch %s' % test_file)
touch(False)
if touch.return_code != 0:
logger.debug('touch file failed, cause: %s' % touch.stderr)
failure += 1
else:
failure = 0
linux.rm_file_force(test_file)
continue
if failure < cmd.maxAttempts:
continue
try:
logger.warn("aliyun nas storage %s fencer fired!" % cmd.uuid)
vm_uuids = kill_vm(cmd.maxAttempts).keys()
if vm_uuids:
self.report_self_fencer_triggered([cmd.uuid], ','.join(vm_uuids))
# reset the failure count
failure = 0
except Exception as e:
logger.warn("kill vm failed, %s" % e.message)
content = traceback.format_exc()
logger.warn("traceback: %s" % content)
finally:
self.report_storage_status([cmd.uuid], 'Disconnected')
except Exception as e:
logger.debug('self-fencer on aliyun nas primary storage %s stopped abnormally' % cmd.uuid)
content = traceback.format_exc()
logger.warn(content)
logger.debug('stop self-fencer on aliyun nas primary storage %s' % cmd.uuid)
heartbeat_on_aliyunnas()
return jsonobject.dumps(AgentRsp())
@kvmagent.replyerror
def cancel_sharedblock_self_fencer(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
self.cancel_fencer(cmd.vgUuid)
return jsonobject.dumps(AgentRsp())
@kvmagent.replyerror
def setup_sharedblock_self_fencer(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
@thread.AsyncThread
def heartbeat_on_sharedblock():
failure = 0
while self.run_fencer(cmd.vgUuid, created_time):
try:
time.sleep(cmd.interval)
global last_multipath_run
if cmd.fail_if_no_path and time.time() - last_multipath_run > 4:
last_multipath_run = time.time()
linux.set_fail_if_no_path()
health = lvm.check_vg_status(cmd.vgUuid, cmd.storageCheckerTimeout, check_pv=False)
logger.debug("sharedblock group primary storage %s fencer run result: %s" % (cmd.vgUuid, health))
if health[0] is True:
failure = 0
continue
failure += 1
if failure < cmd.maxAttempts:
continue
try:
logger.warn("shared block storage %s fencer fired!" % cmd.vgUuid)
self.report_storage_status([cmd.vgUuid], 'Disconnected', health[1])
# we will check one qcow2 per pv to determine volumes on pv should be kill
invalid_pv_uuids = lvm.get_invalid_pv_uuids(cmd.vgUuid, cmd.checkIo)
vms = lvm.get_running_vm_root_volume_on_pv(cmd.vgUuid, invalid_pv_uuids, True)
killed_vm_uuids = []
for vm in vms:
kill = shell.ShellCmd('kill -9 %s' % vm.pid)
kill(False)
if kill.return_code == 0:
logger.warn(
'kill the vm[uuid:%s, pid:%s] because we lost connection to the storage.'
'failed to run health check %s times' % (vm.uuid, vm.pid, cmd.maxAttempts))
killed_vm_uuids.append(vm.uuid)
else:
logger.warn(
'failed to kill the vm[uuid:%s, pid:%s] %s' % (vm.uuid, vm.pid, kill.stderr))
for volume in vm.volumes:
used_process = linux.linux_lsof(volume)
if len(used_process) == 0:
try:
lvm.deactive_lv(volume, False)
except Exception as e:
logger.debug("deactivate volume %s for vm %s failed, %s" % (volume, vm.uuid, e.message))
content = traceback.format_exc()
logger.warn("traceback: %s" % content)
else:
logger.debug("volume %s still used: %s, skip to deactivate" % (volume, used_process))
if len(killed_vm_uuids) != 0:
self.report_self_fencer_triggered([cmd.vgUuid], ','.join(killed_vm_uuids))
lvm.remove_partial_lv_dm(cmd.vgUuid)
if lvm.check_vg_status(cmd.vgUuid, cmd.storageCheckerTimeout, True)[0] is False:
lvm.drop_vg_lock(cmd.vgUuid)
lvm.remove_device_map_for_vg(cmd.vgUuid)
# reset the failure count
failure = 0
except Exception as e:
logger.warn("kill vm failed, %s" % e.message)
content = traceback.format_exc()
logger.warn("traceback: %s" % content)
except Exception as e:
logger.debug('self-fencer on sharedblock primary storage %s stopped abnormally, try again soon...' % cmd.vgUuid)
content = traceback.format_exc()
logger.warn(content)
if not self.run_fencer(cmd.vgUuid, created_time):
logger.debug('stop self-fencer on sharedblock primary storage %s for judger failed' % cmd.vgUuid)
else:
logger.warn('stop self-fencer on sharedblock primary storage %s' % cmd.vgUuid)
created_time = time.time()
self.setup_fencer(cmd.vgUuid, created_time)
heartbeat_on_sharedblock()
return jsonobject.dumps(AgentRsp())
@kvmagent.replyerror
def setup_ceph_self_fencer(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
def check_tools():
ceph = shell.run('which ceph')
rbd = shell.run('which rbd')
if ceph == 0 and rbd == 0:
return True
return False
if not check_tools():
rsp = AgentRsp()
rsp.error = "no ceph or rbd on current host, please install the tools first"
rsp.success = False
return jsonobject.dumps(rsp)
mon_url = '\;'.join(cmd.monUrls)
mon_url = mon_url.replace(':', '\\\:')
created_time = time.time()
self.setup_fencer(cmd.uuid, created_time)
def get_ceph_rbd_args():
if cmd.userKey is None:
return 'rbd:%s:mon_host=%s' % (cmd.heartbeatImagePath, mon_url)
return 'rbd:%s:id=zstack:key=%s:auth_supported=cephx\;none:mon_host=%s' % (cmd.heartbeatImagePath, cmd.userKey, mon_url)
def ceph_in_error_stat():
# HEALTH_OK,HEALTH_WARN,HEALTH_ERR and others(may be empty)...
health = shell.ShellCmd('timeout %s ceph health' % cmd.storageCheckerTimeout)
health(False)
# If the command times out, then exit with status 124
if health.return_code == 124:
return True
health_status = health.stdout
return not (health_status.startswith('HEALTH_OK') or health_status.startswith('HEALTH_WARN'))
def heartbeat_file_exists():
touch = shell.ShellCmd('timeout %s qemu-img info %s' %
(cmd.storageCheckerTimeout, get_ceph_rbd_args()))
touch(False)
if touch.return_code == 0:
return True
logger.warn('cannot query heartbeat image: %s: %s' % (cmd.heartbeatImagePath, touch.stderr))
return False
def create_heartbeat_file():
create = shell.ShellCmd('timeout %s qemu-img create -f raw %s 1' %
(cmd.storageCheckerTimeout, get_ceph_rbd_args()))
create(False)
if create.return_code == 0 or "File exists" in create.stderr:
return True
logger.warn('cannot create heartbeat image: %s: %s' % (cmd.heartbeatImagePath, create.stderr))
return False
def delete_heartbeat_file():
shell.run("timeout %s rbd rm --id zstack %s -m %s" %
(cmd.storageCheckerTimeout, cmd.heartbeatImagePath, mon_url))
@thread.AsyncThread
def heartbeat_on_ceph():
try:
failure = 0
while self.run_fencer(cmd.uuid, created_time):
time.sleep(cmd.interval)
if heartbeat_file_exists() or create_heartbeat_file():
failure = 0
continue
failure += 1
if failure == cmd.maxAttempts:
# c.f. We discovered that, Ceph could behave the following:
# 1. Create heart-beat file, failed with 'File exists'
# 2. Query the hb file in step 1, and failed again with 'No such file or directory'
if ceph_in_error_stat():
path = (os.path.split(cmd.heartbeatImagePath))[0]
vm_uuids = kill_vm(cmd.maxAttempts, [path], False).keys()
if vm_uuids:
self.report_self_fencer_triggered([cmd.uuid], ','.join(vm_uuids))
else:
delete_heartbeat_file()
# reset the failure count
failure = 0
logger.debug('stop self-fencer on ceph primary storage')
except:
logger.debug('self-fencer on ceph primary storage stopped abnormally')
content = traceback.format_exc()
logger.warn(content)
heartbeat_on_ceph()
return jsonobject.dumps(AgentRsp())
@kvmagent.replyerror
def setup_self_fencer(self, req):
cmd = jsonobject.loads(req[http.REQUEST_BODY])
@thread.AsyncThread
def heartbeat_file_fencer(mount_path, ps_uuid, mounted_by_zstack):
def try_remount_fs():
if mount_path_is_nfs(mount_path):
shell.run("systemctl start nfs-client.target")
while self.run_fencer(ps_uuid, created_time):
if linux.is_mounted(path=mount_path) and touch_heartbeat_file():
self.report_storage_status([ps_uuid], 'Connected')
logger.debug("fs[uuid:%s] is reachable again, report to management" % ps_uuid)
break
try:
logger.debug('fs[uuid:%s] is unreachable, it will be remounted after 180s' % ps_uuid)
time.sleep(180)
if not self.run_fencer(ps_uuid, created_time):
break
linux.remount(url, mount_path, options)
self.report_storage_status([ps_uuid], 'Connected')
logger.debug("remount fs[uuid:%s] success, report to management" % ps_uuid)
break
except:
logger.warn('remount fs[uuid:%s] fail, try again soon' % ps_uuid)
kill_progresses_using_mount_path(mount_path)
logger.debug('stop remount fs[uuid:%s]' % ps_uuid)
def after_kill_vm():
if not killed_vm_pids or not mounted_by_zstack:
return
try:
kill_and_umount(mount_path, mount_path_is_nfs(mount_path))
except UmountException:
if shell.run('ps -p %s' % ' '.join(killed_vm_pids)) == 0:
virsh_list = shell.call("timeout 10 virsh list --all || echo 'cannot obtain virsh list'")
logger.debug("virsh_list:\n" + virsh_list)
logger.error('kill vm[pids:%s] failed because of unavailable fs[mountPath:%s].'
' please retry "umount -f %s"' % (killed_vm_pids, mount_path, mount_path))
return
try_remount_fs()
def touch_heartbeat_file():
touch = shell.ShellCmd('timeout %s touch %s' % (cmd.storageCheckerTimeout, heartbeat_file_path))
touch(False)
if touch.return_code != 0:
logger.warn('unable to touch %s, %s %s' % (heartbeat_file_path, touch.stderr, touch.stdout))
return touch.return_code == 0
heartbeat_file_path = os.path.join(mount_path, 'heartbeat-file-kvm-host-%s.hb' % cmd.hostUuid)
created_time = time.time()
self.setup_fencer(ps_uuid, created_time)
try:
failure = 0
url = shell.call("mount | grep -e '%s' | awk '{print $1}'" % mount_path).strip()
options = shell.call("mount | grep -e '%s' | awk -F '[()]' '{print $2}'" % mount_path).strip()
while self.run_fencer(ps_uuid, created_time):
time.sleep(cmd.interval)
if touch_heartbeat_file():
failure = 0
continue
failure += 1
if failure == cmd.maxAttempts:
logger.warn('failed to touch the heartbeat file[%s] %s times, we lost the connection to the storage,'
'shutdown ourselves' % (heartbeat_file_path, cmd.maxAttempts))
self.report_storage_status([ps_uuid], 'Disconnected')
killed_vms = kill_vm(cmd.maxAttempts, [mount_path], True)
if len(killed_vms) != 0:
self.report_self_fencer_triggered([ps_uuid], ','.join(killed_vms.keys()))
killed_vm_pids = killed_vms.values()
after_kill_vm()
logger.debug('stop heartbeat[%s] for filesystem self-fencer' % heartbeat_file_path)
except:
content = traceback.format_exc()
logger.warn(content)
for mount_path, uuid, mounted_by_zstack in zip(cmd.mountPaths, cmd.uuids, cmd.mountedByZStack):
if not linux.timeout_isdir(mount_path):
raise Exception('the mount path[%s] is not a directory' % mount_path)
heartbeat_file_fencer(mount_path, uuid, mounted_by_zstack)
return jsonobject.dumps(AgentRsp())
@kvmagent.replyerror
def scan_host(self, req):
rsp = ScanRsp()
success = 0
cmd = jsonobject.loads(req[http.REQUEST_BODY])
for i in range(0, cmd.times):
if shell.run("nmap --host-timeout 10s -sP -PI %s | grep -q 'Host is up'" % cmd.ip) == 0:
success += 1
if success == cmd.successTimes:
rsp.result = self.RET_SUCCESS
return jsonobject.dumps(rsp)
time.sleep(cmd.interval)
if success == 0:
rsp.result = self.RET_FAILURE
return jsonobject.dumps(rsp)
# WE SUCCEED A FEW TIMES, IT SEEMS THE CONNECTION NOT STABLE
success = 0
for i in range(0, cmd.successTimes):
if shell.run("nmap --host-timeout 10s -sP -PI %s | grep -q 'Host is up'" % cmd.ip) == 0:
success += 1
time.sleep(cmd.successInterval)
if success == cmd.successTimes:
rsp.result = self.RET_SUCCESS
return jsonobject.dumps(rsp)
if success == 0:
rsp.result = self.RET_FAILURE
return jsonobject.dumps(rsp)
rsp.result = self.RET_NOT_STABLE
logger.info('scanhost[%s]: %s' % (cmd.ip, rsp.result))
return jsonobject.dumps(rsp)
def start(self):
http_server = kvmagent.get_http_server()
http_server.register_async_uri(self.SCAN_HOST_PATH, self.scan_host)
http_server.register_async_uri(self.SETUP_SELF_FENCER_PATH, self.setup_self_fencer)
http_server.register_async_uri(self.CEPH_SELF_FENCER, self.setup_ceph_self_fencer)
http_server.register_async_uri(self.CANCEL_SELF_FENCER_PATH, self.cancel_filesystem_self_fencer)
http_server.register_async_uri(self.CANCEL_CEPH_SELF_FENCER, self.cancel_ceph_self_fencer)
http_server.register_async_uri(self.SHAREDBLOCK_SELF_FENCER, self.setup_sharedblock_self_fencer)
http_server.register_async_uri(self.CANCEL_SHAREDBLOCK_SELF_FENCER, self.cancel_sharedblock_self_fencer)
http_server.register_async_uri(self.ALIYUN_NAS_SELF_FENCER, self.setup_aliyun_nas_self_fencer)
http_server.register_async_uri(self.CANCEL_NAS_SELF_FENCER, self.cancel_aliyun_nas_self_fencer)
def stop(self):
pass
def configure(self, config):
self.config = config
@thread.AsyncThread
def report_self_fencer_triggered(self, ps_uuids, vm_uuids_string=None):
url = self.config.get(kvmagent.SEND_COMMAND_URL)
if not url:
logger.warn('cannot find SEND_COMMAND_URL, unable to report self fencer triggered on [psList:%s]' % ps_uuids)
return
host_uuid = self.config.get(kvmagent.HOST_UUID)
if not host_uuid:
logger.warn(
'cannot find HOST_UUID, unable to report self fencer triggered on [psList:%s]' % ps_uuids)
return
def report_to_management_node():
cmd = ReportSelfFencerCmd()
cmd.psUuids = ps_uuids
cmd.hostUuid = host_uuid
cmd.vmUuidsString = vm_uuids_string
cmd.reason = "primary storage[uuids:%s] on host[uuid:%s] heartbeat fail, self fencer has been triggered" % (ps_uuids, host_uuid)
logger.debug(
'host[uuid:%s] primary storage[psList:%s], triggered self fencer, report it to %s' % (
host_uuid, ps_uuids, url))
http.json_dump_post(url, cmd, {'commandpath': '/kvm/reportselffencer'})
report_to_management_node()
@thread.AsyncThread
def report_storage_status(self, ps_uuids, ps_status, reason=""):
url = self.config.get(kvmagent.SEND_COMMAND_URL)
if not url:
logger.warn('cannot find SEND_COMMAND_URL, unable to report storages status[psList:%s, status:%s]' % (
ps_uuids, ps_status))
return
host_uuid = self.config.get(kvmagent.HOST_UUID)
if not host_uuid:
logger.warn(
'cannot find HOST_UUID, unable to report storages status[psList:%s, status:%s]' % (ps_uuids, ps_status))
return
def report_to_management_node():
cmd = ReportPsStatusCmd()
cmd.psUuids = ps_uuids
cmd.hostUuid = host_uuid
cmd.psStatus = ps_status
cmd.reason = reason
logger.debug(
'primary storage[psList:%s] has new connection status[%s], report it to %s' % (
ps_uuids, ps_status, url))
http.json_dump_post(url, cmd, {'commandpath': '/kvm/reportstoragestatus'})
report_to_management_node()
def run_fencer(self, ps_uuid, created_time):
with self.fencer_lock:
if not self.run_fencer_timestamp[ps_uuid] or self.run_fencer_timestamp[ps_uuid] > created_time:
return False
self.run_fencer_timestamp[ps_uuid] = created_time
return True
def setup_fencer(self, ps_uuid, created_time):
with self.fencer_lock:
self.run_fencer_timestamp[ps_uuid] = created_time
def cancel_fencer(self, ps_uuid):
with self.fencer_lock:
self.run_fencer_timestamp.pop(ps_uuid, None)
|
apache-2.0
|
eblade/telegram
|
tkRAD/xml/rad_xml_window.py
|
2
|
1635
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
tkRAD - tkinter Rapid Application Development library
(c) 2013+ Raphaël SEBAN <motus@laposte.net>
This program is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program.
If not, see: http://www.gnu.org/licenses/
"""
# lib imports
from ..widgets import rad_window as RW
from . import rad_xml_frame as XF
class RADXMLWindow (RW.RADWindow):
r"""
general purpose tkRAD Toplevel Window class implementing
tkinter XML widget building;
"""
def _init_mainframe (self, **kw):
r"""
inherited from RADWindowBase class;
"""
# widget inits
self.mainframe = kw.get("mainframe") or XF.RADXMLFrame(self, **kw)
if hasattr(self.mainframe, "set_xml_filename"):
self.mainframe.set_xml_filename(
kw.get("xml_filename") or "mainwindow"
)
# end if
# shortcut inits
self.tk_children = self.mainframe.winfo_children
self.mainframe.quit_app = self._slot_quit_app
# end def
# end class RADXMLWindow
|
mit
|
samuellefever/server-tools
|
auth_admin_passkey/model/res_config.py
|
61
|
3206
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Admin Passkey module for OpenERP
# Copyright (C) 2013-2014 GRAP (http://www.grap.coop)
# @author Sylvain LE GAL (https://twitter.com/legalsylvain)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields
from openerp.osv.orm import TransientModel
from openerp.tools.safe_eval import safe_eval
class base_config_settings(TransientModel):
_inherit = 'base.config.settings'
# Getter / Setter Section
def get_default_auth_admin_passkey_send_to_admin(
self, cr, uid, ids, context=None):
icp = self.pool['ir.config_parameter']
return {
'auth_admin_passkey_send_to_admin': safe_eval(icp.get_param(
cr, uid, 'auth_admin_passkey.send_to_admin', 'True')),
}
def set_auth_admin_passkey_send_to_admin(self, cr, uid, ids, context=None):
config = self.browse(cr, uid, ids[0], context=context)
icp = self.pool['ir.config_parameter']
icp.set_param(
cr, uid, 'auth_admin_passkey.send_to_admin',
repr(config.auth_admin_passkey_send_to_admin))
def get_default_auth_admin_passkey_send_to_user(
self, cr, uid, ids, context=None):
icp = self.pool['ir.config_parameter']
return {
'auth_admin_passkey_send_to_user': safe_eval(icp.get_param(
cr, uid, 'auth_admin_passkey.send_to_user', 'True')),
}
def set_auth_admin_passkey_send_to_user(self, cr, uid, ids, context=None):
config = self.browse(cr, uid, ids[0], context=context)
icp = self.pool['ir.config_parameter']
icp.set_param(
cr, uid, 'auth_admin_passkey.send_to_user',
repr(config.auth_admin_passkey_send_to_user))
# Columns Section
_columns = {
'auth_admin_passkey_send_to_admin': fields.boolean(
'Send email to admin user.',
help="""When the administrator use his password to login in """
"""with a different account, OpenERP will send an email """
"""to the admin user.""",
),
'auth_admin_passkey_send_to_user': fields.boolean(
string='Send email to user.',
help="""When the administrator use his password to login in """
"""with a different account, OpenERP will send an email """
"""to the account user.""",
),
}
|
agpl-3.0
|
diegocortassa/TACTIC
|
src/context/client/tactic-api-python-4.0.api04/Lib/cmd.py
|
4
|
15367
|
"""A generic class to build line-oriented command interpreters.
Interpreters constructed with this class obey the following conventions:
1. End of file on input is processed as the command 'EOF'.
2. A command is parsed out of each line by collecting the prefix composed
of characters in the identchars member.
3. A command `foo' is dispatched to a method 'do_foo()'; the do_ method
is passed a single argument consisting of the remainder of the line.
4. Typing an empty line repeats the last command. (Actually, it calls the
method `emptyline', which may be overridden in a subclass.)
5. There is a predefined `help' method. Given an argument `topic', it
calls the command `help_topic'. With no arguments, it lists all topics
with defined help_ functions, broken into up to three topics; documented
commands, miscellaneous help topics, and undocumented commands.
6. The command '?' is a synonym for `help'. The command '!' is a synonym
for `shell', if a do_shell method exists.
7. If completion is enabled, completing commands will be done automatically,
and completing of commands args is done by calling complete_foo() with
arguments text, line, begidx, endidx. text is string we are matching
against, all returned matches must begin with it. line is the current
input line (lstripped), begidx and endidx are the beginning and end
indexes of the text being matched, which could be used to provide
different completion depending upon which position the argument is in.
The `default' method may be overridden to intercept commands for which there
is no do_ method.
The `completedefault' method may be overridden to intercept completions for
commands that have no complete_ method.
The data member `self.ruler' sets the character used to draw separator lines
in the help messages. If empty, no ruler line is drawn. It defaults to "=".
If the value of `self.intro' is nonempty when the cmdloop method is called,
it is printed out on interpreter startup. This value may be overridden
via an optional argument to the cmdloop() method.
The data members `self.doc_header', `self.misc_header', and
`self.undoc_header' set the headers used for the help function's
listings of documented functions, miscellaneous topics, and undocumented
functions respectively.
These interpreters use raw_input; thus, if the readline module is loaded,
they automatically support Emacs-like command history and editing features.
"""
import string
__all__ = ["Cmd"]
PROMPT = '(Cmd) '
IDENTCHARS = string.ascii_letters + string.digits + '_'
class Cmd:
"""A simple framework for writing line-oriented command interpreters.
These are often useful for test harnesses, administrative tools, and
prototypes that will later be wrapped in a more sophisticated interface.
A Cmd instance or subclass instance is a line-oriented interpreter
framework. There is no good reason to instantiate Cmd itself; rather,
it's useful as a superclass of an interpreter class you define yourself
in order to inherit Cmd's methods and encapsulate action methods.
"""
prompt = PROMPT
identchars = IDENTCHARS
ruler = '='
lastcmd = ''
intro = None
doc_leader = ""
doc_header = "Documented commands (type help <topic>):"
misc_header = "Miscellaneous help topics:"
undoc_header = "Undocumented commands:"
nohelp = "*** No help on %s"
use_rawinput = 1
def __init__(self, completekey='tab', stdin=None, stdout=None):
"""Instantiate a line-oriented interpreter framework.
The optional argument 'completekey' is the readline name of a
completion key; it defaults to the Tab key. If completekey is
not None and the readline module is available, command completion
is done automatically. The optional arguments stdin and stdout
specify alternate input and output file objects; if not specified,
sys.stdin and sys.stdout are used.
"""
import sys
if stdin is not None:
self.stdin = stdin
else:
self.stdin = sys.stdin
if stdout is not None:
self.stdout = stdout
else:
self.stdout = sys.stdout
self.cmdqueue = []
self.completekey = completekey
def cmdloop(self, intro=None):
"""Repeatedly issue a prompt, accept input, parse an initial prefix
off the received input, and dispatch to action methods, passing them
the remainder of the line as argument.
"""
self.preloop()
if self.use_rawinput and self.completekey:
try:
import readline
self.old_completer = readline.get_completer()
readline.set_completer(self.complete)
readline.parse_and_bind(self.completekey+": complete")
except ImportError:
pass
try:
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro)+"\n")
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue.pop(0)
else:
if self.use_rawinput:
try:
line = raw_input(self.prompt)
except EOFError:
line = 'EOF'
else:
self.stdout.write(self.prompt)
self.stdout.flush()
line = self.stdin.readline()
if not len(line):
line = 'EOF'
else:
line = line.rstrip('\r\n')
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
finally:
if self.use_rawinput and self.completekey:
try:
import readline
readline.set_completer(self.old_completer)
except ImportError:
pass
def precmd(self, line):
"""Hook method executed just before the command line is
interpreted, but after the input prompt is generated and issued.
"""
return line
def postcmd(self, stop, line):
"""Hook method executed just after a command dispatch is finished."""
return stop
def preloop(self):
"""Hook method executed once when the cmdloop() method is called."""
pass
def postloop(self):
"""Hook method executed once when the cmdloop() method is about to
return.
"""
pass
def parseline(self, line):
"""Parse the line into a command name and a string containing
the arguments. Returns a tuple containing (command, args, line).
'command' and 'args' may be None if the line couldn't be parsed.
"""
line = line.strip()
if not line:
return None, None, line
elif line[0] == '?':
line = 'help ' + line[1:]
elif line[0] == '!':
if hasattr(self, 'do_shell'):
line = 'shell ' + line[1:]
else:
return None, None, line
i, n = 0, len(line)
while i < n and line[i] in self.identchars: i = i+1
cmd, arg = line[:i], line[i:].strip()
return cmd, arg, line
def onecmd(self, line):
"""Interpret the argument as though it had been typed in response
to the prompt.
This may be overridden, but should not normally need to be;
see the precmd() and postcmd() methods for useful execution hooks.
The return value is a flag indicating whether interpretation of
commands by the interpreter should stop.
"""
cmd, arg, line = self.parseline(line)
if not line:
return self.emptyline()
if cmd is None:
return self.default(line)
self.lastcmd = line
if cmd == '':
return self.default(line)
else:
try:
func = getattr(self, 'do_' + cmd)
except AttributeError:
return self.default(line)
return func(arg)
def emptyline(self):
"""Called when an empty line is entered in response to the prompt.
If this method is not overridden, it repeats the last nonempty
command entered.
"""
if self.lastcmd:
return self.onecmd(self.lastcmd)
def default(self, line):
"""Called on an input line when the command prefix is not recognized.
If this method is not overridden, it prints an error message and
returns.
"""
self.stdout.write('*** Unknown syntax: %s\n'%line)
def completedefault(self, *ignored):
"""Method called to complete an input line when no command-specific
complete_*() method is available.
By default, it returns an empty list.
"""
return []
def completenames(self, text, *ignored):
dotext = 'do_'+text
return [a[3:] for a in self.get_names() if a.startswith(dotext)]
def complete(self, text, state):
"""Return the next possible completion for 'text'.
If a command has not been entered, then complete against command list.
Otherwise try to call complete_<command> to get list of completions.
"""
if state == 0:
import readline
origline = readline.get_line_buffer()
line = origline.lstrip()
stripped = len(origline) - len(line)
begidx = readline.get_begidx() - stripped
endidx = readline.get_endidx() - stripped
if begidx>0:
cmd, args, foo = self.parseline(line)
if cmd == '':
compfunc = self.completedefault
else:
try:
compfunc = getattr(self, 'complete_' + cmd)
except AttributeError:
compfunc = self.completedefault
else:
compfunc = self.completenames
self.completion_matches = compfunc(text, line, begidx, endidx)
try:
return self.completion_matches[state]
except IndexError:
return None
def get_names(self):
# Inheritance says we have to look in class and
# base classes; order is not important.
names = []
classes = [self.__class__]
while classes:
aclass = classes.pop(0)
if aclass.__bases__:
classes = classes + list(aclass.__bases__)
names = names + dir(aclass)
return names
def complete_help(self, *args):
return self.completenames(*args)
def do_help(self, arg):
if arg:
# XXX check arg syntax
try:
func = getattr(self, 'help_' + arg)
except AttributeError:
try:
doc=getattr(self, 'do_' + arg).__doc__
if doc:
self.stdout.write("%s\n"%str(doc))
return
except AttributeError:
pass
self.stdout.write("%s\n"%str(self.nohelp % (arg,)))
return
func()
else:
names = self.get_names()
cmds_doc = []
cmds_undoc = []
help = {}
for name in names:
if name[:5] == 'help_':
help[name[5:]]=1
names.sort()
# There can be duplicates if routines overridden
prevname = ''
for name in names:
if name[:3] == 'do_':
if name == prevname:
continue
prevname = name
cmd=name[3:]
if cmd in help:
cmds_doc.append(cmd)
del help[cmd]
elif getattr(self, name).__doc__:
cmds_doc.append(cmd)
else:
cmds_undoc.append(cmd)
self.stdout.write("%s\n"%str(self.doc_leader))
self.print_topics(self.doc_header, cmds_doc, 15,80)
self.print_topics(self.misc_header, help.keys(),15,80)
self.print_topics(self.undoc_header, cmds_undoc, 15,80)
def print_topics(self, header, cmds, cmdlen, maxcol):
if cmds:
self.stdout.write("%s\n"%str(header))
if self.ruler:
self.stdout.write("%s\n"%str(self.ruler * len(header)))
self.columnize(cmds, maxcol-1)
self.stdout.write("\n")
def columnize(self, list, displaywidth=80):
"""Display a list of strings as a compact set of columns.
Each column is only as wide as necessary.
Columns are separated by two spaces (one was not legible enough).
"""
if not list:
self.stdout.write("<empty>\n")
return
nonstrings = [i for i in range(len(list))
if not isinstance(list[i], str)]
if nonstrings:
raise TypeError, ("list[i] not a string for i in %s" %
", ".join(map(str, nonstrings)))
size = len(list)
if size == 1:
self.stdout.write('%s\n'%str(list[0]))
return
# Try every row count from 1 upwards
for nrows in range(1, len(list)):
ncols = (size+nrows-1) // nrows
colwidths = []
totwidth = -2
for col in range(ncols):
colwidth = 0
for row in range(nrows):
i = row + nrows*col
if i >= size:
break
x = list[i]
colwidth = max(colwidth, len(x))
colwidths.append(colwidth)
totwidth += colwidth + 2
if totwidth > displaywidth:
break
if totwidth <= displaywidth:
break
else:
nrows = len(list)
ncols = 1
colwidths = [0]
for row in range(nrows):
texts = []
for col in range(ncols):
i = row + nrows*col
if i >= size:
x = ""
else:
x = list[i]
texts.append(x)
while texts and not texts[-1]:
del texts[-1]
for col in range(len(texts)):
texts[col] = texts[col].ljust(colwidths[col])
self.stdout.write("%s\n"%str(" ".join(texts)))
|
epl-1.0
|
gspilio/nova
|
nova/tests/api/openstack/compute/contrib/test_agents.py
|
13
|
6991
|
# Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack.compute.contrib import agents
from nova import context
from nova import db
from nova.db.sqlalchemy import models
from nova import test
fake_agents_list = [{'hypervisor': 'kvm', 'os': 'win',
'architecture': 'x86',
'version': '7.0',
'url': 'xxx://xxxx/xxx/xxx',
'md5hash': 'add6bb58e139be103324d04d82d8f545',
'id': 1},
{'hypervisor': 'kvm', 'os': 'linux',
'architecture': 'x86',
'version': '16.0',
'url': 'xxx://xxxx/xxx/xxx1',
'md5hash': 'add6bb58e139be103324d04d82d8f546',
'id': 2},
{'hypervisor': 'xen', 'os': 'linux',
'architecture': 'x86',
'version': '16.0',
'url': 'xxx://xxxx/xxx/xxx2',
'md5hash': 'add6bb58e139be103324d04d82d8f547',
'id': 3},
{'hypervisor': 'xen', 'os': 'win',
'architecture': 'power',
'version': '7.0',
'url': 'xxx://xxxx/xxx/xxx3',
'md5hash': 'add6bb58e139be103324d04d82d8f548',
'id': 4},
]
def fake_agent_build_get_all(context, hypervisor):
agent_build_all = []
for agent in fake_agents_list:
if hypervisor and hypervisor != agent['hypervisor']:
continue
agent_build_ref = models.AgentBuild()
agent_build_ref.update(agent)
agent_build_all.append(agent_build_ref)
return agent_build_all
def fake_agent_build_update(context, agent_build_id, values):
pass
def fake_agent_build_destroy(context, agent_update_id):
pass
def fake_agent_build_create(context, values):
values['id'] = 1
agent_build_ref = models.AgentBuild()
agent_build_ref.update(values)
return agent_build_ref
class FakeRequest(object):
environ = {"nova.context": context.get_admin_context()}
GET = {}
class FakeRequestWithHypervisor(object):
environ = {"nova.context": context.get_admin_context()}
GET = {'hypervisor': 'kvm'}
class AgentsTest(test.TestCase):
def setUp(self):
super(AgentsTest, self).setUp()
self.stubs.Set(db, "agent_build_get_all",
fake_agent_build_get_all)
self.stubs.Set(db, "agent_build_update",
fake_agent_build_update)
self.stubs.Set(db, "agent_build_destroy",
fake_agent_build_destroy)
self.stubs.Set(db, "agent_build_create",
fake_agent_build_create)
self.context = context.get_admin_context()
self.controller = agents.AgentController()
def test_agents_create(self):
req = FakeRequest()
body = {'agent': {'hypervisor': 'kvm',
'os': 'win',
'architecture': 'x86',
'version': '7.0',
'url': 'xxx://xxxx/xxx/xxx',
'md5hash': 'add6bb58e139be103324d04d82d8f545'}}
response = {'agent': {'hypervisor': 'kvm',
'os': 'win',
'architecture': 'x86',
'version': '7.0',
'url': 'xxx://xxxx/xxx/xxx',
'md5hash': 'add6bb58e139be103324d04d82d8f545',
'agent_id': 1}}
res_dict = self.controller.create(req, body)
self.assertEqual(res_dict, response)
def test_agents_delete(self):
req = FakeRequest()
self.controller.delete(req, 1)
def test_agents_list(self):
req = FakeRequest()
res_dict = self.controller.index(req)
agents_list = [{'hypervisor': 'kvm', 'os': 'win',
'architecture': 'x86',
'version': '7.0',
'url': 'xxx://xxxx/xxx/xxx',
'md5hash': 'add6bb58e139be103324d04d82d8f545',
'agent_id': 1},
{'hypervisor': 'kvm', 'os': 'linux',
'architecture': 'x86',
'version': '16.0',
'url': 'xxx://xxxx/xxx/xxx1',
'md5hash': 'add6bb58e139be103324d04d82d8f546',
'agent_id': 2},
{'hypervisor': 'xen', 'os': 'linux',
'architecture': 'x86',
'version': '16.0',
'url': 'xxx://xxxx/xxx/xxx2',
'md5hash': 'add6bb58e139be103324d04d82d8f547',
'agent_id': 3},
{'hypervisor': 'xen', 'os': 'win',
'architecture': 'power',
'version': '7.0',
'url': 'xxx://xxxx/xxx/xxx3',
'md5hash': 'add6bb58e139be103324d04d82d8f548',
'agent_id': 4},
]
self.assertEqual(res_dict, {'agents': agents_list})
def test_agents_list_with_hypervisor(self):
req = FakeRequestWithHypervisor()
res_dict = self.controller.index(req)
response = [{'hypervisor': 'kvm', 'os': 'win',
'architecture': 'x86',
'version': '7.0',
'url': 'xxx://xxxx/xxx/xxx',
'md5hash': 'add6bb58e139be103324d04d82d8f545',
'agent_id': 1},
{'hypervisor': 'kvm', 'os': 'linux',
'architecture': 'x86',
'version': '16.0',
'url': 'xxx://xxxx/xxx/xxx1',
'md5hash': 'add6bb58e139be103324d04d82d8f546',
'agent_id': 2},
]
self.assertEqual(res_dict, {'agents': response})
def test_agents_update(self):
req = FakeRequest()
body = {'para': {'version': '7.0',
'url': 'xxx://xxxx/xxx/xxx',
'md5hash': 'add6bb58e139be103324d04d82d8f545'}}
response = {'agent': {'agent_id': 1,
'version': '7.0',
'url': 'xxx://xxxx/xxx/xxx',
'md5hash': 'add6bb58e139be103324d04d82d8f545'}}
res_dict = self.controller.update(req, 1, body)
self.assertEqual(res_dict, response)
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.