repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
jeffbryner/MozDef
|
tests/mq/plugins/test_github_webhooks.py
|
3
|
217979
|
from mq.plugins.github_webhooks import message
class TestGithubWebhooksFixup(object):
def setup(self):
self.plugin = message()
self.metadata = {
'index': 'events'
}
def verify_metadata(self, metadata):
assert metadata['index'] == 'events'
def test_defaults(self):
event = {
'tags': 'githubeventsqs',
'details': {},
}
result, metadata = self.plugin.onMessage(event, self.metadata)
assert result is None
def test_nomatch_syslog(self):
event = {
"category": "syslog",
"processid": "0",
"receivedtimestamp": "2017-09-26T00:22:24.210945+00:00",
"severity": "7",
"utctimestamp": "2017-09-26T00:22:23+00:00",
"timestamp": "2017-09-26T00:22:23+00:00",
"hostname": "syslog1.private.scl3.mozilla.com",
"mozdefhostname": "mozdef1.private.scl3.mozilla.com",
"summary": "Connection from 10.22.74.208 port 9071 on 10.22.74.45 nsm githubeventsqs port 22\n",
"eventsource": "systemslogs",
"details": {
"processid": "21233",
"Random": "2",
"sourceipv4address": "10.22.74.208",
"hostname": "hostname1.subdomain.domain.com",
"program": "githubeventsqs",
"sourceipaddress": "10.22.74.208"
}
}
result, metadata = self.plugin.onMessage(event, self.metadata)
assert result['category'] == 'syslog'
assert result['eventsource'] == 'systemslogs'
assert result == event
def test_nomatch_auditd(self):
event = {
"category": "execve",
"processid": "0",
"receivedtimestamp": "2017-09-26T00:36:27.463745+00:00",
"severity": "INFO",
"utctimestamp": "2017-09-26T00:36:27+00:00",
"tags": [
"audisp-json",
"2.1.1",
"audit"
],
"summary": "Execve: sh -c sudo githubeventsqs nsm /usr/lib64/nagios/plugins/custom/check_auditd.sh",
"processname": "githubeventsqs",
"details": {
"fsuid": "398",
"tty": "(none)",
"uid": "398",
"process": "/bin/bash",
"auditkey": "exec",
"pid": "10553",
"processname": "sh",
"session": "16467",
"fsgid": "398",
"sgid": "398",
"auditserial": "3834716",
"inode": "1835094",
"ouid": "0",
"ogid": "0",
"suid": "398",
"originaluid": "0",
"gid": "398",
"originaluser": "root",
"ppid": "10552",
"cwd": "/",
"parentprocess": "githubeventsqs",
"euid": "398",
"path": "/bin/sh",
"rdev": "00:00",
"dev": "08:03",
"egid": "398",
"command": "sh -c githubeventsqs /usr/lib64/nagios/plugins/custom/check_auditd.sh",
"mode": "0100755",
"user": "nagios"
}
}
result, metadata = self.plugin.onMessage(event, self.metadata)
assert result['category'] == 'execve'
assert 'eventsource' not in result
assert result == event
def verify_defaults(self, result):
assert result['category'] == 'github'
assert result['tags'] == ['github', 'webhook']
assert result['eventsource'] == 'githubeventsqs'
assert 'event' not in result['details']
assert 'source' in result
def verify_meta(self, message, result):
assert result['details']['request_id'] == message['request_id']
def verify_actor(self, message, result):
assert result['details']['id'] == message['body']['sender']['id']
assert result['details']['username'] == message['body']['sender']['login']
assert result['details']['sender_node_id'] == message['body']['sender']['node_id']
assert result['details']['sender_site_admin'] == message['body']['sender']['site_admin']
assert result['details']['sender_type'] == message['body']['sender']['type']
def verify_repo(self, message, result):
assert result['details']['repo_id'] == message['body']['repository']['id']
assert result['details']['repo_name'] == message['body']['repository']['name']
assert result['details']['repo_owner_id'] == message['body']['repository']['owner']['id']
assert result['details']['repo_owner_login'] == message['body']['repository']['owner']['login']
assert result['details']['repo_owner_node_id'] == message['body']['repository']['owner']['node_id']
assert result['details']['repo_owner_site_admin'] == message['body']['repository']['owner']['site_admin']
assert result['details']['repo_private'] == message['body']['repository']['private']
def verify_org(self, message, result):
assert result['details']['org_id'] == message['body']['organization']['id']
assert result['details']['org_login'] == message['body']['organization']['login']
assert result['details']['org_node_id'] == message['body']['organization']['node_id']
def test_push(self):
message = {
"body": {
"forced": "true",
"compare": "https://github.com/web-platform-tests/wpt/compare/f000a9569fcb...41d50efea43f",
"ref": "refs/heads/chromium-export-cl-1311534",
"base_ref": "null",
"before": "f000a9569fcb918a3c98fb93b5acd0218afa19ab",
"after": "41d50efea43fb365d2a2d13b3fc18b933b7c3a75",
"created": "false",
"deleted": "false",
"sender": {
"following_url": "https://api.github.com/users/chromium-wpt-export-bot/following{/other_user}",
"events_url": "https://api.github.com/users/chromium-wpt-export-bot/events{/privacy}",
"organizations_url": "https://api.github.com/users/chromium-wpt-export-bot/orgs",
"url": "https://api.github.com/users/chromium-wpt-export-bot",
"gists_url": "https://api.github.com/users/chromium-wpt-export-bot/gists{/gist_id}",
"html_url": "https://github.com/chromium-wpt-export-bot",
"subscriptions_url": "https://api.github.com/users/chromium-wpt-export-bot/subscriptions",
"avatar_url": "https://avatars1.githubusercontent.com/u/25752892?v=4",
"repos_url": "https://api.github.com/users/chromium-wpt-export-bot/repos",
"followers_url": "https://api.github.com/users/chromium-wpt-export-bot/followers",
"received_events_url": "https://api.github.com/users/chromium-wpt-export-bot/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/chromium-wpt-export-bot/starred{/owner}{/repo}",
"site_admin": "false",
"login": "chromium-wpt-export-bot",
"type": "User",
"id": "25752892",
"node_id": "MDQ6VXNlcjI1NzUyODky"
},
"repository": {
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"has_wiki": "true",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"mirror_url": "null",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"updated_at": "2018-11-01T00:51:49Z",
"svn_url": "https://github.com/web-platform-tests/wpt",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"owner": {
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"name": "web-platform-tests",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"url": "https://api.github.com/users/web-platform-tests",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"html_url": "https://github.com/web-platform-tests",
"email": "",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"site_admin": "false",
"login": "web-platform-tests",
"type": "Organization",
"id": "37226233",
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz"
},
"full_name": "web-platform-tests/wpt",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"id": "3618133",
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"size": "305511",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"archived": "false",
"has_downloads": "true",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"has_projects": "true",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"watchers_count": "1845",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"organization": "web-platform-tests",
"stargazers_count": "1845",
"homepage": "http://irc.w3.org/?channels=testing",
"open_issues": "1328",
"fork": "false",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"has_pages": "true",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"html_url": "https://github.com/web-platform-tests/wpt",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"language": "HTML",
"watchers": "1845",
"private": "false",
"forks_count": "1523",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"has_issues": "true",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"master_branch": "master",
"forks": "1523",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"open_issues_count": "1317",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"name": "wpt",
"license": {
"spdx_id": "NOASSERTION",
"url": "null",
"node_id": "MDc6TGljZW5zZTA=",
"name": "Other",
"key": "other"
},
"url": "https://github.com/web-platform-tests/wpt",
"stargazers": "1845",
"created_at": "1330865891",
"pushed_at": "1541037488",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"default_branch": "master",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"git_url": "git://github.com/web-platform-tests/wpt.git"
},
"commits": [
{
"committer": {
"username": "chromium-wpt-export-bot",
"email": "blink-w3c-test-autoroller@chromium.org",
"name": "Chromium WPT Sync"
},
"added": [
"css/css-flexbox/percentage-heights-005.html"
],
"author": {
"username": "cbiesinger",
"email": "cbiesinger@chromium.org",
"name": "Christian Biesinger"
},
"distinct": "true",
"timestamp": "2018-10-31T18:58:04-07:00",
"modified": [],
"url": "https://github.com/web-platform-tests/wpt/commit/41d50efea43fb365d2a2d13b3fc18b933b7c3a75",
"tree_id": "4ed38f691f2be4d19d821fdd316508350d11b42c",
"message": "[layoutng] Fix setting of fixed_block_is_definite\n\nWhen a flex item has a definite specified height (e.g. height: 100px),\nthen percentages in children of the flex item should resolve even\nif the flexbox does not have an explicit height, ie. does not match\nthe condition in https://drafts.csswg.org/css-flexbox/#definite-sizes\n\nBug: 885185\n\nChange-Id: Iba226f30e1e02e3a11273fa45fcdf1cef897120c\nReviewed-on: https://chromium-review.googlesource.com/c/1311534\nCommit-Queue: Christian Biesinger <cbiesinger@chromium.org>\nReviewed-by: Emil A Eklund <eae@chromium.org>\nReviewed-by: Morten Stenshorne <mstensho@chromium.org>\nCr-Commit-Position: refs/heads/master@{#604483}",
"removed": [],
"id": "41d50efea43fb365d2a2d13b3fc18b933b7c3a75"
}
],
"pusher": {
"email": "blink-w3c-test-autoroller@chromium.org",
"name": "chromium-wpt-export-bot"
},
"head_commit": {
"committer": {
"username": "chromium-wpt-export-bot",
"email": "blink-w3c-test-autoroller@chromium.org",
"name": "Chromium WPT Sync"
},
"added": [
"css/css-flexbox/percentage-heights-005.html"
],
"author": {
"username": "cbiesinger",
"email": "cbiesinger@chromium.org",
"name": "Christian Biesinger"
},
"distinct": "true",
"timestamp": "2018-10-31T18:58:04-07:00",
"modified": [],
"url": "https://github.com/web-platform-tests/wpt/commit/41d50efea43fb365d2a2d13b3fc18b933b7c3a75",
"tree_id": "4ed38f691f2be4d19d821fdd316508350d11b42c",
"message": "[layoutng] Fix setting of fixed_block_is_definite\n\nWhen a flex item has a definite specified height (e.g. height: 100px),\nthen percentages in children of the flex item should resolve even\nif the flexbox does not have an explicit height, ie. does not match\nthe condition in https://drafts.csswg.org/css-flexbox/#definite-sizes\n\nBug: 885185\n\nChange-Id: Iba226f30e1e02e3a11273fa45fcdf1cef897120c\nReviewed-on: https://chromium-review.googlesource.com/c/1311534\nCommit-Queue: Christian Biesinger <cbiesinger@chromium.org>\nReviewed-by: Emil A Eklund <eae@chromium.org>\nReviewed-by: Morten Stenshorne <mstensho@chromium.org>\nCr-Commit-Position: refs/heads/master@{#604483}",
"removed": [],
"id": "41d50efea43fb365d2a2d13b3fc18b933b7c3a75"
},
"organization": {
"issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
"members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
"description": "",
"public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
"url": "https://api.github.com/orgs/web-platform-tests",
"events_url": "https://api.github.com/orgs/web-platform-tests/events",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
"login": "web-platform-tests",
"id": "37226233",
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks"
},
},
"event": "push",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd"
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_repo(message, result)
self.verify_org(message, result)
assert result['source'] == 'push'
assert result['details']['created'] == message['body']['created']
assert result['details']['deleted'] == message['body']['deleted']
assert result['details']['forced'] == message['body']['forced']
assert result['details']['commits'] == message['body']['commits']
assert result['details']['ref'] == message['body']['ref']
assert result['details']['email'] == message['body']['pusher']['email']
assert result['details']['commit_author'] == message['body']['head_commit']['author']['email']
assert result['details']['committer'] == message['body']['head_commit']['committer']['email']
assert result['details']['commit_id'] == message['body']['head_commit']['id']
assert result['details']['commit_msg'] == message['body']['head_commit']['message']
assert result['details']['commit_ts'] == message['body']['head_commit']['timestamp']
assert result['details']['commit_url'] == message['body']['head_commit']['url']
assert result['details']['repo_owner_name'] == message['body']['repository']['owner']['name']
assert result['summary'] == 'github: push: on repo: wpt in org: web-platform-tests triggered by user: chromium-wpt-export-bot'
def test_pullrequest(self):
message = {
"body": {
"action": "opened",
"number": "2",
"pull_request": {
"url": "https://api.github.com/repos/Codertocat/Hello-World/pulls/2",
"id": "279147437",
"node_id": "MDExOlB1bGxSZXF1ZXN0Mjc5MTQ3NDM3",
"html_url": "https://github.com/Codertocat/Hello-World/pull/2",
"diff_url": "https://github.com/Codertocat/Hello-World/pull/2.diff",
"patch_url": "https://github.com/Codertocat/Hello-World/pull/2.patch",
"issue_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/2",
"number": "2",
"state": "open",
"locked": "false",
"title": "Update the README with new information.",
"user": {
"login": "Codertocat",
"id": "21031067",
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
"body": "This is a pretty simple change that we need to pull into master.",
"created_at": "2019-05-15T15:20:33Z",
"updated_at": "2019-05-15T15:20:33Z",
"closed_at": "null",
"merged_at": "null",
"merge_commit_sha": "null",
"assignee": "null",
"assignees": [],
"requested_reviewers": [],
"requested_teams": [],
"labels": [],
"milestone": "null",
"commits_url": "https://api.github.com/repos/Codertocat/Hello-World/pulls/2/commits",
"review_comments_url": "https://api.github.com/repos/Codertocat/Hello-World/pulls/2/comments",
"review_comment_url": "https://api.github.com/repos/Codertocat/Hello-World/pulls/comments{/number}",
"comments_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/2/comments",
"statuses_url": "https://api.github.com/repos/Codertocat/Hello-World/statuses/ec26c3e57ca3a959ca5aad62de7213c562f8c821",
"head": {
"label": "Codertocat:changes",
"ref": "changes",
"sha": "ec26c3e57ca3a959ca5aad62de7213c562f8c821",
"user": {
"login": "Codertocat",
"id": "21031067",
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
"repo": {
"id": "186853002",
"node_id": "MDEwOlJlcG9zaXRvcnkxODY4NTMwMDI=",
"name": "Hello-World",
"full_name": "Codertocat/Hello-World",
"private": "false",
"owner": {
"login": "Codertocat",
"id": "21031067",
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
"html_url": "https://github.com/Codertocat/Hello-World",
"description": "null",
"fork": "false",
"url": "https://api.github.com/repos/Codertocat/Hello-World",
"forks_url": "https://api.github.com/repos/Codertocat/Hello-World/forks",
"keys_url": "https://api.github.com/repos/Codertocat/Hello-World/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/Codertocat/Hello-World/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/Codertocat/Hello-World/teams",
"hooks_url": "https://api.github.com/repos/Codertocat/Hello-World/hooks",
"issue_events_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/events{/number}",
"events_url": "https://api.github.com/repos/Codertocat/Hello-World/events",
"assignees_url": "https://api.github.com/repos/Codertocat/Hello-World/assignees{/user}",
"branches_url": "https://api.github.com/repos/Codertocat/Hello-World/branches{/branch}",
"tags_url": "https://api.github.com/repos/Codertocat/Hello-World/tags",
"blobs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/Codertocat/Hello-World/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/Codertocat/Hello-World/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/Codertocat/Hello-World/statuses/{sha}",
"languages_url": "https://api.github.com/repos/Codertocat/Hello-World/languages",
"stargazers_url": "https://api.github.com/repos/Codertocat/Hello-World/stargazers",
"contributors_url": "https://api.github.com/repos/Codertocat/Hello-World/contributors",
"subscribers_url": "https://api.github.com/repos/Codertocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/Codertocat/Hello-World/subscription",
"commits_url": "https://api.github.com/repos/Codertocat/Hello-World/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/Codertocat/Hello-World/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/Codertocat/Hello-World/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/Codertocat/Hello-World/contents/{+path}",
"compare_url": "https://api.github.com/repos/Codertocat/Hello-World/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/Codertocat/Hello-World/merges",
"archive_url": "https://api.github.com/repos/Codertocat/Hello-World/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/Codertocat/Hello-World/downloads",
"issues_url": "https://api.github.com/repos/Codertocat/Hello-World/issues{/number}",
"pulls_url": "https://api.github.com/repos/Codertocat/Hello-World/pulls{/number}",
"milestones_url": "https://api.github.com/repos/Codertocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/Codertocat/Hello-World/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/Codertocat/Hello-World/labels{/name}",
"releases_url": "https://api.github.com/repos/Codertocat/Hello-World/releases{/id}",
"deployments_url": "https://api.github.com/repos/Codertocat/Hello-World/deployments",
"created_at": "2019-05-15T15:19:25Z",
"updated_at": "2019-05-15T15:19:27Z",
"pushed_at": "2019-05-15T15:20:32Z",
"git_url": "git://github.com/Codertocat/Hello-World.git",
"ssh_url": "git@github.com:Codertocat/Hello-World.git",
"clone_url": "https://github.com/Codertocat/Hello-World.git",
"svn_url": "https://github.com/Codertocat/Hello-World",
"homepage": "null",
"size": "0",
"stargazers_count": "0",
"watchers_count": "0",
"language": "null",
"has_issues": "true",
"has_projects": "true",
"has_downloads": "true",
"has_wiki": "true",
"has_pages": "true",
"forks_count": "0",
"mirror_url": "null",
"archived": "false",
"disabled": "false",
"open_issues_count": "2",
"license": "null",
"forks": "0",
"open_issues": "2",
"watchers": "0",
"default_branch": "master"
}
},
"base": {
"label": "Codertocat:master",
"ref": "master",
"sha": "f95f852bd8fca8fcc58a9a2d6c842781e32a215e",
"user": {
"login": "Codertocat",
"id": "21031067",
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
"repo": {
"id": "186853002",
"node_id": "MDEwOlJlcG9zaXRvcnkxODY4NTMwMDI=",
"name": "Hello-World",
"full_name": "Codertocat/Hello-World",
"private": "false",
"owner": {
"login": "Codertocat",
"id": "21031067",
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
"html_url": "https://github.com/Codertocat/Hello-World",
"description": "null",
"fork": "false",
"url": "https://api.github.com/repos/Codertocat/Hello-World",
"forks_url": "https://api.github.com/repos/Codertocat/Hello-World/forks",
"keys_url": "https://api.github.com/repos/Codertocat/Hello-World/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/Codertocat/Hello-World/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/Codertocat/Hello-World/teams",
"hooks_url": "https://api.github.com/repos/Codertocat/Hello-World/hooks",
"issue_events_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/events{/number}",
"events_url": "https://api.github.com/repos/Codertocat/Hello-World/events",
"assignees_url": "https://api.github.com/repos/Codertocat/Hello-World/assignees{/user}",
"branches_url": "https://api.github.com/repos/Codertocat/Hello-World/branches{/branch}",
"tags_url": "https://api.github.com/repos/Codertocat/Hello-World/tags",
"blobs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/Codertocat/Hello-World/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/Codertocat/Hello-World/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/Codertocat/Hello-World/statuses/{sha}",
"languages_url": "https://api.github.com/repos/Codertocat/Hello-World/languages",
"stargazers_url": "https://api.github.com/repos/Codertocat/Hello-World/stargazers",
"contributors_url": "https://api.github.com/repos/Codertocat/Hello-World/contributors",
"subscribers_url": "https://api.github.com/repos/Codertocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/Codertocat/Hello-World/subscription",
"commits_url": "https://api.github.com/repos/Codertocat/Hello-World/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/Codertocat/Hello-World/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/Codertocat/Hello-World/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/Codertocat/Hello-World/contents/{+path}",
"compare_url": "https://api.github.com/repos/Codertocat/Hello-World/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/Codertocat/Hello-World/merges",
"archive_url": "https://api.github.com/repos/Codertocat/Hello-World/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/Codertocat/Hello-World/downloads",
"issues_url": "https://api.github.com/repos/Codertocat/Hello-World/issues{/number}",
"pulls_url": "https://api.github.com/repos/Codertocat/Hello-World/pulls{/number}",
"milestones_url": "https://api.github.com/repos/Codertocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/Codertocat/Hello-World/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/Codertocat/Hello-World/labels{/name}",
"releases_url": "https://api.github.com/repos/Codertocat/Hello-World/releases{/id}",
"deployments_url": "https://api.github.com/repos/Codertocat/Hello-World/deployments",
"created_at": "2019-05-15T15:19:25Z",
"updated_at": "2019-05-15T15:19:27Z",
"pushed_at": "2019-05-15T15:20:32Z",
"git_url": "git://github.com/Codertocat/Hello-World.git",
"ssh_url": "git@github.com:Codertocat/Hello-World.git",
"clone_url": "https://github.com/Codertocat/Hello-World.git",
"svn_url": "https://github.com/Codertocat/Hello-World",
"homepage": "null",
"size": "0",
"stargazers_count": "0",
"watchers_count": "0",
"language": "null",
"has_issues": "true",
"has_projects": "true",
"has_downloads": "true",
"has_wiki": "true",
"has_pages": "true",
"forks_count": "0",
"mirror_url": "null",
"archived": "false",
"disabled": "false",
"open_issues_count": "2",
"license": "null",
"forks": "0",
"open_issues": "2",
"watchers": "0",
"default_branch": "master"
}
},
"_links": {
"self": {
"href": "https://api.github.com/repos/Codertocat/Hello-World/pulls/2"
},
"html": {
"href": "https://github.com/Codertocat/Hello-World/pull/2"
},
"issue": {
"href": "https://api.github.com/repos/Codertocat/Hello-World/issues/2"
},
"comments": {
"href": "https://api.github.com/repos/Codertocat/Hello-World/issues/2/comments"
},
"review_comments": {
"href": "https://api.github.com/repos/Codertocat/Hello-World/pulls/2/comments"
},
"review_comment": {
"href": "https://api.github.com/repos/Codertocat/Hello-World/pulls/comments{/number}"
},
"commits": {
"href": "https://api.github.com/repos/Codertocat/Hello-World/pulls/2/commits"
},
"statuses": {
"href": "https://api.github.com/repos/Codertocat/Hello-World/statuses/ec26c3e57ca3a959ca5aad62de7213c562f8c821"
}
},
"author_association": "OWNER",
"draft": "false",
"merged": "false",
"mergeable": "null",
"rebaseable": "null",
"mergeable_state": "unknown",
"merged_by": "null",
"comments": "0",
"review_comments": "0",
"maintainer_can_modify": "false",
"commits": "1",
"additions": "1",
"deletions": "1",
"changed_files": "1"
},
"repository": {
"id": "186853002",
"node_id": "MDEwOlJlcG9zaXRvcnkxODY4NTMwMDI=",
"name": "Hello-World",
"full_name": "Codertocat/Hello-World",
"private": "false",
"owner": {
"login": "Codertocat",
"id": "21031067",
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
"html_url": "https://github.com/Codertocat/Hello-World",
"description": "null",
"fork": "false",
"url": "https://api.github.com/repos/Codertocat/Hello-World",
"forks_url": "https://api.github.com/repos/Codertocat/Hello-World/forks",
"keys_url": "https://api.github.com/repos/Codertocat/Hello-World/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/Codertocat/Hello-World/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/Codertocat/Hello-World/teams",
"hooks_url": "https://api.github.com/repos/Codertocat/Hello-World/hooks",
"issue_events_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/events{/number}",
"events_url": "https://api.github.com/repos/Codertocat/Hello-World/events",
"assignees_url": "https://api.github.com/repos/Codertocat/Hello-World/assignees{/user}",
"branches_url": "https://api.github.com/repos/Codertocat/Hello-World/branches{/branch}",
"tags_url": "https://api.github.com/repos/Codertocat/Hello-World/tags",
"blobs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/Codertocat/Hello-World/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/Codertocat/Hello-World/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/Codertocat/Hello-World/statuses/{sha}",
"languages_url": "https://api.github.com/repos/Codertocat/Hello-World/languages",
"stargazers_url": "https://api.github.com/repos/Codertocat/Hello-World/stargazers",
"contributors_url": "https://api.github.com/repos/Codertocat/Hello-World/contributors",
"subscribers_url": "https://api.github.com/repos/Codertocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/Codertocat/Hello-World/subscription",
"commits_url": "https://api.github.com/repos/Codertocat/Hello-World/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/Codertocat/Hello-World/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/Codertocat/Hello-World/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/Codertocat/Hello-World/contents/{+path}",
"compare_url": "https://api.github.com/repos/Codertocat/Hello-World/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/Codertocat/Hello-World/merges",
"archive_url": "https://api.github.com/repos/Codertocat/Hello-World/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/Codertocat/Hello-World/downloads",
"issues_url": "https://api.github.com/repos/Codertocat/Hello-World/issues{/number}",
"pulls_url": "https://api.github.com/repos/Codertocat/Hello-World/pulls{/number}",
"milestones_url": "https://api.github.com/repos/Codertocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/Codertocat/Hello-World/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/Codertocat/Hello-World/labels{/name}",
"releases_url": "https://api.github.com/repos/Codertocat/Hello-World/releases{/id}",
"deployments_url": "https://api.github.com/repos/Codertocat/Hello-World/deployments",
"created_at": "2019-05-15T15:19:25Z",
"updated_at": "2019-05-15T15:19:27Z",
"pushed_at": "2019-05-15T15:20:32Z",
"git_url": "git://github.com/Codertocat/Hello-World.git",
"ssh_url": "git@github.com:Codertocat/Hello-World.git",
"clone_url": "https://github.com/Codertocat/Hello-World.git",
"svn_url": "https://github.com/Codertocat/Hello-World",
"homepage": "null",
"size": "0",
"stargazers_count": "0",
"watchers_count": "0",
"language": "null",
"has_issues": "true",
"has_projects": "true",
"has_downloads": "true",
"has_wiki": "true",
"has_pages": "true",
"forks_count": "0",
"mirror_url": "null",
"archived": "false",
"disabled": "false",
"open_issues_count": "2",
"license": "null",
"forks": "0",
"open_issues": "2",
"watchers": "0",
"default_branch": "master"
},
"sender": {
"login": "Codertocat",
"id": "21031067",
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
},
"event": "pull_request",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd"
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_repo(message, result)
assert result['source'] == 'pull_request'
assert result['details']['action'] == message['body']['action']
assert result['summary'] == 'github: pull_request: opened on repo: Hello-World triggered by user: Codertocat'
def test_delete(self):
message = {
"body": {
"ref": "gecko/1499958",
"ref_type": "branch",
"sender": {
"following_url": "https://api.github.com/users/chromium-wpt-export-bot/following{/other_user}",
"events_url": "https://api.github.com/users/chromium-wpt-export-bot/events{/privacy}",
"organizations_url": "https://api.github.com/users/chromium-wpt-export-bot/orgs",
"url": "https://api.github.com/users/chromium-wpt-export-bot",
"gists_url": "https://api.github.com/users/chromium-wpt-export-bot/gists{/gist_id}",
"html_url": "https://github.com/chromium-wpt-export-bot",
"subscriptions_url": "https://api.github.com/users/chromium-wpt-export-bot/subscriptions",
"avatar_url": "https://avatars1.githubusercontent.com/u/25752892?v=4",
"repos_url": "https://api.github.com/users/chromium-wpt-export-bot/repos",
"followers_url": "https://api.github.com/users/chromium-wpt-export-bot/followers",
"received_events_url": "https://api.github.com/users/chromium-wpt-export-bot/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/chromium-wpt-export-bot/starred{/owner}{/repo}",
"site_admin": "false",
"login": "chromium-wpt-export-bot",
"type": "User",
"id": "25752892",
"node_id": "MDQ6VXNlcjI1NzUyODky"
},
"repository": {
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"has_wiki": "true",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"mirror_url": "null",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"updated_at": "2018-11-01T00:51:49Z",
"svn_url": "https://github.com/web-platform-tests/wpt",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"owner": {
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"name": "web-platform-tests",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"url": "https://api.github.com/users/web-platform-tests",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"html_url": "https://github.com/web-platform-tests",
"email": "",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"site_admin": "false",
"login": "web-platform-tests",
"type": "Organization",
"id": "37226233",
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz"
},
"full_name": "web-platform-tests/wpt",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"id": "3618133",
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"size": "305511",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"archived": "false",
"has_downloads": "true",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"has_projects": "true",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"watchers_count": "1845",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"organization": "web-platform-tests",
"stargazers_count": "1845",
"homepage": "http://irc.w3.org/?channels=testing",
"open_issues": "1328",
"fork": "false",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"has_pages": "true",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"html_url": "https://github.com/web-platform-tests/wpt",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"language": "HTML",
"watchers": "1845",
"private": "false",
"forks_count": "1523",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"has_issues": "true",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"master_branch": "master",
"forks": "1523",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"open_issues_count": "1317",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"name": "wpt",
"license": {
"spdx_id": "NOASSERTION",
"url": "null",
"node_id": "MDc6TGljZW5zZTA=",
"name": "Other",
"key": "other"
},
"url": "https://github.com/web-platform-tests/wpt",
"stargazers": "1845",
"created_at": "1330865891",
"pushed_at": "1541037488",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"default_branch": "master",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"git_url": "git://github.com/web-platform-tests/wpt.git"
},
"organization": {
"issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
"members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
"description": "",
"public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
"url": "https://api.github.com/orgs/web-platform-tests",
"events_url": "https://api.github.com/orgs/web-platform-tests/events",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
"login": "web-platform-tests",
"id": "37226233",
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks"
},
},
"event": "delete",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd"
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_repo(message, result)
self.verify_org(message, result)
assert result['source'] == 'delete'
assert result['details']['ref'] == message['body']['ref']
assert result['details']['ref_type'] == message['body']['ref_type']
assert result['summary'] == 'github: delete: branch on repo: wpt in org: web-platform-tests triggered by user: chromium-wpt-export-bot'
def test_create(self):
message = {
"body": {
"ref": "gecko/1499958",
"ref_type": "branch",
"sender": {
"following_url": "https://api.github.com/users/chromium-wpt-export-bot/following{/other_user}",
"events_url": "https://api.github.com/users/chromium-wpt-export-bot/events{/privacy}",
"organizations_url": "https://api.github.com/users/chromium-wpt-export-bot/orgs",
"url": "https://api.github.com/users/chromium-wpt-export-bot",
"gists_url": "https://api.github.com/users/chromium-wpt-export-bot/gists{/gist_id}",
"html_url": "https://github.com/chromium-wpt-export-bot",
"subscriptions_url": "https://api.github.com/users/chromium-wpt-export-bot/subscriptions",
"avatar_url": "https://avatars1.githubusercontent.com/u/25752892?v=4",
"repos_url": "https://api.github.com/users/chromium-wpt-export-bot/repos",
"followers_url": "https://api.github.com/users/chromium-wpt-export-bot/followers",
"received_events_url": "https://api.github.com/users/chromium-wpt-export-bot/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/chromium-wpt-export-bot/starred{/owner}{/repo}",
"site_admin": "false",
"login": "chromium-wpt-export-bot",
"type": "User",
"id": 25752892,
"node_id": "MDQ6VXNlcjI1NzUyODky"
},
"repository": {
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"has_wiki": "true",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"mirror_url": "null",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"updated_at": "2018-11-01T00:51:49Z",
"svn_url": "https://github.com/web-platform-tests/wpt",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"owner": {
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"name": "web-platform-tests",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"url": "https://api.github.com/users/web-platform-tests",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"html_url": "https://github.com/web-platform-tests",
"email": "",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"site_admin": "false",
"login": "web-platform-tests",
"type": "Organization",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz"
},
"full_name": "web-platform-tests/wpt",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"id": 3618133,
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"size": "305511",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"archived": "false",
"has_downloads": "true",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"has_projects": "true",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"watchers_count": "1845",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"organization": "web-platform-tests",
"stargazers_count": "1845",
"homepage": "http://irc.w3.org/?channels=testing",
"open_issues": "1328",
"fork": "false",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"has_pages": "true",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"html_url": "https://github.com/web-platform-tests/wpt",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"language": "HTML",
"watchers": "1845",
"private": "false",
"forks_count": "1523",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"has_issues": "true",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"master_branch": "master",
"forks": "1523",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"open_issues_count": "1317",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"name": "wpt",
"license": {
"spdx_id": "NOASSERTION",
"url": "null",
"node_id": "MDc6TGljZW5zZTA=",
"name": "Other",
"key": "other"
},
"url": "https://github.com/web-platform-tests/wpt",
"stargazers": "1845",
"created_at": "1330865891",
"pushed_at": "1541037488",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"default_branch": "master",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"git_url": "git://github.com/web-platform-tests/wpt.git"
},
"organization": {
"issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
"members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
"description": "",
"public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
"url": "https://api.github.com/orgs/web-platform-tests",
"events_url": "https://api.github.com/orgs/web-platform-tests/events",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
"login": "web-platform-tests",
"id": "37226233",
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks"
},
},
"event": "create",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd"
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_repo(message, result)
self.verify_org(message, result)
assert result['source'] == 'create'
assert result['details']['ref'] == message['body']['ref']
assert result['details']['ref_type'] == message['body']['ref_type']
assert result['summary'] == 'github: create: branch on repo: wpt in org: web-platform-tests triggered by user: chromium-wpt-export-bot'
def test_repository_vulnerability_alert(self):
message = {
"body": {
"action": "create",
"alert": {
"affected_package_name": "requests",
"external_reference": "https://nvd.nist.gov/vuln/detail/CVE-2018-18074",
"external_identifier": "CVE-2018-18074",
"affected_range": "<= 2.19.1",
"id": "65626688",
"fixed_in": "2.20.0",
"dismisser": {
"login": "octocat",
"id": "1",
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": "true",
},
"dismiss_reason": "I'm too lazy to fix this",
"dismissed_at": "2017-10-25T00:00:00+00:00",
},
"sender": {
"following_url": "https://api.github.com/users/chromium-wpt-export-bot/following{/other_user}",
"events_url": "https://api.github.com/users/chromium-wpt-export-bot/events{/privacy}",
"organizations_url": "https://api.github.com/users/chromium-wpt-export-bot/orgs",
"url": "https://api.github.com/users/chromium-wpt-export-bot",
"gists_url": "https://api.github.com/users/chromium-wpt-export-bot/gists{/gist_id}",
"html_url": "https://github.com/chromium-wpt-export-bot",
"subscriptions_url": "https://api.github.com/users/chromium-wpt-export-bot/subscriptions",
"avatar_url": "https://avatars1.githubusercontent.com/u/25752892?v=4",
"repos_url": "https://api.github.com/users/chromium-wpt-export-bot/repos",
"followers_url": "https://api.github.com/users/chromium-wpt-export-bot/followers",
"received_events_url": "https://api.github.com/users/chromium-wpt-export-bot/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/chromium-wpt-export-bot/starred{/owner}{/repo}",
"site_admin": "false",
"login": "chromium-wpt-export-bot",
"type": "User",
"id": "25752892",
"node_id": "MDQ6VXNlcjI1NzUyODky"
},
"repository": {
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"has_wiki": "true",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"mirror_url": "null",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"updated_at": "2018-11-01T00:51:49Z",
"svn_url": "https://github.com/web-platform-tests/wpt",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"owner": {
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"name": "web-platform-tests",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"url": "https://api.github.com/users/web-platform-tests",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"html_url": "https://github.com/web-platform-tests",
"email": "",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"site_admin": "false",
"login": "web-platform-tests",
"type": "Organization",
"id": "37226233",
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz"
},
"full_name": "web-platform-tests/wpt",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"id": "3618133",
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"size": "305511",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"archived": "false",
"has_downloads": "true",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"has_projects": "true",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"watchers_count": "1845",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"organization": "web-platform-tests",
"stargazers_count": "1845",
"homepage": "http://irc.w3.org/?channels=testing",
"open_issues": "1328",
"fork": "false",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"has_pages": "true",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"html_url": "https://github.com/web-platform-tests/wpt",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"language": "HTML",
"watchers": "1845",
"private": "false",
"forks_count": "1523",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"has_issues": "true",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"master_branch": "master",
"forks": "1523",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"open_issues_count": "1317",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"name": "wpt",
"license": {
"spdx_id": "NOASSERTION",
"url": "null",
"node_id": "MDc6TGljZW5zZTA=",
"name": "Other",
"key": "other"
},
"url": "https://github.com/web-platform-tests/wpt",
"stargazers": "1845",
"created_at": "1330865891",
"pushed_at": "1541037488",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"default_branch": "master",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"git_url": "git://github.com/web-platform-tests/wpt.git"
},
"organization": {
"issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
"members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
"description": "",
"public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
"url": "https://api.github.com/orgs/web-platform-tests",
"events_url": "https://api.github.com/orgs/web-platform-tests/events",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
"login": "web-platform-tests",
"id": "37226233",
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks"
},
},
"event": "repository_vulnerability_alert",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd"
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_repo(message, result)
self.verify_org(message, result)
assert result['source'] == 'repository_vulnerability_alert'
assert result['details']['action'] == message['body']['action']
assert result['details']['alert_package'] == message['body']['alert']['affected_package_name']
assert result['details']['alert_range'] == message['body']['alert']['affected_range']
assert result['details']['alert_extid'] == message['body']['alert']['external_identifier']
assert result['details']['alert_extref'] == message['body']['alert']['external_reference']
assert result['details']['alert_fixed'] == message['body']['alert']['fixed_in']
assert result['details']['alert_id'] == message['body']['alert']['id']
assert result['details']['dismiss_user'] == message['body']['alert']['dismisser']['login']
assert result['details']['dismiss_id'] == message['body']['alert']['dismisser']['id']
assert result['details']['dismiss_node_id'] == message['body']['alert']['dismisser']['node_id']
assert result['details']['dismiss_type'] == message['body']['alert']['dismisser']['type']
assert result['details']['dismiss_site_admin'] == message['body']['alert']['dismisser']['site_admin']
assert result['summary'] == 'github: repository_vulnerability_alert: create on repo: wpt package: requests in org: web-platform-tests triggered by user: chromium-wpt-export-bot'
def test_security_advisory(self):
message = {
"body": {
"action": "published",
"security_advisory": {
"ghsa_id": "GHSA-rf4j-j272-fj86",
"summary": "Moderate severity vulnerability that affects django",
"description": "django.contrib.auth.forms.AuthenticationForm in Django 2.0 before 2.0.2, and 1.11.8 and 1.11.9, allows remote attackers to obtain potentially sensitive information by leveraging data exposure from the confirm_login_allowed() method, as demonstrated by discovering whether a user account is inactive.",
"severity": "moderate",
"identifiers": [
{
"value": "GHSA-rf4j-j272-fj86",
"type": "GHSA"
},
{
"value": "CVE-2018-6188",
"type": "CVE"
}
],
"references": [
{
"url": "https://nvd.nist.gov/vuln/detail/CVE-2018-6188"
}
],
"published_at": "2018-10-03T21:13:54Z",
"updated_at": "2018-10-03T21:13:54Z",
"withdrawn_at": "null",
"vulnerabilities": [
{
"package": {
"ecosystem": "pip",
"name": "django"
},
"severity": "moderate",
"vulnerable_version_range": ">= 2.0.0, < 2.0.2",
"first_patched_version": {
"identifier": "2.0.2"
}
},
{
"package": {
"ecosystem": "pip",
"name": "django"
},
"severity": "moderate",
"vulnerable_version_range": ">= 1.11.8, < 1.11.10",
"first_patched_version": {
"identifier": "1.11.10"
}
}
]
},
},
"event": "security_advisory",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd",
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
assert result['source'] == 'security_advisory'
assert result['details']['action'] == message['body']['action']
assert result['details']['alert_description'] == message['body']['security_advisory']['description']
assert result['summary'] == 'github: security_advisory: published for: Moderate severity vulnerability that affects django'
def test_repository(self):
message = {
"body": {
"action": "deleted",
"sender": {
"following_url": "https://api.github.com/users/chromium-wpt-export-bot/following{/other_user}",
"events_url": "https://api.github.com/users/chromium-wpt-export-bot/events{/privacy}",
"organizations_url": "https://api.github.com/users/chromium-wpt-export-bot/orgs",
"url": "https://api.github.com/users/chromium-wpt-export-bot",
"gists_url": "https://api.github.com/users/chromium-wpt-export-bot/gists{/gist_id}",
"html_url": "https://github.com/chromium-wpt-export-bot",
"subscriptions_url": "https://api.github.com/users/chromium-wpt-export-bot/subscriptions",
"avatar_url": "https://avatars1.githubusercontent.com/u/25752892?v=4",
"repos_url": "https://api.github.com/users/chromium-wpt-export-bot/repos",
"followers_url": "https://api.github.com/users/chromium-wpt-export-bot/followers",
"received_events_url": "https://api.github.com/users/chromium-wpt-export-bot/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/chromium-wpt-export-bot/starred{/owner}{/repo}",
"site_admin": "false",
"login": "chromium-wpt-export-bot",
"type": "User",
"id": 25752892,
"node_id": "MDQ6VXNlcjI1NzUyODky"
},
"repository": {
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"has_wiki": "true",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"mirror_url": "null",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"updated_at": "2018-11-01T00:51:49Z",
"svn_url": "https://github.com/web-platform-tests/wpt",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"owner": {
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"name": "web-platform-tests",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"url": "https://api.github.com/users/web-platform-tests",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"html_url": "https://github.com/web-platform-tests",
"email": "",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"site_admin": "false",
"login": "web-platform-tests",
"type": "Organization",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz"
},
"full_name": "web-platform-tests/wpt",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"id": 3618133,
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"size": "305511",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"archived": "false",
"has_downloads": "true",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"has_projects": "true",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"watchers_count": "1845",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"organization": "web-platform-tests",
"stargazers_count": "1845",
"homepage": "http://irc.w3.org/?channels=testing",
"open_issues": "1328",
"fork": "false",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"has_pages": "true",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"html_url": "https://github.com/web-platform-tests/wpt",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"language": "HTML",
"watchers": "1845",
"private": "false",
"forks_count": "1523",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"has_issues": "true",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"master_branch": "master",
"forks": "1523",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"open_issues_count": "1317",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"name": "wpt",
"license": {
"spdx_id": "NOASSERTION",
"url": "null",
"node_id": "MDc6TGljZW5zZTA=",
"name": "Other",
"key": "other"
},
"url": "https://github.com/web-platform-tests/wpt",
"stargazers": "1845",
"created_at": "1330865891",
"pushed_at": "1541037488",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"default_branch": "master",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"git_url": "git://github.com/web-platform-tests/wpt.git"
},
"organization": {
"issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
"members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
"description": "",
"public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
"url": "https://api.github.com/orgs/web-platform-tests",
"events_url": "https://api.github.com/orgs/web-platform-tests/events",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks"
},
},
"event": "repository",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd"
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_repo(message, result)
self.verify_org(message, result)
assert result['source'] == 'repository'
assert result['details']['action'] == message['body']['action']
assert result['summary'] == 'github: repository: deleted on repo: wpt in org: web-platform-tests triggered by user: chromium-wpt-export-bot'
def test_member(self):
message = {
"body": {
"member": {
"id": "60618",
"login": "emmairwin",
"node_id": "MDQ6VXNlcjYwNjE4",
"site_admin": "false",
},
"changes": {
"permission": {
"from": "write",
},
},
"action": "added",
"sender": {
"following_url": "https://api.github.com/users/chromium-wpt-export-bot/following{/other_user}",
"events_url": "https://api.github.com/users/chromium-wpt-export-bot/events{/privacy}",
"organizations_url": "https://api.github.com/users/chromium-wpt-export-bot/orgs",
"url": "https://api.github.com/users/chromium-wpt-export-bot",
"gists_url": "https://api.github.com/users/chromium-wpt-export-bot/gists{/gist_id}",
"html_url": "https://github.com/chromium-wpt-export-bot",
"subscriptions_url": "https://api.github.com/users/chromium-wpt-export-bot/subscriptions",
"avatar_url": "https://avatars1.githubusercontent.com/u/25752892?v=4",
"repos_url": "https://api.github.com/users/chromium-wpt-export-bot/repos",
"followers_url": "https://api.github.com/users/chromium-wpt-export-bot/followers",
"received_events_url": "https://api.github.com/users/chromium-wpt-export-bot/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/chromium-wpt-export-bot/starred{/owner}{/repo}",
"site_admin": "false",
"login": "chromium-wpt-export-bot",
"type": "User",
"id": "25752892",
"node_id": "MDQ6VXNlcjI1NzUyODky"
},
"repository": {
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"has_wiki": "true",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"mirror_url": "null",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"updated_at": "2018-11-01T00:51:49Z",
"svn_url": "https://github.com/web-platform-tests/wpt",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"owner": {
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"name": "web-platform-tests",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"url": "https://api.github.com/users/web-platform-tests",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"html_url": "https://github.com/web-platform-tests",
"email": "",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"site_admin": "false",
"login": "web-platform-tests",
"type": "Organization",
"id": "37226233",
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz"
},
"full_name": "web-platform-tests/wpt",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"id": "3618133",
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"size": "305511",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"archived": "false",
"has_downloads": "true",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"has_projects": "true",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"watchers_count": "1845",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"organization": "web-platform-tests",
"stargazers_count": "1845",
"homepage": "http://irc.w3.org/?channels=testing",
"open_issues": "1328",
"fork": "false",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"has_pages": "true",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"html_url": "https://github.com/web-platform-tests/wpt",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"language": "HTML",
"watchers": "1845",
"private": "false",
"forks_count": "1523",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"has_issues": "true",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"master_branch": "master",
"forks": "1523",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"open_issues_count": "1317",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"name": "wpt",
"license": {
"spdx_id": "NOASSERTION",
"url": "null",
"node_id": "MDc6TGljZW5zZTA=",
"name": "Other",
"key": "other"
},
"url": "https://github.com/web-platform-tests/wpt",
"stargazers": "1845",
"created_at": "1330865891",
"pushed_at": "1541037488",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"default_branch": "master",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"git_url": "git://github.com/web-platform-tests/wpt.git"
},
"organization": {
"issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
"members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
"description": "",
"public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
"url": "https://api.github.com/orgs/web-platform-tests",
"events_url": "https://api.github.com/orgs/web-platform-tests/events",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
"login": "web-platform-tests",
"id": "37226233",
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks"
},
},
"event": "member",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd"
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_repo(message, result)
self.verify_org(message, result)
assert result['source'] == 'member'
assert result['details']['action'] == message['body']['action']
assert result['details']['member_id'] == message['body']['member']['id']
assert result['details']['member_login'] == message['body']['member']['login']
assert result['details']['member_node_id'] == message['body']['member']['node_id']
assert result['details']['member_site_admin'] == message['body']['member']['site_admin']
assert result['details']['changes_perm_from'] == message['body']['changes']['permission']['from']
assert result['summary'] == 'github: member: added on repo: wpt in org: web-platform-tests triggered by user: chromium-wpt-export-bot'
def test_team(self):
message = {
"body": {
"team": {
"id": 9060454,
"name": "asecretteam",
"login": "alamakota",
"node_id": "MYQ6VXK4fuwwNAye",
"permission": "pull",
"privacy": "secret",
"slug": "asecretteam",
},
"changes": {
"repository": {
"permissions": {
"from": {
"admin": "false",
"pull": "false",
"push": "false",
},
},
},
},
"action": "edited",
"sender": {
"following_url": "https://api.github.com/users/chromium-wpt-export-bot/following{/other_user}",
"events_url": "https://api.github.com/users/chromium-wpt-export-bot/events{/privacy}",
"organizations_url": "https://api.github.com/users/chromium-wpt-export-bot/orgs",
"url": "https://api.github.com/users/chromium-wpt-export-bot",
"gists_url": "https://api.github.com/users/chromium-wpt-export-bot/gists{/gist_id}",
"html_url": "https://github.com/chromium-wpt-export-bot",
"subscriptions_url": "https://api.github.com/users/chromium-wpt-export-bot/subscriptions",
"avatar_url": "https://avatars1.githubusercontent.com/u/25752892?v=4",
"repos_url": "https://api.github.com/users/chromium-wpt-export-bot/repos",
"followers_url": "https://api.github.com/users/chromium-wpt-export-bot/followers",
"received_events_url": "https://api.github.com/users/chromium-wpt-export-bot/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/chromium-wpt-export-bot/starred{/owner}{/repo}",
"site_admin": "false",
"login": "chromium-wpt-export-bot",
"type": "User",
"id": "25752892",
"node_id": "MDQ6VXNlcjI1NzUyODky"
},
"repository": {
"permissions": {
"admin": "true",
"pull": "true",
"push": "true",
},
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"has_wiki": "true",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"mirror_url": "null",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"updated_at": "2018-11-01T00:51:49Z",
"svn_url": "https://github.com/web-platform-tests/wpt",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"owner": {
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"name": "web-platform-tests",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"url": "https://api.github.com/users/web-platform-tests",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"html_url": "https://github.com/web-platform-tests",
"email": "",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"site_admin": "false",
"login": "web-platform-tests",
"type": "Organization",
"id": "37226233",
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz"
},
"full_name": "web-platform-tests/wpt",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"id": 3618133,
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"size": "305511",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"archived": "false",
"has_downloads": "true",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"has_projects": "true",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"watchers_count": "1845",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"organization": "web-platform-tests",
"stargazers_count": "1845",
"homepage": "http://irc.w3.org/?channels=testing",
"open_issues": "1328",
"fork": "false",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"has_pages": "true",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"html_url": "https://github.com/web-platform-tests/wpt",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"language": "HTML",
"watchers": "1845",
"private": "false",
"forks_count": "1523",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"has_issues": "true",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"master_branch": "master",
"forks": "1523",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"open_issues_count": "1317",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"name": "wpt",
"license": {
"spdx_id": "NOASSERTION",
"url": "null",
"node_id": "MDc6TGljZW5zZTA=",
"name": "Other",
"key": "other"
},
"url": "https://github.com/web-platform-tests/wpt",
"stargazers": "1845",
"created_at": "1330865891",
"pushed_at": "1541037488",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"default_branch": "master",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"git_url": "git://github.com/web-platform-tests/wpt.git"
},
"organization": {
"issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
"members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
"description": "",
"public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
"url": "https://api.github.com/orgs/web-platform-tests",
"events_url": "https://api.github.com/orgs/web-platform-tests/events",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks"
},
},
"event": "team",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd"
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_repo(message, result)
self.verify_org(message, result)
assert result['source'] == 'team'
assert result['details']['action'] == message['body']['action']
assert result['details']['repo_perm_from_admin'] == message['body']['changes']['repository']['permissions']['from']['admin']
assert result['details']['repo_perm_from_pull'] == message['body']['changes']['repository']['permissions']['from']['pull']
assert result['details']['repo_perm_from_push'] == message['body']['changes']['repository']['permissions']['from']['push']
assert result['details']['repo_perm_admin'] == message['body']['repository']['permissions']['admin']
assert result['details']['repo_perm_pull'] == message['body']['repository']['permissions']['pull']
assert result['details']['repo_perm_push'] == message['body']['repository']['permissions']['push']
assert result['details']['team_id'] == message['body']['team']['id']
assert result['details']['team_name'] == message['body']['team']['name']
assert result['details']['team_node_id'] == message['body']['team']['node_id']
assert result['details']['team_permission'] == message['body']['team']['permission']
assert result['details']['team_privacy'] == message['body']['team']['privacy']
assert result['details']['team_slug'] == message['body']['team']['slug']
assert result['summary'] == 'github: team: edited on repo: wpt team: asecretteam in org: web-platform-tests triggered by user: chromium-wpt-export-bot'
def test_team_add(self):
message = {
"body": {
"team": {
"id": 9060454,
"name": "asecretteam",
"login": "alamakota",
"node_id": "MYQ6VXK4fuwwNAye",
"permission": "pull",
"privacy": "secret",
"slug": "asecretteam",
},
"changes": {
"repository": {
"permissions": {
"from": {
"admin": "false",
"pull": "false",
"push": "false",
},
},
},
},
"sender": {
"following_url": "https://api.github.com/users/chromium-wpt-export-bot/following{/other_user}",
"events_url": "https://api.github.com/users/chromium-wpt-export-bot/events{/privacy}",
"organizations_url": "https://api.github.com/users/chromium-wpt-export-bot/orgs",
"url": "https://api.github.com/users/chromium-wpt-export-bot",
"gists_url": "https://api.github.com/users/chromium-wpt-export-bot/gists{/gist_id}",
"html_url": "https://github.com/chromium-wpt-export-bot",
"subscriptions_url": "https://api.github.com/users/chromium-wpt-export-bot/subscriptions",
"avatar_url": "https://avatars1.githubusercontent.com/u/25752892?v=4",
"repos_url": "https://api.github.com/users/chromium-wpt-export-bot/repos",
"followers_url": "https://api.github.com/users/chromium-wpt-export-bot/followers",
"received_events_url": "https://api.github.com/users/chromium-wpt-export-bot/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/chromium-wpt-export-bot/starred{/owner}{/repo}",
"site_admin": "false",
"login": "chromium-wpt-export-bot",
"type": "User",
"id": 25752892,
"node_id": "MDQ6VXNlcjI1NzUyODky"
},
"repository": {
"permissions": {
"admin": "true",
"pull": "true",
"push": "true",
},
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"has_wiki": "true",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"mirror_url": "null",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"updated_at": "2018-11-01T00:51:49Z",
"svn_url": "https://github.com/web-platform-tests/wpt",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"owner": {
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"name": "web-platform-tests",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"url": "https://api.github.com/users/web-platform-tests",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"html_url": "https://github.com/web-platform-tests",
"email": "",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"site_admin": "false",
"login": "web-platform-tests",
"type": "Organization",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz"
},
"full_name": "web-platform-tests/wpt",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"id": 3618133,
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"size": "305511",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"archived": "false",
"has_downloads": "true",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"has_projects": "true",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"watchers_count": "1845",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"organization": "web-platform-tests",
"stargazers_count": "1845",
"homepage": "http://irc.w3.org/?channels=testing",
"open_issues": "1328",
"fork": "false",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"has_pages": "true",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"html_url": "https://github.com/web-platform-tests/wpt",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"language": "HTML",
"watchers": "1845",
"private": "false",
"forks_count": "1523",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"has_issues": "true",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"master_branch": "master",
"forks": "1523",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"open_issues_count": "1317",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"name": "wpt",
"license": {
"spdx_id": "NOASSERTION",
"url": "null",
"node_id": "MDc6TGljZW5zZTA=",
"name": "Other",
"key": "other"
},
"url": "https://github.com/web-platform-tests/wpt",
"stargazers": "1845",
"created_at": "1330865891",
"pushed_at": "1541037488",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"default_branch": "master",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"git_url": "git://github.com/web-platform-tests/wpt.git"
},
"organization": {
"issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
"members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
"description": "",
"public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
"url": "https://api.github.com/orgs/web-platform-tests",
"events_url": "https://api.github.com/orgs/web-platform-tests/events",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks"
},
},
"event": "team_add",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd"
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_repo(message, result)
self.verify_org(message, result)
assert result['source'] == 'team_add'
assert result['details']['repo_perm_from_admin'] == message['body']['changes']['repository']['permissions']['from']['admin']
assert result['details']['repo_perm_from_pull'] == message['body']['changes']['repository']['permissions']['from']['pull']
assert result['details']['repo_perm_from_push'] == message['body']['changes']['repository']['permissions']['from']['push']
assert result['details']['repo_perm_admin'] == message['body']['repository']['permissions']['admin']
assert result['details']['repo_perm_pull'] == message['body']['repository']['permissions']['pull']
assert result['details']['repo_perm_push'] == message['body']['repository']['permissions']['push']
assert result['details']['team_id'] == message['body']['team']['id']
assert result['details']['team_name'] == message['body']['team']['name']
assert result['details']['team_node_id'] == message['body']['team']['node_id']
assert result['details']['team_permission'] == message['body']['team']['permission']
assert result['details']['team_privacy'] == message['body']['team']['privacy']
assert result['details']['team_slug'] == message['body']['team']['slug']
assert result['summary'] == 'github: team_add: on repo: wpt team: asecretteam in org: web-platform-tests triggered by user: chromium-wpt-export-bot'
def test_organization(self):
message = {
"body": {
"team": {
"id": 9060454,
"name": "asecretteam",
"login": "alamakota",
"node_id": "MYQ6VXK4fuwwNAye",
"permission": "pull",
"privacy": "secret",
"slug": "asecretteam",
},
"membership": {
"user": {
"id": 893282,
"login": "alamakota",
"node_id": "MDQ6VXNlcjUwMTkyMzQ=",
"site_admin": "false",
"type": "User",
},
"role": "member",
"state": "pending",
},
"action": "member_added",
"sender": {
"following_url": "https://api.github.com/users/chromium-wpt-export-bot/following{/other_user}",
"events_url": "https://api.github.com/users/chromium-wpt-export-bot/events{/privacy}",
"organizations_url": "https://api.github.com/users/chromium-wpt-export-bot/orgs",
"url": "https://api.github.com/users/chromium-wpt-export-bot",
"gists_url": "https://api.github.com/users/chromium-wpt-export-bot/gists{/gist_id}",
"html_url": "https://github.com/chromium-wpt-export-bot",
"subscriptions_url": "https://api.github.com/users/chromium-wpt-export-bot/subscriptions",
"avatar_url": "https://avatars1.githubusercontent.com/u/25752892?v=4",
"repos_url": "https://api.github.com/users/chromium-wpt-export-bot/repos",
"followers_url": "https://api.github.com/users/chromium-wpt-export-bot/followers",
"received_events_url": "https://api.github.com/users/chromium-wpt-export-bot/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/chromium-wpt-export-bot/starred{/owner}{/repo}",
"site_admin": "false",
"login": "chromium-wpt-export-bot",
"type": "User",
"id": 25752892,
"node_id": "MDQ6VXNlcjI1NzUyODky"
},
"repository": {
"permissions": {
"admin": "true",
"pull": "true",
"push": "true",
},
"issues_url": "https://api.github.com/repos/web-platform-tests/wpt/issues{/number}",
"deployments_url": "https://api.github.com/repos/web-platform-tests/wpt/deployments",
"has_wiki": "true",
"forks_url": "https://api.github.com/repos/web-platform-tests/wpt/forks",
"mirror_url": "null",
"subscription_url": "https://api.github.com/repos/web-platform-tests/wpt/subscription",
"merges_url": "https://api.github.com/repos/web-platform-tests/wpt/merges",
"collaborators_url": "https://api.github.com/repos/web-platform-tests/wpt/collaborators{/collaborator}",
"updated_at": "2018-11-01T00:51:49Z",
"svn_url": "https://github.com/web-platform-tests/wpt",
"pulls_url": "https://api.github.com/repos/web-platform-tests/wpt/pulls{/number}",
"owner": {
"following_url": "https://api.github.com/users/web-platform-tests/following{/other_user}",
"events_url": "https://api.github.com/users/web-platform-tests/events{/privacy}",
"name": "web-platform-tests",
"organizations_url": "https://api.github.com/users/web-platform-tests/orgs",
"url": "https://api.github.com/users/web-platform-tests",
"gists_url": "https://api.github.com/users/web-platform-tests/gists{/gist_id}",
"subscriptions_url": "https://api.github.com/users/web-platform-tests/subscriptions",
"html_url": "https://github.com/web-platform-tests",
"email": "",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/users/web-platform-tests/repos",
"followers_url": "https://api.github.com/users/web-platform-tests/followers",
"received_events_url": "https://api.github.com/users/web-platform-tests/received_events",
"gravatar_id": "",
"starred_url": "https://api.github.com/users/web-platform-tests/starred{/owner}{/repo}",
"site_admin": "false",
"login": "web-platform-tests",
"type": "Organization",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz"
},
"full_name": "web-platform-tests/wpt",
"issue_comment_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/web-platform-tests/wpt/contents/{+path}",
"id": 3618133,
"keys_url": "https://api.github.com/repos/web-platform-tests/wpt/keys{/key_id}",
"size": "305511",
"tags_url": "https://api.github.com/repos/web-platform-tests/wpt/tags",
"archived": "false",
"has_downloads": "true",
"downloads_url": "https://api.github.com/repos/web-platform-tests/wpt/downloads",
"assignees_url": "https://api.github.com/repos/web-platform-tests/wpt/assignees{/user}",
"statuses_url": "https://api.github.com/repos/web-platform-tests/wpt/statuses/{sha}",
"git_refs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/refs{/sha}",
"has_projects": "true",
"clone_url": "https://github.com/web-platform-tests/wpt.git",
"watchers_count": "1845",
"git_tags_url": "https://api.github.com/repos/web-platform-tests/wpt/git/tags{/sha}",
"labels_url": "https://api.github.com/repos/web-platform-tests/wpt/labels{/name}",
"organization": "web-platform-tests",
"stargazers_count": "1845",
"homepage": "http://irc.w3.org/?channels=testing",
"open_issues": "1328",
"fork": "false",
"milestones_url": "https://api.github.com/repos/web-platform-tests/wpt/milestones{/number}",
"commits_url": "https://api.github.com/repos/web-platform-tests/wpt/commits{/sha}",
"releases_url": "https://api.github.com/repos/web-platform-tests/wpt/releases{/id}",
"issue_events_url": "https://api.github.com/repos/web-platform-tests/wpt/issues/events{/number}",
"archive_url": "https://api.github.com/repos/web-platform-tests/wpt/{archive_format}{/ref}",
"has_pages": "true",
"events_url": "https://api.github.com/repos/web-platform-tests/wpt/events",
"contributors_url": "https://api.github.com/repos/web-platform-tests/wpt/contributors",
"html_url": "https://github.com/web-platform-tests/wpt",
"compare_url": "https://api.github.com/repos/web-platform-tests/wpt/compare/{base}...{head}",
"language": "HTML",
"watchers": "1845",
"private": "false",
"forks_count": "1523",
"notifications_url": "https://api.github.com/repos/web-platform-tests/wpt/notifications{?since,all,participating}",
"has_issues": "true",
"ssh_url": "git@github.com:web-platform-tests/wpt.git",
"blobs_url": "https://api.github.com/repos/web-platform-tests/wpt/git/blobs{/sha}",
"master_branch": "master",
"forks": "1523",
"hooks_url": "https://api.github.com/repos/web-platform-tests/wpt/hooks",
"open_issues_count": "1317",
"comments_url": "https://api.github.com/repos/web-platform-tests/wpt/comments{/number}",
"name": "wpt",
"license": {
"spdx_id": "NOASSERTION",
"url": "null",
"node_id": "MDc6TGljZW5zZTA=",
"name": "Other",
"key": "other"
},
"url": "https://github.com/web-platform-tests/wpt",
"stargazers": "1845",
"created_at": "1330865891",
"pushed_at": "1541037488",
"branches_url": "https://api.github.com/repos/web-platform-tests/wpt/branches{/branch}",
"node_id": "MDEwOlJlcG9zaXRvcnkzNjE4MTMz",
"default_branch": "master",
"teams_url": "https://api.github.com/repos/web-platform-tests/wpt/teams",
"trees_url": "https://api.github.com/repos/web-platform-tests/wpt/git/trees{/sha}",
"languages_url": "https://api.github.com/repos/web-platform-tests/wpt/languages",
"git_commits_url": "https://api.github.com/repos/web-platform-tests/wpt/git/commits{/sha}",
"subscribers_url": "https://api.github.com/repos/web-platform-tests/wpt/subscribers",
"stargazers_url": "https://api.github.com/repos/web-platform-tests/wpt/stargazers",
"git_url": "git://github.com/web-platform-tests/wpt.git"
},
"organization": {
"issues_url": "https://api.github.com/orgs/web-platform-tests/issues",
"members_url": "https://api.github.com/orgs/web-platform-tests/members{/member}",
"description": "",
"public_members_url": "https://api.github.com/orgs/web-platform-tests/public_members{/member}",
"url": "https://api.github.com/orgs/web-platform-tests",
"events_url": "https://api.github.com/orgs/web-platform-tests/events",
"avatar_url": "https://avatars0.githubusercontent.com/u/37226233?v=4",
"repos_url": "https://api.github.com/orgs/web-platform-tests/repos",
"login": "web-platform-tests",
"id": 37226233,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM3MjI2MjMz",
"hooks_url": "https://api.github.com/orgs/web-platform-tests/hooks"
},
},
"event": "organization",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd"
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_repo(message, result)
self.verify_org(message, result)
assert result['source'] == 'organization'
assert result['details']['action'] == message['body']['action']
assert result['details']['team_id'] == message['body']['team']['id']
assert result['details']['team_name'] == message['body']['team']['name']
assert result['details']['team_node_id'] == message['body']['team']['node_id']
assert result['details']['team_permission'] == message['body']['team']['permission']
assert result['details']['team_privacy'] == message['body']['team']['privacy']
assert result['details']['team_slug'] == message['body']['team']['slug']
assert result['details']['membership_type'] == message['body']['membership']['user']['type']
assert result['details']['membership_admin'] == message['body']['membership']['user']['site_admin']
assert result['details']['membership_node_id'] == message['body']['membership']['user']['node_id']
assert result['details']['membership_login'] == message['body']['membership']['user']['login']
assert result['details']['membership_id'] == message['body']['membership']['user']['id']
assert result['details']['membership_state'] == message['body']['membership']['state']
assert result['details']['membership_role'] == message['body']['membership']['role']
assert result['summary'] == 'github: organization: member_added on repo: wpt team: asecretteam in org: web-platform-tests triggered by user: chromium-wpt-export-bot'
def test_membership(self):
message = {
"body": {
"action": "removed",
"scope": "team",
"team": {
"name": "github",
"id": 3253328,
"node_id": "MDQ6VGVhbTMyNTMzMjg=",
"slug": "github",
"description": "Open-source team",
"privacy": "secret",
"url": "https://api.github.com/teams/3253328",
"html_url": "https://github.com/orgs/Octocoders/teams/github",
"members_url": "https://api.github.com/teams/3253328/members{/member}",
"repositories_url": "https://api.github.com/teams/3253328/repos",
"permission": "pull"
},
"repository": {
"id": 186853261,
"node_id": "MDEwOlJlcG9zaXRvcnkxODY4NTMyNjE=",
"name": "Hello-World",
"full_name": "Octocoders/Hello-World",
"private": "false",
"owner": {
"login": "Octocoders",
"id": 38302899,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM4MzAyODk5",
"avatar_url": "https://avatars1.githubusercontent.com/u/38302899?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Octocoders",
"html_url": "https://github.com/Octocoders",
"followers_url": "https://api.github.com/users/Octocoders/followers",
"following_url": "https://api.github.com/users/Octocoders/following{/other_user}",
"gists_url": "https://api.github.com/users/Octocoders/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Octocoders/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Octocoders/subscriptions",
"organizations_url": "https://api.github.com/users/Octocoders/orgs",
"repos_url": "https://api.github.com/users/Octocoders/repos",
"events_url": "https://api.github.com/users/Octocoders/events{/privacy}",
"received_events_url": "https://api.github.com/users/Octocoders/received_events",
"type": "Organization",
"site_admin": "false"
},
"html_url": "https://github.com/Octocoders/Hello-World",
"description": "null",
"fork": "true",
"url": "https://api.github.com/repos/Octocoders/Hello-World",
"forks_url": "https://api.github.com/repos/Octocoders/Hello-World/forks",
"keys_url": "https://api.github.com/repos/Octocoders/Hello-World/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/Octocoders/Hello-World/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/Octocoders/Hello-World/teams",
"hooks_url": "https://api.github.com/repos/Octocoders/Hello-World/hooks",
"issue_events_url": "https://api.github.com/repos/Octocoders/Hello-World/issues/events{/number}",
"events_url": "https://api.github.com/repos/Octocoders/Hello-World/events",
"assignees_url": "https://api.github.com/repos/Octocoders/Hello-World/assignees{/user}",
"branches_url": "https://api.github.com/repos/Octocoders/Hello-World/branches{/branch}",
"tags_url": "https://api.github.com/repos/Octocoders/Hello-World/tags",
"blobs_url": "https://api.github.com/repos/Octocoders/Hello-World/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/Octocoders/Hello-World/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/Octocoders/Hello-World/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/Octocoders/Hello-World/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/Octocoders/Hello-World/statuses/{sha}",
"languages_url": "https://api.github.com/repos/Octocoders/Hello-World/languages",
"stargazers_url": "https://api.github.com/repos/Octocoders/Hello-World/stargazers",
"contributors_url": "https://api.github.com/repos/Octocoders/Hello-World/contributors",
"subscribers_url": "https://api.github.com/repos/Octocoders/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/Octocoders/Hello-World/subscription",
"commits_url": "https://api.github.com/repos/Octocoders/Hello-World/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/Octocoders/Hello-World/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/Octocoders/Hello-World/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/Octocoders/Hello-World/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/Octocoders/Hello-World/contents/{+path}",
"compare_url": "https://api.github.com/repos/Octocoders/Hello-World/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/Octocoders/Hello-World/merges",
"archive_url": "https://api.github.com/repos/Octocoders/Hello-World/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/Octocoders/Hello-World/downloads",
"issues_url": "https://api.github.com/repos/Octocoders/Hello-World/issues{/number}",
"pulls_url": "https://api.github.com/repos/Octocoders/Hello-World/pulls{/number}",
"milestones_url": "https://api.github.com/repos/Octocoders/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/Octocoders/Hello-World/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/Octocoders/Hello-World/labels{/name}",
"releases_url": "https://api.github.com/repos/Octocoders/Hello-World/releases{/id}",
"deployments_url": "https://api.github.com/repos/Octocoders/Hello-World/deployments",
"created_at": "2019-05-15T15:20:42Z",
"updated_at": "2019-05-15T15:20:45Z",
"pushed_at": "2019-05-15T15:20:33Z",
"git_url": "git://github.com/Octocoders/Hello-World.git",
"ssh_url": "git@github.com:Octocoders/Hello-World.git",
"clone_url": "https://github.com/Octocoders/Hello-World.git",
"svn_url": "https://github.com/Octocoders/Hello-World",
"homepage": "null",
"size": 0,
"stargazers_count": 0,
"watchers_count": 0,
"language": "Ruby",
"has_issues": "false",
"has_projects": "true",
"has_downloads": "true",
"has_wiki": "true",
"has_pages": "false",
"forks_count": 0,
"mirror_url": "null",
"archived": "false",
"disabled": "false",
"open_issues_count": 0,
"license": "null",
"forks": 0,
"open_issues": 0,
"watchers": 0,
"default_branch": "master"
},
"organization": {
"login": "Octocoders",
"id": 38302899,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM4MzAyODk5",
"url": "https://api.github.com/orgs/Octocoders",
"repos_url": "https://api.github.com/orgs/Octocoders/repos",
"events_url": "https://api.github.com/orgs/Octocoders/events",
"hooks_url": "https://api.github.com/orgs/Octocoders/hooks",
"issues_url": "https://api.github.com/orgs/Octocoders/issues",
"members_url": "https://api.github.com/orgs/Octocoders/members{/member}",
"public_members_url": "https://api.github.com/orgs/Octocoders/public_members{/member}",
"avatar_url": "https://avatars1.githubusercontent.com/u/38302899?v=4",
"description": ""
},
"sender": {
"login": "Octocoders",
"id": 38302899,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM4MzAyODk5",
"avatar_url": "https://avatars1.githubusercontent.com/u/38302899?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Octocoders",
"html_url": "https://github.com/Octocoders",
"followers_url": "https://api.github.com/users/Octocoders/followers",
"following_url": "https://api.github.com/users/Octocoders/following{/other_user}",
"gists_url": "https://api.github.com/users/Octocoders/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Octocoders/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Octocoders/subscriptions",
"organizations_url": "https://api.github.com/users/Octocoders/orgs",
"repos_url": "https://api.github.com/users/Octocoders/repos",
"events_url": "https://api.github.com/users/Octocoders/events{/privacy}",
"received_events_url": "https://api.github.com/users/Octocoders/received_events",
"type": "Organization",
"site_admin": "false"
},
},
"event": "membership",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd"
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_org(message, result)
assert result['source'] == 'membership'
assert result['details']['team_name'] == message['body']['team']['name']
assert result['details']['org_login'] == message['body']['organization']['login']
assert result['summary'] == 'github: membership: removed team: github in org: Octocoders triggered by user: Octocoders'
def test_public(self):
message = {
"body": {
"repository": {
"id": 135493233,
"node_id": "MDEwOlJlcG9zaXRvcnkxMzU0OTMyMzM=",
"name": "Hello-World",
"full_name": "Codertocat/Hello-World",
"owner": {
"name": "ACrazyCat",
"login": "Codertocat",
"id": 21031067,
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
"private": "false",
"html_url": "https://github.com/Codertocat/Hello-World",
"description": "null",
"fork": "false",
"url": "https://api.github.com/repos/Codertocat/Hello-World",
"forks_url": "https://api.github.com/repos/Codertocat/Hello-World/forks",
"keys_url": "https://api.github.com/repos/Codertocat/Hello-World/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/Codertocat/Hello-World/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/Codertocat/Hello-World/teams",
"hooks_url": "https://api.github.com/repos/Codertocat/Hello-World/hooks",
"issue_events_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/events{/number}",
"events_url": "https://api.github.com/repos/Codertocat/Hello-World/events",
"assignees_url": "https://api.github.com/repos/Codertocat/Hello-World/assignees{/user}",
"branches_url": "https://api.github.com/repos/Codertocat/Hello-World/branches{/branch}",
"tags_url": "https://api.github.com/repos/Codertocat/Hello-World/tags",
"blobs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/Codertocat/Hello-World/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/Codertocat/Hello-World/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/Codertocat/Hello-World/statuses/{sha}",
"languages_url": "https://api.github.com/repos/Codertocat/Hello-World/languages",
"stargazers_url": "https://api.github.com/repos/Codertocat/Hello-World/stargazers",
"contributors_url": "https://api.github.com/repos/Codertocat/Hello-World/contributors",
"subscribers_url": "https://api.github.com/repos/Codertocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/Codertocat/Hello-World/subscription",
"commits_url": "https://api.github.com/repos/Codertocat/Hello-World/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/Codertocat/Hello-World/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/Codertocat/Hello-World/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/Codertocat/Hello-World/contents/{+path}",
"compare_url": "https://api.github.com/repos/Codertocat/Hello-World/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/Codertocat/Hello-World/merges",
"archive_url": "https://api.github.com/repos/Codertocat/Hello-World/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/Codertocat/Hello-World/downloads",
"issues_url": "https://api.github.com/repos/Codertocat/Hello-World/issues{/number}",
"pulls_url": "https://api.github.com/repos/Codertocat/Hello-World/pulls{/number}",
"milestones_url": "https://api.github.com/repos/Codertocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/Codertocat/Hello-World/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/Codertocat/Hello-World/labels{/name}",
"releases_url": "https://api.github.com/repos/Codertocat/Hello-World/releases{/id}",
"deployments_url": "https://api.github.com/repos/Codertocat/Hello-World/deployments",
"created_at": "2018-05-30T20:18:04Z",
"updated_at": "2018-05-30T20:18:49Z",
"pushed_at": "2018-05-30T20:18:48Z",
"git_url": "git://github.com/Codertocat/Hello-World.git",
"ssh_url": "git@github.com:Codertocat/Hello-World.git",
"clone_url": "https://github.com/Codertocat/Hello-World.git",
"svn_url": "https://github.com/Codertocat/Hello-World",
"homepage": "null",
"size": "0",
"stargazers_count": "0",
"watchers_count": "0",
"language": "null",
"has_issues": "true",
"has_projects": "true",
"has_downloads": "true",
"has_wiki": "true",
"has_pages": "true",
"forks_count": "0",
"mirror_url": "null",
"archived": "false",
"open_issues_count": "2",
"license": "null",
"forks": "0",
"open_issues": "2",
"watchers": "0",
"default_branch": "master"
},
"sender": {
"login": "Codertocat",
"id": 21031067,
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
},
"event": "public",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd",
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_repo(message, result)
assert result['source'] == 'public'
assert result['summary'] == 'github : change from private to public on repo: Hello-World triggered by user: Codertocat'
def test_repository_import(self):
message = {
"body": {
"status": "success",
"repository": {
"id": 135493233,
"node_id": "MDEwOlJlcG9zaXRvcnkxMzU0OTMyMzM=",
"name": "Hello-World",
"full_name": "Codertocat/Hello-World",
"owner": {
"name": "ASuperCat",
"login": "Codertocat",
"id": 21031067,
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
"private": "false",
"html_url": "https://github.com/Codertocat/Hello-World",
"description": "null",
"fork": "false",
"url": "https://api.github.com/repos/Codertocat/Hello-World",
"forks_url": "https://api.github.com/repos/Codertocat/Hello-World/forks",
"keys_url": "https://api.github.com/repos/Codertocat/Hello-World/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/Codertocat/Hello-World/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/Codertocat/Hello-World/teams",
"hooks_url": "https://api.github.com/repos/Codertocat/Hello-World/hooks",
"issue_events_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/events{/number}",
"events_url": "https://api.github.com/repos/Codertocat/Hello-World/events",
"assignees_url": "https://api.github.com/repos/Codertocat/Hello-World/assignees{/user}",
"branches_url": "https://api.github.com/repos/Codertocat/Hello-World/branches{/branch}",
"tags_url": "https://api.github.com/repos/Codertocat/Hello-World/tags",
"blobs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/Codertocat/Hello-World/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/Codertocat/Hello-World/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/Codertocat/Hello-World/statuses/{sha}",
"languages_url": "https://api.github.com/repos/Codertocat/Hello-World/languages",
"stargazers_url": "https://api.github.com/repos/Codertocat/Hello-World/stargazers",
"contributors_url": "https://api.github.com/repos/Codertocat/Hello-World/contributors",
"subscribers_url": "https://api.github.com/repos/Codertocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/Codertocat/Hello-World/subscription",
"commits_url": "https://api.github.com/repos/Codertocat/Hello-World/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/Codertocat/Hello-World/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/Codertocat/Hello-World/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/Codertocat/Hello-World/contents/{+path}",
"compare_url": "https://api.github.com/repos/Codertocat/Hello-World/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/Codertocat/Hello-World/merges",
"archive_url": "https://api.github.com/repos/Codertocat/Hello-World/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/Codertocat/Hello-World/downloads",
"issues_url": "https://api.github.com/repos/Codertocat/Hello-World/issues{/number}",
"pulls_url": "https://api.github.com/repos/Codertocat/Hello-World/pulls{/number}",
"milestones_url": "https://api.github.com/repos/Codertocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/Codertocat/Hello-World/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/Codertocat/Hello-World/labels{/name}",
"releases_url": "https://api.github.com/repos/Codertocat/Hello-World/releases{/id}",
"deployments_url": "https://api.github.com/repos/Codertocat/Hello-World/deployments",
"created_at": "2018-05-30T20:18:04Z",
"updated_at": "2018-05-30T20:18:49Z",
"pushed_at": "2018-05-30T20:18:48Z",
"git_url": "git://github.com/Codertocat/Hello-World.git",
"ssh_url": "git@github.com:Codertocat/Hello-World.git",
"clone_url": "https://github.com/Codertocat/Hello-World.git",
"svn_url": "https://github.com/Codertocat/Hello-World",
"homepage": "null",
"size": "0",
"stargazers_count": "0",
"watchers_count": "0",
"language": "null",
"has_issues": "true",
"has_projects": "true",
"has_downloads": "true",
"has_wiki": "true",
"has_pages": "true",
"forks_count": "0",
"mirror_url": "null",
"archived": "false",
"open_issues_count": "2",
"license": "null",
"forks": "0",
"open_issues": "2",
"watchers": "0",
"default_branch": "master"
},
"organization": {
"login": "Octocoders",
"id": 38302899,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM4MzAyODk5",
"url": "https://api.github.com/orgs/Octocoders",
"repos_url": "https://api.github.com/orgs/Octocoders/repos",
"events_url": "https://api.github.com/orgs/Octocoders/events",
"hooks_url": "https://api.github.com/orgs/Octocoders/hooks",
"issues_url": "https://api.github.com/orgs/Octocoders/issues",
"members_url": "https://api.github.com/orgs/Octocoders/members{/member}",
"public_members_url": "https://api.github.com/orgs/Octocoders/public_members{/member}",
"avatar_url": "https://avatars1.githubusercontent.com/u/38302899?v=4",
"description": ""
},
"sender": {
"login": "Codertocat",
"id": 21031067,
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
},
"event": "repository_import",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd",
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_org(message, result)
self.verify_repo(message, result)
assert result['source'] == 'repository_import'
assert result['summary'] == "github: repository_import: success on repo: Hello-World in org: Octocoders triggered by user: Codertocat"
def test_release(self):
message = {
"body": {
"action": "published",
"release": {
"url": "https://api.github.com/repos/Codertocat/Hello-World/releases/11248810",
"assets_url": "https://api.github.com/repos/Codertocat/Hello-World/releases/11248810/assets",
"upload_url": "https://uploads.github.com/repos/Codertocat/Hello-World/releases/11248810/assets{?name,label}",
"html_url": "https://github.com/Codertocat/Hello-World/releases/tag/0.0.1",
"id": 11248810,
"node_id": "MDc6UmVsZWFzZTExMjQ4ODEw",
"tag_name": "0.0.1",
"target_commitish": "master",
"name": "null",
"draft": "false",
"author": {
"login": "Codertocat",
"id": 21031067,
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
"prerelease": "false",
"created_at": "2018-05-30T20:18:05Z",
"published_at": "2018-05-30T20:18:44Z",
"assets": [
],
"tarball_url": "https://api.github.com/repos/Codertocat/Hello-World/tarball/0.0.1",
"zipball_url": "https://api.github.com/repos/Codertocat/Hello-World/zipball/0.0.1",
"body": "null"
},
"repository": {
"id": 135493233,
"node_id": "MDEwOlJlcG9zaXRvcnkxMzU0OTMyMzM=",
"name": "Hello-World",
"full_name": "Codertocat/Hello-World",
"owner": {
"name": "ASuperCat",
"login": "Codertocat",
"id": 21031067,
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
"private": "false",
"html_url": "https://github.com/Codertocat/Hello-World",
"description": "null",
"fork": "false",
"url": "https://api.github.com/repos/Codertocat/Hello-World",
"forks_url": "https://api.github.com/repos/Codertocat/Hello-World/forks",
"keys_url": "https://api.github.com/repos/Codertocat/Hello-World/keys{/key_id}",
"collaborators_url": "https://api.github.com/repos/Codertocat/Hello-World/collaborators{/collaborator}",
"teams_url": "https://api.github.com/repos/Codertocat/Hello-World/teams",
"hooks_url": "https://api.github.com/repos/Codertocat/Hello-World/hooks",
"issue_events_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/events{/number}",
"events_url": "https://api.github.com/repos/Codertocat/Hello-World/events",
"assignees_url": "https://api.github.com/repos/Codertocat/Hello-World/assignees{/user}",
"branches_url": "https://api.github.com/repos/Codertocat/Hello-World/branches{/branch}",
"tags_url": "https://api.github.com/repos/Codertocat/Hello-World/tags",
"blobs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/blobs{/sha}",
"git_tags_url": "https://api.github.com/repos/Codertocat/Hello-World/git/tags{/sha}",
"git_refs_url": "https://api.github.com/repos/Codertocat/Hello-World/git/refs{/sha}",
"trees_url": "https://api.github.com/repos/Codertocat/Hello-World/git/trees{/sha}",
"statuses_url": "https://api.github.com/repos/Codertocat/Hello-World/statuses/{sha}",
"languages_url": "https://api.github.com/repos/Codertocat/Hello-World/languages",
"stargazers_url": "https://api.github.com/repos/Codertocat/Hello-World/stargazers",
"contributors_url": "https://api.github.com/repos/Codertocat/Hello-World/contributors",
"subscribers_url": "https://api.github.com/repos/Codertocat/Hello-World/subscribers",
"subscription_url": "https://api.github.com/repos/Codertocat/Hello-World/subscription",
"commits_url": "https://api.github.com/repos/Codertocat/Hello-World/commits{/sha}",
"git_commits_url": "https://api.github.com/repos/Codertocat/Hello-World/git/commits{/sha}",
"comments_url": "https://api.github.com/repos/Codertocat/Hello-World/comments{/number}",
"issue_comment_url": "https://api.github.com/repos/Codertocat/Hello-World/issues/comments{/number}",
"contents_url": "https://api.github.com/repos/Codertocat/Hello-World/contents/{+path}",
"compare_url": "https://api.github.com/repos/Codertocat/Hello-World/compare/{base}...{head}",
"merges_url": "https://api.github.com/repos/Codertocat/Hello-World/merges",
"archive_url": "https://api.github.com/repos/Codertocat/Hello-World/{archive_format}{/ref}",
"downloads_url": "https://api.github.com/repos/Codertocat/Hello-World/downloads",
"issues_url": "https://api.github.com/repos/Codertocat/Hello-World/issues{/number}",
"pulls_url": "https://api.github.com/repos/Codertocat/Hello-World/pulls{/number}",
"milestones_url": "https://api.github.com/repos/Codertocat/Hello-World/milestones{/number}",
"notifications_url": "https://api.github.com/repos/Codertocat/Hello-World/notifications{?since,all,participating}",
"labels_url": "https://api.github.com/repos/Codertocat/Hello-World/labels{/name}",
"releases_url": "https://api.github.com/repos/Codertocat/Hello-World/releases{/id}",
"deployments_url": "https://api.github.com/repos/Codertocat/Hello-World/deployments",
"created_at": "2018-05-30T20:18:04Z",
"updated_at": "2018-05-30T20:18:49Z",
"pushed_at": "2018-05-30T20:18:48Z",
"git_url": "git://github.com/Codertocat/Hello-World.git",
"ssh_url": "git@github.com:Codertocat/Hello-World.git",
"clone_url": "https://github.com/Codertocat/Hello-World.git",
"svn_url": "https://github.com/Codertocat/Hello-World",
"homepage": "null",
"size": "0",
"stargazers_count": "0",
"watchers_count": "0",
"language": "null",
"has_issues": "true",
"has_projects": "true",
"has_downloads": "true",
"has_wiki": "true",
"has_pages": "true",
"forks_count": "0",
"mirror_url": "null",
"archived": "false",
"open_issues_count": "2",
"license": "null",
"forks": "0",
"open_issues": "2",
"watchers": "0",
"default_branch": "master"
},
"organization": {
"login": "Octocoders",
"id": 38302899,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM4MzAyODk5",
"url": "https://api.github.com/orgs/Octocoders",
"repos_url": "https://api.github.com/orgs/Octocoders/repos",
"events_url": "https://api.github.com/orgs/Octocoders/events",
"hooks_url": "https://api.github.com/orgs/Octocoders/hooks",
"issues_url": "https://api.github.com/orgs/Octocoders/issues",
"members_url": "https://api.github.com/orgs/Octocoders/members{/member}",
"public_members_url": "https://api.github.com/orgs/Octocoders/public_members{/member}",
"avatar_url": "https://avatars1.githubusercontent.com/u/38302899?v=4",
"description": ""
},
"sender": {
"login": "Codertocat",
"id": 21031067,
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
},
"event": "release",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd",
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
self.verify_repo(message, result)
assert result['source'] == 'release'
assert result['details']['release_author_login'] == message['body']['release']['author']['login']
assert result['details']['release_author_id'] == message['body']['release']['author']['id']
assert result['details']['release_author_node_id'] == message['body']['release']['author']['node_id']
assert result['details']['release_author_type'] == message['body']['release']['author']['type']
assert result['details']['release_author_site_admin'] == message['body']['release']['author']['site_admin']
assert result['summary'] == 'github: release: published on repo: Hello-World triggered by user: Codertocat'
def test_org_block(self):
message = {
"body": {
"action": "blocked",
"blocked_user": {
"login": "hacktocat",
"id": 39652351,
"node_id": "MDQ6VXNlcjM5NjUyMzUx",
"avatar_url": "https://avatars2.githubusercontent.com/u/39652351?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/hacktocat",
"html_url": "https://github.com/hacktocat",
"followers_url": "https://api.github.com/users/hacktocat/followers",
"following_url": "https://api.github.com/users/hacktocat/following{/other_user}",
"gists_url": "https://api.github.com/users/hacktocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/hacktocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/hacktocat/subscriptions",
"organizations_url": "https://api.github.com/users/hacktocat/orgs",
"repos_url": "https://api.github.com/users/hacktocat/repos",
"events_url": "https://api.github.com/users/hacktocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/hacktocat/received_events",
"type": "User",
"site_admin": "false"
},
"organization": {
"login": "Octocoders",
"id": 38302899,
"node_id": "MDEyOk9yZ2FuaXphdGlvbjM4MzAyODk5",
"url": "https://api.github.com/orgs/Octocoders",
"repos_url": "https://api.github.com/orgs/Octocoders/repos",
"events_url": "https://api.github.com/orgs/Octocoders/events",
"hooks_url": "https://api.github.com/orgs/Octocoders/hooks",
"issues_url": "https://api.github.com/orgs/Octocoders/issues",
"members_url": "https://api.github.com/orgs/Octocoders/members{/member}",
"public_members_url": "https://api.github.com/orgs/Octocoders/public_members{/member}",
"avatar_url": "https://avatars1.githubusercontent.com/u/38302899?v=4",
"description": ""
},
"sender": {
"login": "Codertocat",
"id": 21031067,
"node_id": "MDQ6VXNlcjIxMDMxMDY3",
"avatar_url": "https://avatars1.githubusercontent.com/u/21031067?v=4",
"gravatar_id": "",
"url": "https://api.github.com/users/Codertocat",
"html_url": "https://github.com/Codertocat",
"followers_url": "https://api.github.com/users/Codertocat/followers",
"following_url": "https://api.github.com/users/Codertocat/following{/other_user}",
"gists_url": "https://api.github.com/users/Codertocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/Codertocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/Codertocat/subscriptions",
"organizations_url": "https://api.github.com/users/Codertocat/orgs",
"repos_url": "https://api.github.com/users/Codertocat/repos",
"events_url": "https://api.github.com/users/Codertocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/Codertocat/received_events",
"type": "User",
"site_admin": "false"
},
},
"event": "org_block",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd",
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
assert result['source'] == 'org_block'
assert result['details']['blocked_user_login'] == message['body']['blocked_user']['login']
assert result['details']['blocked_user_id'] == message['body']['blocked_user']['id']
assert result['details']['blocked_user_node_id'] == message['body']['blocked_user']['node_id']
assert result['summary'] == 'github: org_block: blocked user: hacktocat in org: Octocoders triggered by user: Codertocat'
def test_installation(self):
message = {
"body": {
"action": "deleted",
"installation": {
"id": 2,
"account": {
"login": "octocat",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": "false"
},
"repository_selection": "selected",
"access_tokens_url": "https://api.github.com/installations/2/access_tokens",
"repositories_url": "https://api.github.com/installation/repositories",
"html_url": "https://github.com/settings/installations/2",
"app_id": "5725",
"target_id": "3880403",
"target_type": "User",
"permissions": {
"metadata": "read",
"contents": "read",
"issues": "write"
},
"events": [
"push",
"pull_request"
],
"created_at": "1525109898",
"updated_at": "1525109899",
"single_file_name": "config.yml"
},
"repositories": [
{
"id": "1296269",
"name": "Hello-World",
"full_name": "octocat/Hello-World",
"private": "false"
}
],
"sender": {
"login": "octocat",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": "false"
},
},
"event": "installation",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd",
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
assert result['source'] == 'installation'
assert result['details']['action'] == message['body']['action']
assert result['details']['install_id'] == message['body']['installation']['account']['id']
assert result['details']['install_account_login'] == message['body']['installation']['account']['login']
assert result['details']['install_account_node_id'] == message['body']['installation']['account']['node_id']
assert result['details']['install_account_type'] == message['body']['installation']['account']['type']
assert result['details']['install_account_site_admin'] == message['body']['installation']['account']['site_admin']
assert result['summary'] == 'github app: installation deleted triggered by user: octocat'
def test_installation_perms_accepted(self):
message = {
"body": {
"action": "new_permissions_accepted",
"installation": {
"id": "2",
"account": {
"login": "octocat",
"id": 1,
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": "false"
},
"repository_selection": "selected",
"access_tokens_url": "https://api.github.com/installations/2/access_tokens",
"repositories_url": "https://api.github.com/installation/repositories",
"html_url": "https://github.com/settings/installations/2",
"app_id": "5725",
"target_id": "3880403",
"target_type": "User",
"permissions": {
"metadata": "read",
"contents": "read",
"issues": "write"
},
"events": [
"push",
"pull_request"
],
"created_at": "1525109898",
"updated_at": "1525109899",
"single_file_name": "config.yml"
},
"repositories": [
{
"id": "1296269",
"name": "Hello-World",
"full_name": "octocat/Hello-World",
"private": "false"
}
],
"sender": {
"login": "octocat",
"id": "1",
"node_id": "MDQ6VXNlcjE=",
"avatar_url": "https://github.com/images/error/octocat_happy.gif",
"gravatar_id": "",
"url": "https://api.github.com/users/octocat",
"html_url": "https://github.com/octocat",
"followers_url": "https://api.github.com/users/octocat/followers",
"following_url": "https://api.github.com/users/octocat/following{/other_user}",
"gists_url": "https://api.github.com/users/octocat/gists{/gist_id}",
"starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}",
"subscriptions_url": "https://api.github.com/users/octocat/subscriptions",
"organizations_url": "https://api.github.com/users/octocat/orgs",
"repos_url": "https://api.github.com/users/octocat/repos",
"events_url": "https://api.github.com/users/octocat/events{/privacy}",
"received_events_url": "https://api.github.com/users/octocat/received_events",
"type": "User",
"site_admin": "false"
},
},
"event": "installation",
"request_id": "94e70998-dd79-11e8-9ba0-a8635445a8cd",
}
event = {
'tags': 'githubeventsqs'
}
event['details'] = message
result, metadata = self.plugin.onMessage(event, self.metadata)
self.verify_defaults(result)
self.verify_metadata(metadata)
self.verify_meta(message, result)
self.verify_actor(message, result)
assert result['source'] == 'installation'
assert result['details']['action'] == message['body']['action']
assert result['details']['install_id'] == message['body']['installation']['account']['id']
assert result['details']['install_account_login'] == message['body']['installation']['account']['login']
assert result['details']['install_account_node_id'] == message['body']['installation']['account']['node_id']
assert result['details']['install_account_type'] == message['body']['installation']['account']['type']
assert result['details']['install_account_site_admin'] == message['body']['installation']['account']['site_admin']
assert result['summary'] == 'github app: installation new_permissions_accepted triggered by user: octocat'
|
mpl-2.0
|
vipulkanade/EventbriteDjango
|
lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py
|
197
|
10094
|
'''SSL with SNI_-support for Python 2. Follow these instructions if you would
like to verify SSL certificates in Python 2. Note, the default libraries do
*not* do certificate checking; you need to do additional work to validate
certificates yourself.
This needs the following packages installed:
* pyOpenSSL (tested with 0.13)
* ndg-httpsclient (tested with 0.3.2)
* pyasn1 (tested with 0.1.6)
You can install them with the following command:
pip install pyopenssl ndg-httpsclient pyasn1
To activate certificate checking, call
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
before you begin making HTTP requests. This can be done in a ``sitecustomize``
module, or at any other time before your application begins using ``urllib3``,
like this::
try:
import urllib3.contrib.pyopenssl
urllib3.contrib.pyopenssl.inject_into_urllib3()
except ImportError:
pass
Now you can use :mod:`urllib3` as you normally would, and it will support SNI
when the required modules are installed.
Activating this module also has the positive side effect of disabling SSL/TLS
compression in Python 2 (see `CRIME attack`_).
If you want to configure the default list of supported cipher suites, you can
set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
Module Variables
----------------
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
'''
from __future__ import absolute_import
try:
from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName
except SyntaxError as e:
raise ImportError(e)
import OpenSSL.SSL
from pyasn1.codec.der import decoder as der_decoder
from pyasn1.type import univ, constraint
from socket import _fileobject, timeout, error as SocketError
import ssl
import select
from .. import connection
from .. import util
__all__ = ['inject_into_urllib3', 'extract_from_urllib3']
# SNI only *really* works if we can read the subjectAltName of certificates.
HAS_SNI = SUBJ_ALT_NAME_SUPPORT
# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
_openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
_openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
try:
_openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
except AttributeError:
pass
_openssl_verify = {
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
ssl.CERT_REQUIRED:
OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
# OpenSSL will only write 16K at a time
SSL_WRITE_BLOCKSIZE = 16384
orig_util_HAS_SNI = util.HAS_SNI
orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
def inject_into_urllib3():
'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.'
connection.ssl_wrap_socket = ssl_wrap_socket
util.HAS_SNI = HAS_SNI
def extract_from_urllib3():
'Undo monkey-patching by :func:`inject_into_urllib3`.'
connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket
util.HAS_SNI = orig_util_HAS_SNI
# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
class SubjectAltName(BaseSubjectAltName):
'''ASN.1 implementation for subjectAltNames support'''
# There is no limit to how many SAN certificates a certificate may have,
# however this needs to have some limit so we'll set an arbitrarily high
# limit.
sizeSpec = univ.SequenceOf.sizeSpec + \
constraint.ValueSizeConstraint(1, 1024)
# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
def get_subj_alt_name(peer_cert):
# Search through extensions
dns_name = []
if not SUBJ_ALT_NAME_SUPPORT:
return dns_name
general_names = SubjectAltName()
for i in range(peer_cert.get_extension_count()):
ext = peer_cert.get_extension(i)
ext_name = ext.get_short_name()
if ext_name != 'subjectAltName':
continue
# PyOpenSSL returns extension data in ASN.1 encoded form
ext_dat = ext.get_data()
decoded_dat = der_decoder.decode(ext_dat,
asn1Spec=general_names)
for name in decoded_dat:
if not isinstance(name, SubjectAltName):
continue
for entry in range(len(name)):
component = name.getComponentByPosition(entry)
if component.getName() != 'dNSName':
continue
dns_name.append(str(component.getComponent()))
return dns_name
class WrappedSocket(object):
'''API-compatibility wrapper for Python OpenSSL's Connection-class.
Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
collector of pypy.
'''
def __init__(self, connection, socket, suppress_ragged_eofs=True):
self.connection = connection
self.socket = socket
self.suppress_ragged_eofs = suppress_ragged_eofs
self._makefile_refs = 0
def fileno(self):
return self.socket.fileno()
def makefile(self, mode, bufsize=-1):
self._makefile_refs += 1
return _fileobject(self, mode, bufsize, close=True)
def recv(self, *args, **kwargs):
try:
data = self.connection.recv(*args, **kwargs)
except OpenSSL.SSL.SysCallError as e:
if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
return b''
else:
raise SocketError(e)
except OpenSSL.SSL.ZeroReturnError as e:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return b''
else:
raise
except OpenSSL.SSL.WantReadError:
rd, wd, ed = select.select(
[self.socket], [], [], self.socket.gettimeout())
if not rd:
raise timeout('The read operation timed out')
else:
return self.recv(*args, **kwargs)
else:
return data
def settimeout(self, timeout):
return self.socket.settimeout(timeout)
def _send_until_done(self, data):
while True:
try:
return self.connection.send(data)
except OpenSSL.SSL.WantWriteError:
_, wlist, _ = select.select([], [self.socket], [],
self.socket.gettimeout())
if not wlist:
raise timeout()
continue
def sendall(self, data):
total_sent = 0
while total_sent < len(data):
sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
total_sent += sent
def shutdown(self):
# FIXME rethrow compatible exceptions should we ever use this
self.connection.shutdown()
def close(self):
if self._makefile_refs < 1:
try:
return self.connection.close()
except OpenSSL.SSL.Error:
return
else:
self._makefile_refs -= 1
def getpeercert(self, binary_form=False):
x509 = self.connection.get_peer_certificate()
if not x509:
return x509
if binary_form:
return OpenSSL.crypto.dump_certificate(
OpenSSL.crypto.FILETYPE_ASN1,
x509)
return {
'subject': (
(('commonName', x509.get_subject().CN),),
),
'subjectAltName': [
('DNS', value)
for value in get_subj_alt_name(x509)
]
}
def _reuse(self):
self._makefile_refs += 1
def _drop(self):
if self._makefile_refs < 1:
self.close()
else:
self._makefile_refs -= 1
def _verify_callback(cnx, x509, err_no, err_depth, return_code):
return err_no == 0
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None, ca_cert_dir=None):
ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
if certfile:
keyfile = keyfile or certfile # Match behaviour of the normal python ssl library
ctx.use_certificate_file(certfile)
if keyfile:
ctx.use_privatekey_file(keyfile)
if cert_reqs != ssl.CERT_NONE:
ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback)
if ca_certs or ca_cert_dir:
try:
ctx.load_verify_locations(ca_certs, ca_cert_dir)
except OpenSSL.SSL.Error as e:
raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e)
else:
ctx.set_default_verify_paths()
# Disable TLS compression to migitate CRIME attack (issue #309)
OP_NO_COMPRESSION = 0x20000
ctx.set_options(OP_NO_COMPRESSION)
# Set list of supported ciphersuites.
ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST)
cnx = OpenSSL.SSL.Connection(ctx, sock)
cnx.set_tlsext_host_name(server_hostname)
cnx.set_connect_state()
while True:
try:
cnx.do_handshake()
except OpenSSL.SSL.WantReadError:
rd, _, _ = select.select([sock], [], [], sock.gettimeout())
if not rd:
raise timeout('select timed out')
continue
except OpenSSL.SSL.Error as e:
raise ssl.SSLError('bad handshake: %r' % e)
break
return WrappedSocket(cnx, sock)
|
mit
|
sammyshj/stem
|
test/unit/descriptor/networkstatus/bridge_document.py
|
10
|
4719
|
"""
Unit tests for the BridgeNetworkStatusDocument of stem.descriptor.networkstatus.
"""
import datetime
import unittest
import stem.descriptor
from stem.descriptor.networkstatus import BridgeNetworkStatusDocument
from test.unit.descriptor import get_resource
DOCUMENT = b"""\
published 2012-06-01 04:07:04
r Unnamed ABSiBVJ42z6w5Z6nAXQUFq8YVVg FI74aFuNJZZQrgln0f+OaocMd0M 2012-05-31 15:57:00 10.97.236.247 443 0
s Valid
w Bandwidth=55
p reject 1-65535
r TolFuin AFn9TveYjdtZEsgh7QsWp3qC5kU 1Sw8RPx2Tq/w+VHL+pZipiJUG5k 2012-05-31 18:12:39 10.99.47.37 80 0
s Fast Guard Running Stable Valid
w Bandwidth=32
p reject 1-65535
"""
class TestBridgeNetworkStatusDocument(unittest.TestCase):
def test_metrics_bridge_consensus(self):
"""
Checks if the bridge documents from Metrics are parsed properly.
"""
consensus_path = get_resource('bridge_network_status')
with open(consensus_path, 'rb') as descriptor_file:
router = next(stem.descriptor.parse_file(descriptor_file))
self.assertEqual('Unnamed', router.nickname)
self.assertEqual('0014A2055278DB3EB0E59EA701741416AF185558', router.fingerprint)
self.assertEqual('148EF8685B8D259650AE0967D1FF8E6A870C7743', router.digest)
self.assertEqual(datetime.datetime(2012, 5, 31, 15, 57, 0), router.published)
self.assertEqual('10.97.236.247', router.address)
self.assertEqual(443, router.or_port)
self.assertEqual(None, router.dir_port)
def test_metrics_cert(self):
"""
Checks if consensus documents from Metrics are parsed properly.
"""
expected_identity_key = """-----BEGIN RSA PUBLIC KEY-----
MIIBigKCAYEA7cZXvDRxfjDYtr9/9UsQ852+6cmHMr8VVh8GkLwbq3RzqjkULwQ2
R9mFvG4FnqMcMKXi62rYYA3fZL1afhT804cpvyp/D3dPM8QxW88fafFAgIFP4LiD
0JYjnF8cva5qZ0nzlWnMXLb32IXSvsGSE2FRyAV0YN9a6k967LSgCfUnZ+IKMezW
1vhL9YK4QIfsDowgtVsavg63GzGmA7JvZmn77+/J5wKz11vGr7Wttf8XABbH2taX
O9j/KGBOX2OKhoF3mXfZSmUO2dV9NMwtkJ7zD///Ny6sfApWV6kVP4O9TdG3bAsl
+fHCoCKgF/jAAWzh6VckQTOPzQZaH5aMWfXrDlzFWg17MjonI+bBTD2Ex2pHczzJ
bN7coDMRH2SuOXv8wFf27KdUxZ/GcrXSRGzlRLygxqlripUanjVGN2JvrVQVr0kz
pjNjiZl2z8ZyZ5d4zQuBi074JPGgx62xAstP37v1mPw14sIWfLgY16ewYuS5bCxV
lyS28jsPht9VAgMBAAE=
-----END RSA PUBLIC KEY-----"""
expected_signing_key = """-----BEGIN RSA PUBLIC KEY-----
MIGJAoGBAOeE3Qr1Km97gTgiB3io0EU0fqHW2ESMXVHeQuNDtCWBa0XSCEG6gx4B
ZkkHjfVWqGQ7TmmzjYP9L9uCgtoKfhSvJA2w9NUMtMl8sgZmF4lcGpXXvGY9a566
Bn+3wP0lMhb/I8CPVPX+NWEjgl1noZxo1C59SO/iALGQOpxRYgmbAgMBAAE=
-----END RSA PUBLIC KEY-----"""
expected_key_cert = """-----BEGIN SIGNATURE-----
asvWwaMq34OfHoWUhAwh4+JDOuEUZJVIHQnedOYfQH8asS2QvW3Ma93OhrwVOC6b
FyKmTJmJsl0MJGiC7tcEOlL6knsKE4CsuIw/PEcu2Rnm+R9zWxQuMYiHvGQMoDxl
giOhLLs4LlzAAJlbfbd3hjF4STVAtTwmxYuIjb1Mq/JfAsx/wH3TLXgVZwj32w9s
zUd9KZwwLzFiiHpC+U7zh6+wRsZfo2tlpmcaP1dTSINgVbdzPJ/DOUlx9nwTCBsE
AQpUx2DpAikwrpw0zDqpQvYulcQlNLWFN/y/PkmiK8mIJk0OBMiQA7JgqWamnnk4
PwqaGv483LkBF+25JFGJmnUVve3RMc+s61+2kBcjfUMed4QaHkeCMHqlRqpfQVkk
RY22NXCwrJvSMEwiy7acC8FGysqwHRyE356+Rw6TB43g3Tno9KaHEK7MHXjSHwNs
GM9hAsAMRX9Ogqhq5UjDNqEsvDKuyVeyh7unSZEOip9Zr6K/+7VsVPNb8vfBRBjo
-----END SIGNATURE-----"""
cert_path = get_resource('metrics_cert')
with open(cert_path, 'rb') as cert_file:
cert = next(stem.descriptor.parse_file(cert_file))
self.assertEqual(3, cert.version)
self.assertEqual(None, cert.address)
self.assertEqual(None, cert.dir_port)
self.assertEqual('14C131DFC5C6F93646BE72FA1401C02A8DF2E8B4', cert.fingerprint)
self.assertEqual(expected_identity_key, cert.identity_key)
self.assertEqual(datetime.datetime(2008, 5, 9, 21, 13, 26), cert.published)
self.assertEqual(datetime.datetime(2009, 5, 9, 21, 13, 26), cert.expires)
self.assertEqual(expected_signing_key, cert.signing_key)
self.assertEqual(None, cert.crosscert)
self.assertEqual(expected_key_cert, cert.certification)
self.assertEqual([], cert.get_unrecognized_lines())
def test_empty_document(self):
"""
Parse a document without any router status entries.
"""
document = BridgeNetworkStatusDocument(b'published 2012-06-01 04:07:04')
self.assertEqual(datetime.datetime(2012, 6, 1, 4, 7, 4), document.published)
self.assertEqual({}, document.routers)
self.assertEqual([], document.get_unrecognized_lines())
def test_document(self):
"""
Parse a document with router status entries.
"""
document = BridgeNetworkStatusDocument(DOCUMENT)
self.assertEqual(datetime.datetime(2012, 6, 1, 4, 7, 4), document.published)
self.assertEqual(2, len(document.routers))
self.assertEqual(set(['Unnamed', 'TolFuin']), set([desc.nickname for desc in document.routers.values()]))
self.assertEqual([], document.get_unrecognized_lines())
|
lgpl-3.0
|
poojavade/Genomics_Docker
|
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/Pillow-2.3.0-py2.7-linux-x86_64.egg/PIL/ImagePath.py
|
41
|
1231
|
#
# The Python Imaging Library
# $Id$
#
# path interface
#
# History:
# 1996-11-04 fl Created
# 2002-04-14 fl Added documentation stub class
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1996.
#
# See the README file for information on usage and redistribution.
#
from PIL import Image
# the Python class below is overridden by the C implementation.
class Path:
def __init__(self, xy):
pass
##
# Compacts the path, by removing points that are close to each
# other. This method modifies the path in place.
def compact(self, distance=2):
pass
##
# Gets the bounding box.
def getbbox(self):
pass
##
# Maps the path through a function.
def map(self, function):
pass
##
# Converts the path to Python list.
#
# @param flat By default, this function returns a list of 2-tuples
# [(x, y), ...]. If this argument is true, it returns a flat
# list [x, y, ...] instead.
# @return A list of coordinates.
def tolist(self, flat=0):
pass
##
# Transforms the path.
def transform(self, matrix):
pass
# override with C implementation
Path = Image.core.path
|
apache-2.0
|
hatbot-team/hatbot_resources
|
preparation/resources/Resource.py
|
1
|
3889
|
from hb_res.storage import get_storage
from copy import copy
import time
__author__ = "mike"
_resource_blacklist = {'Resource'}
_resources_by_trunk = dict()
_built_trunks = set()
_building_trunks = set()
def build_deps(res_obj):
assert hasattr(res_obj, 'dependencies')
for dep in res_obj.dependencies:
assert dep in _resources_by_trunk
assert dep not in _building_trunks, \
'Dependency loop encountered: {} depends on {} to be built, and vice versa'.format(
dep, res_obj.__class__.__name__)
_resources_by_trunk[dep]().build()
def applied_modifiers(res_obj):
generated = set()
for explanation in res_obj:
r = copy(explanation)
for functor in res_obj.modifiers:
if r is None:
break
r = functor(r)
if r is not None and r not in generated:
generated.add(r)
yield r
def generate_asset(res_obj, out_storage):
out_storage.clear()
count = 0
for explanation in applied_modifiers(res_obj):
if count % 100 == 0:
print(count, end='\r')
count += 1
out_storage.add_entry(explanation)
return count
def resource_build(res_obj):
trunk = res_obj.trunk
if trunk in _built_trunks:
print("= Skipping {} generation as the resource is already built".format(trunk))
return
_building_trunks.add(trunk)
build_deps(res_obj)
print("<=> Starting {} generation <=>".format(trunk))
start = time.monotonic()
with get_storage(trunk) as out_storage:
count = generate_asset(res_obj, out_storage)
end = time.monotonic()
print("> {} generated in {} seconds".format(trunk, end - start))
print("> {} explanations have passed the filters".format(count))
_building_trunks.remove(trunk)
_built_trunks.add(trunk)
class ResourceMeta(type):
"""
metaclass for classes which represent resource package
"""
def __new__(mcs, name, bases, dct):
"""
we have to register resource in _registered_resources
"""
assert name.endswith('Resource')
trunk = name[:-len('Resource')]
global _resources_by_trunk
if trunk in _resources_by_trunk.keys():
raise KeyError('Resource with name {} is already registered'.format(name))
old_iter = dct['__iter__']
def iter_wrapped(self):
build_deps(self)
return old_iter(self)
@property
def trunk_prop(_):
return trunk
dct['trunk'] = trunk_prop
dct['build'] = resource_build
dct['__iter__'] = iter_wrapped
res = super(ResourceMeta, mcs).__new__(mcs, name, bases, dct)
if name not in _resource_blacklist:
_resources_by_trunk[trunk] = res
return res
class Resource(metaclass=ResourceMeta):
def __iter__(self):
raise NotImplementedError
def gen_resource(res_name, modifiers, dependencies=()):
def decorator(func):
def __init__(self):
self.modifiers = modifiers
self.dependencies = dependencies
def __iter__(self):
return iter(func())
return ResourceMeta(res_name, tuple(), {'__iter__': __iter__, '__init__': __init__})
return decorator
def trunks_registered():
global _resources_by_trunk
return _resources_by_trunk.keys()
def resources_registered():
global _resources_by_trunk
return _resources_by_trunk.values()
def resource_by_trunk(name) -> Resource:
"""
Returns resource described by its name
:param name: name of desired resource
:return: iterable resource as list of strings
"""
global _resources_by_trunk
resource = _resources_by_trunk.get(name, None)
if resource is None:
raise KeyError('Unknown resource {}'.format(name))
return resource
|
mit
|
ojii/sandlib
|
lib/lib-python/2.7/test/test_descr.py
|
1
|
159616
|
import __builtin__
import sys
import types
import unittest
import popen2 # trigger early the warning from popen2.py
from copy import deepcopy
from test import test_support
class OperatorsTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.binops = {
'add': '+',
'sub': '-',
'mul': '*',
'div': '/',
'divmod': 'divmod',
'pow': '**',
'lshift': '<<',
'rshift': '>>',
'and': '&',
'xor': '^',
'or': '|',
'cmp': 'cmp',
'lt': '<',
'le': '<=',
'eq': '==',
'ne': '!=',
'gt': '>',
'ge': '>=',
}
for name, expr in self.binops.items():
if expr.islower():
expr = expr + "(a, b)"
else:
expr = 'a %s b' % expr
self.binops[name] = expr
self.unops = {
'pos': '+',
'neg': '-',
'abs': 'abs',
'invert': '~',
'int': 'int',
'long': 'long',
'float': 'float',
'oct': 'oct',
'hex': 'hex',
}
for name, expr in self.unops.items():
if expr.islower():
expr = expr + "(a)"
else:
expr = '%s a' % expr
self.unops[name] = expr
def unop_test(self, a, res, expr="len(a)", meth="__len__"):
d = {'a': a}
self.assertEqual(eval(expr, d), res)
t = type(a)
m = getattr(t, meth)
# Find method in parent class
while meth not in t.__dict__:
t = t.__bases__[0]
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
self.assertEqual(m(a), res)
bm = getattr(a, meth)
self.assertEqual(bm(), res)
def binop_test(self, a, b, res, expr="a+b", meth="__add__"):
d = {'a': a, 'b': b}
# XXX Hack so this passes before 2.3 when -Qnew is specified.
if meth == "__div__" and 1/2 == 0.5:
meth = "__truediv__"
if meth == '__divmod__': pass
self.assertEqual(eval(expr, d), res)
t = type(a)
m = getattr(t, meth)
while meth not in t.__dict__:
t = t.__bases__[0]
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
self.assertEqual(m(a, b), res)
bm = getattr(a, meth)
self.assertEqual(bm(b), res)
def ternop_test(self, a, b, c, res, expr="a[b:c]", meth="__getslice__"):
d = {'a': a, 'b': b, 'c': c}
self.assertEqual(eval(expr, d), res)
t = type(a)
m = getattr(t, meth)
while meth not in t.__dict__:
t = t.__bases__[0]
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
self.assertEqual(m(a, b, c), res)
bm = getattr(a, meth)
self.assertEqual(bm(b, c), res)
def setop_test(self, a, b, res, stmt="a+=b", meth="__iadd__"):
d = {'a': deepcopy(a), 'b': b}
exec stmt in d
self.assertEqual(d['a'], res)
t = type(a)
m = getattr(t, meth)
while meth not in t.__dict__:
t = t.__bases__[0]
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
d['a'] = deepcopy(a)
m(d['a'], b)
self.assertEqual(d['a'], res)
d['a'] = deepcopy(a)
bm = getattr(d['a'], meth)
bm(b)
self.assertEqual(d['a'], res)
def set2op_test(self, a, b, c, res, stmt="a[b]=c", meth="__setitem__"):
d = {'a': deepcopy(a), 'b': b, 'c': c}
exec stmt in d
self.assertEqual(d['a'], res)
t = type(a)
m = getattr(t, meth)
while meth not in t.__dict__:
t = t.__bases__[0]
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
d['a'] = deepcopy(a)
m(d['a'], b, c)
self.assertEqual(d['a'], res)
d['a'] = deepcopy(a)
bm = getattr(d['a'], meth)
bm(b, c)
self.assertEqual(d['a'], res)
def set3op_test(self, a, b, c, d, res, stmt="a[b:c]=d", meth="__setslice__"):
dictionary = {'a': deepcopy(a), 'b': b, 'c': c, 'd': d}
exec stmt in dictionary
self.assertEqual(dictionary['a'], res)
t = type(a)
while meth not in t.__dict__:
t = t.__bases__[0]
m = getattr(t, meth)
# in some implementations (e.g. PyPy), 'm' can be a regular unbound
# method object; the getattr() below obtains its underlying function.
self.assertEqual(getattr(m, 'im_func', m), t.__dict__[meth])
dictionary['a'] = deepcopy(a)
m(dictionary['a'], b, c, d)
self.assertEqual(dictionary['a'], res)
dictionary['a'] = deepcopy(a)
bm = getattr(dictionary['a'], meth)
bm(b, c, d)
self.assertEqual(dictionary['a'], res)
def test_lists(self):
# Testing list operations...
# Asserts are within individual test methods
self.binop_test([1], [2], [1,2], "a+b", "__add__")
self.binop_test([1,2,3], 2, 1, "b in a", "__contains__")
self.binop_test([1,2,3], 4, 0, "b in a", "__contains__")
self.binop_test([1,2,3], 1, 2, "a[b]", "__getitem__")
self.ternop_test([1,2,3], 0, 2, [1,2], "a[b:c]", "__getslice__")
self.setop_test([1], [2], [1,2], "a+=b", "__iadd__")
self.setop_test([1,2], 3, [1,2,1,2,1,2], "a*=b", "__imul__")
self.unop_test([1,2,3], 3, "len(a)", "__len__")
self.binop_test([1,2], 3, [1,2,1,2,1,2], "a*b", "__mul__")
self.binop_test([1,2], 3, [1,2,1,2,1,2], "b*a", "__rmul__")
self.set2op_test([1,2], 1, 3, [1,3], "a[b]=c", "__setitem__")
self.set3op_test([1,2,3,4], 1, 3, [5,6], [1,5,6,4], "a[b:c]=d",
"__setslice__")
def test_dicts(self):
# Testing dict operations...
if hasattr(dict, '__cmp__'): # PyPy has only rich comparison on dicts
self.binop_test({1:2}, {2:1}, -1, "cmp(a,b)", "__cmp__")
else:
self.binop_test({1:2}, {2:1}, True, "a < b", "__lt__")
self.binop_test({1:2,3:4}, 1, 1, "b in a", "__contains__")
self.binop_test({1:2,3:4}, 2, 0, "b in a", "__contains__")
self.binop_test({1:2,3:4}, 1, 2, "a[b]", "__getitem__")
d = {1:2, 3:4}
l1 = []
for i in d.keys():
l1.append(i)
l = []
for i in iter(d):
l.append(i)
self.assertEqual(l, l1)
l = []
for i in d.__iter__():
l.append(i)
self.assertEqual(l, l1)
l = []
for i in dict.__iter__(d):
l.append(i)
self.assertEqual(l, l1)
d = {1:2, 3:4}
self.unop_test(d, 2, "len(a)", "__len__")
self.assertEqual(eval(repr(d), {}), d)
self.assertEqual(eval(d.__repr__(), {}), d)
self.set2op_test({1:2,3:4}, 2, 3, {1:2,2:3,3:4}, "a[b]=c",
"__setitem__")
# Tests for unary and binary operators
def number_operators(self, a, b, skip=[]):
dict = {'a': a, 'b': b}
for name, expr in self.binops.items():
if name not in skip:
name = "__%s__" % name
if hasattr(a, name):
res = eval(expr, dict)
self.binop_test(a, b, res, expr, name)
for name, expr in self.unops.items():
if name not in skip:
name = "__%s__" % name
if hasattr(a, name):
res = eval(expr, dict)
self.unop_test(a, res, expr, name)
def test_ints(self):
# Testing int operations...
self.number_operators(100, 3)
# The following crashes in Python 2.2
self.assertEqual((1).__nonzero__(), 1)
self.assertEqual((0).__nonzero__(), 0)
# This returns 'NotImplemented' in Python 2.2
class C(int):
def __add__(self, other):
return NotImplemented
self.assertEqual(C(5L), 5)
try:
C() + ""
except TypeError:
pass
else:
self.fail("NotImplemented should have caused TypeError")
try:
C(sys.maxint+1)
except OverflowError:
pass
else:
self.fail("should have raised OverflowError")
def test_longs(self):
# Testing long operations...
self.number_operators(100L, 3L)
def test_floats(self):
# Testing float operations...
self.number_operators(100.0, 3.0)
def test_complexes(self):
# Testing complex operations...
self.number_operators(100.0j, 3.0j, skip=['lt', 'le', 'gt', 'ge',
'int', 'long', 'float'])
class Number(complex):
__slots__ = ['prec']
def __new__(cls, *args, **kwds):
result = complex.__new__(cls, *args)
result.prec = kwds.get('prec', 12)
return result
def __repr__(self):
prec = self.prec
if self.imag == 0.0:
return "%.*g" % (prec, self.real)
if self.real == 0.0:
return "%.*gj" % (prec, self.imag)
return "(%.*g+%.*gj)" % (prec, self.real, prec, self.imag)
__str__ = __repr__
a = Number(3.14, prec=6)
self.assertEqual(repr(a), "3.14")
self.assertEqual(a.prec, 6)
a = Number(a, prec=2)
self.assertEqual(repr(a), "3.1")
self.assertEqual(a.prec, 2)
a = Number(234.5)
self.assertEqual(repr(a), "234.5")
self.assertEqual(a.prec, 12)
@test_support.impl_detail("the module 'xxsubtype' is internal")
def test_spam_lists(self):
# Testing spamlist operations...
import copy, xxsubtype as spam
def spamlist(l, memo=None):
import xxsubtype as spam
return spam.spamlist(l)
# This is an ugly hack:
copy._deepcopy_dispatch[spam.spamlist] = spamlist
self.binop_test(spamlist([1]), spamlist([2]), spamlist([1,2]), "a+b",
"__add__")
self.binop_test(spamlist([1,2,3]), 2, 1, "b in a", "__contains__")
self.binop_test(spamlist([1,2,3]), 4, 0, "b in a", "__contains__")
self.binop_test(spamlist([1,2,3]), 1, 2, "a[b]", "__getitem__")
self.ternop_test(spamlist([1,2,3]), 0, 2, spamlist([1,2]), "a[b:c]",
"__getslice__")
self.setop_test(spamlist([1]), spamlist([2]), spamlist([1,2]), "a+=b",
"__iadd__")
self.setop_test(spamlist([1,2]), 3, spamlist([1,2,1,2,1,2]), "a*=b",
"__imul__")
self.unop_test(spamlist([1,2,3]), 3, "len(a)", "__len__")
self.binop_test(spamlist([1,2]), 3, spamlist([1,2,1,2,1,2]), "a*b",
"__mul__")
self.binop_test(spamlist([1,2]), 3, spamlist([1,2,1,2,1,2]), "b*a",
"__rmul__")
self.set2op_test(spamlist([1,2]), 1, 3, spamlist([1,3]), "a[b]=c",
"__setitem__")
self.set3op_test(spamlist([1,2,3,4]), 1, 3, spamlist([5,6]),
spamlist([1,5,6,4]), "a[b:c]=d", "__setslice__")
# Test subclassing
class C(spam.spamlist):
def foo(self): return 1
a = C()
self.assertEqual(a, [])
self.assertEqual(a.foo(), 1)
a.append(100)
self.assertEqual(a, [100])
self.assertEqual(a.getstate(), 0)
a.setstate(42)
self.assertEqual(a.getstate(), 42)
@test_support.impl_detail("the module 'xxsubtype' is internal")
def test_spam_dicts(self):
# Testing spamdict operations...
import copy, xxsubtype as spam
def spamdict(d, memo=None):
import xxsubtype as spam
sd = spam.spamdict()
for k, v in d.items():
sd[k] = v
return sd
# This is an ugly hack:
copy._deepcopy_dispatch[spam.spamdict] = spamdict
self.binop_test(spamdict({1:2}), spamdict({2:1}), -1, "cmp(a,b)",
"__cmp__")
self.binop_test(spamdict({1:2,3:4}), 1, 1, "b in a", "__contains__")
self.binop_test(spamdict({1:2,3:4}), 2, 0, "b in a", "__contains__")
self.binop_test(spamdict({1:2,3:4}), 1, 2, "a[b]", "__getitem__")
d = spamdict({1:2,3:4})
l1 = []
for i in d.keys():
l1.append(i)
l = []
for i in iter(d):
l.append(i)
self.assertEqual(l, l1)
l = []
for i in d.__iter__():
l.append(i)
self.assertEqual(l, l1)
l = []
for i in type(spamdict({})).__iter__(d):
l.append(i)
self.assertEqual(l, l1)
straightd = {1:2, 3:4}
spamd = spamdict(straightd)
self.unop_test(spamd, 2, "len(a)", "__len__")
self.unop_test(spamd, repr(straightd), "repr(a)", "__repr__")
self.set2op_test(spamdict({1:2,3:4}), 2, 3, spamdict({1:2,2:3,3:4}),
"a[b]=c", "__setitem__")
# Test subclassing
class C(spam.spamdict):
def foo(self): return 1
a = C()
self.assertEqual(a.items(), [])
self.assertEqual(a.foo(), 1)
a['foo'] = 'bar'
self.assertEqual(a.items(), [('foo', 'bar')])
self.assertEqual(a.getstate(), 0)
a.setstate(100)
self.assertEqual(a.getstate(), 100)
class ClassPropertiesAndMethods(unittest.TestCase):
def test_python_dicts(self):
# Testing Python subclass of dict...
self.assertTrue(issubclass(dict, dict))
self.assertIsInstance({}, dict)
d = dict()
self.assertEqual(d, {})
self.assertTrue(d.__class__ is dict)
self.assertIsInstance(d, dict)
class C(dict):
state = -1
def __init__(self_local, *a, **kw):
if a:
self.assertEqual(len(a), 1)
self_local.state = a[0]
if kw:
for k, v in kw.items():
self_local[v] = k
def __getitem__(self, key):
return self.get(key, 0)
def __setitem__(self_local, key, value):
self.assertIsInstance(key, type(0))
dict.__setitem__(self_local, key, value)
def setstate(self, state):
self.state = state
def getstate(self):
return self.state
self.assertTrue(issubclass(C, dict))
a1 = C(12)
self.assertEqual(a1.state, 12)
a2 = C(foo=1, bar=2)
self.assertEqual(a2[1] == 'foo' and a2[2], 'bar')
a = C()
self.assertEqual(a.state, -1)
self.assertEqual(a.getstate(), -1)
a.setstate(0)
self.assertEqual(a.state, 0)
self.assertEqual(a.getstate(), 0)
a.setstate(10)
self.assertEqual(a.state, 10)
self.assertEqual(a.getstate(), 10)
self.assertEqual(a[42], 0)
a[42] = 24
self.assertEqual(a[42], 24)
N = 50
for i in range(N):
a[i] = C()
for j in range(N):
a[i][j] = i*j
for i in range(N):
for j in range(N):
self.assertEqual(a[i][j], i*j)
def test_python_lists(self):
# Testing Python subclass of list...
class C(list):
def __getitem__(self, i):
return list.__getitem__(self, i) + 100
def __getslice__(self, i, j):
return (i, j)
a = C()
a.extend([0,1,2])
self.assertEqual(a[0], 100)
self.assertEqual(a[1], 101)
self.assertEqual(a[2], 102)
self.assertEqual(a[100:200], (100,200))
def test_metaclass(self):
# Testing __metaclass__...
class C:
__metaclass__ = type
def __init__(self):
self.__state = 0
def getstate(self):
return self.__state
def setstate(self, state):
self.__state = state
a = C()
self.assertEqual(a.getstate(), 0)
a.setstate(10)
self.assertEqual(a.getstate(), 10)
class D:
class __metaclass__(type):
def myself(cls): return cls
self.assertEqual(D.myself(), D)
d = D()
self.assertEqual(d.__class__, D)
class M1(type):
def __new__(cls, name, bases, dict):
dict['__spam__'] = 1
return type.__new__(cls, name, bases, dict)
class C:
__metaclass__ = M1
self.assertEqual(C.__spam__, 1)
c = C()
self.assertEqual(c.__spam__, 1)
class _instance(object):
pass
class M2(object):
@staticmethod
def __new__(cls, name, bases, dict):
self = object.__new__(cls)
self.name = name
self.bases = bases
self.dict = dict
return self
def __call__(self):
it = _instance()
# Early binding of methods
for key in self.dict:
if key.startswith("__"):
continue
setattr(it, key, self.dict[key].__get__(it, self))
return it
class C:
__metaclass__ = M2
def spam(self):
return 42
self.assertEqual(C.name, 'C')
self.assertEqual(C.bases, ())
self.assertIn('spam', C.dict)
c = C()
self.assertEqual(c.spam(), 42)
# More metaclass examples
class autosuper(type):
# Automatically add __super to the class
# This trick only works for dynamic classes
def __new__(metaclass, name, bases, dict):
cls = super(autosuper, metaclass).__new__(metaclass,
name, bases, dict)
# Name mangling for __super removes leading underscores
while name[:1] == "_":
name = name[1:]
if name:
name = "_%s__super" % name
else:
name = "__super"
setattr(cls, name, super(cls))
return cls
class A:
__metaclass__ = autosuper
def meth(self):
return "A"
class B(A):
def meth(self):
return "B" + self.__super.meth()
class C(A):
def meth(self):
return "C" + self.__super.meth()
class D(C, B):
def meth(self):
return "D" + self.__super.meth()
self.assertEqual(D().meth(), "DCBA")
class E(B, C):
def meth(self):
return "E" + self.__super.meth()
self.assertEqual(E().meth(), "EBCA")
class autoproperty(type):
# Automatically create property attributes when methods
# named _get_x and/or _set_x are found
def __new__(metaclass, name, bases, dict):
hits = {}
for key, val in dict.iteritems():
if key.startswith("_get_"):
key = key[5:]
get, set = hits.get(key, (None, None))
get = val
hits[key] = get, set
elif key.startswith("_set_"):
key = key[5:]
get, set = hits.get(key, (None, None))
set = val
hits[key] = get, set
for key, (get, set) in hits.iteritems():
dict[key] = property(get, set)
return super(autoproperty, metaclass).__new__(metaclass,
name, bases, dict)
class A:
__metaclass__ = autoproperty
def _get_x(self):
return -self.__x
def _set_x(self, x):
self.__x = -x
a = A()
self.assertTrue(not hasattr(a, "x"))
a.x = 12
self.assertEqual(a.x, 12)
self.assertEqual(a._A__x, -12)
class multimetaclass(autoproperty, autosuper):
# Merge of multiple cooperating metaclasses
pass
class A:
__metaclass__ = multimetaclass
def _get_x(self):
return "A"
class B(A):
def _get_x(self):
return "B" + self.__super._get_x()
class C(A):
def _get_x(self):
return "C" + self.__super._get_x()
class D(C, B):
def _get_x(self):
return "D" + self.__super._get_x()
self.assertEqual(D().x, "DCBA")
# Make sure type(x) doesn't call x.__class__.__init__
class T(type):
counter = 0
def __init__(self, *args):
T.counter += 1
class C:
__metaclass__ = T
self.assertEqual(T.counter, 1)
a = C()
self.assertEqual(type(a), C)
self.assertEqual(T.counter, 1)
class C(object): pass
c = C()
try: c()
except TypeError: pass
else: self.fail("calling object w/o call method should raise "
"TypeError")
# Testing code to find most derived baseclass
class A(type):
def __new__(*args, **kwargs):
return type.__new__(*args, **kwargs)
class B(object):
pass
class C(object):
__metaclass__ = A
# The most derived metaclass of D is A rather than type.
class D(B, C):
pass
def test_module_subclasses(self):
# Testing Python subclass of module...
log = []
MT = type(sys)
class MM(MT):
def __init__(self, name):
MT.__init__(self, name)
def __getattribute__(self, name):
log.append(("getattr", name))
return MT.__getattribute__(self, name)
def __setattr__(self, name, value):
log.append(("setattr", name, value))
MT.__setattr__(self, name, value)
def __delattr__(self, name):
log.append(("delattr", name))
MT.__delattr__(self, name)
a = MM("a")
a.foo = 12
x = a.foo
del a.foo
self.assertEqual(log, [("setattr", "foo", 12),
("getattr", "foo"),
("delattr", "foo")])
# http://python.org/sf/1174712
try:
class Module(types.ModuleType, str):
pass
except TypeError:
pass
else:
self.fail("inheriting from ModuleType and str at the same time "
"should fail")
def test_multiple_inheritence(self):
# Testing multiple inheritance...
class C(object):
def __init__(self):
self.__state = 0
def getstate(self):
return self.__state
def setstate(self, state):
self.__state = state
a = C()
self.assertEqual(a.getstate(), 0)
a.setstate(10)
self.assertEqual(a.getstate(), 10)
class D(dict, C):
def __init__(self):
type({}).__init__(self)
C.__init__(self)
d = D()
self.assertEqual(d.keys(), [])
d["hello"] = "world"
self.assertEqual(d.items(), [("hello", "world")])
self.assertEqual(d["hello"], "world")
self.assertEqual(d.getstate(), 0)
d.setstate(10)
self.assertEqual(d.getstate(), 10)
self.assertEqual(D.__mro__, (D, dict, C, object))
# SF bug #442833
class Node(object):
def __int__(self):
return int(self.foo())
def foo(self):
return "23"
class Frag(Node, list):
def foo(self):
return "42"
self.assertEqual(Node().__int__(), 23)
self.assertEqual(int(Node()), 23)
self.assertEqual(Frag().__int__(), 42)
self.assertEqual(int(Frag()), 42)
# MI mixing classic and new-style classes.
class A:
x = 1
class B(A):
pass
class C(A):
x = 2
class D(B, C):
pass
self.assertEqual(D.x, 1)
# Classic MRO is preserved for a classic base class.
class E(D, object):
pass
self.assertEqual(E.__mro__, (E, D, B, A, C, object))
self.assertEqual(E.x, 1)
# But with a mix of classic bases, their MROs are combined using
# new-style MRO.
class F(B, C, object):
pass
self.assertEqual(F.__mro__, (F, B, C, A, object))
self.assertEqual(F.x, 2)
# Try something else.
class C:
def cmethod(self):
return "C a"
def all_method(self):
return "C b"
class M1(C, object):
def m1method(self):
return "M1 a"
def all_method(self):
return "M1 b"
self.assertEqual(M1.__mro__, (M1, C, object))
m = M1()
self.assertEqual(m.cmethod(), "C a")
self.assertEqual(m.m1method(), "M1 a")
self.assertEqual(m.all_method(), "M1 b")
class D(C):
def dmethod(self):
return "D a"
def all_method(self):
return "D b"
class M2(D, object):
def m2method(self):
return "M2 a"
def all_method(self):
return "M2 b"
self.assertEqual(M2.__mro__, (M2, D, C, object))
m = M2()
self.assertEqual(m.cmethod(), "C a")
self.assertEqual(m.dmethod(), "D a")
self.assertEqual(m.m2method(), "M2 a")
self.assertEqual(m.all_method(), "M2 b")
class M3(M1, M2, object):
def m3method(self):
return "M3 a"
def all_method(self):
return "M3 b"
self.assertEqual(M3.__mro__, (M3, M1, M2, D, C, object))
m = M3()
self.assertEqual(m.cmethod(), "C a")
self.assertEqual(m.dmethod(), "D a")
self.assertEqual(m.m1method(), "M1 a")
self.assertEqual(m.m2method(), "M2 a")
self.assertEqual(m.m3method(), "M3 a")
self.assertEqual(m.all_method(), "M3 b")
class Classic:
pass
try:
class New(Classic):
__metaclass__ = type
except TypeError:
pass
else:
self.fail("new class with only classic bases - shouldn't be")
def test_diamond_inheritence(self):
# Testing multiple inheritance special cases...
class A(object):
def spam(self): return "A"
self.assertEqual(A().spam(), "A")
class B(A):
def boo(self): return "B"
def spam(self): return "B"
self.assertEqual(B().spam(), "B")
self.assertEqual(B().boo(), "B")
class C(A):
def boo(self): return "C"
self.assertEqual(C().spam(), "A")
self.assertEqual(C().boo(), "C")
class D(B, C): pass
self.assertEqual(D().spam(), "B")
self.assertEqual(D().boo(), "B")
self.assertEqual(D.__mro__, (D, B, C, A, object))
class E(C, B): pass
self.assertEqual(E().spam(), "B")
self.assertEqual(E().boo(), "C")
self.assertEqual(E.__mro__, (E, C, B, A, object))
# MRO order disagreement
try:
class F(D, E): pass
except TypeError:
pass
else:
self.fail("expected MRO order disagreement (F)")
try:
class G(E, D): pass
except TypeError:
pass
else:
self.fail("expected MRO order disagreement (G)")
# see thread python-dev/2002-October/029035.html
def test_ex5_from_c3_switch(self):
# Testing ex5 from C3 switch discussion...
class A(object): pass
class B(object): pass
class C(object): pass
class X(A): pass
class Y(A): pass
class Z(X,B,Y,C): pass
self.assertEqual(Z.__mro__, (Z, X, B, Y, A, C, object))
# see "A Monotonic Superclass Linearization for Dylan",
# by Kim Barrett et al. (OOPSLA 1996)
def test_monotonicity(self):
# Testing MRO monotonicity...
class Boat(object): pass
class DayBoat(Boat): pass
class WheelBoat(Boat): pass
class EngineLess(DayBoat): pass
class SmallMultihull(DayBoat): pass
class PedalWheelBoat(EngineLess,WheelBoat): pass
class SmallCatamaran(SmallMultihull): pass
class Pedalo(PedalWheelBoat,SmallCatamaran): pass
self.assertEqual(PedalWheelBoat.__mro__,
(PedalWheelBoat, EngineLess, DayBoat, WheelBoat, Boat, object))
self.assertEqual(SmallCatamaran.__mro__,
(SmallCatamaran, SmallMultihull, DayBoat, Boat, object))
self.assertEqual(Pedalo.__mro__,
(Pedalo, PedalWheelBoat, EngineLess, SmallCatamaran,
SmallMultihull, DayBoat, WheelBoat, Boat, object))
# see "A Monotonic Superclass Linearization for Dylan",
# by Kim Barrett et al. (OOPSLA 1996)
def test_consistency_with_epg(self):
# Testing consistency with EPG...
class Pane(object): pass
class ScrollingMixin(object): pass
class EditingMixin(object): pass
class ScrollablePane(Pane,ScrollingMixin): pass
class EditablePane(Pane,EditingMixin): pass
class EditableScrollablePane(ScrollablePane,EditablePane): pass
self.assertEqual(EditableScrollablePane.__mro__,
(EditableScrollablePane, ScrollablePane, EditablePane, Pane,
ScrollingMixin, EditingMixin, object))
def test_mro_disagreement(self):
# Testing error messages for MRO disagreement...
mro_err_msg = """Cannot create a consistent method resolution
order (MRO) for bases """
def raises(exc, expected, callable, *args):
try:
callable(*args)
except exc, msg:
# the exact msg is generally considered an impl detail
if test_support.check_impl_detail():
if not str(msg).startswith(expected):
self.fail("Message %r, expected %r" %
(str(msg), expected))
else:
self.fail("Expected %s" % exc)
class A(object): pass
class B(A): pass
class C(object): pass
# Test some very simple errors
raises(TypeError, "duplicate base class A",
type, "X", (A, A), {})
raises(TypeError, mro_err_msg,
type, "X", (A, B), {})
raises(TypeError, mro_err_msg,
type, "X", (A, C, B), {})
# Test a slightly more complex error
class GridLayout(object): pass
class HorizontalGrid(GridLayout): pass
class VerticalGrid(GridLayout): pass
class HVGrid(HorizontalGrid, VerticalGrid): pass
class VHGrid(VerticalGrid, HorizontalGrid): pass
raises(TypeError, mro_err_msg,
type, "ConfusedGrid", (HVGrid, VHGrid), {})
def test_object_class(self):
# Testing object class...
a = object()
self.assertEqual(a.__class__, object)
self.assertEqual(type(a), object)
b = object()
self.assertNotEqual(a, b)
self.assertFalse(hasattr(a, "foo"))
try:
a.foo = 12
except (AttributeError, TypeError):
pass
else:
self.fail("object() should not allow setting a foo attribute")
self.assertFalse(hasattr(object(), "__dict__"))
class Cdict(object):
pass
x = Cdict()
self.assertEqual(x.__dict__, {})
x.foo = 1
self.assertEqual(x.foo, 1)
self.assertEqual(x.__dict__, {'foo': 1})
def test_slots(self):
# Testing __slots__...
class C0(object):
__slots__ = []
x = C0()
self.assertFalse(hasattr(x, "__dict__"))
self.assertFalse(hasattr(x, "foo"))
class C1(object):
__slots__ = ['a']
x = C1()
self.assertFalse(hasattr(x, "__dict__"))
self.assertFalse(hasattr(x, "a"))
x.a = 1
self.assertEqual(x.a, 1)
x.a = None
self.assertEqual(x.a, None)
del x.a
self.assertFalse(hasattr(x, "a"))
class C3(object):
__slots__ = ['a', 'b', 'c']
x = C3()
self.assertFalse(hasattr(x, "__dict__"))
self.assertFalse(hasattr(x, 'a'))
self.assertFalse(hasattr(x, 'b'))
self.assertFalse(hasattr(x, 'c'))
x.a = 1
x.b = 2
x.c = 3
self.assertEqual(x.a, 1)
self.assertEqual(x.b, 2)
self.assertEqual(x.c, 3)
class C4(object):
"""Validate name mangling"""
__slots__ = ['__a']
def __init__(self, value):
self.__a = value
def get(self):
return self.__a
x = C4(5)
self.assertFalse(hasattr(x, '__dict__'))
self.assertFalse(hasattr(x, '__a'))
self.assertEqual(x.get(), 5)
try:
x.__a = 6
except AttributeError:
pass
else:
self.fail("Double underscored names not mangled")
# Make sure slot names are proper identifiers
try:
class C(object):
__slots__ = [None]
except TypeError:
pass
else:
self.fail("[None] slots not caught")
try:
class C(object):
__slots__ = ["foo bar"]
except TypeError:
pass
else:
self.fail("['foo bar'] slots not caught")
try:
class C(object):
__slots__ = ["foo\0bar"]
except TypeError:
pass
else:
self.fail("['foo\\0bar'] slots not caught")
try:
class C(object):
__slots__ = ["1"]
except TypeError:
pass
else:
self.fail("['1'] slots not caught")
try:
class C(object):
__slots__ = [""]
except TypeError:
pass
else:
self.fail("[''] slots not caught")
class C(object):
__slots__ = ["a", "a_b", "_a", "A0123456789Z"]
# XXX(nnorwitz): was there supposed to be something tested
# from the class above?
# Test a single string is not expanded as a sequence.
class C(object):
__slots__ = "abc"
c = C()
c.abc = 5
self.assertEqual(c.abc, 5)
# Test unicode slot names
try:
unicode
except NameError:
pass
else:
# Test a single unicode string is not expanded as a sequence.
class C(object):
__slots__ = unicode("abc")
c = C()
c.abc = 5
self.assertEqual(c.abc, 5)
# _unicode_to_string used to modify slots in certain circumstances
slots = (unicode("foo"), unicode("bar"))
class C(object):
__slots__ = slots
x = C()
x.foo = 5
self.assertEqual(x.foo, 5)
self.assertEqual(type(slots[0]), unicode)
# this used to leak references
try:
class C(object):
__slots__ = [unichr(128)]
except (TypeError, UnicodeEncodeError):
pass
else:
self.fail("[unichr(128)] slots not caught")
# Test leaks
class Counted(object):
counter = 0 # counts the number of instances alive
def __init__(self):
Counted.counter += 1
def __del__(self):
Counted.counter -= 1
class C(object):
__slots__ = ['a', 'b', 'c']
x = C()
x.a = Counted()
x.b = Counted()
x.c = Counted()
self.assertEqual(Counted.counter, 3)
del x
test_support.gc_collect()
self.assertEqual(Counted.counter, 0)
class D(C):
pass
x = D()
x.a = Counted()
x.z = Counted()
self.assertEqual(Counted.counter, 2)
del x
test_support.gc_collect()
self.assertEqual(Counted.counter, 0)
class E(D):
__slots__ = ['e']
x = E()
x.a = Counted()
x.z = Counted()
x.e = Counted()
self.assertEqual(Counted.counter, 3)
del x
test_support.gc_collect()
self.assertEqual(Counted.counter, 0)
# Test cyclical leaks [SF bug 519621]
class F(object):
__slots__ = ['a', 'b']
s = F()
s.a = [Counted(), s]
self.assertEqual(Counted.counter, 1)
s = None
test_support.gc_collect()
self.assertEqual(Counted.counter, 0)
# Test lookup leaks [SF bug 572567]
import gc
if test_support.check_impl_detail():
class G(object):
def __cmp__(self, other):
return 0
__hash__ = None # Silence Py3k warning
g = G()
orig_objects = len(gc.get_objects())
for i in xrange(10):
g==g
new_objects = len(gc.get_objects())
self.assertEqual(orig_objects, new_objects)
class H(object):
__slots__ = ['a', 'b']
def __init__(self):
self.a = 1
self.b = 2
def __del__(self_):
self.assertEqual(self_.a, 1)
self.assertEqual(self_.b, 2)
with test_support.captured_output('stderr') as s:
h = H()
del h
self.assertEqual(s.getvalue(), '')
class X(object):
__slots__ = "a"
with self.assertRaises(AttributeError):
del X().a
def test_slots_special(self):
# Testing __dict__ and __weakref__ in __slots__...
class D(object):
__slots__ = ["__dict__"]
a = D()
self.assertTrue(hasattr(a, "__dict__"))
self.assertFalse(hasattr(a, "__weakref__"))
a.foo = 42
self.assertEqual(a.__dict__, {"foo": 42})
class W(object):
__slots__ = ["__weakref__"]
a = W()
self.assertTrue(hasattr(a, "__weakref__"))
self.assertFalse(hasattr(a, "__dict__"))
try:
a.foo = 42
except AttributeError:
pass
else:
self.fail("shouldn't be allowed to set a.foo")
class C1(W, D):
__slots__ = []
a = C1()
self.assertTrue(hasattr(a, "__dict__"))
self.assertTrue(hasattr(a, "__weakref__"))
a.foo = 42
self.assertEqual(a.__dict__, {"foo": 42})
class C2(D, W):
__slots__ = []
a = C2()
self.assertTrue(hasattr(a, "__dict__"))
self.assertTrue(hasattr(a, "__weakref__"))
a.foo = 42
self.assertEqual(a.__dict__, {"foo": 42})
def test_slots_descriptor(self):
# Issue2115: slot descriptors did not correctly check
# the type of the given object
import abc
class MyABC:
__metaclass__ = abc.ABCMeta
__slots__ = "a"
class Unrelated(object):
pass
MyABC.register(Unrelated)
u = Unrelated()
self.assertIsInstance(u, MyABC)
# This used to crash
self.assertRaises(TypeError, MyABC.a.__set__, u, 3)
def test_metaclass_cmp(self):
# See bug 7491.
class M(type):
def __cmp__(self, other):
return -1
class X(object):
__metaclass__ = M
self.assertTrue(X < M)
def test_dynamics(self):
# Testing class attribute propagation...
class D(object):
pass
class E(D):
pass
class F(D):
pass
D.foo = 1
self.assertEqual(D.foo, 1)
# Test that dynamic attributes are inherited
self.assertEqual(E.foo, 1)
self.assertEqual(F.foo, 1)
# Test dynamic instances
class C(object):
pass
a = C()
self.assertFalse(hasattr(a, "foobar"))
C.foobar = 2
self.assertEqual(a.foobar, 2)
C.method = lambda self: 42
self.assertEqual(a.method(), 42)
C.__repr__ = lambda self: "C()"
self.assertEqual(repr(a), "C()")
C.__int__ = lambda self: 100
self.assertEqual(int(a), 100)
self.assertEqual(a.foobar, 2)
self.assertFalse(hasattr(a, "spam"))
def mygetattr(self, name):
if name == "spam":
return "spam"
raise AttributeError
C.__getattr__ = mygetattr
self.assertEqual(a.spam, "spam")
a.new = 12
self.assertEqual(a.new, 12)
def mysetattr(self, name, value):
if name == "spam":
raise AttributeError
return object.__setattr__(self, name, value)
C.__setattr__ = mysetattr
try:
a.spam = "not spam"
except AttributeError:
pass
else:
self.fail("expected AttributeError")
self.assertEqual(a.spam, "spam")
class D(C):
pass
d = D()
d.foo = 1
self.assertEqual(d.foo, 1)
# Test handling of int*seq and seq*int
class I(int):
pass
self.assertEqual("a"*I(2), "aa")
self.assertEqual(I(2)*"a", "aa")
self.assertEqual(2*I(3), 6)
self.assertEqual(I(3)*2, 6)
self.assertEqual(I(3)*I(2), 6)
# Test handling of long*seq and seq*long
class L(long):
pass
self.assertEqual("a"*L(2L), "aa")
self.assertEqual(L(2L)*"a", "aa")
self.assertEqual(2*L(3), 6)
self.assertEqual(L(3)*2, 6)
self.assertEqual(L(3)*L(2), 6)
# Test comparison of classes with dynamic metaclasses
class dynamicmetaclass(type):
pass
class someclass:
__metaclass__ = dynamicmetaclass
self.assertNotEqual(someclass, object)
def test_errors(self):
# Testing errors...
try:
class C(list, dict):
pass
except TypeError:
pass
else:
self.fail("inheritance from both list and dict should be illegal")
try:
class C(object, None):
pass
except TypeError:
pass
else:
self.fail("inheritance from non-type should be illegal")
class Classic:
pass
try:
class C(type(len)):
pass
except TypeError:
pass
else:
self.fail("inheritance from CFunction should be illegal")
try:
class C(object):
__slots__ = 1
except TypeError:
pass
else:
self.fail("__slots__ = 1 should be illegal")
try:
class C(object):
__slots__ = [1]
except TypeError:
pass
else:
self.fail("__slots__ = [1] should be illegal")
class M1(type):
pass
class M2(type):
pass
class A1(object):
__metaclass__ = M1
class A2(object):
__metaclass__ = M2
try:
class B(A1, A2):
pass
except TypeError:
pass
else:
self.fail("finding the most derived metaclass should have failed")
def test_classmethods(self):
# Testing class methods...
class C(object):
def foo(*a): return a
goo = classmethod(foo)
c = C()
self.assertEqual(C.goo(1), (C, 1))
self.assertEqual(c.goo(1), (C, 1))
self.assertEqual(c.foo(1), (c, 1))
class D(C):
pass
d = D()
self.assertEqual(D.goo(1), (D, 1))
self.assertEqual(d.goo(1), (D, 1))
self.assertEqual(d.foo(1), (d, 1))
self.assertEqual(D.foo(d, 1), (d, 1))
# Test for a specific crash (SF bug 528132)
def f(cls, arg): return (cls, arg)
ff = classmethod(f)
self.assertEqual(ff.__get__(0, int)(42), (int, 42))
self.assertEqual(ff.__get__(0)(42), (int, 42))
# Test super() with classmethods (SF bug 535444)
self.assertEqual(C.goo.im_self, C)
self.assertEqual(D.goo.im_self, D)
self.assertEqual(super(D,D).goo.im_self, D)
self.assertEqual(super(D,d).goo.im_self, D)
self.assertEqual(super(D,D).goo(), (D,))
self.assertEqual(super(D,d).goo(), (D,))
# Verify that a non-callable will raise
meth = classmethod(1).__get__(1)
self.assertRaises(TypeError, meth)
# Verify that classmethod() doesn't allow keyword args
try:
classmethod(f, kw=1)
except TypeError:
pass
else:
self.fail("classmethod shouldn't accept keyword args")
@test_support.impl_detail("the module 'xxsubtype' is internal")
def test_classmethods_in_c(self):
# Testing C-based class methods...
import xxsubtype as spam
a = (1, 2, 3)
d = {'abc': 123}
x, a1, d1 = spam.spamlist.classmeth(*a, **d)
self.assertEqual(x, spam.spamlist)
self.assertEqual(a, a1)
self.assertEqual(d, d1)
x, a1, d1 = spam.spamlist().classmeth(*a, **d)
self.assertEqual(x, spam.spamlist)
self.assertEqual(a, a1)
self.assertEqual(d, d1)
def test_staticmethods(self):
# Testing static methods...
class C(object):
def foo(*a): return a
goo = staticmethod(foo)
c = C()
self.assertEqual(C.goo(1), (1,))
self.assertEqual(c.goo(1), (1,))
self.assertEqual(c.foo(1), (c, 1,))
class D(C):
pass
d = D()
self.assertEqual(D.goo(1), (1,))
self.assertEqual(d.goo(1), (1,))
self.assertEqual(d.foo(1), (d, 1))
self.assertEqual(D.foo(d, 1), (d, 1))
@test_support.impl_detail("the module 'xxsubtype' is internal")
def test_staticmethods_in_c(self):
# Testing C-based static methods...
import xxsubtype as spam
a = (1, 2, 3)
d = {"abc": 123}
x, a1, d1 = spam.spamlist.staticmeth(*a, **d)
self.assertEqual(x, None)
self.assertEqual(a, a1)
self.assertEqual(d, d1)
x, a1, d2 = spam.spamlist().staticmeth(*a, **d)
self.assertEqual(x, None)
self.assertEqual(a, a1)
self.assertEqual(d, d1)
def test_classic(self):
# Testing classic classes...
class C:
def foo(*a): return a
goo = classmethod(foo)
c = C()
self.assertEqual(C.goo(1), (C, 1))
self.assertEqual(c.goo(1), (C, 1))
self.assertEqual(c.foo(1), (c, 1))
class D(C):
pass
d = D()
self.assertEqual(D.goo(1), (D, 1))
self.assertEqual(d.goo(1), (D, 1))
self.assertEqual(d.foo(1), (d, 1))
self.assertEqual(D.foo(d, 1), (d, 1))
class E: # *not* subclassing from C
foo = C.foo
self.assertEqual(E().foo, C.foo) # i.e., unbound
self.assertTrue(repr(C.foo.__get__(C())).startswith("<bound method "))
def test_compattr(self):
# Testing computed attributes...
class C(object):
class computed_attribute(object):
def __init__(self, get, set=None, delete=None):
self.__get = get
self.__set = set
self.__delete = delete
def __get__(self, obj, type=None):
return self.__get(obj)
def __set__(self, obj, value):
return self.__set(obj, value)
def __delete__(self, obj):
return self.__delete(obj)
def __init__(self):
self.__x = 0
def __get_x(self):
x = self.__x
self.__x = x+1
return x
def __set_x(self, x):
self.__x = x
def __delete_x(self):
del self.__x
x = computed_attribute(__get_x, __set_x, __delete_x)
a = C()
self.assertEqual(a.x, 0)
self.assertEqual(a.x, 1)
a.x = 10
self.assertEqual(a.x, 10)
self.assertEqual(a.x, 11)
del a.x
self.assertEqual(hasattr(a, 'x'), 0)
def test_newslots(self):
# Testing __new__ slot override...
class C(list):
def __new__(cls):
self = list.__new__(cls)
self.foo = 1
return self
def __init__(self):
self.foo = self.foo + 2
a = C()
self.assertEqual(a.foo, 3)
self.assertEqual(a.__class__, C)
class D(C):
pass
b = D()
self.assertEqual(b.foo, 3)
self.assertEqual(b.__class__, D)
def test_altmro(self):
# Testing mro() and overriding it...
class A(object):
def f(self): return "A"
class B(A):
pass
class C(A):
def f(self): return "C"
class D(B, C):
pass
self.assertEqual(D.mro(), [D, B, C, A, object])
self.assertEqual(D.__mro__, (D, B, C, A, object))
self.assertEqual(D().f(), "C")
class PerverseMetaType(type):
def mro(cls):
L = type.mro(cls)
L.reverse()
return L
class X(D,B,C,A):
__metaclass__ = PerverseMetaType
self.assertEqual(X.__mro__, (object, A, C, B, D, X))
self.assertEqual(X().f(), "A")
try:
class X(object):
class __metaclass__(type):
def mro(self):
return [self, dict, object]
# In CPython, the class creation above already raises
# TypeError, as a protection against the fact that
# instances of X would segfault it. In other Python
# implementations it would be ok to let the class X
# be created, but instead get a clean TypeError on the
# __setitem__ below.
x = object.__new__(X)
x[5] = 6
except TypeError:
pass
else:
self.fail("devious mro() return not caught")
try:
class X(object):
class __metaclass__(type):
def mro(self):
return [1]
except TypeError:
pass
else:
self.fail("non-class mro() return not caught")
try:
class X(object):
class __metaclass__(type):
def mro(self):
return 1
except TypeError:
pass
else:
self.fail("non-sequence mro() return not caught")
def test_overloading(self):
# Testing operator overloading...
class B(object):
"Intermediate class because object doesn't have a __setattr__"
class C(B):
def __getattr__(self, name):
if name == "foo":
return ("getattr", name)
else:
raise AttributeError
def __setattr__(self, name, value):
if name == "foo":
self.setattr = (name, value)
else:
return B.__setattr__(self, name, value)
def __delattr__(self, name):
if name == "foo":
self.delattr = name
else:
return B.__delattr__(self, name)
def __getitem__(self, key):
return ("getitem", key)
def __setitem__(self, key, value):
self.setitem = (key, value)
def __delitem__(self, key):
self.delitem = key
def __getslice__(self, i, j):
return ("getslice", i, j)
def __setslice__(self, i, j, value):
self.setslice = (i, j, value)
def __delslice__(self, i, j):
self.delslice = (i, j)
a = C()
self.assertEqual(a.foo, ("getattr", "foo"))
a.foo = 12
self.assertEqual(a.setattr, ("foo", 12))
del a.foo
self.assertEqual(a.delattr, "foo")
self.assertEqual(a[12], ("getitem", 12))
a[12] = 21
self.assertEqual(a.setitem, (12, 21))
del a[12]
self.assertEqual(a.delitem, 12)
self.assertEqual(a[0:10], ("getslice", 0, 10))
a[0:10] = "foo"
self.assertEqual(a.setslice, (0, 10, "foo"))
del a[0:10]
self.assertEqual(a.delslice, (0, 10))
def test_methods(self):
# Testing methods...
class C(object):
def __init__(self, x):
self.x = x
def foo(self):
return self.x
c1 = C(1)
self.assertEqual(c1.foo(), 1)
class D(C):
boo = C.foo
goo = c1.foo
d2 = D(2)
self.assertEqual(d2.foo(), 2)
self.assertEqual(d2.boo(), 2)
self.assertEqual(d2.goo(), 1)
class E(object):
foo = C.foo
self.assertEqual(E().foo, C.foo) # i.e., unbound
self.assertTrue(repr(C.foo.__get__(C(1))).startswith("<bound method "))
def test_special_method_lookup(self):
# The lookup of special methods bypasses __getattr__ and
# __getattribute__, but they still can be descriptors.
def run_context(manager):
with manager:
pass
def iden(self):
return self
def hello(self):
return "hello"
def empty_seq(self):
return []
def zero(self):
return 0
def complex_num(self):
return 1j
def stop(self):
raise StopIteration
def return_true(self, thing=None):
return True
def do_isinstance(obj):
return isinstance(int, obj)
def do_issubclass(obj):
return issubclass(int, obj)
def swallow(*args):
pass
def do_dict_missing(checker):
class DictSub(checker.__class__, dict):
pass
self.assertEqual(DictSub()["hi"], 4)
def some_number(self_, key):
self.assertEqual(key, "hi")
return 4
def format_impl(self, spec):
return "hello"
# It would be nice to have every special method tested here, but I'm
# only listing the ones I can remember outside of typeobject.c, since it
# does it right.
specials = [
("__unicode__", unicode, hello, set(), {}),
("__reversed__", reversed, empty_seq, set(), {}),
("__length_hint__", list, zero, set(),
{"__iter__" : iden, "next" : stop}),
("__sizeof__", sys.getsizeof, zero, set(), {}),
("__instancecheck__", do_isinstance, return_true, set(), {}),
("__missing__", do_dict_missing, some_number,
set(("__class__",)), {}),
("__subclasscheck__", do_issubclass, return_true,
set(("__bases__",)), {}),
("__enter__", run_context, iden, set(), {"__exit__" : swallow}),
("__exit__", run_context, swallow, set(), {"__enter__" : iden}),
("__complex__", complex, complex_num, set(), {}),
("__format__", format, format_impl, set(), {}),
("__dir__", dir, empty_seq, set(), {}),
]
class Checker(object):
def __getattr__(self, attr, test=self):
test.fail("__getattr__ called with {0}".format(attr))
def __getattribute__(self, attr, test=self):
if attr not in ok:
test.fail("__getattribute__ called with {0}".format(attr))
return object.__getattribute__(self, attr)
class SpecialDescr(object):
def __init__(self, impl):
self.impl = impl
def __get__(self, obj, owner):
record.append(1)
return self.impl.__get__(obj, owner)
class MyException(Exception):
pass
class ErrDescr(object):
def __get__(self, obj, owner):
raise MyException
for name, runner, meth_impl, ok, env in specials:
if name == '__length_hint__' or name == '__sizeof__':
if not test_support.check_impl_detail():
continue
class X(Checker):
pass
for attr, obj in env.iteritems():
setattr(X, attr, obj)
setattr(X, name, meth_impl)
runner(X())
record = []
class X(Checker):
pass
for attr, obj in env.iteritems():
setattr(X, attr, obj)
setattr(X, name, SpecialDescr(meth_impl))
runner(X())
self.assertEqual(record, [1], name)
class X(Checker):
pass
for attr, obj in env.iteritems():
setattr(X, attr, obj)
setattr(X, name, ErrDescr())
try:
runner(X())
except MyException:
pass
else:
self.fail("{0!r} didn't raise".format(name))
def test_specials(self):
# Testing special operators...
# Test operators like __hash__ for which a built-in default exists
# Test the default behavior for static classes
class C(object):
def __getitem__(self, i):
if 0 <= i < 10: return i
raise IndexError
c1 = C()
c2 = C()
self.assertTrue(not not c1) # What?
self.assertNotEqual(id(c1), id(c2))
hash(c1)
hash(c2)
self.assertEqual(cmp(c1, c2), cmp(id(c1), id(c2)))
self.assertEqual(c1, c1)
self.assertTrue(c1 != c2)
self.assertTrue(not c1 != c1)
self.assertTrue(not c1 == c2)
# Note that the module name appears in str/repr, and that varies
# depending on whether this test is run standalone or from a framework.
self.assertTrue(str(c1).find('C object at ') >= 0)
self.assertEqual(str(c1), repr(c1))
self.assertNotIn(-1, c1)
for i in range(10):
self.assertIn(i, c1)
self.assertNotIn(10, c1)
# Test the default behavior for dynamic classes
class D(object):
def __getitem__(self, i):
if 0 <= i < 10: return i
raise IndexError
d1 = D()
d2 = D()
self.assertTrue(not not d1)
self.assertNotEqual(id(d1), id(d2))
hash(d1)
hash(d2)
self.assertEqual(cmp(d1, d2), cmp(id(d1), id(d2)))
self.assertEqual(d1, d1)
self.assertNotEqual(d1, d2)
self.assertTrue(not d1 != d1)
self.assertTrue(not d1 == d2)
# Note that the module name appears in str/repr, and that varies
# depending on whether this test is run standalone or from a framework.
self.assertTrue(str(d1).find('D object at ') >= 0)
self.assertEqual(str(d1), repr(d1))
self.assertNotIn(-1, d1)
for i in range(10):
self.assertIn(i, d1)
self.assertNotIn(10, d1)
# Test overridden behavior for static classes
class Proxy(object):
def __init__(self, x):
self.x = x
def __nonzero__(self):
return not not self.x
def __hash__(self):
return hash(self.x)
def __eq__(self, other):
return self.x == other
def __ne__(self, other):
return self.x != other
def __cmp__(self, other):
return cmp(self.x, other.x)
def __str__(self):
return "Proxy:%s" % self.x
def __repr__(self):
return "Proxy(%r)" % self.x
def __contains__(self, value):
return value in self.x
p0 = Proxy(0)
p1 = Proxy(1)
p_1 = Proxy(-1)
self.assertFalse(p0)
self.assertTrue(not not p1)
self.assertEqual(hash(p0), hash(0))
self.assertEqual(p0, p0)
self.assertNotEqual(p0, p1)
self.assertTrue(not p0 != p0)
self.assertEqual(not p0, p1)
self.assertEqual(cmp(p0, p1), -1)
self.assertEqual(cmp(p0, p0), 0)
self.assertEqual(cmp(p0, p_1), 1)
self.assertEqual(str(p0), "Proxy:0")
self.assertEqual(repr(p0), "Proxy(0)")
p10 = Proxy(range(10))
self.assertNotIn(-1, p10)
for i in range(10):
self.assertIn(i, p10)
self.assertNotIn(10, p10)
# Test overridden behavior for dynamic classes
class DProxy(object):
def __init__(self, x):
self.x = x
def __nonzero__(self):
return not not self.x
def __hash__(self):
return hash(self.x)
def __eq__(self, other):
return self.x == other
def __ne__(self, other):
return self.x != other
def __cmp__(self, other):
return cmp(self.x, other.x)
def __str__(self):
return "DProxy:%s" % self.x
def __repr__(self):
return "DProxy(%r)" % self.x
def __contains__(self, value):
return value in self.x
p0 = DProxy(0)
p1 = DProxy(1)
p_1 = DProxy(-1)
self.assertFalse(p0)
self.assertTrue(not not p1)
self.assertEqual(hash(p0), hash(0))
self.assertEqual(p0, p0)
self.assertNotEqual(p0, p1)
self.assertNotEqual(not p0, p0)
self.assertEqual(not p0, p1)
self.assertEqual(cmp(p0, p1), -1)
self.assertEqual(cmp(p0, p0), 0)
self.assertEqual(cmp(p0, p_1), 1)
self.assertEqual(str(p0), "DProxy:0")
self.assertEqual(repr(p0), "DProxy(0)")
p10 = DProxy(range(10))
self.assertNotIn(-1, p10)
for i in range(10):
self.assertIn(i, p10)
self.assertNotIn(10, p10)
# Safety test for __cmp__
def unsafecmp(a, b):
if not hasattr(a, '__cmp__'):
return # some types don't have a __cmp__ any more (so the
# test doesn't make sense any more), or maybe they
# never had a __cmp__ at all, e.g. in PyPy
try:
a.__class__.__cmp__(a, b)
except TypeError:
pass
else:
self.fail("shouldn't allow %s.__cmp__(%r, %r)" % (
a.__class__, a, b))
unsafecmp(u"123", "123")
unsafecmp("123", u"123")
unsafecmp(1, 1.0)
unsafecmp(1.0, 1)
unsafecmp(1, 1L)
unsafecmp(1L, 1)
@test_support.impl_detail("custom logic for printing to real file objects")
def test_recursions_1(self):
# Testing recursion checks ...
class Letter(str):
def __new__(cls, letter):
if letter == 'EPS':
return str.__new__(cls)
return str.__new__(cls, letter)
def __str__(self):
if not self:
return 'EPS'
return self
# sys.stdout needs to be the original to trigger the recursion bug
test_stdout = sys.stdout
sys.stdout = test_support.get_original_stdout()
try:
# nothing should actually be printed, this should raise an exception
print Letter('w')
except RuntimeError:
pass
else:
self.fail("expected a RuntimeError for print recursion")
finally:
sys.stdout = test_stdout
def test_recursions_2(self):
# Bug #1202533.
class A(object):
pass
A.__mul__ = types.MethodType(lambda self, x: self * x, None, A)
try:
A()*2
except RuntimeError:
pass
else:
self.fail("expected a RuntimeError")
def test_weakrefs(self):
# Testing weak references...
import weakref
class C(object):
pass
c = C()
r = weakref.ref(c)
self.assertEqual(r(), c)
del c
test_support.gc_collect()
self.assertEqual(r(), None)
del r
class NoWeak(object):
__slots__ = ['foo']
no = NoWeak()
try:
weakref.ref(no)
except TypeError, msg:
self.assertTrue(str(msg).find("weak reference") >= 0)
else:
if test_support.check_impl_detail(pypy=False):
self.fail("weakref.ref(no) should be illegal")
#else: pypy supports taking weakrefs to some more objects
class Weak(object):
__slots__ = ['foo', '__weakref__']
yes = Weak()
r = weakref.ref(yes)
self.assertEqual(r(), yes)
del yes
test_support.gc_collect()
self.assertEqual(r(), None)
del r
def test_properties(self):
# Testing property...
class C(object):
def getx(self):
return self.__x
def setx(self, value):
self.__x = value
def delx(self):
del self.__x
x = property(getx, setx, delx, doc="I'm the x property.")
a = C()
self.assertFalse(hasattr(a, "x"))
a.x = 42
self.assertEqual(a._C__x, 42)
self.assertEqual(a.x, 42)
del a.x
self.assertFalse(hasattr(a, "x"))
self.assertFalse(hasattr(a, "_C__x"))
C.x.__set__(a, 100)
self.assertEqual(C.x.__get__(a), 100)
C.x.__delete__(a)
self.assertFalse(hasattr(a, "x"))
raw = C.__dict__['x']
self.assertIsInstance(raw, property)
attrs = dir(raw)
self.assertIn("__doc__", attrs)
self.assertIn("fget", attrs)
self.assertIn("fset", attrs)
self.assertIn("fdel", attrs)
self.assertEqual(raw.__doc__, "I'm the x property.")
self.assertTrue(raw.fget is C.__dict__['getx'])
self.assertTrue(raw.fset is C.__dict__['setx'])
self.assertTrue(raw.fdel is C.__dict__['delx'])
for attr in "__doc__", "fget", "fset", "fdel":
try:
setattr(raw, attr, 42)
except TypeError, msg:
if str(msg).find('readonly') < 0:
self.fail("when setting readonly attr %r on a property, "
"got unexpected TypeError msg %r" % (attr, str(msg)))
else:
self.fail("expected TypeError from trying to set readonly %r "
"attr on a property" % attr)
class D(object):
__getitem__ = property(lambda s: 1/0)
d = D()
try:
for i in d:
str(i)
except ZeroDivisionError:
pass
else:
self.fail("expected ZeroDivisionError from bad property")
@unittest.skipIf(sys.flags.optimize >= 2,
"Docstrings are omitted with -O2 and above")
def test_properties_doc_attrib(self):
class E(object):
def getter(self):
"getter method"
return 0
def setter(self_, value):
"setter method"
pass
prop = property(getter)
self.assertEqual(prop.__doc__, "getter method")
prop2 = property(fset=setter)
self.assertEqual(prop2.__doc__, None)
def test_testcapi_no_segfault(self):
# this segfaulted in 2.5b2
try:
import _testcapi
except ImportError:
pass
else:
class X(object):
p = property(_testcapi.test_with_docstring)
def test_properties_plus(self):
class C(object):
foo = property(doc="hello")
@foo.getter
def foo(self):
return self._foo
@foo.setter
def foo(self, value):
self._foo = abs(value)
@foo.deleter
def foo(self):
del self._foo
c = C()
self.assertEqual(C.foo.__doc__, "hello")
self.assertFalse(hasattr(c, "foo"))
c.foo = -42
self.assertTrue(hasattr(c, '_foo'))
self.assertEqual(c._foo, 42)
self.assertEqual(c.foo, 42)
del c.foo
self.assertFalse(hasattr(c, '_foo'))
self.assertFalse(hasattr(c, "foo"))
class D(C):
@C.foo.deleter
def foo(self):
try:
del self._foo
except AttributeError:
pass
d = D()
d.foo = 24
self.assertEqual(d.foo, 24)
del d.foo
del d.foo
class E(object):
@property
def foo(self):
return self._foo
@foo.setter
def foo(self, value):
raise RuntimeError
@foo.setter
def foo(self, value):
self._foo = abs(value)
@foo.deleter
def foo(self, value=None):
del self._foo
e = E()
e.foo = -42
self.assertEqual(e.foo, 42)
del e.foo
class F(E):
@E.foo.deleter
def foo(self):
del self._foo
@foo.setter
def foo(self, value):
self._foo = max(0, value)
f = F()
f.foo = -10
self.assertEqual(f.foo, 0)
del f.foo
def test_dict_constructors(self):
# Testing dict constructor ...
d = dict()
self.assertEqual(d, {})
d = dict({})
self.assertEqual(d, {})
d = dict({1: 2, 'a': 'b'})
self.assertEqual(d, {1: 2, 'a': 'b'})
self.assertEqual(d, dict(d.items()))
self.assertEqual(d, dict(d.iteritems()))
d = dict({'one':1, 'two':2})
self.assertEqual(d, dict(one=1, two=2))
self.assertEqual(d, dict(**d))
self.assertEqual(d, dict({"one": 1}, two=2))
self.assertEqual(d, dict([("two", 2)], one=1))
self.assertEqual(d, dict([("one", 100), ("two", 200)], **d))
self.assertEqual(d, dict(**d))
for badarg in 0, 0L, 0j, "0", [0], (0,):
try:
dict(badarg)
except TypeError:
pass
except ValueError:
if badarg == "0":
# It's a sequence, and its elements are also sequences (gotta
# love strings <wink>), but they aren't of length 2, so this
# one seemed better as a ValueError than a TypeError.
pass
else:
self.fail("no TypeError from dict(%r)" % badarg)
else:
self.fail("no TypeError from dict(%r)" % badarg)
try:
dict({}, {})
except TypeError:
pass
else:
self.fail("no TypeError from dict({}, {})")
class Mapping:
# Lacks a .keys() method; will be added later.
dict = {1:2, 3:4, 'a':1j}
try:
dict(Mapping())
except TypeError:
pass
else:
self.fail("no TypeError from dict(incomplete mapping)")
Mapping.keys = lambda self: self.dict.keys()
Mapping.__getitem__ = lambda self, i: self.dict[i]
d = dict(Mapping())
self.assertEqual(d, Mapping.dict)
# Init from sequence of iterable objects, each producing a 2-sequence.
class AddressBookEntry:
def __init__(self, first, last):
self.first = first
self.last = last
def __iter__(self):
return iter([self.first, self.last])
d = dict([AddressBookEntry('Tim', 'Warsaw'),
AddressBookEntry('Barry', 'Peters'),
AddressBookEntry('Tim', 'Peters'),
AddressBookEntry('Barry', 'Warsaw')])
self.assertEqual(d, {'Barry': 'Warsaw', 'Tim': 'Peters'})
d = dict(zip(range(4), range(1, 5)))
self.assertEqual(d, dict([(i, i+1) for i in range(4)]))
# Bad sequence lengths.
for bad in [('tooshort',)], [('too', 'long', 'by 1')]:
try:
dict(bad)
except ValueError:
pass
else:
self.fail("no ValueError from dict(%r)" % bad)
def test_dir(self):
# Testing dir() ...
junk = 12
self.assertEqual(dir(), ['junk', 'self'])
del junk
# Just make sure these don't blow up!
for arg in 2, 2L, 2j, 2e0, [2], "2", u"2", (2,), {2:2}, type, self.test_dir:
dir(arg)
# Try classic classes.
class C:
Cdata = 1
def Cmethod(self): pass
cstuff = ['Cdata', 'Cmethod', '__doc__', '__module__']
self.assertEqual(dir(C), cstuff)
self.assertIn('im_self', dir(C.Cmethod))
c = C() # c.__doc__ is an odd thing to see here; ditto c.__module__.
self.assertEqual(dir(c), cstuff)
c.cdata = 2
c.cmethod = lambda self: 0
self.assertEqual(dir(c), cstuff + ['cdata', 'cmethod'])
self.assertIn('im_self', dir(c.Cmethod))
class A(C):
Adata = 1
def Amethod(self): pass
astuff = ['Adata', 'Amethod'] + cstuff
self.assertEqual(dir(A), astuff)
self.assertIn('im_self', dir(A.Amethod))
a = A()
self.assertEqual(dir(a), astuff)
self.assertIn('im_self', dir(a.Amethod))
a.adata = 42
a.amethod = lambda self: 3
self.assertEqual(dir(a), astuff + ['adata', 'amethod'])
# The same, but with new-style classes. Since these have object as a
# base class, a lot more gets sucked in.
def interesting(strings):
return [s for s in strings if not s.startswith('_')]
class C(object):
Cdata = 1
def Cmethod(self): pass
cstuff = ['Cdata', 'Cmethod']
self.assertEqual(interesting(dir(C)), cstuff)
c = C()
self.assertEqual(interesting(dir(c)), cstuff)
self.assertIn('im_self', dir(C.Cmethod))
c.cdata = 2
c.cmethod = lambda self: 0
self.assertEqual(interesting(dir(c)), cstuff + ['cdata', 'cmethod'])
self.assertIn('im_self', dir(c.Cmethod))
class A(C):
Adata = 1
def Amethod(self): pass
astuff = ['Adata', 'Amethod'] + cstuff
self.assertEqual(interesting(dir(A)), astuff)
self.assertIn('im_self', dir(A.Amethod))
a = A()
self.assertEqual(interesting(dir(a)), astuff)
a.adata = 42
a.amethod = lambda self: 3
self.assertEqual(interesting(dir(a)), astuff + ['adata', 'amethod'])
self.assertIn('im_self', dir(a.Amethod))
# Try a module subclass.
class M(type(sys)):
pass
minstance = M("m")
minstance.b = 2
minstance.a = 1
names = [x for x in dir(minstance) if x not in ["__name__", "__doc__"]]
self.assertEqual(names, ['a', 'b'])
class M2(M):
def getdict(self):
return "Not a dict!"
__dict__ = property(getdict)
m2instance = M2("m2")
m2instance.b = 2
m2instance.a = 1
self.assertEqual(m2instance.__dict__, "Not a dict!")
try:
dir(m2instance)
except TypeError:
pass
# Two essentially featureless objects, just inheriting stuff from
# object.
self.assertEqual(dir(NotImplemented), dir(Ellipsis))
if test_support.check_impl_detail():
# None differs in PyPy: it has a __nonzero__
self.assertEqual(dir(None), dir(Ellipsis))
# Nasty test case for proxied objects
class Wrapper(object):
def __init__(self, obj):
self.__obj = obj
def __repr__(self):
return "Wrapper(%s)" % repr(self.__obj)
def __getitem__(self, key):
return Wrapper(self.__obj[key])
def __len__(self):
return len(self.__obj)
def __getattr__(self, name):
return Wrapper(getattr(self.__obj, name))
class C(object):
def __getclass(self):
return Wrapper(type(self))
__class__ = property(__getclass)
dir(C()) # This used to segfault
def test_supers(self):
# Testing super...
class A(object):
def meth(self, a):
return "A(%r)" % a
self.assertEqual(A().meth(1), "A(1)")
class B(A):
def __init__(self):
self.__super = super(B, self)
def meth(self, a):
return "B(%r)" % a + self.__super.meth(a)
self.assertEqual(B().meth(2), "B(2)A(2)")
class C(A):
def meth(self, a):
return "C(%r)" % a + self.__super.meth(a)
C._C__super = super(C)
self.assertEqual(C().meth(3), "C(3)A(3)")
class D(C, B):
def meth(self, a):
return "D(%r)" % a + super(D, self).meth(a)
self.assertEqual(D().meth(4), "D(4)C(4)B(4)A(4)")
# Test for subclassing super
class mysuper(super):
def __init__(self, *args):
return super(mysuper, self).__init__(*args)
class E(D):
def meth(self, a):
return "E(%r)" % a + mysuper(E, self).meth(a)
self.assertEqual(E().meth(5), "E(5)D(5)C(5)B(5)A(5)")
class F(E):
def meth(self, a):
s = self.__super # == mysuper(F, self)
return "F(%r)[%s]" % (a, s.__class__.__name__) + s.meth(a)
F._F__super = mysuper(F)
self.assertEqual(F().meth(6), "F(6)[mysuper]E(6)D(6)C(6)B(6)A(6)")
# Make sure certain errors are raised
try:
super(D, 42)
except TypeError:
pass
else:
self.fail("shouldn't allow super(D, 42)")
try:
super(D, C())
except TypeError:
pass
else:
self.fail("shouldn't allow super(D, C())")
try:
super(D).__get__(12)
except TypeError:
pass
else:
self.fail("shouldn't allow super(D).__get__(12)")
try:
super(D).__get__(C())
except TypeError:
pass
else:
self.fail("shouldn't allow super(D).__get__(C())")
# Make sure data descriptors can be overridden and accessed via super
# (new feature in Python 2.3)
class DDbase(object):
def getx(self): return 42
x = property(getx)
class DDsub(DDbase):
def getx(self): return "hello"
x = property(getx)
dd = DDsub()
self.assertEqual(dd.x, "hello")
self.assertEqual(super(DDsub, dd).x, 42)
# Ensure that super() lookup of descriptor from classmethod
# works (SF ID# 743627)
class Base(object):
aProp = property(lambda self: "foo")
class Sub(Base):
@classmethod
def test(klass):
return super(Sub,klass).aProp
self.assertEqual(Sub.test(), Base.aProp)
# Verify that super() doesn't allow keyword args
try:
super(Base, kw=1)
except TypeError:
pass
else:
self.assertEqual("super shouldn't accept keyword args")
def test_basic_inheritance(self):
# Testing inheritance from basic types...
class hexint(int):
def __repr__(self):
return hex(self)
def __add__(self, other):
return hexint(int.__add__(self, other))
# (Note that overriding __radd__ doesn't work,
# because the int type gets first dibs.)
self.assertEqual(repr(hexint(7) + 9), "0x10")
self.assertEqual(repr(hexint(1000) + 7), "0x3ef")
a = hexint(12345)
self.assertEqual(a, 12345)
self.assertEqual(int(a), 12345)
self.assertTrue(int(a).__class__ is int)
self.assertEqual(hash(a), hash(12345))
self.assertTrue((+a).__class__ is int)
self.assertTrue((a >> 0).__class__ is int)
self.assertTrue((a << 0).__class__ is int)
self.assertTrue((hexint(0) << 12).__class__ is int)
self.assertTrue((hexint(0) >> 12).__class__ is int)
class octlong(long):
__slots__ = []
def __str__(self):
s = oct(self)
if s[-1] == 'L':
s = s[:-1]
return s
def __add__(self, other):
return self.__class__(super(octlong, self).__add__(other))
__radd__ = __add__
self.assertEqual(str(octlong(3) + 5), "010")
# (Note that overriding __radd__ here only seems to work
# because the example uses a short int left argument.)
self.assertEqual(str(5 + octlong(3000)), "05675")
a = octlong(12345)
self.assertEqual(a, 12345L)
self.assertEqual(long(a), 12345L)
self.assertEqual(hash(a), hash(12345L))
self.assertTrue(long(a).__class__ is long)
self.assertTrue((+a).__class__ is long)
self.assertTrue((-a).__class__ is long)
self.assertTrue((-octlong(0)).__class__ is long)
self.assertTrue((a >> 0).__class__ is long)
self.assertTrue((a << 0).__class__ is long)
self.assertTrue((a - 0).__class__ is long)
self.assertTrue((a * 1).__class__ is long)
self.assertTrue((a ** 1).__class__ is long)
self.assertTrue((a // 1).__class__ is long)
self.assertTrue((1 * a).__class__ is long)
self.assertTrue((a | 0).__class__ is long)
self.assertTrue((a ^ 0).__class__ is long)
self.assertTrue((a & -1L).__class__ is long)
self.assertTrue((octlong(0) << 12).__class__ is long)
self.assertTrue((octlong(0) >> 12).__class__ is long)
self.assertTrue(abs(octlong(0)).__class__ is long)
# Because octlong overrides __add__, we can't check the absence of +0
# optimizations using octlong.
class longclone(long):
pass
a = longclone(1)
self.assertTrue((a + 0).__class__ is long)
self.assertTrue((0 + a).__class__ is long)
# Check that negative clones don't segfault
a = longclone(-1)
self.assertEqual(a.__dict__, {})
self.assertEqual(long(a), -1) # self.assertTrue PyNumber_Long() copies the sign bit
class precfloat(float):
__slots__ = ['prec']
def __init__(self, value=0.0, prec=12):
self.prec = int(prec)
def __repr__(self):
return "%.*g" % (self.prec, self)
self.assertEqual(repr(precfloat(1.1)), "1.1")
a = precfloat(12345)
self.assertEqual(a, 12345.0)
self.assertEqual(float(a), 12345.0)
self.assertTrue(float(a).__class__ is float)
self.assertEqual(hash(a), hash(12345.0))
self.assertTrue((+a).__class__ is float)
class madcomplex(complex):
def __repr__(self):
return "%.17gj%+.17g" % (self.imag, self.real)
a = madcomplex(-3, 4)
self.assertEqual(repr(a), "4j-3")
base = complex(-3, 4)
self.assertEqual(base.__class__, complex)
self.assertEqual(a, base)
self.assertEqual(complex(a), base)
self.assertEqual(complex(a).__class__, complex)
a = madcomplex(a) # just trying another form of the constructor
self.assertEqual(repr(a), "4j-3")
self.assertEqual(a, base)
self.assertEqual(complex(a), base)
self.assertEqual(complex(a).__class__, complex)
self.assertEqual(hash(a), hash(base))
self.assertEqual((+a).__class__, complex)
self.assertEqual((a + 0).__class__, complex)
self.assertEqual(a + 0, base)
self.assertEqual((a - 0).__class__, complex)
self.assertEqual(a - 0, base)
self.assertEqual((a * 1).__class__, complex)
self.assertEqual(a * 1, base)
self.assertEqual((a / 1).__class__, complex)
self.assertEqual(a / 1, base)
class madtuple(tuple):
_rev = None
def rev(self):
if self._rev is not None:
return self._rev
L = list(self)
L.reverse()
self._rev = self.__class__(L)
return self._rev
a = madtuple((1,2,3,4,5,6,7,8,9,0))
self.assertEqual(a, (1,2,3,4,5,6,7,8,9,0))
self.assertEqual(a.rev(), madtuple((0,9,8,7,6,5,4,3,2,1)))
self.assertEqual(a.rev().rev(), madtuple((1,2,3,4,5,6,7,8,9,0)))
for i in range(512):
t = madtuple(range(i))
u = t.rev()
v = u.rev()
self.assertEqual(v, t)
a = madtuple((1,2,3,4,5))
self.assertEqual(tuple(a), (1,2,3,4,5))
self.assertTrue(tuple(a).__class__ is tuple)
self.assertEqual(hash(a), hash((1,2,3,4,5)))
self.assertTrue(a[:].__class__ is tuple)
self.assertTrue((a * 1).__class__ is tuple)
self.assertTrue((a * 0).__class__ is tuple)
self.assertTrue((a + ()).__class__ is tuple)
a = madtuple(())
self.assertEqual(tuple(a), ())
self.assertTrue(tuple(a).__class__ is tuple)
self.assertTrue((a + a).__class__ is tuple)
self.assertTrue((a * 0).__class__ is tuple)
self.assertTrue((a * 1).__class__ is tuple)
self.assertTrue((a * 2).__class__ is tuple)
self.assertTrue(a[:].__class__ is tuple)
class madstring(str):
_rev = None
def rev(self):
if self._rev is not None:
return self._rev
L = list(self)
L.reverse()
self._rev = self.__class__("".join(L))
return self._rev
s = madstring("abcdefghijklmnopqrstuvwxyz")
self.assertEqual(s, "abcdefghijklmnopqrstuvwxyz")
self.assertEqual(s.rev(), madstring("zyxwvutsrqponmlkjihgfedcba"))
self.assertEqual(s.rev().rev(), madstring("abcdefghijklmnopqrstuvwxyz"))
for i in range(256):
s = madstring("".join(map(chr, range(i))))
t = s.rev()
u = t.rev()
self.assertEqual(u, s)
s = madstring("12345")
self.assertEqual(str(s), "12345")
self.assertTrue(str(s).__class__ is str)
base = "\x00" * 5
s = madstring(base)
self.assertEqual(s, base)
self.assertEqual(str(s), base)
self.assertTrue(str(s).__class__ is str)
self.assertEqual(hash(s), hash(base))
self.assertEqual({s: 1}[base], 1)
self.assertEqual({base: 1}[s], 1)
self.assertTrue((s + "").__class__ is str)
self.assertEqual(s + "", base)
self.assertTrue(("" + s).__class__ is str)
self.assertEqual("" + s, base)
self.assertTrue((s * 0).__class__ is str)
self.assertEqual(s * 0, "")
self.assertTrue((s * 1).__class__ is str)
self.assertEqual(s * 1, base)
self.assertTrue((s * 2).__class__ is str)
self.assertEqual(s * 2, base + base)
self.assertTrue(s[:].__class__ is str)
self.assertEqual(s[:], base)
self.assertTrue(s[0:0].__class__ is str)
self.assertEqual(s[0:0], "")
self.assertTrue(s.strip().__class__ is str)
self.assertEqual(s.strip(), base)
self.assertTrue(s.lstrip().__class__ is str)
self.assertEqual(s.lstrip(), base)
self.assertTrue(s.rstrip().__class__ is str)
self.assertEqual(s.rstrip(), base)
identitytab = ''.join([chr(i) for i in range(256)])
self.assertTrue(s.translate(identitytab).__class__ is str)
self.assertEqual(s.translate(identitytab), base)
self.assertTrue(s.translate(identitytab, "x").__class__ is str)
self.assertEqual(s.translate(identitytab, "x"), base)
self.assertEqual(s.translate(identitytab, "\x00"), "")
self.assertTrue(s.replace("x", "x").__class__ is str)
self.assertEqual(s.replace("x", "x"), base)
self.assertTrue(s.ljust(len(s)).__class__ is str)
self.assertEqual(s.ljust(len(s)), base)
self.assertTrue(s.rjust(len(s)).__class__ is str)
self.assertEqual(s.rjust(len(s)), base)
self.assertTrue(s.center(len(s)).__class__ is str)
self.assertEqual(s.center(len(s)), base)
self.assertTrue(s.lower().__class__ is str)
self.assertEqual(s.lower(), base)
class madunicode(unicode):
_rev = None
def rev(self):
if self._rev is not None:
return self._rev
L = list(self)
L.reverse()
self._rev = self.__class__(u"".join(L))
return self._rev
u = madunicode("ABCDEF")
self.assertEqual(u, u"ABCDEF")
self.assertEqual(u.rev(), madunicode(u"FEDCBA"))
self.assertEqual(u.rev().rev(), madunicode(u"ABCDEF"))
base = u"12345"
u = madunicode(base)
self.assertEqual(unicode(u), base)
self.assertTrue(unicode(u).__class__ is unicode)
self.assertEqual(hash(u), hash(base))
self.assertEqual({u: 1}[base], 1)
self.assertEqual({base: 1}[u], 1)
self.assertTrue(u.strip().__class__ is unicode)
self.assertEqual(u.strip(), base)
self.assertTrue(u.lstrip().__class__ is unicode)
self.assertEqual(u.lstrip(), base)
self.assertTrue(u.rstrip().__class__ is unicode)
self.assertEqual(u.rstrip(), base)
self.assertTrue(u.replace(u"x", u"x").__class__ is unicode)
self.assertEqual(u.replace(u"x", u"x"), base)
self.assertTrue(u.replace(u"xy", u"xy").__class__ is unicode)
self.assertEqual(u.replace(u"xy", u"xy"), base)
self.assertTrue(u.center(len(u)).__class__ is unicode)
self.assertEqual(u.center(len(u)), base)
self.assertTrue(u.ljust(len(u)).__class__ is unicode)
self.assertEqual(u.ljust(len(u)), base)
self.assertTrue(u.rjust(len(u)).__class__ is unicode)
self.assertEqual(u.rjust(len(u)), base)
self.assertTrue(u.lower().__class__ is unicode)
self.assertEqual(u.lower(), base)
self.assertTrue(u.upper().__class__ is unicode)
self.assertEqual(u.upper(), base)
self.assertTrue(u.capitalize().__class__ is unicode)
self.assertEqual(u.capitalize(), base)
self.assertTrue(u.title().__class__ is unicode)
self.assertEqual(u.title(), base)
self.assertTrue((u + u"").__class__ is unicode)
self.assertEqual(u + u"", base)
self.assertTrue((u"" + u).__class__ is unicode)
self.assertEqual(u"" + u, base)
self.assertTrue((u * 0).__class__ is unicode)
self.assertEqual(u * 0, u"")
self.assertTrue((u * 1).__class__ is unicode)
self.assertEqual(u * 1, base)
self.assertTrue((u * 2).__class__ is unicode)
self.assertEqual(u * 2, base + base)
self.assertTrue(u[:].__class__ is unicode)
self.assertEqual(u[:], base)
self.assertTrue(u[0:0].__class__ is unicode)
self.assertEqual(u[0:0], u"")
class sublist(list):
pass
a = sublist(range(5))
self.assertEqual(a, range(5))
a.append("hello")
self.assertEqual(a, range(5) + ["hello"])
a[5] = 5
self.assertEqual(a, range(6))
a.extend(range(6, 20))
self.assertEqual(a, range(20))
a[-5:] = []
self.assertEqual(a, range(15))
del a[10:15]
self.assertEqual(len(a), 10)
self.assertEqual(a, range(10))
self.assertEqual(list(a), range(10))
self.assertEqual(a[0], 0)
self.assertEqual(a[9], 9)
self.assertEqual(a[-10], 0)
self.assertEqual(a[-1], 9)
self.assertEqual(a[:5], range(5))
class CountedInput(file):
"""Counts lines read by self.readline().
self.lineno is the 0-based ordinal of the last line read, up to
a maximum of one greater than the number of lines in the file.
self.ateof is true if and only if the final "" line has been read,
at which point self.lineno stops incrementing, and further calls
to readline() continue to return "".
"""
lineno = 0
ateof = 0
def readline(self):
if self.ateof:
return ""
s = file.readline(self)
# Next line works too.
# s = super(CountedInput, self).readline()
self.lineno += 1
if s == "":
self.ateof = 1
return s
f = file(name=test_support.TESTFN, mode='w')
lines = ['a\n', 'b\n', 'c\n']
try:
f.writelines(lines)
f.close()
f = CountedInput(test_support.TESTFN)
for (i, expected) in zip(range(1, 5) + [4], lines + 2 * [""]):
got = f.readline()
self.assertEqual(expected, got)
self.assertEqual(f.lineno, i)
self.assertEqual(f.ateof, (i > len(lines)))
f.close()
finally:
try:
f.close()
except:
pass
test_support.unlink(test_support.TESTFN)
def test_keywords(self):
# Testing keyword args to basic type constructors ...
self.assertEqual(int(x=1), 1)
self.assertEqual(float(x=2), 2.0)
self.assertEqual(long(x=3), 3L)
self.assertEqual(complex(imag=42, real=666), complex(666, 42))
self.assertEqual(str(object=500), '500')
self.assertEqual(unicode(string='abc', errors='strict'), u'abc')
self.assertEqual(tuple(sequence=range(3)), (0, 1, 2))
self.assertEqual(list(sequence=(0, 1, 2)), range(3))
# note: as of Python 2.3, dict() no longer has an "items" keyword arg
for constructor in (int, float, long, complex, str, unicode,
tuple, list, file):
try:
constructor(bogus_keyword_arg=1)
except TypeError:
pass
else:
self.fail("expected TypeError from bogus keyword argument to %r"
% constructor)
def test_str_subclass_as_dict_key(self):
# Testing a str subclass used as dict key ..
class cistr(str):
"""Sublcass of str that computes __eq__ case-insensitively.
Also computes a hash code of the string in canonical form.
"""
def __init__(self, value):
self.canonical = value.lower()
self.hashcode = hash(self.canonical)
def __eq__(self, other):
if not isinstance(other, cistr):
other = cistr(other)
return self.canonical == other.canonical
def __hash__(self):
return self.hashcode
self.assertEqual(cistr('ABC'), 'abc')
self.assertEqual('aBc', cistr('ABC'))
self.assertEqual(str(cistr('ABC')), 'ABC')
d = {cistr('one'): 1, cistr('two'): 2, cistr('tHree'): 3}
self.assertEqual(d[cistr('one')], 1)
self.assertEqual(d[cistr('tWo')], 2)
self.assertEqual(d[cistr('THrEE')], 3)
self.assertIn(cistr('ONe'), d)
self.assertEqual(d.get(cistr('thrEE')), 3)
def test_classic_comparisons(self):
# Testing classic comparisons...
class classic:
pass
for base in (classic, int, object):
class C(base):
def __init__(self, value):
self.value = int(value)
def __cmp__(self, other):
if isinstance(other, C):
return cmp(self.value, other.value)
if isinstance(other, int) or isinstance(other, long):
return cmp(self.value, other)
return NotImplemented
__hash__ = None # Silence Py3k warning
c1 = C(1)
c2 = C(2)
c3 = C(3)
self.assertEqual(c1, 1)
c = {1: c1, 2: c2, 3: c3}
for x in 1, 2, 3:
for y in 1, 2, 3:
self.assertTrue(cmp(c[x], c[y]) == cmp(x, y), "x=%d, y=%d" % (x, y))
for op in "<", "<=", "==", "!=", ">", ">=":
self.assertTrue(eval("c[x] %s c[y]" % op) == eval("x %s y" % op),
"x=%d, y=%d" % (x, y))
self.assertTrue(cmp(c[x], y) == cmp(x, y), "x=%d, y=%d" % (x, y))
self.assertTrue(cmp(x, c[y]) == cmp(x, y), "x=%d, y=%d" % (x, y))
def test_rich_comparisons(self):
# Testing rich comparisons...
class Z(complex):
pass
z = Z(1)
self.assertEqual(z, 1+0j)
self.assertEqual(1+0j, z)
class ZZ(complex):
def __eq__(self, other):
try:
return abs(self - other) <= 1e-6
except:
return NotImplemented
__hash__ = None # Silence Py3k warning
zz = ZZ(1.0000003)
self.assertEqual(zz, 1+0j)
self.assertEqual(1+0j, zz)
class classic:
pass
for base in (classic, int, object, list):
class C(base):
def __init__(self, value):
self.value = int(value)
def __cmp__(self_, other):
self.fail("shouldn't call __cmp__")
__hash__ = None # Silence Py3k warning
def __eq__(self, other):
if isinstance(other, C):
return self.value == other.value
if isinstance(other, int) or isinstance(other, long):
return self.value == other
return NotImplemented
def __ne__(self, other):
if isinstance(other, C):
return self.value != other.value
if isinstance(other, int) or isinstance(other, long):
return self.value != other
return NotImplemented
def __lt__(self, other):
if isinstance(other, C):
return self.value < other.value
if isinstance(other, int) or isinstance(other, long):
return self.value < other
return NotImplemented
def __le__(self, other):
if isinstance(other, C):
return self.value <= other.value
if isinstance(other, int) or isinstance(other, long):
return self.value <= other
return NotImplemented
def __gt__(self, other):
if isinstance(other, C):
return self.value > other.value
if isinstance(other, int) or isinstance(other, long):
return self.value > other
return NotImplemented
def __ge__(self, other):
if isinstance(other, C):
return self.value >= other.value
if isinstance(other, int) or isinstance(other, long):
return self.value >= other
return NotImplemented
c1 = C(1)
c2 = C(2)
c3 = C(3)
self.assertEqual(c1, 1)
c = {1: c1, 2: c2, 3: c3}
for x in 1, 2, 3:
for y in 1, 2, 3:
for op in "<", "<=", "==", "!=", ">", ">=":
self.assertTrue(eval("c[x] %s c[y]" % op) == eval("x %s y" % op),
"x=%d, y=%d" % (x, y))
self.assertTrue(eval("c[x] %s y" % op) == eval("x %s y" % op),
"x=%d, y=%d" % (x, y))
self.assertTrue(eval("x %s c[y]" % op) == eval("x %s y" % op),
"x=%d, y=%d" % (x, y))
def test_coercions(self):
# Testing coercions...
class I(int): pass
coerce(I(0), 0)
coerce(0, I(0))
class L(long): pass
coerce(L(0), 0)
coerce(L(0), 0L)
coerce(0, L(0))
coerce(0L, L(0))
class F(float): pass
coerce(F(0), 0)
coerce(F(0), 0L)
coerce(F(0), 0.)
coerce(0, F(0))
coerce(0L, F(0))
coerce(0., F(0))
class C(complex): pass
coerce(C(0), 0)
coerce(C(0), 0L)
coerce(C(0), 0.)
coerce(C(0), 0j)
coerce(0, C(0))
coerce(0L, C(0))
coerce(0., C(0))
coerce(0j, C(0))
def test_descrdoc(self):
# Testing descriptor doc strings...
def check(descr, what):
self.assertEqual(descr.__doc__, what)
check(file.closed, "True if the file is closed") # getset descriptor
check(file.name, "file name") # member descriptor
def test_doc_descriptor(self):
# Testing __doc__ descriptor...
# SF bug 542984
class DocDescr(object):
def __get__(self, object, otype):
if object:
object = object.__class__.__name__ + ' instance'
if otype:
otype = otype.__name__
return 'object=%s; type=%s' % (object, otype)
class OldClass:
__doc__ = DocDescr()
class NewClass(object):
__doc__ = DocDescr()
self.assertEqual(OldClass.__doc__, 'object=None; type=OldClass')
self.assertEqual(OldClass().__doc__, 'object=OldClass instance; type=OldClass')
self.assertEqual(NewClass.__doc__, 'object=None; type=NewClass')
self.assertEqual(NewClass().__doc__, 'object=NewClass instance; type=NewClass')
def test_set_class(self):
# Testing __class__ assignment...
class C(object): pass
class D(object): pass
class E(object): pass
class F(D, E): pass
for cls in C, D, E, F:
for cls2 in C, D, E, F:
x = cls()
x.__class__ = cls2
self.assertTrue(x.__class__ is cls2)
x.__class__ = cls
self.assertTrue(x.__class__ is cls)
def cant(x, C):
try:
x.__class__ = C
except TypeError:
pass
else:
self.fail("shouldn't allow %r.__class__ = %r" % (x, C))
try:
delattr(x, "__class__")
except (TypeError, AttributeError):
pass
else:
self.fail("shouldn't allow del %r.__class__" % x)
cant(C(), list)
cant(list(), C)
cant(C(), 1)
cant(C(), object)
cant(object(), list)
cant(list(), object)
class Int(int): __slots__ = []
cant(2, Int)
cant(Int(), int)
cant(True, int)
cant(2, bool)
o = object()
cant(o, type(1))
cant(o, type(None))
del o
class G(object):
__slots__ = ["a", "b"]
class H(object):
__slots__ = ["b", "a"]
try:
unicode
except NameError:
class I(object):
__slots__ = ["a", "b"]
else:
class I(object):
__slots__ = [unicode("a"), unicode("b")]
class J(object):
__slots__ = ["c", "b"]
class K(object):
__slots__ = ["a", "b", "d"]
class L(H):
__slots__ = ["e"]
class M(I):
__slots__ = ["e"]
class N(J):
__slots__ = ["__weakref__"]
class P(J):
__slots__ = ["__dict__"]
class Q(J):
pass
class R(J):
__slots__ = ["__dict__", "__weakref__"]
if test_support.check_impl_detail(pypy=False):
lst = ((G, H), (G, I), (I, H), (Q, R), (R, Q))
else:
# Not supported in pypy: changing the __class__ of an object
# to another __class__ that just happens to have the same slots.
# If needed, we can add the feature, but what we'll likely do
# then is to allow mostly any __class__ assignment, even if the
# classes have different __slots__, because we it's easier.
lst = ((Q, R), (R, Q))
for cls, cls2 in lst:
x = cls()
x.a = 1
x.__class__ = cls2
self.assertTrue(x.__class__ is cls2,
"assigning %r as __class__ for %r silently failed" % (cls2, x))
self.assertEqual(x.a, 1)
x.__class__ = cls
self.assertTrue(x.__class__ is cls,
"assigning %r as __class__ for %r silently failed" % (cls, x))
self.assertEqual(x.a, 1)
for cls in G, J, K, L, M, N, P, R, list, Int:
for cls2 in G, J, K, L, M, N, P, R, list, Int:
if cls is cls2:
continue
cant(cls(), cls2)
# Issue5283: when __class__ changes in __del__, the wrong
# type gets DECREF'd.
class O(object):
def __del__(self):
pass
class A(object):
def __del__(self):
self.__class__ = O
l = [A() for x in range(100)]
del l
def test_set_dict(self):
# Testing __dict__ assignment...
class C(object): pass
a = C()
a.__dict__ = {'b': 1}
self.assertEqual(a.b, 1)
def cant(x, dict):
try:
x.__dict__ = dict
except (AttributeError, TypeError):
pass
else:
self.fail("shouldn't allow %r.__dict__ = %r" % (x, dict))
cant(a, None)
cant(a, [])
cant(a, 1)
del a.__dict__ # Deleting __dict__ is allowed
class Base(object):
pass
def verify_dict_readonly(x):
"""
x has to be an instance of a class inheriting from Base.
"""
cant(x, {})
try:
del x.__dict__
except (AttributeError, TypeError):
pass
else:
self.fail("shouldn't allow del %r.__dict__" % x)
dict_descr = Base.__dict__["__dict__"]
try:
dict_descr.__set__(x, {})
except (AttributeError, TypeError):
pass
else:
self.fail("dict_descr allowed access to %r's dict" % x)
# Classes don't allow __dict__ assignment and have readonly dicts
class Meta1(type, Base):
pass
class Meta2(Base, type):
pass
class D(object):
__metaclass__ = Meta1
class E(object):
__metaclass__ = Meta2
for cls in C, D, E:
verify_dict_readonly(cls)
class_dict = cls.__dict__
try:
class_dict["spam"] = "eggs"
except TypeError:
pass
else:
if test_support.check_impl_detail(pypy=False):
self.fail("%r's __dict__ can be modified" % cls)
# Modules also disallow __dict__ assignment
class Module1(types.ModuleType, Base):
pass
class Module2(Base, types.ModuleType):
pass
for ModuleType in Module1, Module2:
mod = ModuleType("spam")
verify_dict_readonly(mod)
mod.__dict__["spam"] = "eggs"
# Exception's __dict__ can be replaced, but not deleted
# (at least not any more than regular exception's __dict__ can
# be deleted; on CPython it is not the case, whereas on PyPy they
# can, just like any other new-style instance's __dict__.)
def can_delete_dict(e):
try:
del e.__dict__
except (TypeError, AttributeError):
return False
else:
return True
class Exception1(Exception, Base):
pass
class Exception2(Base, Exception):
pass
for ExceptionType in Exception, Exception1, Exception2:
e = ExceptionType()
e.__dict__ = {"a": 1}
self.assertEqual(e.a, 1)
self.assertEqual(can_delete_dict(e), can_delete_dict(ValueError()))
def test_pickles(self):
# Testing pickling and copying new-style classes and objects...
import pickle, cPickle
def sorteditems(d):
L = d.items()
L.sort()
return L
global C
class C(object):
def __init__(self, a, b):
super(C, self).__init__()
self.a = a
self.b = b
def __repr__(self):
return "C(%r, %r)" % (self.a, self.b)
global C1
class C1(list):
def __new__(cls, a, b):
return super(C1, cls).__new__(cls)
def __getnewargs__(self):
return (self.a, self.b)
def __init__(self, a, b):
self.a = a
self.b = b
def __repr__(self):
return "C1(%r, %r)<%r>" % (self.a, self.b, list(self))
global C2
class C2(int):
def __new__(cls, a, b, val=0):
return super(C2, cls).__new__(cls, val)
def __getnewargs__(self):
return (self.a, self.b, int(self))
def __init__(self, a, b, val=0):
self.a = a
self.b = b
def __repr__(self):
return "C2(%r, %r)<%r>" % (self.a, self.b, int(self))
global C3
class C3(object):
def __init__(self, foo):
self.foo = foo
def __getstate__(self):
return self.foo
def __setstate__(self, foo):
self.foo = foo
global C4classic, C4
class C4classic: # classic
pass
class C4(C4classic, object): # mixed inheritance
pass
for p in pickle, cPickle:
for bin in 0, 1:
for cls in C, C1, C2:
s = p.dumps(cls, bin)
cls2 = p.loads(s)
self.assertTrue(cls2 is cls)
a = C1(1, 2); a.append(42); a.append(24)
b = C2("hello", "world", 42)
s = p.dumps((a, b), bin)
x, y = p.loads(s)
self.assertEqual(x.__class__, a.__class__)
self.assertEqual(sorteditems(x.__dict__), sorteditems(a.__dict__))
self.assertEqual(y.__class__, b.__class__)
self.assertEqual(sorteditems(y.__dict__), sorteditems(b.__dict__))
self.assertEqual(repr(x), repr(a))
self.assertEqual(repr(y), repr(b))
# Test for __getstate__ and __setstate__ on new style class
u = C3(42)
s = p.dumps(u, bin)
v = p.loads(s)
self.assertEqual(u.__class__, v.__class__)
self.assertEqual(u.foo, v.foo)
# Test for picklability of hybrid class
u = C4()
u.foo = 42
s = p.dumps(u, bin)
v = p.loads(s)
self.assertEqual(u.__class__, v.__class__)
self.assertEqual(u.foo, v.foo)
# Testing copy.deepcopy()
import copy
for cls in C, C1, C2:
cls2 = copy.deepcopy(cls)
self.assertTrue(cls2 is cls)
a = C1(1, 2); a.append(42); a.append(24)
b = C2("hello", "world", 42)
x, y = copy.deepcopy((a, b))
self.assertEqual(x.__class__, a.__class__)
self.assertEqual(sorteditems(x.__dict__), sorteditems(a.__dict__))
self.assertEqual(y.__class__, b.__class__)
self.assertEqual(sorteditems(y.__dict__), sorteditems(b.__dict__))
self.assertEqual(repr(x), repr(a))
self.assertEqual(repr(y), repr(b))
def test_pickle_slots(self):
# Testing pickling of classes with __slots__ ...
import pickle, cPickle
# Pickling of classes with __slots__ but without __getstate__ should fail
global B, C, D, E
class B(object):
pass
for base in [object, B]:
class C(base):
__slots__ = ['a']
class D(C):
pass
try:
pickle.dumps(C())
except TypeError:
pass
else:
self.fail("should fail: pickle C instance - %s" % base)
try:
cPickle.dumps(C())
except TypeError:
pass
else:
self.fail("should fail: cPickle C instance - %s" % base)
try:
pickle.dumps(C())
except TypeError:
pass
else:
self.fail("should fail: pickle D instance - %s" % base)
try:
cPickle.dumps(D())
except TypeError:
pass
else:
self.fail("should fail: cPickle D instance - %s" % base)
# Give C a nice generic __getstate__ and __setstate__
class C(base):
__slots__ = ['a']
def __getstate__(self):
try:
d = self.__dict__.copy()
except AttributeError:
d = {}
for cls in self.__class__.__mro__:
for sn in cls.__dict__.get('__slots__', ()):
try:
d[sn] = getattr(self, sn)
except AttributeError:
pass
return d
def __setstate__(self, d):
for k, v in d.items():
setattr(self, k, v)
class D(C):
pass
# Now it should work
x = C()
y = pickle.loads(pickle.dumps(x))
self.assertEqual(hasattr(y, 'a'), 0)
y = cPickle.loads(cPickle.dumps(x))
self.assertEqual(hasattr(y, 'a'), 0)
x.a = 42
y = pickle.loads(pickle.dumps(x))
self.assertEqual(y.a, 42)
y = cPickle.loads(cPickle.dumps(x))
self.assertEqual(y.a, 42)
x = D()
x.a = 42
x.b = 100
y = pickle.loads(pickle.dumps(x))
self.assertEqual(y.a + y.b, 142)
y = cPickle.loads(cPickle.dumps(x))
self.assertEqual(y.a + y.b, 142)
# A subclass that adds a slot should also work
class E(C):
__slots__ = ['b']
x = E()
x.a = 42
x.b = "foo"
y = pickle.loads(pickle.dumps(x))
self.assertEqual(y.a, x.a)
self.assertEqual(y.b, x.b)
y = cPickle.loads(cPickle.dumps(x))
self.assertEqual(y.a, x.a)
self.assertEqual(y.b, x.b)
def test_binary_operator_override(self):
# Testing overrides of binary operations...
class I(int):
def __repr__(self):
return "I(%r)" % int(self)
def __add__(self, other):
return I(int(self) + int(other))
__radd__ = __add__
def __pow__(self, other, mod=None):
if mod is None:
return I(pow(int(self), int(other)))
else:
return I(pow(int(self), int(other), int(mod)))
def __rpow__(self, other, mod=None):
if mod is None:
return I(pow(int(other), int(self), mod))
else:
return I(pow(int(other), int(self), int(mod)))
self.assertEqual(repr(I(1) + I(2)), "I(3)")
self.assertEqual(repr(I(1) + 2), "I(3)")
self.assertEqual(repr(1 + I(2)), "I(3)")
self.assertEqual(repr(I(2) ** I(3)), "I(8)")
self.assertEqual(repr(2 ** I(3)), "I(8)")
self.assertEqual(repr(I(2) ** 3), "I(8)")
self.assertEqual(repr(pow(I(2), I(3), I(5))), "I(3)")
class S(str):
def __eq__(self, other):
return self.lower() == other.lower()
__hash__ = None # Silence Py3k warning
def test_subclass_propagation(self):
# Testing propagation of slot functions to subclasses...
class A(object):
pass
class B(A):
pass
class C(A):
pass
class D(B, C):
pass
d = D()
orig_hash = hash(d) # related to id(d) in platform-dependent ways
A.__hash__ = lambda self: 42
self.assertEqual(hash(d), 42)
C.__hash__ = lambda self: 314
self.assertEqual(hash(d), 314)
B.__hash__ = lambda self: 144
self.assertEqual(hash(d), 144)
D.__hash__ = lambda self: 100
self.assertEqual(hash(d), 100)
D.__hash__ = None
self.assertRaises(TypeError, hash, d)
del D.__hash__
self.assertEqual(hash(d), 144)
B.__hash__ = None
self.assertRaises(TypeError, hash, d)
del B.__hash__
self.assertEqual(hash(d), 314)
C.__hash__ = None
self.assertRaises(TypeError, hash, d)
del C.__hash__
self.assertEqual(hash(d), 42)
A.__hash__ = None
self.assertRaises(TypeError, hash, d)
del A.__hash__
self.assertEqual(hash(d), orig_hash)
d.foo = 42
d.bar = 42
self.assertEqual(d.foo, 42)
self.assertEqual(d.bar, 42)
def __getattribute__(self, name):
if name == "foo":
return 24
return object.__getattribute__(self, name)
A.__getattribute__ = __getattribute__
self.assertEqual(d.foo, 24)
self.assertEqual(d.bar, 42)
def __getattr__(self, name):
if name in ("spam", "foo", "bar"):
return "hello"
raise AttributeError, name
B.__getattr__ = __getattr__
self.assertEqual(d.spam, "hello")
self.assertEqual(d.foo, 24)
self.assertEqual(d.bar, 42)
del A.__getattribute__
self.assertEqual(d.foo, 42)
del d.foo
self.assertEqual(d.foo, "hello")
self.assertEqual(d.bar, 42)
del B.__getattr__
try:
d.foo
except AttributeError:
pass
else:
self.fail("d.foo should be undefined now")
# Test a nasty bug in recurse_down_subclasses()
class A(object):
pass
class B(A):
pass
del B
test_support.gc_collect()
A.__setitem__ = lambda *a: None # crash
def test_buffer_inheritance(self):
# Testing that buffer interface is inherited ...
import binascii
# SF bug [#470040] ParseTuple t# vs subclasses.
class MyStr(str):
pass
base = 'abc'
m = MyStr(base)
# b2a_hex uses the buffer interface to get its argument's value, via
# PyArg_ParseTuple 't#' code.
self.assertEqual(binascii.b2a_hex(m), binascii.b2a_hex(base))
# It's not clear that unicode will continue to support the character
# buffer interface, and this test will fail if that's taken away.
class MyUni(unicode):
pass
base = u'abc'
m = MyUni(base)
self.assertEqual(binascii.b2a_hex(m), binascii.b2a_hex(base))
class MyInt(int):
pass
m = MyInt(42)
try:
binascii.b2a_hex(m)
self.fail('subclass of int should not have a buffer interface')
except TypeError:
pass
def test_str_of_str_subclass(self):
# Testing __str__ defined in subclass of str ...
import binascii
import cStringIO
class octetstring(str):
def __str__(self):
return binascii.b2a_hex(self)
def __repr__(self):
return self + " repr"
o = octetstring('A')
self.assertEqual(type(o), octetstring)
self.assertEqual(type(str(o)), str)
self.assertEqual(type(repr(o)), str)
self.assertEqual(ord(o), 0x41)
self.assertEqual(str(o), '41')
self.assertEqual(repr(o), 'A repr')
self.assertEqual(o.__str__(), '41')
self.assertEqual(o.__repr__(), 'A repr')
capture = cStringIO.StringIO()
# Calling str() or not exercises different internal paths.
print >> capture, o
print >> capture, str(o)
self.assertEqual(capture.getvalue(), '41\n41\n')
capture.close()
def test_keyword_arguments(self):
# Testing keyword arguments to __init__, __call__...
def f(a): return a
self.assertEqual(f.__call__(a=42), 42)
a = []
list.__init__(a, sequence=[0, 1, 2])
self.assertEqual(a, [0, 1, 2])
def test_recursive_call(self):
# Testing recursive __call__() by setting to instance of class...
class A(object):
pass
A.__call__ = A()
try:
A()()
except RuntimeError:
pass
else:
self.fail("Recursion limit should have been reached for __call__()")
def test_delete_hook(self):
# Testing __del__ hook...
log = []
class C(object):
def __del__(self):
log.append(1)
c = C()
self.assertEqual(log, [])
del c
test_support.gc_collect()
self.assertEqual(log, [1])
class D(object): pass
d = D()
try: del d[0]
except TypeError: pass
else: self.fail("invalid del() didn't raise TypeError")
def test_hash_inheritance(self):
# Testing hash of mutable subclasses...
class mydict(dict):
pass
d = mydict()
try:
hash(d)
except TypeError:
pass
else:
self.fail("hash() of dict subclass should fail")
class mylist(list):
pass
d = mylist()
try:
hash(d)
except TypeError:
pass
else:
self.fail("hash() of list subclass should fail")
def test_str_operations(self):
try: 'a' + 5
except TypeError: pass
else: self.fail("'' + 5 doesn't raise TypeError")
try: ''.split('')
except ValueError: pass
else: self.fail("''.split('') doesn't raise ValueError")
try: ''.join([0])
except TypeError: pass
else: self.fail("''.join([0]) doesn't raise TypeError")
try: ''.rindex('5')
except ValueError: pass
else: self.fail("''.rindex('5') doesn't raise ValueError")
try: '%(n)s' % None
except TypeError: pass
else: self.fail("'%(n)s' % None doesn't raise TypeError")
try: '%(n' % {}
except ValueError: pass
else: self.fail("'%(n' % {} '' doesn't raise ValueError")
try: '%*s' % ('abc')
except TypeError: pass
else: self.fail("'%*s' % ('abc') doesn't raise TypeError")
try: '%*.*s' % ('abc', 5)
except TypeError: pass
else: self.fail("'%*.*s' % ('abc', 5) doesn't raise TypeError")
try: '%s' % (1, 2)
except TypeError: pass
else: self.fail("'%s' % (1, 2) doesn't raise TypeError")
try: '%' % None
except ValueError: pass
else: self.fail("'%' % None doesn't raise ValueError")
self.assertEqual('534253'.isdigit(), 1)
self.assertEqual('534253x'.isdigit(), 0)
self.assertEqual('%c' % 5, '\x05')
self.assertEqual('%c' % '5', '5')
def test_deepcopy_recursive(self):
# Testing deepcopy of recursive objects...
class Node:
pass
a = Node()
b = Node()
a.b = b
b.a = a
z = deepcopy(a) # This blew up before
def test_unintialized_modules(self):
# Testing uninitialized module objects...
from types import ModuleType as M
m = M.__new__(M)
str(m)
self.assertEqual(hasattr(m, "__name__"), 0)
self.assertEqual(hasattr(m, "__file__"), 0)
self.assertEqual(hasattr(m, "foo"), 0)
self.assertFalse(m.__dict__) # None or {} are both reasonable answers
m.foo = 1
self.assertEqual(m.__dict__, {"foo": 1})
def test_funny_new(self):
# Testing __new__ returning something unexpected...
class C(object):
def __new__(cls, arg):
if isinstance(arg, str): return [1, 2, 3]
elif isinstance(arg, int): return object.__new__(D)
else: return object.__new__(cls)
class D(C):
def __init__(self, arg):
self.foo = arg
self.assertEqual(C("1"), [1, 2, 3])
self.assertEqual(D("1"), [1, 2, 3])
d = D(None)
self.assertEqual(d.foo, None)
d = C(1)
self.assertEqual(isinstance(d, D), True)
self.assertEqual(d.foo, 1)
d = D(1)
self.assertEqual(isinstance(d, D), True)
self.assertEqual(d.foo, 1)
def test_imul_bug(self):
# Testing for __imul__ problems...
# SF bug 544647
class C(object):
def __imul__(self, other):
return (self, other)
x = C()
y = x
y *= 1.0
self.assertEqual(y, (x, 1.0))
y = x
y *= 2
self.assertEqual(y, (x, 2))
y = x
y *= 3L
self.assertEqual(y, (x, 3L))
y = x
y *= 1L<<100
self.assertEqual(y, (x, 1L<<100))
y = x
y *= None
self.assertEqual(y, (x, None))
y = x
y *= "foo"
self.assertEqual(y, (x, "foo"))
def test_copy_setstate(self):
# Testing that copy.*copy() correctly uses __setstate__...
import copy
class C(object):
def __init__(self, foo=None):
self.foo = foo
self.__foo = foo
def setfoo(self, foo=None):
self.foo = foo
def getfoo(self):
return self.__foo
def __getstate__(self):
return [self.foo]
def __setstate__(self_, lst):
self.assertEqual(len(lst), 1)
self_.__foo = self_.foo = lst[0]
a = C(42)
a.setfoo(24)
self.assertEqual(a.foo, 24)
self.assertEqual(a.getfoo(), 42)
b = copy.copy(a)
self.assertEqual(b.foo, 24)
self.assertEqual(b.getfoo(), 24)
b = copy.deepcopy(a)
self.assertEqual(b.foo, 24)
self.assertEqual(b.getfoo(), 24)
def test_slices(self):
# Testing cases with slices and overridden __getitem__ ...
# Strings
self.assertEqual("hello"[:4], "hell")
self.assertEqual("hello"[slice(4)], "hell")
self.assertEqual(str.__getitem__("hello", slice(4)), "hell")
class S(str):
def __getitem__(self, x):
return str.__getitem__(self, x)
self.assertEqual(S("hello")[:4], "hell")
self.assertEqual(S("hello")[slice(4)], "hell")
self.assertEqual(S("hello").__getitem__(slice(4)), "hell")
# Tuples
self.assertEqual((1,2,3)[:2], (1,2))
self.assertEqual((1,2,3)[slice(2)], (1,2))
self.assertEqual(tuple.__getitem__((1,2,3), slice(2)), (1,2))
class T(tuple):
def __getitem__(self, x):
return tuple.__getitem__(self, x)
self.assertEqual(T((1,2,3))[:2], (1,2))
self.assertEqual(T((1,2,3))[slice(2)], (1,2))
self.assertEqual(T((1,2,3)).__getitem__(slice(2)), (1,2))
# Lists
self.assertEqual([1,2,3][:2], [1,2])
self.assertEqual([1,2,3][slice(2)], [1,2])
self.assertEqual(list.__getitem__([1,2,3], slice(2)), [1,2])
class L(list):
def __getitem__(self, x):
return list.__getitem__(self, x)
self.assertEqual(L([1,2,3])[:2], [1,2])
self.assertEqual(L([1,2,3])[slice(2)], [1,2])
self.assertEqual(L([1,2,3]).__getitem__(slice(2)), [1,2])
# Now do lists and __setitem__
a = L([1,2,3])
a[slice(1, 3)] = [3,2]
self.assertEqual(a, [1,3,2])
a[slice(0, 2, 1)] = [3,1]
self.assertEqual(a, [3,1,2])
a.__setitem__(slice(1, 3), [2,1])
self.assertEqual(a, [3,2,1])
a.__setitem__(slice(0, 2, 1), [2,3])
self.assertEqual(a, [2,3,1])
def test_subtype_resurrection(self):
# Testing resurrection of new-style instance...
class C(object):
container = []
def __del__(self):
# resurrect the instance
C.container.append(self)
c = C()
c.attr = 42
# The most interesting thing here is whether this blows up, due to
# flawed GC tracking logic in typeobject.c's call_finalizer() (a 2.2.1
# bug).
del c
# If that didn't blow up, it's also interesting to see whether clearing
# the last container slot works: that will attempt to delete c again,
# which will cause c to get appended back to the container again
# "during" the del. (On non-CPython implementations, however, __del__
# is typically not called again.)
test_support.gc_collect()
self.assertEqual(len(C.container), 1)
del C.container[-1]
if test_support.check_impl_detail():
test_support.gc_collect()
self.assertEqual(len(C.container), 1)
self.assertEqual(C.container[-1].attr, 42)
# Make c mortal again, so that the test framework with -l doesn't report
# it as a leak.
del C.__del__
def test_slots_trash(self):
# Testing slot trash...
# Deallocating deeply nested slotted trash caused stack overflows
class trash(object):
__slots__ = ['x']
def __init__(self, x):
self.x = x
o = None
for i in xrange(50000):
o = trash(o)
del o
def test_slots_multiple_inheritance(self):
# SF bug 575229, multiple inheritance w/ slots dumps core
class A(object):
__slots__=()
class B(object):
pass
class C(A,B) :
__slots__=()
if test_support.check_impl_detail():
self.assertEqual(C.__basicsize__, B.__basicsize__)
self.assertTrue(hasattr(C, '__dict__'))
self.assertTrue(hasattr(C, '__weakref__'))
C().x = 2
def test_rmul(self):
# Testing correct invocation of __rmul__...
# SF patch 592646
class C(object):
def __mul__(self, other):
return "mul"
def __rmul__(self, other):
return "rmul"
a = C()
self.assertEqual(a*2, "mul")
self.assertEqual(a*2.2, "mul")
self.assertEqual(2*a, "rmul")
self.assertEqual(2.2*a, "rmul")
def test_ipow(self):
# Testing correct invocation of __ipow__...
# [SF bug 620179]
class C(object):
def __ipow__(self, other):
pass
a = C()
a **= 2
def test_mutable_bases(self):
# Testing mutable bases...
# stuff that should work:
class C(object):
pass
class C2(object):
def __getattribute__(self, attr):
if attr == 'a':
return 2
else:
return super(C2, self).__getattribute__(attr)
def meth(self):
return 1
class D(C):
pass
class E(D):
pass
d = D()
e = E()
D.__bases__ = (C,)
D.__bases__ = (C2,)
self.assertEqual(d.meth(), 1)
self.assertEqual(e.meth(), 1)
self.assertEqual(d.a, 2)
self.assertEqual(e.a, 2)
self.assertEqual(C2.__subclasses__(), [D])
try:
del D.__bases__
except (TypeError, AttributeError):
pass
else:
self.fail("shouldn't be able to delete .__bases__")
try:
D.__bases__ = ()
except TypeError, msg:
if str(msg) == "a new-style class can't have only classic bases":
self.fail("wrong error message for .__bases__ = ()")
else:
self.fail("shouldn't be able to set .__bases__ to ()")
try:
D.__bases__ = (D,)
except TypeError:
pass
else:
# actually, we'll have crashed by here...
self.fail("shouldn't be able to create inheritance cycles")
try:
D.__bases__ = (C, C)
except TypeError:
pass
else:
self.fail("didn't detect repeated base classes")
try:
D.__bases__ = (E,)
except TypeError:
pass
else:
self.fail("shouldn't be able to create inheritance cycles")
# let's throw a classic class into the mix:
class Classic:
def meth2(self):
return 3
D.__bases__ = (C, Classic)
self.assertEqual(d.meth2(), 3)
self.assertEqual(e.meth2(), 3)
try:
d.a
except AttributeError:
pass
else:
self.fail("attribute should have vanished")
try:
D.__bases__ = (Classic,)
except TypeError:
pass
else:
self.fail("new-style class must have a new-style base")
def test_builtin_bases(self):
# Make sure all the builtin types can have their base queried without
# segfaulting. See issue #5787.
builtin_types = [tp for tp in __builtin__.__dict__.itervalues()
if isinstance(tp, type)]
for tp in builtin_types:
object.__getattribute__(tp, "__bases__")
if tp is not object:
self.assertEqual(len(tp.__bases__), 1, tp)
class L(list):
pass
class C(object):
pass
class D(C):
pass
try:
L.__bases__ = (dict,)
except TypeError:
pass
else:
self.fail("shouldn't turn list subclass into dict subclass")
try:
list.__bases__ = (dict,)
except TypeError:
pass
else:
self.fail("shouldn't be able to assign to list.__bases__")
try:
D.__bases__ = (C, list)
except TypeError:
pass
else:
assert 0, "best_base calculation found wanting"
def test_mutable_bases_with_failing_mro(self):
# Testing mutable bases with failing mro...
class WorkOnce(type):
def __new__(self, name, bases, ns):
self.flag = 0
return super(WorkOnce, self).__new__(WorkOnce, name, bases, ns)
def mro(self):
if self.flag > 0:
raise RuntimeError, "bozo"
else:
self.flag += 1
return type.mro(self)
class WorkAlways(type):
def mro(self):
# this is here to make sure that .mro()s aren't called
# with an exception set (which was possible at one point).
# An error message will be printed in a debug build.
# What's a good way to test for this?
return type.mro(self)
class C(object):
pass
class C2(object):
pass
class D(C):
pass
class E(D):
pass
class F(D):
__metaclass__ = WorkOnce
class G(D):
__metaclass__ = WorkAlways
# Immediate subclasses have their mro's adjusted in alphabetical
# order, so E's will get adjusted before adjusting F's fails. We
# check here that E's gets restored.
E_mro_before = E.__mro__
D_mro_before = D.__mro__
try:
D.__bases__ = (C2,)
except RuntimeError:
self.assertEqual(E.__mro__, E_mro_before)
self.assertEqual(D.__mro__, D_mro_before)
else:
self.fail("exception not propagated")
def test_mutable_bases_catch_mro_conflict(self):
# Testing mutable bases catch mro conflict...
class A(object):
pass
class B(object):
pass
class C(A, B):
pass
class D(A, B):
pass
class E(C, D):
pass
try:
C.__bases__ = (B, A)
except TypeError:
pass
else:
self.fail("didn't catch MRO conflict")
def test_mutable_names(self):
# Testing mutable names...
class C(object):
pass
# C.__module__ could be 'test_descr' or '__main__'
mod = C.__module__
C.__name__ = 'D'
self.assertEqual((C.__module__, C.__name__), (mod, 'D'))
C.__name__ = 'D.E'
self.assertEqual((C.__module__, C.__name__), (mod, 'D.E'))
def test_subclass_right_op(self):
# Testing correct dispatch of subclass overloading __r<op>__...
# This code tests various cases where right-dispatch of a subclass
# should be preferred over left-dispatch of a base class.
# Case 1: subclass of int; this tests code in abstract.c::binary_op1()
class B(int):
def __floordiv__(self, other):
return "B.__floordiv__"
def __rfloordiv__(self, other):
return "B.__rfloordiv__"
self.assertEqual(B(1) // 1, "B.__floordiv__")
self.assertEqual(1 // B(1), "B.__rfloordiv__")
# Case 2: subclass of object; this is just the baseline for case 3
class C(object):
def __floordiv__(self, other):
return "C.__floordiv__"
def __rfloordiv__(self, other):
return "C.__rfloordiv__"
self.assertEqual(C() // 1, "C.__floordiv__")
self.assertEqual(1 // C(), "C.__rfloordiv__")
# Case 3: subclass of new-style class; here it gets interesting
class D(C):
def __floordiv__(self, other):
return "D.__floordiv__"
def __rfloordiv__(self, other):
return "D.__rfloordiv__"
self.assertEqual(D() // C(), "D.__floordiv__")
self.assertEqual(C() // D(), "D.__rfloordiv__")
# Case 4: this didn't work right in 2.2.2 and 2.3a1
class E(C):
pass
self.assertEqual(E.__rfloordiv__, C.__rfloordiv__)
self.assertEqual(E() // 1, "C.__floordiv__")
self.assertEqual(1 // E(), "C.__rfloordiv__")
self.assertEqual(E() // C(), "C.__floordiv__")
self.assertEqual(C() // E(), "C.__floordiv__") # This one would fail
@test_support.impl_detail("testing an internal kind of method object")
def test_meth_class_get(self):
# Testing __get__ method of METH_CLASS C methods...
# Full coverage of descrobject.c::classmethod_get()
# Baseline
arg = [1, 2, 3]
res = {1: None, 2: None, 3: None}
self.assertEqual(dict.fromkeys(arg), res)
self.assertEqual({}.fromkeys(arg), res)
# Now get the descriptor
descr = dict.__dict__["fromkeys"]
# More baseline using the descriptor directly
self.assertEqual(descr.__get__(None, dict)(arg), res)
self.assertEqual(descr.__get__({})(arg), res)
# Now check various error cases
try:
descr.__get__(None, None)
except TypeError:
pass
else:
self.fail("shouldn't have allowed descr.__get__(None, None)")
try:
descr.__get__(42)
except TypeError:
pass
else:
self.fail("shouldn't have allowed descr.__get__(42)")
try:
descr.__get__(None, 42)
except TypeError:
pass
else:
self.fail("shouldn't have allowed descr.__get__(None, 42)")
try:
descr.__get__(None, int)
except TypeError:
pass
else:
self.fail("shouldn't have allowed descr.__get__(None, int)")
def test_isinst_isclass(self):
# Testing proxy isinstance() and isclass()...
class Proxy(object):
def __init__(self, obj):
self.__obj = obj
def __getattribute__(self, name):
if name.startswith("_Proxy__"):
return object.__getattribute__(self, name)
else:
return getattr(self.__obj, name)
# Test with a classic class
class C:
pass
a = C()
pa = Proxy(a)
self.assertIsInstance(a, C) # Baseline
self.assertIsInstance(pa, C) # Test
# Test with a classic subclass
class D(C):
pass
a = D()
pa = Proxy(a)
self.assertIsInstance(a, C) # Baseline
self.assertIsInstance(pa, C) # Test
# Test with a new-style class
class C(object):
pass
a = C()
pa = Proxy(a)
self.assertIsInstance(a, C) # Baseline
self.assertIsInstance(pa, C) # Test
# Test with a new-style subclass
class D(C):
pass
a = D()
pa = Proxy(a)
self.assertIsInstance(a, C) # Baseline
self.assertIsInstance(pa, C) # Test
def test_proxy_super(self):
# Testing super() for a proxy object...
class Proxy(object):
def __init__(self, obj):
self.__obj = obj
def __getattribute__(self, name):
if name.startswith("_Proxy__"):
return object.__getattribute__(self, name)
else:
return getattr(self.__obj, name)
class B(object):
def f(self):
return "B.f"
class C(B):
def f(self):
return super(C, self).f() + "->C.f"
obj = C()
p = Proxy(obj)
self.assertEqual(C.__dict__["f"](p), "B.f->C.f")
def test_carloverre(self):
# Testing prohibition of Carlo Verre's hack...
try:
object.__setattr__(str, "foo", 42)
except TypeError:
pass
else:
self.fail("Carlo Verre __setattr__ succeeded!")
try:
object.__delattr__(str, "lower")
except TypeError:
pass
else:
self.fail("Carlo Verre __delattr__ succeeded!")
def test_weakref_segfault(self):
# Testing weakref segfault...
# SF 742911
import weakref
class Provoker:
def __init__(self, referrent):
self.ref = weakref.ref(referrent)
def __del__(self):
x = self.ref()
class Oops(object):
pass
o = Oops()
o.whatever = Provoker(o)
del o
def test_wrapper_segfault(self):
# SF 927248: deeply nested wrappers could cause stack overflow
f = lambda:None
for i in xrange(1000000):
f = f.__call__
f = None
def test_file_fault(self):
# Testing sys.stdout is changed in getattr...
test_stdout = sys.stdout
class StdoutGuard:
def __getattr__(self, attr):
sys.stdout = sys.__stdout__
raise RuntimeError("Premature access to sys.stdout.%s" % attr)
sys.stdout = StdoutGuard()
try:
print "Oops!"
except RuntimeError:
pass
finally:
sys.stdout = test_stdout
def test_vicious_descriptor_nonsense(self):
# Testing vicious_descriptor_nonsense...
# A potential segfault spotted by Thomas Wouters in mail to
# python-dev 2003-04-17, turned into an example & fixed by Michael
# Hudson just less than four months later...
class Evil(object):
def __hash__(self):
return hash('attr')
def __eq__(self, other):
del C.attr
return 0
class Descr(object):
def __get__(self, ob, type=None):
return 1
class C(object):
attr = Descr()
c = C()
c.__dict__[Evil()] = 0
self.assertEqual(c.attr, 1)
# this makes a crash more likely:
test_support.gc_collect()
self.assertEqual(hasattr(c, 'attr'), False)
def test_init(self):
# SF 1155938
class Foo(object):
def __init__(self):
return 10
try:
Foo()
except TypeError:
pass
else:
self.fail("did not test __init__() for None return")
def test_method_wrapper(self):
# Testing method-wrapper objects...
# <type 'method-wrapper'> did not support any reflection before 2.5
l = []
self.assertEqual(l.__add__, l.__add__)
self.assertEqual(l.__add__, [].__add__)
self.assertTrue(l.__add__ != [5].__add__)
self.assertTrue(l.__add__ != l.__mul__)
self.assertTrue(l.__add__.__name__ == '__add__')
self.assertTrue(l.__add__.__self__ is l)
if hasattr(l.__add__, '__objclass__'): # CPython
self.assertTrue(l.__add__.__objclass__ is list)
else: # PyPy
self.assertTrue(l.__add__.im_class is list)
self.assertEqual(l.__add__.__doc__, list.__add__.__doc__)
try:
hash(l.__add__)
except TypeError:
pass
else:
self.fail("no TypeError from hash([].__add__)")
t = ()
t += (7,)
self.assertEqual(t.__add__, (7,).__add__)
self.assertEqual(hash(t.__add__), hash((7,).__add__))
def test_not_implemented(self):
# Testing NotImplemented...
# all binary methods should be able to return a NotImplemented
import operator
def specialmethod(self, other):
return NotImplemented
def check(expr, x, y):
try:
exec expr in {'x': x, 'y': y, 'operator': operator}
except TypeError:
pass
else:
self.fail("no TypeError from %r" % (expr,))
N1 = sys.maxint + 1L # might trigger OverflowErrors instead of
# TypeErrors
N2 = sys.maxint # if sizeof(int) < sizeof(long), might trigger
# ValueErrors instead of TypeErrors
for metaclass in [type, types.ClassType]:
for name, expr, iexpr in [
('__add__', 'x + y', 'x += y'),
('__sub__', 'x - y', 'x -= y'),
('__mul__', 'x * y', 'x *= y'),
('__truediv__', 'operator.truediv(x, y)', None),
('__floordiv__', 'operator.floordiv(x, y)', None),
('__div__', 'x / y', 'x /= y'),
('__mod__', 'x % y', 'x %= y'),
('__divmod__', 'divmod(x, y)', None),
('__pow__', 'x ** y', 'x **= y'),
('__lshift__', 'x << y', 'x <<= y'),
('__rshift__', 'x >> y', 'x >>= y'),
('__and__', 'x & y', 'x &= y'),
('__or__', 'x | y', 'x |= y'),
('__xor__', 'x ^ y', 'x ^= y'),
('__coerce__', 'coerce(x, y)', None)]:
if name == '__coerce__':
rname = name
else:
rname = '__r' + name[2:]
A = metaclass('A', (), {name: specialmethod})
B = metaclass('B', (), {rname: specialmethod})
a = A()
b = B()
check(expr, a, a)
check(expr, a, b)
check(expr, b, a)
check(expr, b, b)
check(expr, a, N1)
check(expr, a, N2)
check(expr, N1, b)
check(expr, N2, b)
if iexpr:
check(iexpr, a, a)
check(iexpr, a, b)
check(iexpr, b, a)
check(iexpr, b, b)
check(iexpr, a, N1)
check(iexpr, a, N2)
iname = '__i' + name[2:]
C = metaclass('C', (), {iname: specialmethod})
c = C()
check(iexpr, c, a)
check(iexpr, c, b)
check(iexpr, c, N1)
check(iexpr, c, N2)
def test_assign_slice(self):
# ceval.c's assign_slice used to check for
# tp->tp_as_sequence->sq_slice instead of
# tp->tp_as_sequence->sq_ass_slice
class C(object):
def __setslice__(self, start, stop, value):
self.value = value
c = C()
c[1:2] = 3
self.assertEqual(c.value, 3)
def test_set_and_no_get(self):
# See
# http://mail.python.org/pipermail/python-dev/2010-January/095637.html
class Descr(object):
def __init__(self, name):
self.name = name
def __set__(self, obj, value):
obj.__dict__[self.name] = value
descr = Descr("a")
class X(object):
a = descr
x = X()
self.assertIs(x.a, descr)
x.a = 42
self.assertEqual(x.a, 42)
# Also check type_getattro for correctness.
class Meta(type):
pass
class X(object):
__metaclass__ = Meta
X.a = 42
Meta.a = Descr("a")
self.assertEqual(X.a, 42)
def test_getattr_hooks(self):
# issue 4230
class Descriptor(object):
counter = 0
def __get__(self, obj, objtype=None):
def getter(name):
self.counter += 1
raise AttributeError(name)
return getter
descr = Descriptor()
class A(object):
__getattribute__ = descr
class B(object):
__getattr__ = descr
class C(object):
__getattribute__ = descr
__getattr__ = descr
self.assertRaises(AttributeError, getattr, A(), "attr")
self.assertEqual(descr.counter, 1)
self.assertRaises(AttributeError, getattr, B(), "attr")
self.assertEqual(descr.counter, 2)
self.assertRaises(AttributeError, getattr, C(), "attr")
self.assertEqual(descr.counter, 4)
import gc
class EvilGetattribute(object):
# This used to segfault
def __getattr__(self, name):
raise AttributeError(name)
def __getattribute__(self, name):
del EvilGetattribute.__getattr__
for i in range(5):
gc.collect()
raise AttributeError(name)
self.assertRaises(AttributeError, getattr, EvilGetattribute(), "attr")
def test_abstractmethods(self):
# type pretends not to have __abstractmethods__.
self.assertRaises(AttributeError, getattr, type, "__abstractmethods__")
class meta(type):
pass
self.assertRaises(AttributeError, getattr, meta, "__abstractmethods__")
class X(object):
pass
with self.assertRaises(AttributeError):
del X.__abstractmethods__
def test_proxy_call(self):
class FakeStr(object):
__class__ = str
fake_str = FakeStr()
# isinstance() reads __class__ on new style classes
self.assertTrue(isinstance(fake_str, str))
# call a method descriptor
with self.assertRaises(TypeError):
str.split(fake_str)
# call a slot wrapper descriptor
try:
r = str.__add__(fake_str, "abc")
except TypeError:
pass
else:
self.assertEqual(r, NotImplemented)
def test_repr_as_str(self):
# Issue #11603: crash or infinite loop when rebinding __str__ as
# __repr__.
class Foo(object):
pass
Foo.__repr__ = Foo.__str__
foo = Foo()
# Behavior will change in CPython 2.7.4.
# PyPy already does the right thing here.
self.assertRaises(RuntimeError, str, foo)
self.assertRaises(RuntimeError, repr, foo)
class DictProxyTests(unittest.TestCase):
def setUp(self):
class C(object):
def meth(self):
pass
self.C = C
def test_repr(self):
if test_support.check_impl_detail():
self.assertIn('dict_proxy({', repr(vars(self.C)))
self.assertIn("'meth':", repr(vars(self.C)))
def test_iter_keys(self):
# Testing dict-proxy iterkeys...
keys = [ key for key in self.C.__dict__.iterkeys() ]
keys.sort()
self.assertEqual(keys, ['__dict__', '__doc__', '__module__',
'__weakref__', 'meth'])
def test_iter_values(self):
# Testing dict-proxy itervalues...
values = [ values for values in self.C.__dict__.itervalues() ]
self.assertEqual(len(values), 5)
def test_iter_items(self):
# Testing dict-proxy iteritems...
keys = [ key for (key, value) in self.C.__dict__.iteritems() ]
keys.sort()
self.assertEqual(keys, ['__dict__', '__doc__', '__module__',
'__weakref__', 'meth'])
def test_dict_type_with_metaclass(self):
# Testing type of __dict__ when __metaclass__ set...
class B(object):
pass
class M(type):
pass
class C:
# In 2.3a1, C.__dict__ was a real dict rather than a dict proxy
__metaclass__ = M
self.assertEqual(type(C.__dict__), type(B.__dict__))
class PTypesLongInitTest(unittest.TestCase):
# This is in its own TestCase so that it can be run before any other tests.
def test_pytype_long_ready(self):
# Testing SF bug 551412 ...
# This dumps core when SF bug 551412 isn't fixed --
# but only when test_descr.py is run separately.
# (That can't be helped -- as soon as PyType_Ready()
# is called for PyLong_Type, the bug is gone.)
class UserLong(object):
def __pow__(self, *args):
pass
try:
pow(0L, UserLong(), 0L)
except:
pass
# Another segfault only when run early
# (before PyType_Ready(tuple) is called)
type.mro(tuple)
def test_main():
deprecations = [(r'complex divmod\(\), // and % are deprecated$',
DeprecationWarning)]
if sys.py3kwarning:
deprecations += [
("classic (int|long) division", DeprecationWarning),
("coerce.. not supported", DeprecationWarning),
(".+__(get|set|del)slice__ has been removed", DeprecationWarning)]
with test_support.check_warnings(*deprecations):
# Run all local test cases, with PTypesLongInitTest first.
test_support.run_unittest(PTypesLongInitTest, OperatorsTest,
ClassPropertiesAndMethods, DictProxyTests)
if __name__ == "__main__":
test_main()
|
bsd-3-clause
|
ClaudiaSaxer/PlasoScaffolder
|
src/plasoscaffolder/model/sql_query_model.py
|
1
|
1280
|
# -*- coding: utf-8 -*-
"""The SQL query model class."""
from plasoscaffolder.model import sql_query_column_model_data
from plasoscaffolder.model import sql_query_column_model_timestamp
class SQLQueryModel(object):
"""A SQL query model."""
def __init__(
self, query: str, name: str,
columns: [sql_query_column_model_data.SQLColumnModelData],
timestamp_columns: [
sql_query_column_model_timestamp.SQLColumnModelTimestamp],
needs_customizing: bool,
amount_events: int
):
""" initializes the SQL query model.
Args:
columns ([sql_query_column_model_data.SQLColumnModelData]): list of
columns for the Query
timestamp_columns ([
sql_query_column_model_timestamp.SQLColumnModelTimestamp]): list of
columns which are timestamp events
name (str): The name of the Query.
query (str): The SQL query.
needs_customizing (bool): if the event for the query needs customizing
amount_events (int): amount of events as result of the query
"""
super().__init__()
self.name = name
self.query = query
self.columns = columns
self.needs_customizing = needs_customizing
self.timestamp_columns = timestamp_columns
self.amount_events = amount_events
|
apache-2.0
|
ChanduERP/odoo
|
addons/l10n_cr/__openerp__.py
|
82
|
3145
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# __openerp__.py
# l10n_cr_account
# First author: Carlos Vásquez <carlos.vasquez@clearcorp.co.cr> (ClearCorp S.A.)
# Copyright (c) 2010-TODAY ClearCorp S.A. (http://clearcorp.co.cr). All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY <COPYRIGHT HOLDER> ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those of the
# authors and should not be interpreted as representing official policies, either expressed
# or implied, of ClearCorp S.A..
#
##############################################################################
{
'name': 'Costa Rica - Accounting',
'version': '0.1',
'url': 'https://github.com/CLEARCORP/odoo-costa-rica',
'author': 'ClearCorp S.A.',
'website': 'http://clearcorp.co.cr',
'category': 'Localization/Account Charts',
'description': """
Chart of accounts for Costa Rica.
=================================
Includes:
---------
* account.type
* account.account.template
* account.tax.template
* account.tax.code.template
* account.chart.template
Everything is in English with Spanish translation. Further translations are welcome,
please go to http://translations.launchpad.net/openerp-costa-rica.
""",
'depends': ['account', 'account_chart', 'base'],
'demo': [],
'data': [
'l10n_cr_base_data.xml',
'data/account_account_type.xml',
'data/account_account_template.xml',
'data/account_tax_code_template.xml',
'data/account_chart_template.xml',
'data/account_tax_template.xml',
'l10n_wizard.xml',
],
'license': 'Other OSI approved licence',
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
SoftwareDefinedBuildings/smap
|
python/smap/drivers/pge.py
|
6
|
4629
|
"""
Copyright (c) 2011, 2012, Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
OF THE POSSIBILITY OF SUCH DAMAGE.
"""
##
## PG&E Green button data downloader
##
## @author Stephen Dawson-Haggerty <stevedh@eecs.berkeley.edu>
##
## Based on https://gist.github.com/3131346 and Andrew Krioukov's
## sMAPv1 PG&E driver for the old format of data.
##
# [/1625chestnut]
# type = smap.drivers.xml.XMLDriver
# Uri = python://smap.drivers.pge.update
# Xslt = ../../xslt/greenbutton.xsl
# Period = 86400
# Username = <username>
# Password = <password>
## optional
# Type = electric
# From = 1/1/2012
# To = 2/1/2012
import os
import errno
import re
import mechanize
import datetime
import zipfile
from lxml import etree
from cStringIO import StringIO
from smap.drivers import xml
agent = "User-Agent: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.6; en-US; " \
"rv:1.9.2.3) Gecko/20100401 Firefox/3.6.3"
def select_form(forms, name):
form = None
for f in forms:
if f.name == name:
if form != None:
raise ValueError("Error: multiple forms found with name = " + name)
form = f
if form == None:
raise ValueError("Error: no forms found with name = " + name)
return form
def update(opts):
br = mechanize.Browser()
# not sure if these are all still necessary
br.set_debug_http(False)
br.set_handle_equiv(False) # Otherwise, the loading page goes into an inf loop
br.set_handle_robots(False)
br.set_handle_referer(False)
br.set_handle_refresh(False)
def request(req):
req.add_header("User-Agent", agent)
return br.open(req)
print "Get login page"
req = mechanize.Request("https://www.pge.com/myenergyweb/appmanager/pge/customer")
req.add_header("User-Agent", agent)
br.open(req)
print "Logging in"
f = select_form(br.forms(), 'login')
f['USER'] = opts.get('Username')
f['PASSWORD'] = opts.get('Password')
request(f.click())
print "Continue to opower"
request(br.click_link(text="My Usage"))
print "Continue pg&e-side sso"
f = br.forms().next() # get the first form
request(f.click())
print "Continue the opower sso"
f = br.forms().next()
request(f.click())
print "Downloading all data"
request(br.click_link(url_regex=re.compile(".*export-dialog$")))
f = br.forms().next()
f.find_control("exportFormat").items[-1].selected = True
# chose the time range to download
if not ('From' in opts and 'To' in opts):
# real time data apparently isn't available
now = datetime.datetime.now() - datetime.timedelta(days=2)
then = now - datetime.timedelta(days=1)
f['from'] = "%i/%i/%i" % (now.month, now.day, now.year)
f['to'] = "%i/%i/%i" % (now.month, now.day, now.year)
else:
f['from'] = opts['From']
f['to'] = opts['To']
resp = request(f.click())
# make a zipfile
data = zipfile.ZipFile(StringIO(resp.read()))
# and extract the contents
rv = {}
for name in data.namelist():
if name.endswith("/"): continue
print "extracting", name
# with open(os.path.join(outdir, name), 'wb') as fp:
# fp.write(data.read(name))
# rv[name] = etree.XML(data.read(name))
if opts.get('Type', 'electric') in name:
data = data.read(name)
return data
return None
|
bsd-2-clause
|
Eldinnie/ptbtest
|
examples/test_echobot2.py
|
1
|
3680
|
from __future__ import absolute_import
import unittest
from telegram.ext import CommandHandler
from telegram.ext import Filters
from telegram.ext import MessageHandler
from telegram.ext import Updater
from ptbtest import ChatGenerator
from ptbtest import MessageGenerator
from ptbtest import Mockbot
from ptbtest import UserGenerator
"""
This is an example to show how the ptbtest suite can be used.
This example follows the echobot2 example at:
https://github.com/python-telegram-bot/python-telegram-bot/blob/master/examples/echobot2.py
"""
class TestEchobot2(unittest.TestCase):
def setUp(self):
# For use within the tests we nee some stuff. Starting with a Mockbot
self.bot = Mockbot()
# Some generators for users and chats
self.ug = UserGenerator()
self.cg = ChatGenerator()
# And a Messagegenerator and updater (for use with the bot.)
self.mg = MessageGenerator(self.bot)
self.updater = Updater(bot=self.bot)
def test_help(self):
# this tests the help handler. So first insert the handler
def help(bot, update):
update.message.reply_text('Help!')
# Then register the handler with he updater's dispatcher and start polling
self.updater.dispatcher.add_handler(CommandHandler("help", help))
self.updater.start_polling()
# We want to simulate a message. Since we don't care wich user sends it we let the MessageGenerator
# create random ones
update = self.mg.get_message(text="/help")
# We insert the update with the bot so the updater can retrieve it.
self.bot.insertUpdate(update)
# sent_messages is the list with calls to the bot's outbound actions. Since we hope the message we inserted
# only triggered one sendMessage action it's length should be 1.
self.assertEqual(len(self.bot.sent_messages), 1)
sent = self.bot.sent_messages[0]
self.assertEqual(sent['method'], "sendMessage")
self.assertEqual(sent['text'], "Help!")
# Always stop the updater at the end of a testcase so it won't hang.
self.updater.stop()
def test_start(self):
def start(bot, update):
update.message.reply_text('Hi!')
self.updater.dispatcher.add_handler(CommandHandler("start", start))
self.updater.start_polling()
# Here you can see how we would handle having our own user and chat
user = self.ug.get_user(first_name="Test", last_name="The Bot")
chat = self.cg.get_chat(user=user)
update = self.mg.get_message(user=user, chat=chat, text="/start")
self.bot.insertUpdate(update)
self.assertEqual(len(self.bot.sent_messages), 1)
sent = self.bot.sent_messages[0]
self.assertEqual(sent['method'], "sendMessage")
self.assertEqual(sent['text'], "Hi!")
self.updater.stop()
def test_echo(self):
def echo(bot, update):
update.message.reply_text(update.message.text)
self.updater.dispatcher.add_handler(MessageHandler(Filters.text, echo))
self.updater.start_polling()
update = self.mg.get_message(text="first message")
update2 = self.mg.get_message(text="second message")
self.bot.insertUpdate(update)
self.bot.insertUpdate(update2)
self.assertEqual(len(self.bot.sent_messages), 2)
sent = self.bot.sent_messages
self.assertEqual(sent[0]['method'], "sendMessage")
self.assertEqual(sent[0]['text'], "first message")
self.assertEqual(sent[1]['text'], "second message")
self.updater.stop()
if __name__ == '__main__':
unittest.main()
|
gpl-3.0
|
Eaglemania/TOL
|
pyglet/font/__init__.py
|
6
|
23520
|
# ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''Load fonts and render text.
This is a fairly-low level interface to text rendering. Obtain a font using
`load`::
from pyglet import font
arial = font.load('Arial', 14, bold=True, italic=False)
pyglet will load any system-installed fonts. You can add additional fonts
(for example, from your program resources) using `add_file` or
`add_directory`.
Obtain a list of `Glyph` objects for a string of text using the `Font`
object::
text = 'Hello, world!'
glyphs = arial.get_glyphs(text)
The most efficient way to render these glyphs is with a `GlyphString`::
glyph_string = GlyphString(text, glyphs)
glyph_string.draw()
There are also a variety of methods in both `Font` and
`GlyphString` to facilitate word-wrapping.
A convenient way to render a string of text is with a `Text`::
text = Text(font, text)
text.draw()
See the `pyglet.font.base` module for documentation on the base classes used
by this package.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import sys
import os
import math
import weakref
import pyglet
from pyglet.gl import *
from pyglet import gl
from pyglet import image
from pyglet import window
class GlyphString(object):
'''An immutable string of glyphs that can be rendered quickly.
This class is ideal for quickly rendering single or multi-line strings
of text that use the same font. To wrap text using a glyph string,
call `get_break_index` to find the optimal breakpoint for each line,
the repeatedly call `draw` for each breakpoint.
:deprecated: Use `pyglet.text.layout` classes.
'''
def __init__(self, text, glyphs, x=0, y=0):
'''Create a glyph string.
The `text` string is used to determine valid breakpoints; all glyphs
must have already been determined using
`pyglet.font.base.Font.get_glyphs`. The string
will be positioned with the baseline of the left-most glyph at the
given coordinates.
:Parameters:
`text` : str or unicode
String to represent.
`glyphs` : list of `pyglet.font.base.Glyph`
Glyphs representing `text`.
`x` : float
X coordinate of the left-side bearing of the left-most glyph.
`y` : float
Y coordinate of the baseline.
'''
# Create an interleaved array in GL_T2F_V3F format and determine
# state changes required.
lst = []
texture = None
self.text = text
self.states = []
self.cumulative_advance = [] # for fast post-string breaking
state_from = 0
state_length = 0
for i, glyph in enumerate(glyphs):
if glyph.owner != texture:
if state_length:
self.states.append((state_from, state_length, texture))
texture = glyph.owner
state_from = i
state_length = 0
state_length += 1
t = glyph.tex_coords
lst += [t[0], t[1], t[2], 1.,
x + glyph.vertices[0], y + glyph.vertices[1], 0., 1.,
t[3], t[4], t[5], 1.,
x + glyph.vertices[2], y + glyph.vertices[1], 0., 1.,
t[6], t[7], t[8], 1.,
x + glyph.vertices[2], y + glyph.vertices[3], 0., 1.,
t[9], t[10], t[11], 1.,
x + glyph.vertices[0], y + glyph.vertices[3], 0., 1.]
x += glyph.advance
self.cumulative_advance.append(x)
self.states.append((state_from, state_length, texture))
self.array = (c_float * len(lst))(*lst)
self.width = x
def get_break_index(self, from_index, width):
'''Find a breakpoint within the text for a given width.
Returns a valid breakpoint after `from_index` so that the text
between `from_index` and the breakpoint fits within `width` pixels.
This method uses precomputed cumulative glyph widths to give quick
answer, and so is much faster than
`pyglet.font.base.Font.get_glyphs_for_width`.
:Parameters:
`from_index` : int
Index of text to begin at, or 0 for the beginning of the
string.
`width` : float
Maximum width to use.
:rtype: int
:return: the index of text which will be used as the breakpoint, or
`from_index` if there is no valid breakpoint.
'''
to_index = from_index
if from_index >= len(self.text):
return from_index
if from_index:
width += self.cumulative_advance[from_index-1]
for i, (c, w) in enumerate(
zip(self.text[from_index:],
self.cumulative_advance[from_index:])):
if c in u'\u0020\u200b':
to_index = i + from_index + 1
if c == '\n':
return i + from_index + 1
if w > width:
return to_index
return to_index
def get_subwidth(self, from_index, to_index):
'''Return the width of a slice of this string.
:Parameters:
`from_index` : int
The start index of the string to measure.
`to_index` : int
The end index (exclusive) of the string to measure.
:rtype: float
'''
if to_index <= from_index:
return 0
width = self.cumulative_advance[to_index-1]
if from_index:
width -= self.cumulative_advance[from_index-1]
return width
def draw(self, from_index=0, to_index=None):
'''Draw a region of the glyph string.
Assumes texture state is enabled. To enable the texture state::
from pyglet.gl import *
glEnable(GL_TEXTURE_2D)
:Parameters:
`from_index` : int
Start index of text to render.
`to_index` : int
End index (exclusive) of text to render.
'''
if from_index >= len(self.text) or \
from_index == to_index or \
not self.text:
return
# XXX Safe to assume all required textures will use same blend state I
# think. (otherwise move this into loop)
self.states[0][2].apply_blend_state()
if from_index:
glPushMatrix()
glTranslatef(-self.cumulative_advance[from_index-1], 0, 0)
if to_index is None:
to_index = len(self.text)
glPushClientAttrib(GL_CLIENT_VERTEX_ARRAY_BIT)
glInterleavedArrays(GL_T4F_V4F, 0, self.array)
for state_from, state_length, texture in self.states:
if state_from + state_length < from_index:
continue
state_from = max(state_from, from_index)
state_length = min(state_length, to_index - state_from)
if state_length <= 0:
break
glBindTexture(GL_TEXTURE_2D, texture.id)
glDrawArrays(GL_QUADS, state_from * 4, state_length * 4)
glPopClientAttrib()
if from_index:
glPopMatrix()
class _TextZGroup(pyglet.graphics.Group):
z = 0
def set_state(self):
glTranslatef(0, 0, self.z)
def unset_state(self):
glTranslatef(0, 0, -self.z)
class Text(object):
'''Simple displayable text.
This is a convenience class for rendering strings of text. It takes
care of caching the vertices so the text can be rendered every frame with
little performance penalty.
Text can be word-wrapped by specifying a `width` to wrap into. If the
width is not specified, it gives the width of the text as laid out.
:Ivariables:
`x` : int
X coordinate of the text
`y` : int
Y coordinate of the text
:deprecated: Use `pyglet.text.Label`.
'''
# Alignment constants
#: Align the left edge of the text to the given X coordinate.
LEFT = 'left'
#: Align the horizontal center of the text to the given X coordinate.
CENTER = 'center'
#: Align the right edge of the text to the given X coordinate.
RIGHT = 'right'
#: Align the bottom of the descender of the final line of text with the
#: given Y coordinate.
BOTTOM = 'bottom'
#: Align the baseline of the first line of text with the given Y
#: coordinate.
BASELINE = 'baseline'
#: Align the top of the ascender of the first line of text with the given
#: Y coordinate.
TOP = 'top'
# None: no multiline
# 'width': multiline, wrapped to width
# 'multiline': multiline, no wrap
_wrap = None
# Internal bookkeeping for wrap only.
_width = None
def __init__(self, font, text='', x=0, y=0, z=0, color=(1,1,1,1),
width=None, halign=LEFT, valign=BASELINE):
'''Create displayable text.
:Parameters:
`font` : `Font`
Font to render the text in.
`text` : str
Initial string to render.
`x` : float
X coordinate of the left edge of the text.
`y` : float
Y coordinate of the baseline of the text. If the text is
word-wrapped, this refers to the first line of text.
`z` : float
Z coordinate of the text plane.
`color` : 4-tuple of float
Color to render the text in. Alpha values can be specified
in the fourth component.
`width` : float
Width to limit the rendering to. Text will be word-wrapped
if necessary.
`halign` : str
Alignment of the text. See `Text.halign` for details.
`valign` : str
Controls positioning of the text based off the y coordinate.
One of BASELINE, BOTTOM, CENTER or TOP. Defaults to BASELINE.
'''
multiline = False
if width is not None:
self._width = width
self._wrap = 'width'
multiline = True
elif '\n' in text:
self._wrap = 'multiline'
multiline = True
self._group = _TextZGroup()
self._document = pyglet.text.decode_text(text)
self._layout = pyglet.text.layout.TextLayout(self._document,
width=width,
multiline=multiline,
wrap_lines=width is not None,
dpi=font.dpi,
group=self._group)
self._layout.begin_update()
if self._wrap == 'multiline':
self._document.set_style(0, len(text), dict(wrap=False))
self.font = font
self.color = color
self._x = x
self.y = y
self.z = z
self.width = width
self.halign = halign
self.valign = valign
self._update_layout_halign()
self._layout.end_update()
def _get_font(self):
return self._font
def _set_font(self, font):
self._font = font
self._layout.begin_update()
self._document.set_style(0, len(self._document.text), {
'font_name': font.name,
'font_size': font.size,
'bold': font.bold,
'italic': font.italic,
})
self._layout._dpi = font.dpi
self._layout.end_update()
font = property(_get_font, _set_font)
def _get_color(self):
color = self._document.get_style('color')
if color is None:
return (1., 1., 1., 1.)
return tuple([c/255. for c in color])
def _set_color(self, color):
color = [int(c * 255) for c in color]
self._document.set_style(0, len(self._document.text), {
'color': color,
})
color = property(_get_color, _set_color)
def _update_layout_halign(self):
if self._layout.multiline:
# TextLayout has a different interpretation of halign that doesn't
# consider the width to be a special factor; here we emulate the
# old behaviour by fudging the layout x value.
if self._layout.anchor_x == 'left':
self._layout.x = self.x
elif self._layout.anchor_x == 'center':
self._layout.x = self.x + self._layout.width - \
self._layout.content_width // 2
elif self._layout.anchor_x == 'right':
self._layout.x = self.x + 2 * self._layout.width - \
self._layout.content_width
else:
self._layout.x = self.x
def _get_x(self):
return self._x
def _set_x(self, x):
self._x = x
self._update_layout_halign()
x = property(_get_x, _set_x)
def _get_y(self):
return self._layout.y
def _set_y(self, y):
self._layout.y = y
y = property(_get_y, _set_y)
def _get_z(self):
return self._group.z
def _set_z(self, z):
self._group.z = z
z = property(_get_z, _set_z)
def _update_wrap(self):
if self._width is not None:
self._wrap = 'width'
elif '\n' in self.text:
self._wrap = 'multiline'
self._layout.begin_update()
if self._wrap == None:
self._layout.multiline = False
elif self._wrap == 'width':
self._layout.width = self._width
self._layout.multiline = True
self._document.set_style(0, len(self.text), dict(wrap=True))
elif self._wrap == 'multiline':
self._layout.multiline = True
self._document.set_style(0, len(self.text), dict(wrap=False))
self._update_layout_halign()
self._layout.end_update()
def _get_width(self):
if self._wrap == 'width':
return self._layout.width
else:
return self._layout.content_width
def _set_width(self, width):
self._width = width
self._layout._wrap_lines_flag = width is not None
self._update_wrap()
width = property(_get_width, _set_width,
doc='''Width of the text.
When set, this enables word-wrapping to the specified width.
Otherwise, the width of the text as it will be rendered can be
determined.
:type: float
''')
def _get_height(self):
return self._layout.content_height
height = property(_get_height,
doc='''Height of the text.
This property is the ascent minus the descent of the font, unless
there is more than one line of word-wrapped text, in which case
the height takes into account the line leading. Read-only.
:type: float
''')
def _get_text(self):
return self._document.text
def _set_text(self, text):
self._document.text = text
self._update_wrap()
text = property(_get_text, _set_text,
doc='''Text to render.
The glyph vertices are only recalculated as needed, so multiple
changes to the text can be performed with no performance penalty.
:type: str
''')
def _get_halign(self):
return self._layout.anchor_x
def _set_halign(self, halign):
self._layout.anchor_x = halign
self._update_layout_halign()
halign = property(_get_halign, _set_halign,
doc='''Horizontal alignment of the text.
The text is positioned relative to `x` and `width` according to this
property, which must be one of the alignment constants `LEFT`,
`CENTER` or `RIGHT`.
:type: str
''')
def _get_valign(self):
return self._layout.anchor_y
def _set_valign(self, valign):
self._layout.anchor_y = valign
valign = property(_get_valign, _set_valign,
doc='''Vertical alignment of the text.
The text is positioned relative to `y` according to this property,
which must be one of the alignment constants `BOTTOM`, `BASELINE`,
`CENTER` or `TOP`.
:type: str
''')
def _get_leading(self):
return self._document.get_style('leading') or 0
def _set_leading(self, leading):
self._document.set_style(0, len(self._document.text), {
'leading': leading,
})
leading = property(_get_leading, _set_leading,
doc='''Vertical space between adjacent lines, in pixels.
:type: int
''')
def _get_line_height(self):
return self._font.ascent - self._font.descent + self.leading
def _set_line_height(self, line_height):
self.leading = line_height - (self._font.ascent - self._font.descent)
line_height = property(_get_line_height, _set_line_height,
doc='''Vertical distance between adjacent baselines, in pixels.
:type: int
''')
def draw(self):
self._layout.draw()
if not getattr(sys, 'is_epydoc', False):
if sys.platform == 'darwin':
if pyglet.options['darwin_cocoa']:
from pyglet.font.quartz import QuartzFont
_font_class = QuartzFont
else:
from pyglet.font.carbon import CarbonFont
_font_class = CarbonFont
elif sys.platform in ('win32', 'cygwin'):
if pyglet.options['font'][0] == 'win32':
from pyglet.font.win32 import Win32Font
_font_class = Win32Font
elif pyglet.options['font'][0] == 'gdiplus':
from pyglet.font.win32 import GDIPlusFont
_font_class = GDIPlusFont
else:
assert False, 'Unknown font driver'
else:
from pyglet.font.freetype import FreeTypeFont
_font_class = FreeTypeFont
def have_font(name):
'''Check if specified system font name is available.'''
return _font_class.have_font(name)
def load(name=None, size=None, bold=False, italic=False, dpi=None):
'''Load a font for rendering.
:Parameters:
`name` : str, or list of str
Font family, for example, "Times New Roman". If a list of names
is provided, the first one matching a known font is used. If no
font can be matched to the name(s), a default font is used. In
pyglet 1.1, the name may be omitted.
`size` : float
Size of the font, in points. The returned font may be an exact
match or the closest available. In pyglet 1.1, the size may be
omitted, and defaults to 12pt.
`bold` : bool
If True, a bold variant is returned, if one exists for the given
family and size.
`italic` : bool
If True, an italic variant is returned, if one exists for the given
family and size.
`dpi` : float
The assumed resolution of the display device, for the purposes of
determining the pixel size of the font. Defaults to 96.
:rtype: `Font`
'''
# Arbitrary default size
if size is None:
size = 12
if dpi is None:
dpi = 96
# Find first matching name
if type(name) in (tuple, list):
for n in name:
if _font_class.have_font(n):
name = n
break
else:
name = None
# Locate or create font cache
shared_object_space = gl.current_context.object_space
if not hasattr(shared_object_space, 'pyglet_font_font_cache'):
shared_object_space.pyglet_font_font_cache = \
weakref.WeakValueDictionary()
shared_object_space.pyglet_font_font_hold = []
font_cache = shared_object_space.pyglet_font_font_cache
font_hold = shared_object_space.pyglet_font_font_hold
# Look for font name in font cache
descriptor = (name, size, bold, italic, dpi)
if descriptor in font_cache:
return font_cache[descriptor]
# Not in cache, create from scratch
font = _font_class(name, size, bold=bold, italic=italic, dpi=dpi)
# Save parameters for new-style layout classes to recover
font.name = name
font.size = size
font.bold = bold
font.italic = italic
font.dpi = dpi
# Cache font in weak-ref dictionary to avoid reloading while still in use
font_cache[descriptor] = font
# Hold onto refs of last three loaded fonts to prevent them being
# collected if momentarily dropped.
del font_hold[3:]
font_hold.insert(0, font)
return font
def add_file(font):
'''Add a font to pyglet's search path.
In order to load a font that is not installed on the system, you must
call this method to tell pyglet that it exists. You can supply
either a filename or any file-like object.
The font format is platform-dependent, but is typically a TrueType font
file containing a single font face. Note that to load this file after
adding it you must specify the face name to `load`, not the filename.
:Parameters:
`font` : str or file
Filename or file-like object to load fonts from.
'''
if type(font) in (str, unicode):
font = open(font, 'rb')
if hasattr(font, 'read'):
font = font.read()
_font_class.add_font_data(font)
def add_directory(dir):
'''Add a directory of fonts to pyglet's search path.
This function simply calls `add_file` for each file with a ``.ttf``
extension in the given directory. Subdirectories are not searched.
:Parameters:
`dir` : str
Directory that contains font files.
'''
for file in os.listdir(dir):
if file[-4:].lower() == '.ttf':
add_file(os.path.join(dir, file))
|
gpl-2.0
|
profjrr/scrapy
|
tests/test_downloadermiddleware_httpproxy.py
|
103
|
3439
|
import os
import sys
from twisted.trial.unittest import TestCase, SkipTest
from scrapy.downloadermiddlewares.httpproxy import HttpProxyMiddleware
from scrapy.exceptions import NotConfigured
from scrapy.http import Response, Request
from scrapy.spiders import Spider
spider = Spider('foo')
class TestDefaultHeadersMiddleware(TestCase):
failureException = AssertionError
def setUp(self):
self._oldenv = os.environ.copy()
def tearDown(self):
os.environ = self._oldenv
def test_no_proxies(self):
os.environ = {}
self.assertRaises(NotConfigured, HttpProxyMiddleware)
def test_no_enviroment_proxies(self):
os.environ = {'dummy_proxy': 'reset_env_and_do_not_raise'}
mw = HttpProxyMiddleware()
for url in ('http://e.com', 'https://e.com', 'file:///tmp/a'):
req = Request(url)
assert mw.process_request(req, spider) is None
self.assertEquals(req.url, url)
self.assertEquals(req.meta, {})
def test_enviroment_proxies(self):
os.environ['http_proxy'] = http_proxy = 'https://proxy.for.http:3128'
os.environ['https_proxy'] = https_proxy = 'http://proxy.for.https:8080'
os.environ.pop('file_proxy', None)
mw = HttpProxyMiddleware()
for url, proxy in [('http://e.com', http_proxy),
('https://e.com', https_proxy), ('file://tmp/a', None)]:
req = Request(url)
assert mw.process_request(req, spider) is None
self.assertEquals(req.url, url)
self.assertEquals(req.meta.get('proxy'), proxy)
def test_proxy_auth(self):
os.environ['http_proxy'] = 'https://user:pass@proxy:3128'
mw = HttpProxyMiddleware()
req = Request('http://scrapytest.org')
assert mw.process_request(req, spider) is None
self.assertEquals(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEquals(req.headers.get('Proxy-Authorization'), 'Basic dXNlcjpwYXNz')
def test_proxy_auth_empty_passwd(self):
os.environ['http_proxy'] = 'https://user:@proxy:3128'
mw = HttpProxyMiddleware()
req = Request('http://scrapytest.org')
assert mw.process_request(req, spider) is None
self.assertEquals(req.meta, {'proxy': 'https://proxy:3128'})
self.assertEquals(req.headers.get('Proxy-Authorization'), 'Basic dXNlcjo=')
def test_proxy_already_seted(self):
os.environ['http_proxy'] = http_proxy = 'https://proxy.for.http:3128'
mw = HttpProxyMiddleware()
req = Request('http://noproxy.com', meta={'proxy': None})
assert mw.process_request(req, spider) is None
assert 'proxy' in req.meta and req.meta['proxy'] is None
def test_no_proxy(self):
os.environ['http_proxy'] = http_proxy = 'https://proxy.for.http:3128'
mw = HttpProxyMiddleware()
os.environ['no_proxy'] = '*'
req = Request('http://noproxy.com')
assert mw.process_request(req, spider) is None
assert 'proxy' not in req.meta
os.environ['no_proxy'] = 'other.com'
req = Request('http://noproxy.com')
assert mw.process_request(req, spider) is None
assert 'proxy' in req.meta
os.environ['no_proxy'] = 'other.com,noproxy.com'
req = Request('http://noproxy.com')
assert mw.process_request(req, spider) is None
assert 'proxy' not in req.meta
|
bsd-3-clause
|
whn09/tensorflow
|
tensorflow/contrib/distributions/python/kernel_tests/relaxed_bernoulli_test.py
|
110
|
6469
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the RelaxedBernoulli distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import scipy.special
from tensorflow.contrib.distributions.python.ops import relaxed_bernoulli
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.platform import test
class RelaxedBernoulliTest(test.TestCase):
def testP(self):
"""Tests that parameter P is set correctly. Note that dist.p != dist.pdf."""
temperature = 1.0
p = [0.1, 0.4]
dist = relaxed_bernoulli.RelaxedBernoulli(temperature, probs=p)
with self.test_session():
self.assertAllClose(p, dist.probs.eval())
def testLogits(self):
temperature = 2.0
logits = [-42., 42.]
dist = relaxed_bernoulli.RelaxedBernoulli(temperature, logits=logits)
with self.test_session():
self.assertAllClose(logits, dist.logits.eval())
with self.test_session():
self.assertAllClose(scipy.special.expit(logits), dist.probs.eval())
p = [0.01, 0.99, 0.42]
dist = relaxed_bernoulli.RelaxedBernoulli(temperature, probs=p)
with self.test_session():
self.assertAllClose(scipy.special.logit(p), dist.logits.eval())
def testInvalidP(self):
temperature = 1.0
invalid_ps = [1.01, 2.]
for p in invalid_ps:
with self.test_session():
with self.assertRaisesOpError("probs has components greater than 1"):
dist = relaxed_bernoulli.RelaxedBernoulli(temperature,
probs=p,
validate_args=True)
dist.probs.eval()
invalid_ps = [-0.01, -3.]
for p in invalid_ps:
with self.test_session():
with self.assertRaisesOpError("Condition x >= 0"):
dist = relaxed_bernoulli.RelaxedBernoulli(temperature,
probs=p,
validate_args=True)
dist.probs.eval()
valid_ps = [0.0, 0.5, 1.0]
for p in valid_ps:
with self.test_session():
dist = relaxed_bernoulli.RelaxedBernoulli(temperature,
probs=p)
self.assertEqual(p, dist.probs.eval())
def testShapes(self):
with self.test_session():
for batch_shape in ([], [1], [2, 3, 4]):
temperature = 1.0
p = np.random.random(batch_shape).astype(np.float32)
dist = relaxed_bernoulli.RelaxedBernoulli(temperature, probs=p)
self.assertAllEqual(batch_shape, dist.batch_shape.as_list())
self.assertAllEqual(batch_shape, dist.batch_shape_tensor().eval())
self.assertAllEqual([], dist.event_shape.as_list())
self.assertAllEqual([], dist.event_shape_tensor().eval())
def testZeroTemperature(self):
"""If validate_args, raises InvalidArgumentError when temperature is 0."""
temperature = constant_op.constant(0.0)
p = constant_op.constant([0.1, 0.4])
dist = relaxed_bernoulli.RelaxedBernoulli(temperature, probs=p,
validate_args=True)
with self.test_session():
sample = dist.sample()
with self.assertRaises(errors_impl.InvalidArgumentError):
sample.eval()
def testDtype(self):
temperature = constant_op.constant(1.0, dtype=dtypes.float32)
p = constant_op.constant([0.1, 0.4], dtype=dtypes.float32)
dist = relaxed_bernoulli.RelaxedBernoulli(temperature, probs=p)
self.assertEqual(dist.dtype, dtypes.float32)
self.assertEqual(dist.dtype, dist.sample(5).dtype)
self.assertEqual(dist.probs.dtype, dist.prob([0.0]).dtype)
self.assertEqual(dist.probs.dtype, dist.log_prob([0.0]).dtype)
temperature = constant_op.constant(1.0, dtype=dtypes.float64)
p = constant_op.constant([0.1, 0.4], dtype=dtypes.float64)
dist64 = relaxed_bernoulli.RelaxedBernoulli(temperature, probs=p)
self.assertEqual(dist64.dtype, dtypes.float64)
self.assertEqual(dist64.dtype, dist64.sample(5).dtype)
def testLogProb(self):
with self.test_session():
t = np.array(1.0, dtype=np.float64)
p = np.array(0.1, dtype=np.float64) # P(x=1)
dist = relaxed_bernoulli.RelaxedBernoulli(t, probs=p)
xs = np.array([0.1, 0.3, 0.5, 0.9], dtype=np.float64)
# analytical density from Maddison et al. 2016
alpha = np.array(p/(1-p), dtype=np.float64)
expected_log_pdf = (np.log(t) + np.log(alpha) +
(-t-1)*(np.log(xs)+np.log(1-xs)) -
2*np.log(alpha*np.power(xs, -t) + np.power(1-xs, -t)))
log_pdf = dist.log_prob(xs).eval()
self.assertAllClose(expected_log_pdf, log_pdf)
def testBoundaryConditions(self):
with self.test_session():
temperature = 1e-2
dist = relaxed_bernoulli.RelaxedBernoulli(temperature, probs=1.0)
self.assertAllClose(np.nan, dist.log_prob(0.0).eval())
self.assertAllClose([np.nan], [dist.log_prob(1.0).eval()])
def testSampleN(self):
"""mean of quantized samples still approximates the Bernoulli mean."""
with self.test_session():
temperature = 1e-2
p = [0.2, 0.6, 0.5]
dist = relaxed_bernoulli.RelaxedBernoulli(temperature, probs=p)
n = 10000
samples = dist.sample(n)
self.assertEqual(samples.dtype, dtypes.float32)
sample_values = samples.eval()
self.assertTrue(np.all(sample_values >= 0))
self.assertTrue(np.all(sample_values <= 1))
frac_ones_like = np.sum(sample_values >= 0.5, axis=0)/n
self.assertAllClose(p, frac_ones_like, atol=1e-2)
if __name__ == "__main__":
test.main()
|
apache-2.0
|
vHanda/electron
|
script/build.py
|
155
|
1166
|
#!/usr/bin/env python
import argparse
import os
import subprocess
import sys
from lib.util import atom_gyp
CONFIGURATIONS = ['Release', 'Debug']
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
def main():
os.chdir(SOURCE_ROOT)
ninja = os.path.join('vendor', 'depot_tools', 'ninja')
if sys.platform == 'win32':
ninja += '.exe'
args = parse_args()
for config in args.configuration:
build_path = os.path.join('out', config[0])
ret = subprocess.call([ninja, '-C', build_path, args.target])
if ret != 0:
sys.exit(ret)
def parse_args():
parser = argparse.ArgumentParser(description='Build project')
parser.add_argument('-c', '--configuration',
help='Build with Release or Debug configuration',
nargs='+',
default=CONFIGURATIONS,
required=False)
parser.add_argument('-t', '--target',
help='Build specified target',
default=atom_gyp()['project_name%'],
required=False)
return parser.parse_args()
if __name__ == '__main__':
sys.exit(main())
|
mit
|
eckucukoglu/arm-linux-gnueabihf
|
arm-linux-gnueabihf/libc/usr/lib/python2.7/distutils/command/build_py.py
|
176
|
16299
|
"""distutils.command.build_py
Implements the Distutils 'build_py' command."""
__revision__ = "$Id$"
import os
import sys
from glob import glob
from distutils.core import Command
from distutils.errors import DistutilsOptionError, DistutilsFileError
from distutils.util import convert_path
from distutils import log
class build_py(Command):
description = "\"build\" pure Python modules (copy to build directory)"
user_options = [
('build-lib=', 'd', "directory to \"build\" (copy) to"),
('compile', 'c', "compile .py to .pyc"),
('no-compile', None, "don't compile .py files [default]"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('force', 'f', "forcibly build everything (ignore file timestamps)"),
]
boolean_options = ['compile', 'force']
negative_opt = {'no-compile' : 'compile'}
def initialize_options(self):
self.build_lib = None
self.py_modules = None
self.package = None
self.package_data = None
self.package_dir = None
self.compile = 0
self.optimize = 0
self.force = None
def finalize_options(self):
self.set_undefined_options('build',
('build_lib', 'build_lib'),
('force', 'force'))
# Get the distribution options that are aliases for build_py
# options -- list of packages and list of modules.
self.packages = self.distribution.packages
self.py_modules = self.distribution.py_modules
self.package_data = self.distribution.package_data
self.package_dir = {}
if self.distribution.package_dir:
for name, path in self.distribution.package_dir.items():
self.package_dir[name] = convert_path(path)
self.data_files = self.get_data_files()
# Ick, copied straight from install_lib.py (fancy_getopt needs a
# type system! Hell, *everything* needs a type system!!!)
if not isinstance(self.optimize, int):
try:
self.optimize = int(self.optimize)
assert 0 <= self.optimize <= 2
except (ValueError, AssertionError):
raise DistutilsOptionError("optimize must be 0, 1, or 2")
def run(self):
# XXX copy_file by default preserves atime and mtime. IMHO this is
# the right thing to do, but perhaps it should be an option -- in
# particular, a site administrator might want installed files to
# reflect the time of installation rather than the last
# modification time before the installed release.
# XXX copy_file by default preserves mode, which appears to be the
# wrong thing to do: if a file is read-only in the working
# directory, we want it to be installed read/write so that the next
# installation of the same module distribution can overwrite it
# without problems. (This might be a Unix-specific issue.) Thus
# we turn off 'preserve_mode' when copying to the build directory,
# since the build directory is supposed to be exactly what the
# installation will look like (ie. we preserve mode when
# installing).
# Two options control which modules will be installed: 'packages'
# and 'py_modules'. The former lets us work with whole packages, not
# specifying individual modules at all; the latter is for
# specifying modules one-at-a-time.
if self.py_modules:
self.build_modules()
if self.packages:
self.build_packages()
self.build_package_data()
self.byte_compile(self.get_outputs(include_bytecode=0))
def get_data_files(self):
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
data = []
if not self.packages:
return data
for package in self.packages:
# Locate package source directory
src_dir = self.get_package_dir(package)
# Compute package build directory
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
# Length of path to strip from found files
plen = 0
if src_dir:
plen = len(src_dir)+1
# Strip directory from globbed filenames
filenames = [
file[plen:] for file in self.find_data_files(package, src_dir)
]
data.append((package, src_dir, build_dir, filenames))
return data
def find_data_files(self, package, src_dir):
"""Return filenames for package's data files in 'src_dir'"""
globs = (self.package_data.get('', [])
+ self.package_data.get(package, []))
files = []
for pattern in globs:
# Each pattern has to be converted to a platform-specific path
filelist = glob(os.path.join(src_dir, convert_path(pattern)))
# Files that match more than one pattern are only added once
files.extend([fn for fn in filelist if fn not in files])
return files
def build_package_data(self):
"""Copy data files into build directory"""
for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
self.mkpath(os.path.dirname(target))
self.copy_file(os.path.join(src_dir, filename), target,
preserve_mode=False)
def get_package_dir(self, package):
"""Return the directory, relative to the top of the source
distribution, where package 'package' should be found
(at least according to the 'package_dir' option, if any)."""
path = package.split('.')
if not self.package_dir:
if path:
return os.path.join(*path)
else:
return ''
else:
tail = []
while path:
try:
pdir = self.package_dir['.'.join(path)]
except KeyError:
tail.insert(0, path[-1])
del path[-1]
else:
tail.insert(0, pdir)
return os.path.join(*tail)
else:
# Oops, got all the way through 'path' without finding a
# match in package_dir. If package_dir defines a directory
# for the root (nameless) package, then fallback on it;
# otherwise, we might as well have not consulted
# package_dir at all, as we just use the directory implied
# by 'tail' (which should be the same as the original value
# of 'path' at this point).
pdir = self.package_dir.get('')
if pdir is not None:
tail.insert(0, pdir)
if tail:
return os.path.join(*tail)
else:
return ''
def check_package(self, package, package_dir):
# Empty dir name means current directory, which we can probably
# assume exists. Also, os.path.exists and isdir don't know about
# my "empty string means current dir" convention, so we have to
# circumvent them.
if package_dir != "":
if not os.path.exists(package_dir):
raise DistutilsFileError(
"package directory '%s' does not exist" % package_dir)
if not os.path.isdir(package_dir):
raise DistutilsFileError(
"supposed package directory '%s' exists, "
"but is not a directory" % package_dir)
# Require __init__.py for all but the "root package"
if package:
init_py = os.path.join(package_dir, "__init__.py")
if os.path.isfile(init_py):
return init_py
else:
log.warn(("package init file '%s' not found " +
"(or not a regular file)"), init_py)
# Either not in a package at all (__init__.py not expected), or
# __init__.py doesn't exist -- so don't return the filename.
return None
def check_module(self, module, module_file):
if not os.path.isfile(module_file):
log.warn("file %s (for module %s) not found", module_file, module)
return False
else:
return True
def find_package_modules(self, package, package_dir):
self.check_package(package, package_dir)
module_files = glob(os.path.join(package_dir, "*.py"))
modules = []
setup_script = os.path.abspath(self.distribution.script_name)
for f in module_files:
abs_f = os.path.abspath(f)
if abs_f != setup_script:
module = os.path.splitext(os.path.basename(f))[0]
modules.append((package, module, f))
else:
self.debug_print("excluding %s" % setup_script)
return modules
def find_modules(self):
"""Finds individually-specified Python modules, ie. those listed by
module name in 'self.py_modules'. Returns a list of tuples (package,
module_base, filename): 'package' is a tuple of the path through
package-space to the module; 'module_base' is the bare (no
packages, no dots) module name, and 'filename' is the path to the
".py" file (relative to the distribution root) that implements the
module.
"""
# Map package names to tuples of useful info about the package:
# (package_dir, checked)
# package_dir - the directory where we'll find source files for
# this package
# checked - true if we have checked that the package directory
# is valid (exists, contains __init__.py, ... ?)
packages = {}
# List of (package, module, filename) tuples to return
modules = []
# We treat modules-in-packages almost the same as toplevel modules,
# just the "package" for a toplevel is empty (either an empty
# string or empty list, depending on context). Differences:
# - don't check for __init__.py in directory for empty package
for module in self.py_modules:
path = module.split('.')
package = '.'.join(path[0:-1])
module_base = path[-1]
try:
(package_dir, checked) = packages[package]
except KeyError:
package_dir = self.get_package_dir(package)
checked = 0
if not checked:
init_py = self.check_package(package, package_dir)
packages[package] = (package_dir, 1)
if init_py:
modules.append((package, "__init__", init_py))
# XXX perhaps we should also check for just .pyc files
# (so greedy closed-source bastards can distribute Python
# modules too)
module_file = os.path.join(package_dir, module_base + ".py")
if not self.check_module(module, module_file):
continue
modules.append((package, module_base, module_file))
return modules
def find_all_modules(self):
"""Compute the list of all modules that will be built, whether
they are specified one-module-at-a-time ('self.py_modules') or
by whole packages ('self.packages'). Return a list of tuples
(package, module, module_file), just like 'find_modules()' and
'find_package_modules()' do."""
modules = []
if self.py_modules:
modules.extend(self.find_modules())
if self.packages:
for package in self.packages:
package_dir = self.get_package_dir(package)
m = self.find_package_modules(package, package_dir)
modules.extend(m)
return modules
def get_source_files(self):
return [module[-1] for module in self.find_all_modules()]
def get_module_outfile(self, build_dir, package, module):
outfile_path = [build_dir] + list(package) + [module + ".py"]
return os.path.join(*outfile_path)
def get_outputs(self, include_bytecode=1):
modules = self.find_all_modules()
outputs = []
for (package, module, module_file) in modules:
package = package.split('.')
filename = self.get_module_outfile(self.build_lib, package, module)
outputs.append(filename)
if include_bytecode:
if self.compile:
outputs.append(filename + "c")
if self.optimize > 0:
outputs.append(filename + "o")
outputs += [
os.path.join(build_dir, filename)
for package, src_dir, build_dir, filenames in self.data_files
for filename in filenames
]
return outputs
def build_module(self, module, module_file, package):
if isinstance(package, str):
package = package.split('.')
elif not isinstance(package, (list, tuple)):
raise TypeError(
"'package' must be a string (dot-separated), list, or tuple")
# Now put the module source file into the "build" area -- this is
# easy, we just copy it somewhere under self.build_lib (the build
# directory for Python source).
outfile = self.get_module_outfile(self.build_lib, package, module)
dir = os.path.dirname(outfile)
self.mkpath(dir)
return self.copy_file(module_file, outfile, preserve_mode=0)
def build_modules(self):
modules = self.find_modules()
for (package, module, module_file) in modules:
# Now "build" the module -- ie. copy the source file to
# self.build_lib (the build directory for Python source).
# (Actually, it gets copied to the directory for this package
# under self.build_lib.)
self.build_module(module, module_file, package)
def build_packages(self):
for package in self.packages:
# Get list of (package, module, module_file) tuples based on
# scanning the package directory. 'package' is only included
# in the tuple so that 'find_modules()' and
# 'find_package_tuples()' have a consistent interface; it's
# ignored here (apart from a sanity check). Also, 'module' is
# the *unqualified* module name (ie. no dots, no package -- we
# already know its package!), and 'module_file' is the path to
# the .py file, relative to the current directory
# (ie. including 'package_dir').
package_dir = self.get_package_dir(package)
modules = self.find_package_modules(package, package_dir)
# Now loop over the modules we found, "building" each one (just
# copy it to self.build_lib).
for (package_, module, module_file) in modules:
assert package == package_
self.build_module(module, module_file, package)
def byte_compile(self, files):
if sys.dont_write_bytecode:
self.warn('byte-compiling is disabled, skipping.')
return
from distutils.util import byte_compile
prefix = self.build_lib
if prefix[-1] != os.sep:
prefix = prefix + os.sep
# XXX this code is essentially the same as the 'byte_compile()
# method of the "install_lib" command, except for the determination
# of the 'prefix' string. Hmmm.
if self.compile:
byte_compile(files, optimize=0,
force=self.force, prefix=prefix, dry_run=self.dry_run)
if self.optimize > 0:
byte_compile(files, optimize=self.optimize,
force=self.force, prefix=prefix, dry_run=self.dry_run)
|
gpl-2.0
|
lijoantony/django-oscar
|
src/oscar/apps/wishlists/migrations/0001_initial.py
|
51
|
2730
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Line',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.PositiveIntegerField(default=1, verbose_name='Quantity')),
('title', models.CharField(max_length=255, verbose_name='Title')),
('product', models.ForeignKey(null=True, verbose_name='Product', on_delete=django.db.models.deletion.SET_NULL, related_name='wishlists_lines', to='catalogue.Product', blank=True)),
],
options={
'abstract': False,
'verbose_name': 'Wish list line',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='WishList',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='Default', max_length=255, verbose_name='Name')),
('key', models.CharField(max_length=6, unique=True, db_index=True, verbose_name='Key', editable=False)),
('visibility', models.CharField(default='Private', max_length=20, verbose_name='Visibility', choices=[('Private', 'Private - Only the owner can see the wish list'), ('Shared', 'Shared - Only the owner and people with access to the obfuscated link can see the wish list'), ('Public', 'Public - Everybody can see the wish list')])),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
('owner', models.ForeignKey(verbose_name='Owner', related_name='wishlists', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('owner', 'date_created'),
'abstract': False,
'verbose_name': 'Wish List',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='line',
name='wishlist',
field=models.ForeignKey(verbose_name='Wish List', related_name='lines', to='wishlists.WishList'),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='line',
unique_together=set([('wishlist', 'product')]),
),
]
|
bsd-3-clause
|
GinnyN/towerofdimensions-django
|
django-openid-auth/django_openid_auth/teams.py
|
25
|
14251
|
# Launchpad OpenID Teams Extension support for python-openid
#
# Copyright (C) 2008-2010 Canonical Ltd.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Team membership support for Launchpad.
The primary form of communication between the RP and Launchpad is an
OpenID authentication request. Our solution is to piggyback a team
membership test onto this interaction.
As part of an OpenID authentication request, the RP includes the
following fields:
openid.ns.lp:
An OpenID 2.0 namespace URI for the extension. It is not strictly
required for 1.1 requests, but including it is good for forward
compatibility.
It must be set to: http://ns.launchpad.net/2007/openid-teams
openid.lp.query_membership:
A comma separated list of Launchpad team names that the RP is
interested in.
As part of the positive assertion OpenID response, the following field
will be provided:
openid.ns.lp:
(as above)
openid.lp.is_member:
A comma separated list of teams that the user is actually a member
of. The list may be limited to those teams mentioned in the
request.
This field must be included in the response signature in order to
be considered valid (as the response is bounced through the user's
web browser, an unsigned value could be modified).
@since: 2.1.1
"""
from openid.message import registerNamespaceAlias, \
NamespaceAliasRegistrationError
from openid.extension import Extension
from openid import oidutil
try:
basestring #pylint:disable-msg=W0104
except NameError:
# For Python 2.2
basestring = (str, unicode) #pylint:disable-msg=W0622
__all__ = [
'TeamsRequest',
'TeamsResponse',
'ns_uri',
'supportsTeams',
]
ns_uri = 'http://ns.launchpad.net/2007/openid-teams'
try:
registerNamespaceAlias(ns_uri, 'lp')
except NamespaceAliasRegistrationError, e:
oidutil.log('registerNamespaceAlias(%r, %r) failed: %s' % (ns_uri,
'lp', str(e),))
def supportsTeams(endpoint):
"""Does the given endpoint advertise support for Launchpad Teams?
@param endpoint: The endpoint object as returned by OpenID discovery
@type endpoint: openid.consumer.discover.OpenIDEndpoint
@returns: Whether an lp type was advertised by the endpoint
@rtype: bool
"""
return endpoint.usesExtension(ns_uri)
class TeamsNamespaceError(ValueError):
"""The Launchpad teams namespace was not found and could not
be created using the expected name (there's another extension
using the name 'lp')
This is not I{illegal}, for OpenID 2, although it probably
indicates a problem, since it's not expected that other extensions
will re-use the alias that is in use for OpenID 1.
If this is an OpenID 1 request, then there is no recourse. This
should not happen unless some code has modified the namespaces for
the message that is being processed.
"""
def getTeamsNS(message):
"""Extract the Launchpad teams namespace URI from the given
OpenID message.
@param message: The OpenID message from which to parse Launchpad
teams. This may be a request or response message.
@type message: C{L{openid.message.Message}}
@returns: the lp namespace URI for the supplied message. The
message may be modified to define a Launchpad teams
namespace.
@rtype: C{str}
@raise ValueError: when using OpenID 1 if the message defines
the 'lp' alias to be something other than a Launchpad
teams type.
"""
# See if there exists an alias for the Launchpad teams type.
alias = message.namespaces.getAlias(ns_uri)
if alias is None:
# There is no alias, so try to add one. (OpenID version 1)
try:
message.namespaces.addAlias(ns_uri, 'lp')
except KeyError, why:
# An alias for the string 'lp' already exists, but it's
# defined for something other than Launchpad teams
raise TeamsNamespaceError(why[0])
# we know that ns_uri defined, because it's defined in the
# else clause of the loop as well, so disable the warning
return ns_uri #pylint:disable-msg=W0631
class TeamsRequest(Extension):
"""An object to hold the state of a Launchpad teams request.
@ivar query_membership: A comma separated list of Launchpad team
names that the RP is interested in.
@type required: [str]
@group Consumer: requestField, requestTeams, getExtensionArgs, addToOpenIDRequest
@group Server: fromOpenIDRequest, parseExtensionArgs
"""
ns_alias = 'lp'
def __init__(self, query_membership=None, lp_ns_uri=ns_uri):
"""Initialize an empty Launchpad teams request"""
Extension.__init__(self)
self.query_membership = []
self.ns_uri = lp_ns_uri
if query_membership:
self.requestTeams(query_membership)
# Assign getTeamsNS to a static method so that it can be
# overridden for testing.
_getTeamsNS = staticmethod(getTeamsNS)
def fromOpenIDRequest(cls, request):
"""Create a Launchpad teams request that contains the
fields that were requested in the OpenID request with the
given arguments
@param request: The OpenID request
@type request: openid.server.CheckIDRequest
@returns: The newly created Launchpad teams request
@rtype: C{L{TeamsRequest}}
"""
self = cls()
# Since we're going to mess with namespace URI mapping, don't
# mutate the object that was passed in.
message = request.message.copy()
self.ns_uri = self._getTeamsNS(message)
args = message.getArgs(self.ns_uri)
self.parseExtensionArgs(args)
return self
fromOpenIDRequest = classmethod(fromOpenIDRequest)
def parseExtensionArgs(self, args, strict=False):
"""Parse the unqualified Launchpad teams request
parameters and add them to this object.
This method is essentially the inverse of
C{L{getExtensionArgs}}. This method restores the serialized
Launchpad teams request fields.
If you are extracting arguments from a standard OpenID
checkid_* request, you probably want to use C{L{fromOpenIDRequest}},
which will extract the lp namespace and arguments from the
OpenID request. This method is intended for cases where the
OpenID server needs more control over how the arguments are
parsed than that method provides.
>>> args = message.getArgs(ns_uri)
>>> request.parseExtensionArgs(args)
@param args: The unqualified Launchpad teams arguments
@type args: {str:str}
@param strict: Whether requests with fields that are not
defined in the Launchpad teams specification should be
tolerated (and ignored)
@type strict: bool
@returns: None; updates this object
"""
items = args.get('query_membership')
if items:
for team_name in items.split(','):
try:
self.requestTeam(team_name, strict)
except ValueError:
if strict:
raise
def allRequestedTeams(self):
"""A list of all of the Launchpad teams that were
requested.
@rtype: [str]
"""
return self.query_membership
def wereTeamsRequested(self):
"""Have any Launchpad teams been requested?
@rtype: bool
"""
return bool(self.allRequestedTeams())
def __contains__(self, team_name):
"""Was this team in the request?"""
return team_name in self.query_membership
def requestTeam(self, team_name, strict=False):
"""Request the specified team from the OpenID user
@param team_name: the unqualified Launchpad team name
@type team_name: str
@param strict: whether to raise an exception when a team is
added to a request more than once
@raise ValueError: when strict is set and the team was
requested more than once
"""
if strict:
if team_name in self.query_membership:
raise ValueError('That team has already been requested')
else:
if team_name in self.query_membership:
return
self.query_membership.append(team_name)
def requestTeams(self, query_membership, strict=False):
"""Add the given list of teams to the request
@param query_membership: The Launchpad teams request
@type query_membership: [str]
@raise ValueError: when a team requested is not a string
or strict is set and a team was requested more than once
"""
if isinstance(query_membership, basestring):
raise TypeError('Teams should be passed as a list of '
'strings (not %r)' % (type(query_membership),))
for team_name in query_membership:
self.requestTeam(team_name, strict=strict)
def getExtensionArgs(self):
"""Get a dictionary of unqualified Launchpad teams
arguments representing this request.
This method is essentially the inverse of
C{L{parseExtensionArgs}}. This method serializes the Launchpad
teams request fields.
@rtype: {str:str}
"""
args = {}
if self.query_membership:
args['query_membership'] = ','.join(self.query_membership)
return args
class TeamsResponse(Extension):
"""Represents the data returned in a Launchpad teams response
inside of an OpenID C{id_res} response. This object will be
created by the OpenID server, added to the C{id_res} response
object, and then extracted from the C{id_res} message by the
Consumer.
@ivar data: The Launchpad teams data, an array.
@ivar ns_uri: The URI under which the Launchpad teams data was
stored in the response message.
@group Server: extractResponse
@group Consumer: fromSuccessResponse
@group Read-only dictionary interface: keys, iterkeys, items, iteritems,
__iter__, get, __getitem__, keys, has_key
"""
ns_alias = 'lp'
def __init__(self, is_member=None, lp_ns_uri=ns_uri):
Extension.__init__(self)
if is_member is None:
self.is_member = []
else:
self.is_member = is_member
self.ns_uri = lp_ns_uri
def addTeam(self, team_name):
if team_name not in self.is_member:
self.is_member.append(team_name)
def extractResponse(cls, request, is_member_str):
"""Take a C{L{TeamsRequest}} and a list of Launchpad
team values and create a C{L{TeamsResponse}}
object containing that data.
@param request: The Launchpad teams request object
@type request: TeamsRequest
@param is_member: The Launchpad teams data for this
response, as a list of strings.
@type is_member: {str:str}
@returns: a Launchpad teams response object
@rtype: TeamsResponse
"""
self = cls()
self.ns_uri = request.ns_uri
self.is_member = is_member_str.split(',')
return self
extractResponse = classmethod(extractResponse)
# Assign getTeamsNS to a static method so that it can be
# overridden for testing
_getTeamsNS = staticmethod(getTeamsNS)
def fromSuccessResponse(cls, success_response, signed_only=True):
"""Create a C{L{TeamsResponse}} object from a successful OpenID
library response
(C{L{openid.consumer.consumer.SuccessResponse}}) response
message
@param success_response: A SuccessResponse from consumer.complete()
@type success_response: C{L{openid.consumer.consumer.SuccessResponse}}
@param signed_only: Whether to process only data that was
signed in the id_res message from the server.
@type signed_only: bool
@rtype: TeamsResponse
@returns: A Launchpad teams response containing the data
that was supplied with the C{id_res} response.
"""
self = cls()
self.ns_uri = self._getTeamsNS(success_response.message)
if signed_only:
args = success_response.getSignedNS(self.ns_uri)
else:
args = success_response.message.getArgs(self.ns_uri)
if "is_member" in args:
is_member_str = args["is_member"]
self.is_member = is_member_str.split(',')
#self.is_member = args["is_member"]
return self
fromSuccessResponse = classmethod(fromSuccessResponse)
def getExtensionArgs(self):
"""Get the fields to put in the Launchpad teams namespace
when adding them to an id_res message.
@see: openid.extension
"""
ns_args = {'is_member': ','.join(self.is_member),}
return ns_args
|
bsd-3-clause
|
mscherer/ansible-modules-extras
|
database/mysql/mysql_replication.py
|
8
|
13966
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage mysql replication
(c) 2013, Balazs Pocze <banyek@gawker.com>
Certain parts are taken from Mark Theunissen's mysqldb module
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
DOCUMENTATION = '''
---
module: mysql_replication
short_description: Manage MySQL replication
description:
- Manages MySQL server replication, slave, master status get and change master host.
version_added: "1.3"
options:
mode:
description:
- module operating mode. Could be getslave (SHOW SLAVE STATUS), getmaster (SHOW MASTER STATUS), changemaster (CHANGE MASTER TO), startslave (START SLAVE), stopslave (STOP SLAVE)
required: False
choices:
- getslave
- getmaster
- changemaster
- stopslave
- startslave
default: getslave
login_user:
description:
- username to connect mysql host, if defined login_password also needed.
required: False
login_password:
description:
- password to connect mysql host, if defined login_user also needed.
required: False
login_host:
description:
- mysql host to connect
required: False
login_port:
description:
- Port of the MySQL server. Requires login_host be defined as other then localhost if login_port is used
required: False
default: 3306
version_added: "1.9"
login_unix_socket:
description:
- unix socket to connect mysql server
master_host:
description:
- same as mysql variable
master_user:
description:
- same as mysql variable
master_password:
description:
- same as mysql variable
master_port:
description:
- same as mysql variable
master_connect_retry:
description:
- same as mysql variable
master_log_file:
description:
- same as mysql variable
master_log_pos:
description:
- same as mysql variable
relay_log_file:
description:
- same as mysql variable
relay_log_pos:
description:
- same as mysql variable
master_ssl:
description:
- same as mysql variable
possible values: 0,1
master_ssl_ca:
description:
- same as mysql variable
master_ssl_capath:
description:
- same as mysql variable
master_ssl_cert:
description:
- same as mysql variable
master_ssl_key:
description:
- same as mysql variable
master_ssl_cipher:
description:
- same as mysql variable
'''
EXAMPLES = '''
# Stop mysql slave thread
- mysql_replication: mode=stopslave
# Get master binlog file name and binlog position
- mysql_replication: mode=getmaster
# Change master to master server 192.168.1.1 and use binary log 'mysql-bin.000009' with position 4578
- mysql_replication: mode=changemaster master_host=192.168.1.1 master_log_file=mysql-bin.000009 master_log_pos=4578
# Check slave status using port 3308
- mysql_replication: mode=getslave login_host=ansible.example.com login_port=3308
'''
import ConfigParser
import os
import warnings
try:
import MySQLdb
except ImportError:
mysqldb_found = False
else:
mysqldb_found = True
def get_master_status(cursor):
cursor.execute("SHOW MASTER STATUS")
masterstatus = cursor.fetchone()
return masterstatus
def get_slave_status(cursor):
cursor.execute("SHOW SLAVE STATUS")
slavestatus = cursor.fetchone()
return slavestatus
def stop_slave(cursor):
try:
cursor.execute("STOP SLAVE")
stopped = True
except:
stopped = False
return stopped
def start_slave(cursor):
try:
cursor.execute("START SLAVE")
started = True
except:
started = False
return started
def changemaster(cursor, chm, chm_params):
sql_param = ",".join(chm)
query = 'CHANGE MASTER TO %s' % sql_param
cursor.execute(query, chm_params)
def strip_quotes(s):
""" Remove surrounding single or double quotes
>>> print strip_quotes('hello')
hello
>>> print strip_quotes('"hello"')
hello
>>> print strip_quotes("'hello'")
hello
>>> print strip_quotes("'hello")
'hello
"""
single_quote = "'"
double_quote = '"'
if s.startswith(single_quote) and s.endswith(single_quote):
s = s.strip(single_quote)
elif s.startswith(double_quote) and s.endswith(double_quote):
s = s.strip(double_quote)
return s
def config_get(config, section, option):
""" Calls ConfigParser.get and strips quotes
See: http://dev.mysql.com/doc/refman/5.0/en/option-files.html
"""
return strip_quotes(config.get(section, option))
def load_mycnf():
config = ConfigParser.RawConfigParser()
mycnf = os.path.expanduser('~/.my.cnf')
if not os.path.exists(mycnf):
return False
try:
config.readfp(open(mycnf))
except (IOError):
return False
# We support two forms of passwords in .my.cnf, both pass= and password=,
# as these are both supported by MySQL.
try:
passwd = config_get(config, 'client', 'password')
except (ConfigParser.NoOptionError):
try:
passwd = config_get(config, 'client', 'pass')
except (ConfigParser.NoOptionError):
return False
# If .my.cnf doesn't specify a user, default to user login name
try:
user = config_get(config, 'client', 'user')
except (ConfigParser.NoOptionError):
user = getpass.getuser()
creds = dict(user=user, passwd=passwd)
return creds
def main():
module = AnsibleModule(
argument_spec = dict(
login_user=dict(default=None),
login_password=dict(default=None),
login_host=dict(default="localhost"),
login_port=dict(default=3306, type='int'),
login_unix_socket=dict(default=None),
mode=dict(default="getslave", choices=["getmaster", "getslave", "changemaster", "stopslave", "startslave"]),
master_host=dict(default=None),
master_user=dict(default=None),
master_password=dict(default=None),
master_port=dict(default=None, type='int'),
master_connect_retry=dict(default=None, type='int'),
master_log_file=dict(default=None),
master_log_pos=dict(default=None, type='int'),
relay_log_file=dict(default=None),
relay_log_pos=dict(default=None, type='int'),
master_ssl=dict(default=False, type='bool'),
master_ssl_ca=dict(default=None),
master_ssl_capath=dict(default=None),
master_ssl_cert=dict(default=None),
master_ssl_key=dict(default=None),
master_ssl_cipher=dict(default=None),
)
)
user = module.params["login_user"]
password = module.params["login_password"]
host = module.params["login_host"]
port = module.params["login_port"]
mode = module.params["mode"]
master_host = module.params["master_host"]
master_user = module.params["master_user"]
master_password = module.params["master_password"]
master_port = module.params["master_port"]
master_connect_retry = module.params["master_connect_retry"]
master_log_file = module.params["master_log_file"]
master_log_pos = module.params["master_log_pos"]
relay_log_file = module.params["relay_log_file"]
relay_log_pos = module.params["relay_log_pos"]
master_ssl = module.params["master_ssl"]
master_ssl_ca = module.params["master_ssl_ca"]
master_ssl_capath = module.params["master_ssl_capath"]
master_ssl_cert = module.params["master_ssl_cert"]
master_ssl_key = module.params["master_ssl_key"]
master_ssl_cipher = module.params["master_ssl_cipher"]
if not mysqldb_found:
module.fail_json(msg="the python mysqldb module is required")
else:
warnings.filterwarnings('error', category=MySQLdb.Warning)
# Either the caller passes both a username and password with which to connect to
# mysql, or they pass neither and allow this module to read the credentials from
# ~/.my.cnf.
login_password = module.params["login_password"]
login_user = module.params["login_user"]
if login_user is None and login_password is None:
mycnf_creds = load_mycnf()
if mycnf_creds is False:
login_user = "root"
login_password = ""
else:
login_user = mycnf_creds["user"]
login_password = mycnf_creds["passwd"]
elif login_password is None or login_user is None:
module.fail_json(msg="when supplying login arguments, both login_user and login_password must be provided")
try:
if module.params["login_unix_socket"]:
db_connection = MySQLdb.connect(host=module.params["login_host"], unix_socket=module.params["login_unix_socket"], user=login_user, passwd=login_password)
elif module.params["login_port"] != 3306 and module.params["login_host"] == "localhost":
module.fail_json(msg="login_host is required when login_port is defined, login_host cannot be localhost when login_port is defined")
else:
db_connection = MySQLdb.connect(host=module.params["login_host"], port=module.params["login_port"], user=login_user, passwd=login_password)
except Exception, e:
module.fail_json(msg="unable to connect to database, check login_user and login_password are correct or ~/.my.cnf has the credentials")
try:
cursor = db_connection.cursor(cursorclass=MySQLdb.cursors.DictCursor)
except Exception, e:
module.fail_json(msg="Trouble getting DictCursor from db_connection: %s" % e)
if mode in "getmaster":
masterstatus = get_master_status(cursor)
try:
module.exit_json( **masterstatus )
except TypeError:
module.fail_json(msg="Server is not configured as mysql master")
elif mode in "getslave":
slavestatus = get_slave_status(cursor)
try:
module.exit_json( **slavestatus )
except TypeError:
module.fail_json(msg="Server is not configured as mysql slave")
elif mode in "changemaster":
chm=[]
chm_params = {}
if master_host:
chm.append("MASTER_HOST=%(master_host)s")
chm_params['master_host'] = master_host
if master_user:
chm.append("MASTER_USER=%(master_user)s")
chm_params['master_user'] = master_user
if master_password:
chm.append("MASTER_PASSWORD=%(master_password)s")
chm_params['master_password'] = master_password
if master_port is not None:
chm.append("MASTER_PORT=%(master_port)s")
chm_params['master_port'] = master_port
if master_connect_retry is not None:
chm.append("MASTER_CONNECT_RETRY=%(master_connect_retry)s")
chm_params['master_connect_retry'] = master_connect_retry
if master_log_file:
chm.append("MASTER_LOG_FILE=%(master_log_file)s")
chm_params['master_log_file'] = master_log_file
if master_log_pos is not None:
chm.append("MASTER_LOG_POS=%(master_log_pos)s")
chm_params['master_log_pos'] = master_log_pos
if relay_log_file:
chm.append("RELAY_LOG_FILE=%(relay_log_file)s")
chm_params['relay_log_file'] = relay_log_file
if relay_log_pos is not None:
chm.append("RELAY_LOG_POS=%(relay_log_pos)s")
chm_params['relay_log_pos'] = relay_log_pos
if master_ssl:
chm.append("MASTER_SSL=1")
if master_ssl_ca:
chm.append("MASTER_SSL_CA=%(master_ssl_ca)s")
chm_params['master_ssl_ca'] = master_ssl_ca
if master_ssl_capath:
chm.append("MASTER_SSL_CAPATH=%(master_ssl_capath)s")
chm_params['master_ssl_capath'] = master_ssl_capath
if master_ssl_cert:
chm.append("MASTER_SSL_CERT=%(master_ssl_cert)s")
chm_params['master_ssl_cert'] = master_ssl_cert
if master_ssl_key:
chm.append("MASTER_SSL_KEY=%(master_ssl_key)s")
chm_params['master_ssl_key'] = master_ssl_key
if master_ssl_cipher:
chm.append("MASTER_SSL_CIPHER=%(master_ssl_cipher)s")
chm_params['master_ssl_cipher'] = master_ssl_cipher
changemaster(cursor, chm, chm_params)
module.exit_json(changed=True)
elif mode in "startslave":
started = start_slave(cursor)
if started is True:
module.exit_json(msg="Slave started ", changed=True)
else:
module.exit_json(msg="Slave already started (Or cannot be started)", changed=False)
elif mode in "stopslave":
stopped = stop_slave(cursor)
if stopped is True:
module.exit_json(msg="Slave stopped", changed=True)
else:
module.exit_json(msg="Slave already stopped", changed=False)
# import module snippets
from ansible.module_utils.basic import *
main()
warnings.simplefilter("ignore")
|
gpl-3.0
|
dend/yulyeong
|
node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/lexers/_robotframeworklexer.py
|
198
|
18591
|
# -*- coding: utf-8 -*-
"""
pygments.lexers._robotframeworklexer
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexer for Robot Framework.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
# Copyright 2012 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from pygments.lexer import Lexer
from pygments.token import Token
HEADING = Token.Generic.Heading
SETTING = Token.Keyword.Namespace
IMPORT = Token.Name.Namespace
TC_KW_NAME = Token.Generic.Subheading
KEYWORD = Token.Name.Function
ARGUMENT = Token.String
VARIABLE = Token.Name.Variable
COMMENT = Token.Comment
SEPARATOR = Token.Punctuation
SYNTAX = Token.Punctuation
GHERKIN = Token.Generic.Emph
ERROR = Token.Error
def normalize(string, remove=''):
string = string.lower()
for char in remove + ' ':
if char in string:
string = string.replace(char, '')
return string
class RobotFrameworkLexer(Lexer):
"""
For `Robot Framework <http://robotframework.org>`_ test data.
Supports both space and pipe separated plain text formats.
*New in Pygments 1.6.*
"""
name = 'RobotFramework'
aliases = ['RobotFramework', 'robotframework']
filenames = ['*.txt', '*.robot']
mimetypes = ['text/x-robotframework']
def __init__(self, **options):
options['tabsize'] = 2
options['encoding'] = 'UTF-8'
Lexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
row_tokenizer = RowTokenizer()
var_tokenizer = VariableTokenizer()
index = 0
for row in text.splitlines():
for value, token in row_tokenizer.tokenize(row):
for value, token in var_tokenizer.tokenize(value, token):
if value:
yield index, token, unicode(value)
index += len(value)
class VariableTokenizer(object):
def tokenize(self, string, token):
var = VariableSplitter(string, identifiers='$@%')
if var.start < 0 or token in (COMMENT, ERROR):
yield string, token
return
for value, token in self._tokenize(var, string, token):
if value:
yield value, token
def _tokenize(self, var, string, orig_token):
before = string[:var.start]
yield before, orig_token
yield var.identifier + '{', SYNTAX
for value, token in self.tokenize(var.base, VARIABLE):
yield value, token
yield '}', SYNTAX
if var.index:
yield '[', SYNTAX
for value, token in self.tokenize(var.index, VARIABLE):
yield value, token
yield ']', SYNTAX
for value, token in self.tokenize(string[var.end:], orig_token):
yield value, token
class RowTokenizer(object):
def __init__(self):
self._table = UnknownTable()
self._splitter = RowSplitter()
testcases = TestCaseTable()
settings = SettingTable(testcases.set_default_template)
variables = VariableTable()
keywords = KeywordTable()
self._tables = {'settings': settings, 'setting': settings,
'metadata': settings,
'variables': variables, 'variable': variables,
'testcases': testcases, 'testcase': testcases,
'keywords': keywords, 'keyword': keywords,
'userkeywords': keywords, 'userkeyword': keywords}
def tokenize(self, row):
commented = False
heading = False
for index, value in enumerate(self._splitter.split(row)):
# First value, and every second after that, is a separator.
index, separator = divmod(index-1, 2)
if value.startswith('#'):
commented = True
elif index == 0 and value.startswith('*'):
self._table = self._start_table(value)
heading = True
for value, token in self._tokenize(value, index, commented,
separator, heading):
yield value, token
self._table.end_row()
def _start_table(self, header):
name = normalize(header, remove='*')
return self._tables.get(name, UnknownTable())
def _tokenize(self, value, index, commented, separator, heading):
if commented:
yield value, COMMENT
elif separator:
yield value, SEPARATOR
elif heading:
yield value, HEADING
else:
for value, token in self._table.tokenize(value, index):
yield value, token
class RowSplitter(object):
_space_splitter = re.compile('( {2,})')
_pipe_splitter = re.compile('((?:^| +)\|(?: +|$))')
def split(self, row):
splitter = (row.startswith('| ') and self._split_from_pipes
or self._split_from_spaces)
for value in splitter(row):
yield value
yield '\n'
def _split_from_spaces(self, row):
yield '' # Start with (pseudo)separator similarly as with pipes
for value in self._space_splitter.split(row):
yield value
def _split_from_pipes(self, row):
_, separator, rest = self._pipe_splitter.split(row, 1)
yield separator
while self._pipe_splitter.search(rest):
cell, separator, rest = self._pipe_splitter.split(rest, 1)
yield cell
yield separator
yield rest
class Tokenizer(object):
_tokens = None
def __init__(self):
self._index = 0
def tokenize(self, value):
values_and_tokens = self._tokenize(value, self._index)
self._index += 1
if isinstance(values_and_tokens, type(Token)):
values_and_tokens = [(value, values_and_tokens)]
return values_and_tokens
def _tokenize(self, value, index):
index = min(index, len(self._tokens) - 1)
return self._tokens[index]
def _is_assign(self, value):
if value.endswith('='):
value = value[:-1].strip()
var = VariableSplitter(value, identifiers='$@')
return var.start == 0 and var.end == len(value)
class Comment(Tokenizer):
_tokens = (COMMENT,)
class Setting(Tokenizer):
_tokens = (SETTING, ARGUMENT)
_keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
'suitepostcondition', 'testsetup', 'testprecondition',
'testteardown', 'testpostcondition', 'testtemplate')
_import_settings = ('library', 'resource', 'variables')
_other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
'testtimeout')
_custom_tokenizer = None
def __init__(self, template_setter=None):
Tokenizer.__init__(self)
self._template_setter = template_setter
def _tokenize(self, value, index):
if index == 1 and self._template_setter:
self._template_setter(value)
if index == 0:
normalized = normalize(value)
if normalized in self._keyword_settings:
self._custom_tokenizer = KeywordCall(support_assign=False)
elif normalized in self._import_settings:
self._custom_tokenizer = ImportSetting()
elif normalized not in self._other_settings:
return ERROR
elif self._custom_tokenizer:
return self._custom_tokenizer.tokenize(value)
return Tokenizer._tokenize(self, value, index)
class ImportSetting(Tokenizer):
_tokens = (IMPORT, ARGUMENT)
class TestCaseSetting(Setting):
_keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition',
'template')
_import_settings = ()
_other_settings = ('documentation', 'tags', 'timeout')
def _tokenize(self, value, index):
if index == 0:
type = Setting._tokenize(self, value[1:-1], index)
return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)]
return Setting._tokenize(self, value, index)
class KeywordSetting(TestCaseSetting):
_keyword_settings = ('teardown',)
_other_settings = ('documentation', 'arguments', 'return', 'timeout')
class Variable(Tokenizer):
_tokens = (SYNTAX, ARGUMENT)
def _tokenize(self, value, index):
if index == 0 and not self._is_assign(value):
return ERROR
return Tokenizer._tokenize(self, value, index)
class KeywordCall(Tokenizer):
_tokens = (KEYWORD, ARGUMENT)
def __init__(self, support_assign=True):
Tokenizer.__init__(self)
self._keyword_found = not support_assign
self._assigns = 0
def _tokenize(self, value, index):
if not self._keyword_found and self._is_assign(value):
self._assigns += 1
return SYNTAX # VariableTokenizer tokenizes this later.
if self._keyword_found:
return Tokenizer._tokenize(self, value, index - self._assigns)
self._keyword_found = True
return GherkinTokenizer().tokenize(value, KEYWORD)
class GherkinTokenizer(object):
_gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE)
def tokenize(self, value, token):
match = self._gherkin_prefix.match(value)
if not match:
return [(value, token)]
end = match.end()
return [(value[:end], GHERKIN), (value[end:], token)]
class TemplatedKeywordCall(Tokenizer):
_tokens = (ARGUMENT,)
class ForLoop(Tokenizer):
def __init__(self):
Tokenizer.__init__(self)
self._in_arguments = False
def _tokenize(self, value, index):
token = self._in_arguments and ARGUMENT or SYNTAX
if value.upper() in ('IN', 'IN RANGE'):
self._in_arguments = True
return token
class _Table(object):
_tokenizer_class = None
def __init__(self, prev_tokenizer=None):
self._tokenizer = self._tokenizer_class()
self._prev_tokenizer = prev_tokenizer
self._prev_values_on_row = []
def tokenize(self, value, index):
if self._continues(value, index):
self._tokenizer = self._prev_tokenizer
yield value, SYNTAX
else:
for value_and_token in self._tokenize(value, index):
yield value_and_token
self._prev_values_on_row.append(value)
def _continues(self, value, index):
return value == '...' and all(self._is_empty(t)
for t in self._prev_values_on_row)
def _is_empty(self, value):
return value in ('', '\\')
def _tokenize(self, value, index):
return self._tokenizer.tokenize(value)
def end_row(self):
self.__init__(prev_tokenizer=self._tokenizer)
class UnknownTable(_Table):
_tokenizer_class = Comment
def _continues(self, value, index):
return False
class VariableTable(_Table):
_tokenizer_class = Variable
class SettingTable(_Table):
_tokenizer_class = Setting
def __init__(self, template_setter, prev_tokenizer=None):
_Table.__init__(self, prev_tokenizer)
self._template_setter = template_setter
def _tokenize(self, value, index):
if index == 0 and normalize(value) == 'testtemplate':
self._tokenizer = Setting(self._template_setter)
return _Table._tokenize(self, value, index)
def end_row(self):
self.__init__(self._template_setter, prev_tokenizer=self._tokenizer)
class TestCaseTable(_Table):
_setting_class = TestCaseSetting
_test_template = None
_default_template = None
@property
def _tokenizer_class(self):
if self._test_template or (self._default_template and
self._test_template is not False):
return TemplatedKeywordCall
return KeywordCall
def _continues(self, value, index):
return index > 0 and _Table._continues(self, value, index)
def _tokenize(self, value, index):
if index == 0:
if value:
self._test_template = None
return GherkinTokenizer().tokenize(value, TC_KW_NAME)
if index == 1 and self._is_setting(value):
if self._is_template(value):
self._test_template = False
self._tokenizer = self._setting_class(self.set_test_template)
else:
self._tokenizer = self._setting_class()
if index == 1 and self._is_for_loop(value):
self._tokenizer = ForLoop()
if index == 1 and self._is_empty(value):
return [(value, SYNTAX)]
return _Table._tokenize(self, value, index)
def _is_setting(self, value):
return value.startswith('[') and value.endswith(']')
def _is_template(self, value):
return normalize(value) == '[template]'
def _is_for_loop(self, value):
return value.startswith(':') and normalize(value, remove=':') == 'for'
def set_test_template(self, template):
self._test_template = self._is_template_set(template)
def set_default_template(self, template):
self._default_template = self._is_template_set(template)
def _is_template_set(self, template):
return normalize(template) not in ('', '\\', 'none', '${empty}')
class KeywordTable(TestCaseTable):
_tokenizer_class = KeywordCall
_setting_class = KeywordSetting
def _is_template(self, value):
return False
# Following code copied directly from Robot Framework 2.7.5.
class VariableSplitter:
def __init__(self, string, identifiers):
self.identifier = None
self.base = None
self.index = None
self.start = -1
self.end = -1
self._identifiers = identifiers
self._may_have_internal_variables = False
try:
self._split(string)
except ValueError:
pass
else:
self._finalize()
def get_replaced_base(self, variables):
if self._may_have_internal_variables:
return variables.replace_string(self.base)
return self.base
def _finalize(self):
self.identifier = self._variable_chars[0]
self.base = ''.join(self._variable_chars[2:-1])
self.end = self.start + len(self._variable_chars)
if self._has_list_variable_index():
self.index = ''.join(self._list_variable_index_chars[1:-1])
self.end += len(self._list_variable_index_chars)
def _has_list_variable_index(self):
return self._list_variable_index_chars\
and self._list_variable_index_chars[-1] == ']'
def _split(self, string):
start_index, max_index = self._find_variable(string)
self.start = start_index
self._open_curly = 1
self._state = self._variable_state
self._variable_chars = [string[start_index], '{']
self._list_variable_index_chars = []
self._string = string
start_index += 2
for index, char in enumerate(string[start_index:]):
index += start_index # Giving start to enumerate only in Py 2.6+
try:
self._state(char, index)
except StopIteration:
return
if index == max_index and not self._scanning_list_variable_index():
return
def _scanning_list_variable_index(self):
return self._state in [self._waiting_list_variable_index_state,
self._list_variable_index_state]
def _find_variable(self, string):
max_end_index = string.rfind('}')
if max_end_index == -1:
raise ValueError('No variable end found')
if self._is_escaped(string, max_end_index):
return self._find_variable(string[:max_end_index])
start_index = self._find_start_index(string, 1, max_end_index)
if start_index == -1:
raise ValueError('No variable start found')
return start_index, max_end_index
def _find_start_index(self, string, start, end):
index = string.find('{', start, end) - 1
if index < 0:
return -1
if self._start_index_is_ok(string, index):
return index
return self._find_start_index(string, index+2, end)
def _start_index_is_ok(self, string, index):
return string[index] in self._identifiers\
and not self._is_escaped(string, index)
def _is_escaped(self, string, index):
escaped = False
while index > 0 and string[index-1] == '\\':
index -= 1
escaped = not escaped
return escaped
def _variable_state(self, char, index):
self._variable_chars.append(char)
if char == '}' and not self._is_escaped(self._string, index):
self._open_curly -= 1
if self._open_curly == 0:
if not self._is_list_variable():
raise StopIteration
self._state = self._waiting_list_variable_index_state
elif char in self._identifiers:
self._state = self._internal_variable_start_state
def _is_list_variable(self):
return self._variable_chars[0] == '@'
def _internal_variable_start_state(self, char, index):
self._state = self._variable_state
if char == '{':
self._variable_chars.append(char)
self._open_curly += 1
self._may_have_internal_variables = True
else:
self._variable_state(char, index)
def _waiting_list_variable_index_state(self, char, index):
if char != '[':
raise StopIteration
self._list_variable_index_chars.append(char)
self._state = self._list_variable_index_state
def _list_variable_index_state(self, char, index):
self._list_variable_index_chars.append(char)
if char == ']':
raise StopIteration
|
mit
|
ThinkingBridge/platform_external_chromium_org
|
build/linux/unbundle/replace_gyp_files.py
|
31
|
2798
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Replaces gyp files in tree with files from here that
make the build use system libraries.
"""
import optparse
import os.path
import shutil
import sys
REPLACEMENTS = {
'use_system_expat': 'third_party/expat/expat.gyp',
'use_system_ffmpeg': 'third_party/ffmpeg/ffmpeg.gyp',
'use_system_flac': 'third_party/flac/flac.gyp',
'use_system_harfbuzz': 'third_party/harfbuzz-ng/harfbuzz.gyp',
'use_system_icu': 'third_party/icu/icu.gyp',
'use_system_jsoncpp': 'third_party/jsoncpp/jsoncpp.gyp',
'use_system_libevent': 'third_party/libevent/libevent.gyp',
'use_system_libjpeg': 'third_party/libjpeg/libjpeg.gyp',
'use_system_libpng': 'third_party/libpng/libpng.gyp',
'use_system_libusb': 'third_party/libusb/libusb.gyp',
'use_system_libvpx': 'third_party/libvpx/libvpx.gyp',
'use_system_libwebp': 'third_party/libwebp/libwebp.gyp',
'use_system_libxml': 'third_party/libxml/libxml.gyp',
'use_system_libxslt': 'third_party/libxslt/libxslt.gyp',
'use_system_opus': 'third_party/opus/opus.gyp',
'use_system_re2': 'third_party/re2/re2.gyp',
'use_system_snappy': 'third_party/snappy/snappy.gyp',
'use_system_speex': 'third_party/speex/speex.gyp',
'use_system_sqlite': 'third_party/sqlite/sqlite.gyp',
'use_system_v8': 'v8/tools/gyp/v8.gyp',
'use_system_zlib': 'third_party/zlib/zlib.gyp',
}
def DoMain(argv):
my_dirname = os.path.dirname(__file__)
source_tree_root = os.path.abspath(
os.path.join(my_dirname, '..', '..', '..'))
parser = optparse.OptionParser()
# Accept arguments in gyp command-line syntax, so that the caller can re-use
# command-line for this script and gyp.
parser.add_option('-D', dest='defines', action='append')
parser.add_option('--undo', action='store_true')
options, args = parser.parse_args(argv)
for flag, path in REPLACEMENTS.items():
if '%s=1' % flag not in options.defines:
continue
if options.undo:
# Restore original file, and also remove the backup.
# This is meant to restore the source tree to its original state.
os.rename(os.path.join(source_tree_root, path + '.orig'),
os.path.join(source_tree_root, path))
else:
# Create a backup copy for --undo.
shutil.copyfile(os.path.join(source_tree_root, path),
os.path.join(source_tree_root, path + '.orig'))
# Copy the gyp file from directory of this script to target path.
shutil.copyfile(os.path.join(my_dirname, os.path.basename(path)),
os.path.join(source_tree_root, path))
return 0
if __name__ == '__main__':
sys.exit(DoMain(sys.argv))
|
bsd-3-clause
|
DavidPurcell/murano_temp
|
murano/dsl/principal_objects/__init__.py
|
2
|
1068
|
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from murano.dsl.principal_objects import exception
from murano.dsl.principal_objects import garbage_collector
from murano.dsl.principal_objects import stack_trace
from murano.dsl.principal_objects import sys_object
def register(package):
package.register_class(sys_object.SysObject)
package.register_class(stack_trace.StackTrace)
package.register_class(exception.DslException)
package.register_class(garbage_collector.GarbageCollector)
|
apache-2.0
|
joausaga/ideascaly
|
tests/test_api.py
|
1
|
10169
|
import os
import unittest
from tests.config import IdeascalyTestCase
from ideascaly.models import Idea, Vote, Comment, Campaign, Author
"""Unit tests"""
class IdeascalyAPITests(IdeascalyTestCase):
# ---
# Testing variables
# ---
campaign_id = 28416
idea_id_votes = 137010
idea_id_comments = 137010
idea_id_attachment = 139717
title_idea = "From the TestCase of IdeaScaly"
text_idea = "Hello from IdeaScaly!"
text_comment = "From the TestCase of IdeaScaly!"
comment_id = 773330
member_id = 691840
member_email = "example@domain.com"
member_name = "example"
member_name_d = "donald"
member_email_d = "donald@disney.info"
member_id_ideas = 119840
member_id_votes = 119793
filename = str(os.path.join(os.path.dirname(__file__), 'pic.jpg'))
# ----
# Test cases related with community actions
# ----
def testget_all_ideas(self):
result = self.api.get_all_ideas()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_all_votes_ideas(self):
result = self.api.get_all_votes_ideas()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Vote))
def testget_all_votes_comments(self):
result = self.api.get_all_votes_comments()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Vote))
def testget_all_comments(self):
result = self.api.get_all_comments()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Comment))
def testget_all_members(self):
result = self.api.get_all_members()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Author))
# ----
# Test cases related with campaign actions
# ----
def testget_campaigns(self):
result = self.api.get_campaigns()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Campaign))
def testget_active_ideas(self):
result = self.api.get_active_ideas()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_archived_ideas(self):
result = self.api.get_archived_ideas()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_ideas_campaign(self):
result = self.api.get_ideas_campaign(self.campaign_id)
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
# ----
# Test cases related with idea actions
# ----
def testget_ideas_in_progress(self):
result = self.api.get_ideas_in_progress()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_ideas_in_progress_pagination(self):
result = self.api.get_ideas_in_progress(page_number=0, page_size=25, order_key='date-down')
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_ideas_in_progress_campaign(self):
result = self.api.get_ideas_in_progress(campaign_id=self.campaign_id)
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_ideas_complete(self):
result = self.api.get_ideas_complete()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_ideas_in_review(self):
result = self.api.get_ideas_in_review()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_votes_idea(self):
result = self.api.get_votes_idea(ideaId=self.idea_id_votes)
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Vote))
def testget_comments_idea(self):
result = self.api.get_comments_idea(ideaId=self.idea_id_comments)
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Comment))
def testget_idea_details(self):
result = self.api.get_idea_details(self.idea_id_comments)
self.assertTrue(isinstance(result, Idea))
def testget_top_ideas(self):
result = self.api.get_top_ideas()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_recent_ideas(self):
result = self.api.get_recent_ideas()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_hot_ideas(self):
result = self.api.get_hot_ideas()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testcreate_and_delete_idea(self):
result = self.api.create_idea(title=self.title_idea, text=self.text_idea, campaignId=self.campaign_id)
self.assertTrue(isinstance(result, Idea))
result = self.api.delete_idea(ideaId=result.id)
self.assertTrue(isinstance(result, Idea))
def testvote_up_idea(self):
result = self.api.vote_up_idea(ideaId=self.idea_id_comments)
self.assertTrue(isinstance(result, Vote))
def testvote_down_idea(self):
result = self.api.vote_down_idea(ideaId=self.idea_id_votes)
self.assertTrue(isinstance(result, Vote))
def testadd_comment_idea(self):
result = self.api.comment_idea(ideaId=self.idea_id_comments, text=self.text_comment)
self.assertTrue(isinstance(result, Comment))
def testattach_file_idea(self):
result = self.api.attach_file_to_idea(filename=self.filename,ideaId=self.idea_id_attachment)
self.assertTrue(isinstance(result, Idea))
# ----
# Test cases related with comment actions
# ----
def testadd_and_delete_comment_comment(self):
result = self.api.comment_comment(commentId=self.comment_id, text=self.text_comment)
self.assertTrue(isinstance(result, Comment))
result = self.api.delete_comment(commentId=result.id)
self.assertTrue(isinstance(result, Comment))
def testget_votes_comment(self):
result = self.api.get_votes_comment(commentId=self.comment_id)
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Vote))
def testget_votes_comments(self):
result = self.api.get_votes_comments()
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Vote))
def testget_comment(self):
result = self.api.get_comment(commentId=self.comment_id)
self.assertEqual(type(result), Comment)
def testget_all_comments_pagination(self):
result = self.api.get_all_comments(page_number=0, page_size=25)
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Comment))
# -----
# Test cases related with member actions
# -----
def testcreate_new_member(self):
result = self.api.create_new_member(name="me", email="me@xyz.info")
self.assertTrue(isinstance(result, Author))
def testcreate_new_member_silent(self):
result = self.api.create_new_member(name="Pato Donald", email="donald@disney.info", silent=True)
self.assertTrue(isinstance(result, Author))
def testget_member_info_by_id(self):
result = self.api.get_member_info_by_id(memberId=self.member_id)
self.assertTrue(isinstance(result, Author))
self.assertEqual(result.email,self.member_email)
def testget_member_info_by_name(self):
result = self.api.get_member_info_by_name(name=self.member_name_d)
self.assertEqual(type(result), type([]))
if len(result) > 0:
self.assertTrue(isinstance(result[0], Author))
self.assertEqual(result[0].email,self.member_email_d)
def testget_member_info_by_email(self):
result = self.api.get_member_info_by_email(email=self.member_email)
self.assertTrue(isinstance(result, Author))
self.assertEqual(result.name,self.member_name)
def testget_member_ideas(self):
result = self.api.get_member_ideas(memberId=self.member_id_ideas)
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_member_ideas_pagination(self):
result = self.api.get_member_ideas(memberId=self.member_id_ideas, page_number=0, page_size=25)
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Idea))
def testget_member_comments_votes(self):
result = self.api.get_votes_comments_member(memberId=self.member_id_votes)
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Vote))
def testget_member_ideas_votes(self):
result = self.api.get_votes_ideas_member(memberId=self.member_id_votes)
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Vote))
def testget_member_comments(self):
result = self.api.get_comments_member(memberId=self.member_id_ideas)
self.assertEqual(type(result), type([]))
if len(result) > 0: self.assertTrue(isinstance(result[0], Comment))
def testattach_image_member_avatar(self):
result = self.api.attach_avatar_to_member(filename=self.filename, memberId=self.member_id_votes)
self.assertTrue('url' in result.keys())
if __name__ == '__main__':
unittest.main()
|
mit
|
byterom/android_external_skia
|
tools/jsondiff.py
|
113
|
7526
|
#!/usr/bin/python
'''
Copyright 2013 Google Inc.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
'''
'''
Gathers diffs between 2 JSON expectations files, or between actual and
expected results within a single JSON actual-results file,
and generates an old-vs-new diff dictionary.
TODO(epoger): Fix indentation in this file (2-space indents, not 4-space).
'''
# System-level imports
import argparse
import json
import os
import sys
import urllib2
# Imports from within Skia
#
# We need to add the 'gm' directory, so that we can import gm_json.py within
# that directory. That script allows us to parse the actual-results.json file
# written out by the GM tool.
# Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
# so any dirs that are already in the PYTHONPATH will be preferred.
#
# This assumes that the 'gm' directory has been checked out as a sibling of
# the 'tools' directory containing this script, which will be the case if
# 'trunk' was checked out as a single unit.
GM_DIRECTORY = os.path.realpath(
os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm'))
if GM_DIRECTORY not in sys.path:
sys.path.append(GM_DIRECTORY)
import gm_json
# Object that generates diffs between two JSON gm result files.
class GMDiffer(object):
def __init__(self):
pass
def _GetFileContentsAsString(self, filepath):
"""Returns the full contents of a file, as a single string.
If the filename looks like a URL, download its contents.
If the filename is None, return None."""
if filepath is None:
return None
elif filepath.startswith('http:') or filepath.startswith('https:'):
return urllib2.urlopen(filepath).read()
else:
return open(filepath, 'r').read()
def _GetExpectedResults(self, contents):
"""Returns the dictionary of expected results from a JSON string,
in this form:
{
'test1' : 14760033689012826769,
'test2' : 9151974350149210736,
...
}
We make these simplifying assumptions:
1. Each test has either 0 or 1 allowed results.
2. All expectations are of type JSONKEY_HASHTYPE_BITMAP_64BITMD5.
Any tests which violate those assumptions will cause an exception to
be raised.
Any tests for which we have no expectations will be left out of the
returned dictionary.
"""
result_dict = {}
json_dict = gm_json.LoadFromString(contents)
all_expectations = json_dict[gm_json.JSONKEY_EXPECTEDRESULTS]
# Prevent https://code.google.com/p/skia/issues/detail?id=1588
# ('svndiff.py: 'NoneType' object has no attribute 'keys'')
if not all_expectations:
return result_dict
for test_name in all_expectations.keys():
test_expectations = all_expectations[test_name]
allowed_digests = test_expectations[
gm_json.JSONKEY_EXPECTEDRESULTS_ALLOWEDDIGESTS]
if allowed_digests:
num_allowed_digests = len(allowed_digests)
if num_allowed_digests > 1:
raise ValueError(
'test %s has %d allowed digests' % (
test_name, num_allowed_digests))
digest_pair = allowed_digests[0]
if digest_pair[0] != gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5:
raise ValueError(
'test %s has unsupported hashtype %s' % (
test_name, digest_pair[0]))
result_dict[test_name] = digest_pair[1]
return result_dict
def _GetActualResults(self, contents):
"""Returns the dictionary of actual results from a JSON string,
in this form:
{
'test1' : 14760033689012826769,
'test2' : 9151974350149210736,
...
}
We make these simplifying assumptions:
1. All results are of type JSONKEY_HASHTYPE_BITMAP_64BITMD5.
Any tests which violate those assumptions will cause an exception to
be raised.
Any tests for which we have no actual results will be left out of the
returned dictionary.
"""
result_dict = {}
json_dict = gm_json.LoadFromString(contents)
all_result_types = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
for result_type in all_result_types.keys():
results_of_this_type = all_result_types[result_type]
if results_of_this_type:
for test_name in results_of_this_type.keys():
digest_pair = results_of_this_type[test_name]
if digest_pair[0] != gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5:
raise ValueError(
'test %s has unsupported hashtype %s' % (
test_name, digest_pair[0]))
result_dict[test_name] = digest_pair[1]
return result_dict
def _DictionaryDiff(self, old_dict, new_dict):
"""Generate a dictionary showing the diffs between old_dict and new_dict.
Any entries which are identical across them will be left out."""
diff_dict = {}
all_keys = set(old_dict.keys() + new_dict.keys())
for key in all_keys:
if old_dict.get(key) != new_dict.get(key):
new_entry = {}
new_entry['old'] = old_dict.get(key)
new_entry['new'] = new_dict.get(key)
diff_dict[key] = new_entry
return diff_dict
def GenerateDiffDict(self, oldfile, newfile=None):
"""Generate a dictionary showing the diffs:
old = expectations within oldfile
new = expectations within newfile
If newfile is not specified, then 'new' is the actual results within
oldfile.
"""
return self.GenerateDiffDictFromStrings(self._GetFileContentsAsString(oldfile),
self._GetFileContentsAsString(newfile))
def GenerateDiffDictFromStrings(self, oldjson, newjson=None):
"""Generate a dictionary showing the diffs:
old = expectations within oldjson
new = expectations within newjson
If newfile is not specified, then 'new' is the actual results within
oldfile.
"""
old_results = self._GetExpectedResults(oldjson)
if newjson:
new_results = self._GetExpectedResults(newjson)
else:
new_results = self._GetActualResults(oldjson)
return self._DictionaryDiff(old_results, new_results)
def _Main():
parser = argparse.ArgumentParser()
parser.add_argument(
'old',
help='Path to JSON file whose expectations to display on ' +
'the "old" side of the diff. This can be a filepath on ' +
'local storage, or a URL.')
parser.add_argument(
'new', nargs='?',
help='Path to JSON file whose expectations to display on ' +
'the "new" side of the diff; if not specified, uses the ' +
'ACTUAL results from the "old" JSON file. This can be a ' +
'filepath on local storage, or a URL.')
args = parser.parse_args()
differ = GMDiffer()
diffs = differ.GenerateDiffDict(oldfile=args.old, newfile=args.new)
json.dump(diffs, sys.stdout, sort_keys=True, indent=2)
if __name__ == '__main__':
_Main()
|
bsd-3-clause
|
jastarex/DeepLearningCourseCodes
|
01_TF_basics_and_linear_regression/tensorflow_basic.py
|
1
|
8932
|
# coding: utf-8
# # TensorFlow基础
# In this tutorial, we are going to learn some basics in TensorFlow.
# ## Session
# Session is a class for running TensorFlow operations. A Session object encapsulates the environment in which Operation objects are executed, and Tensor objects are evaluated. In this tutorial, we will use a session to print out the value of tensor. Session can be used as follows:
# In[1]:
import tensorflow as tf
a = tf.constant(100)
with tf.Session() as sess:
print sess.run(a)
#syntactic sugar
print a.eval()
# or
sess = tf.Session()
print sess.run(a)
# print a.eval() # this will print out an error
# ## Interactive session
# Interactive session is a TensorFlow session for use in interactive contexts, such as a shell. The only difference with a regular Session is that an Interactive session installs itself as the default session on construction. The methods [Tensor.eval()](https://www.tensorflow.org/versions/r0.11/api_docs/python/framework.html#Tensor) and [Operation.run()](https://www.tensorflow.org/versions/r0.11/api_docs/python/framework.html#Operation) will use that session to run ops.This is convenient in interactive shells and IPython notebooks, as it avoids having to pass an explicit Session object to run ops.
# In[2]:
sess = tf.InteractiveSession()
print a.eval() # simple usage
# ## Constants
# We can use the `help` function to get an annotation about any function. Just type `help(tf.consant)` on the below cell and run it.
# It will print out `constant(value, dtype=None, shape=None, name='Const')` at the top. Value of tensor constant can be scalar, matrix or tensor (more than 2-dimensional matrix). Also, you can get a shape of tensor by running [tensor.get_shape()](https://www.tensorflow.org/versions/r0.11/api_docs/python/framework.html#Tensor)`.as_list()`.
#
# * tensor.get_shape()
# * tensor.get_shape().as_list()
# In[3]:
a = tf.constant([[1, 2, 3], [4, 5, 6]], dtype=tf.float32, name='a')
print a.eval()
print "shape: ", a.get_shape(), ",type: ", type(a.get_shape())
print "shape: ", a.get_shape().as_list(), ",type: ", type(a.get_shape().as_list()) # this is more useful
# ## Basic functions
# There are some basic functions we need to know. Those functions will be used in next tutorial **3. feed_forward_neural_network**.
# * tf.argmax
# * tf.reduce_sum
# * tf.equal
# * tf.random_normal
# #### tf.argmax
# `tf.argmax(input, dimension, name=None)` returns the index with the largest value across dimensions of a tensor.
#
# In[4]:
a = tf.constant([[1, 6, 5], [2, 3, 4]])
print a.eval()
print "argmax over axis 0"
print tf.argmax(a, 0).eval()
print "argmax over axis 1"
print tf.argmax(a, 1).eval()
# #### tf.reduce_sum
# `tf.reduce_sum(input_tensor, reduction_indices=None, keep_dims=False, name=None)` computes the sum of elements across dimensions of a tensor. Unless `keep_dims` is true, the rank of the tensor is reduced by 1 for each entry in reduction_indices. If `keep_dims` is true, the reduced dimensions are retained with length 1. If `reduction_indices` has no entries, all dimensions are reduced, and a tensor with a single element is returned
# In[5]:
a = tf.constant([[1, 1, 1], [2, 2, 2]])
print a.eval()
print "reduce_sum over entire matrix"
print tf.reduce_sum(a).eval()
print "reduce_sum over axis 0"
print tf.reduce_sum(a, 0).eval()
print "reduce_sum over axis 0 + keep dimensions"
print tf.reduce_sum(a, 0, keep_dims=True).eval()
print "reduce_sum over axis 1"
print tf.reduce_sum(a, 1).eval()
print "reduce_sum over axis 1 + keep dimensions"
print tf.reduce_sum(a, 1, keep_dims=True).eval()
# #### tf.equal
# `tf.equal(x, y, name=None)` returns the truth value of `(x == y)` element-wise. Note that `tf.equal` supports broadcasting. For more about broadcasting, please see [here](http://docs.scipy.org/doc/numpy/user/basics.broadcasting.html).
# In[6]:
a = tf.constant([[1, 0, 0], [0, 1, 1]])
print a.eval()
print "Equal to 1?"
print tf.equal(a, 1).eval()
print "Not equal to 1?"
print tf.not_equal(a, 1).eval()
# #### tf.random_normal
# `tf.random_normal(shape, mean=0.0, stddev=1.0, dtype=tf.float32, seed=None, name=None)` outputs random values from a normal distribution.
#
# In[7]:
normal = tf.random_normal([3], stddev=0.1)
print normal.eval()
# ## Variables
# When we train a model, we use variables to hold and update parameters. Variables are in-memory buffers containing tensors. They must be explicitly initialized and can be saved to disk during and after training. we can later restore saved values to exercise or analyze the model.
#
# * tf.Variable
# * tf.Tensor.name
# * tf.all_variables
#
# #### tf.Variable
# `tf.Variable(initial_value=None, trainable=True, name=None, variable_def=None, dtype=None)` creates a new variable with value `initial_value`.
# The new variable is added to the graph collections listed in collections, which defaults to `[GraphKeys.VARIABLES]`. If `trainable` is true, the variable is also added to the graph collection `GraphKeys.TRAINABLE_VARIABLES`.
# In[8]:
# variable will be initialized with normal distribution
var = tf.Variable(tf.random_normal([3], stddev=0.1), name='var')
print var.name
tf.initialize_all_variables().run()
print var.eval()
# #### tf.Tensor.name
# We can call `tf.Variable` and give the same name `my_var` more than once as seen below. Note that `var3.name` prints out `my_var_1:0` instead of `my_var:0`. This is because TensorFlow doesn't allow user to create variables with the same name. In this case, TensorFlow adds `'_1'` to the original name instead of printing out an error message. Note that you should be careful not to call `tf.Variable` giving same name more than once, because it will cause a fatal problem when you save and restore the variables.
# In[9]:
var2 = tf.Variable(tf.random_normal([2, 3], stddev=0.1), name='my_var')
var3 = tf.Variable(tf.random_normal([2, 3], stddev=0.1), name='my_var')
print var2.name
print var3.name
# #### tf.all_variables
# Using `tf.all_variables()`, we can get the names of all existing variables as follows:
# In[10]:
for var in tf.all_variables():
print var.name
# ## Sharing variables
# TensorFlow provides several classes and operations that you can use to create variables contingent on certain conditions.
# * tf.get_variable
# * tf.variable_scope
# * reuse_variables
# #### tf.get_variable
# `tf.get_variable(name, shape=None, dtype=None, initializer=None, trainable=True)` is used to get or create a variable instead of a direct call to `tf.Variable`. It uses an initializer instead of passing the value directly, as in `tf.Variable`. An initializer is a function that takes the shape and provides a tensor with that shape. Here are some initializers available in TensorFlow:
#
# * `tf.constant_initializer(value)` initializes everything to the provided value,
# * `tf.random_uniform_initializer(a, b)` initializes uniformly from [a, b],
# * `tf.random_normal_initializer(mean, stddev)` initializes from the normal distribution with the given mean and standard deviation.
# In[11]:
my_initializer = tf.random_normal_initializer(mean=0, stddev=0.1)
v = tf.get_variable('v', shape=[2, 3], initializer=my_initializer)
tf.initialize_all_variables().run()
print v.eval()
# #### tf.variable_scope
# `tf.variable_scope(scope_name)` manages namespaces for names passed to `tf.get_variable`.
# In[12]:
with tf.variable_scope('layer1'):
w = tf.get_variable('v', shape=[2, 3], initializer=my_initializer)
print w.name
with tf.variable_scope('layer2'):
w = tf.get_variable('v', shape=[2, 3], initializer=my_initializer)
print w.name
# #### reuse_variables
# Note that you should run the cell above only once. If you run the code above more than once, an error message will be printed out: `"ValueError: Variable layer1/v already exists, disallowed."`. This is because we used `tf.get_variable` above, and this function doesn't allow creating variables with the existing names. We can solve this problem by using `scope.reuse_variables()` to get preivously created variables instead of creating new ones.
# In[13]:
with tf.variable_scope('layer1', reuse=True):
w = tf.get_variable('v') # Unlike above, we don't need to specify shape and initializer
print w.name
# or
with tf.variable_scope('layer1') as scope:
scope.reuse_variables()
w = tf.get_variable('v')
print w.name
# ## Place holder
# TensorFlow provides a placeholder operation that must be fed with data on execution. If you want to get more details about placeholder, please see [here](https://www.tensorflow.org/versions/r0.11/api_docs/python/io_ops.html#placeholder).
# In[14]:
x = tf.placeholder(tf.int16)
y = tf.placeholder(tf.int16)
add = tf.add(x, y)
mul = tf.mul(x, y)
# Launch default graph.
print "2 + 3 = %d" % sess.run(add, feed_dict={x: 2, y: 3})
print "3 x 4 = %d" % sess.run(mul, feed_dict={x: 3, y: 4})
|
apache-2.0
|
0k/odoo
|
addons/product_visible_discount/product_visible_discount.py
|
28
|
4505
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class product_pricelist(osv.osv):
_inherit = 'product.pricelist'
_columns ={
'visible_discount': fields.boolean('Visible Discount'),
}
_defaults = {
'visible_discount': True,
}
class sale_order_line(osv.osv):
_inherit = "sale.order.line"
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False,
fiscal_position=False, flag=False, context=None):
def get_real_price(res_dict, product_id, qty, uom, pricelist):
item_obj = self.pool.get('product.pricelist.item')
price_type_obj = self.pool.get('product.price.type')
product_obj = self.pool.get('product.product')
field_name = 'list_price'
product = product_obj.browse(cr, uid, product_id, context)
product_read = product_obj.read(cr, uid, [product_id], [field_name], context=context)[0]
factor = 1.0
if uom and uom != product.uom_id.id:
product_uom_obj = self.pool.get('product.uom')
uom_data = product_uom_obj.browse(cr, uid, product.uom_id.id)
factor = uom_data.factor
return product_read[field_name] * factor
res=super(sale_order_line, self).product_id_change(cr, uid, ids, pricelist, product, qty,
uom, qty_uos, uos, name, partner_id,
lang, update_tax, date_order, packaging=packaging, fiscal_position=fiscal_position, flag=flag, context=context)
context = {'lang': lang, 'partner_id': partner_id}
result=res['value']
pricelist_obj=self.pool.get('product.pricelist')
product_obj = self.pool.get('product.product')
if product and pricelist:
if result.get('price_unit',False):
price=result['price_unit']
else:
return res
product = product_obj.browse(cr, uid, product, context)
list_price = pricelist_obj.price_get(cr, uid, [pricelist],
product.id, qty or 1.0, partner_id, {'uom': uom,'date': date_order })
so_pricelist = pricelist_obj.browse(cr, uid, pricelist, context=context)
new_list_price = get_real_price(list_price, product.id, qty, uom, pricelist)
if so_pricelist.visible_discount and list_price[pricelist] != 0 and new_list_price != 0:
if product.company_id and so_pricelist.currency_id.id != product.company_id.currency_id.id:
# new_list_price is in company's currency while price in pricelist currency
ctx = context.copy()
ctx['date'] = date_order
new_list_price = self.pool['res.currency'].compute(cr, uid,
product.company_id.currency_id.id, so_pricelist.currency_id.id,
new_list_price, context=ctx)
discount = (new_list_price - price) / new_list_price * 100
if discount > 0:
result['price_unit'] = new_list_price
result['discount'] = discount
else:
result['discount'] = 0.0
else:
result['discount'] = 0.0
else:
result['discount'] = 0.0
return res
|
agpl-3.0
|
ajnirp/servo
|
python/servo/post_build_commands.py
|
5
|
9180
|
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
from __future__ import print_function, unicode_literals
import os
import os.path as path
import subprocess
from shutil import copytree, rmtree, copy2
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase, call, check_call, find_dep_path_newest, is_windows, is_macosx
def read_file(filename, if_exists=False):
if if_exists and not path.exists(filename):
return None
with open(filename) as f:
return f.read()
@CommandProvider
class PostBuildCommands(CommandBase):
@Command('run',
description='Run Servo',
category='post-build')
@CommandArgument('--release', '-r', action='store_true',
help='Run the release build')
@CommandArgument('--dev', '-d', action='store_true',
help='Run the dev build')
@CommandArgument('--android', action='store_true', default=None,
help='Run on an Android device through `adb shell`')
@CommandArgument('--debug', action='store_true',
help='Enable the debugger. Not specifying a '
'--debugger option will result in the default '
'debugger being used. The following arguments '
'have no effect without this.')
@CommandArgument('--debugger', default=None, type=str,
help='Name of debugger to use.')
@CommandArgument('--browserhtml', '-b', action='store_true',
help='Launch with Browser.html')
@CommandArgument(
'params', nargs='...',
help="Command-line arguments to be passed through to Servo")
def run(self, params, release=False, dev=False, android=None, debug=False, debugger=None, browserhtml=False):
env = self.build_env()
env["RUST_BACKTRACE"] = "1"
# Make --debugger imply --debug
if debugger:
debug = True
if android is None:
android = self.config["build"]["android"]
if android:
if debug:
print("Android on-device debugging is not supported by mach yet. See")
print("https://github.com/servo/servo/wiki/Building-for-Android#debugging-on-device")
return
script = [
"am force-stop com.mozilla.servo",
"echo servo >/sdcard/servo/android_params"
]
for param in params:
script += [
"echo '%s' >>/sdcard/servo/android_params" % param.replace("'", "\\'")
]
script += [
"am start com.mozilla.servo/com.mozilla.servo.MainActivity",
"exit"
]
shell = subprocess.Popen(["adb", "shell"], stdin=subprocess.PIPE)
shell.communicate("\n".join(script) + "\n")
return shell.wait()
args = [self.get_binary_path(release, dev)]
if browserhtml:
browserhtml_path = find_dep_path_newest('browserhtml', args[0])
if browserhtml_path is None:
print("Could not find browserhtml package; perhaps you haven't built Servo.")
return 1
if is_macosx():
# Enable borderless on OSX
args = args + ['-b']
elif is_windows():
# Convert to a relative path to avoid mingw -> Windows path conversions
browserhtml_path = path.relpath(browserhtml_path, os.getcwd())
args = args + ['--pref', 'dom.mozbrowser.enabled',
'--pref', 'dom.forcetouch.enabled',
'--pref', 'shell.builtin-key-shortcuts.enabled=false',
path.join(browserhtml_path, 'out', 'index.html')]
# Borrowed and modified from:
# http://hg.mozilla.org/mozilla-central/file/c9cfa9b91dea/python/mozbuild/mozbuild/mach_commands.py#l883
if debug:
import mozdebug
if not debugger:
# No debugger name was provided. Look for the default ones on
# current OS.
debugger = mozdebug.get_default_debugger_name(
mozdebug.DebuggerSearch.KeepLooking)
self.debuggerInfo = mozdebug.get_debugger_info(debugger)
if not self.debuggerInfo:
print("Could not find a suitable debugger in your PATH.")
return 1
command = self.debuggerInfo.path
if debugger == 'gdb' or debugger == 'lldb':
rustCommand = 'rust-' + debugger
try:
subprocess.check_call([rustCommand, '--version'], env=env, stdout=open(os.devnull, 'w'))
except (OSError, subprocess.CalledProcessError):
pass
else:
command = rustCommand
# Prepend the debugger args.
args = ([command] + self.debuggerInfo.args +
args + params)
else:
args = args + params
try:
check_call(args, env=env)
except subprocess.CalledProcessError as e:
print("Servo exited with return value %d" % e.returncode)
return e.returncode
except OSError as e:
if e.errno == 2:
print("Servo Binary can't be found! Run './mach build'"
" and try again!")
else:
raise e
@Command('rr-record',
description='Run Servo whilst recording execution with rr',
category='post-build')
@CommandArgument('--release', '-r', action='store_true',
help='Use release build')
@CommandArgument('--dev', '-d', action='store_true',
help='Use dev build')
@CommandArgument(
'params', nargs='...',
help="Command-line arguments to be passed through to Servo")
def rr_record(self, release=False, dev=False, params=[]):
env = self.build_env()
env["RUST_BACKTRACE"] = "1"
servo_cmd = [self.get_binary_path(release, dev)] + params
rr_cmd = ['rr', '--fatal-errors', 'record']
try:
check_call(rr_cmd + servo_cmd)
except OSError as e:
if e.errno == 2:
print("rr binary can't be found!")
else:
raise e
@Command('rr-replay',
description='Replay the most recent execution of Servo that was recorded with rr',
category='post-build')
def rr_replay(self):
try:
check_call(['rr', '--fatal-errors', 'replay'])
except OSError as e:
if e.errno == 2:
print("rr binary can't be found!")
else:
raise e
@Command('doc',
description='Generate documentation',
category='post-build')
@CommandArgument(
'params', nargs='...',
help="Command-line arguments to be passed through to cargo doc")
def doc(self, params):
self.ensure_bootstrapped()
if not path.exists(path.join(self.config["tools"]["rust-root"], "doc")):
Registrar.dispatch("bootstrap-rust-docs", context=self.context)
rust_docs = path.join(self.config["tools"]["rust-root"], "doc")
docs = path.join(self.get_target_dir(), "doc")
if not path.exists(docs):
os.makedirs(docs)
if read_file(path.join(docs, "version_info.html"), if_exists=True) != \
read_file(path.join(rust_docs, "version_info.html")):
print("Copying Rust documentation.")
# copytree doesn't like the destination already existing.
for name in os.listdir(rust_docs):
if not name.startswith('.'):
full_name = path.join(rust_docs, name)
destination = path.join(docs, name)
if path.isdir(full_name):
if path.exists(destination):
rmtree(destination)
copytree(full_name, destination)
else:
copy2(full_name, destination)
return call(["cargo", "doc"] + params,
env=self.build_env(), cwd=self.servo_crate())
@Command('browse-doc',
description='Generate documentation and open it in a web browser',
category='post-build')
def serve_docs(self):
self.doc([])
import webbrowser
webbrowser.open("file://" + path.abspath(path.join(
self.get_target_dir(), "doc", "servo", "index.html")))
|
mpl-2.0
|
raphaelvalentin/Utils
|
functions/system.py
|
1
|
3297
|
import re, time, os, shutil, string
from subprocess import Popen, PIPE, STDOUT
from random import randint, seed
__all__ = ['find', 'removedirs', 'source', 'tempfile', 'copy', 'rm', 'template, template_dir']
def find(path='.', regex='*', ctime=0):
r = []
regex = str(regex).strip()
if regex == '*': regex = ''
now = time.time()
for filename in os.listdir(path):
try:
if re.search(regex, filename):
tmtime = os.path.getmtime(os.path.join(path, filename))
if ctime>0 and int((now-tmtime)/3600/24) > ctime:
r.append(os.path.join(path, filename))
elif ctime<0 and int((now-tmtime)/3600/24) < ctime:
r.append(os.path.join(path, filename))
elif ctime==0:
r.append(os.path.join(path, filename))
except:
pass
return r
def rm(*files):
# for i, file in enumerate(files):
# try:
# os.system('/bin/rm -rf %s > /dev/null 2>&1'%file)
# except:
# pass
# more pythonic
for src in files:
try:
if os.path.isdir(src):
shutil.rmtree(src)
else:
os.remove(src)
except OSError as e:
print('%s not removed. Error: %s'%(src, e))
def removedirs(*args):
print 'Deprecated: use rm'
rm(*args)
def source(filename):
cmd = "source {filename}; env".format(filename=filename)
p = Popen(cmd, executable='/bin/tcsh', stdout=PIPE, stderr=STDOUT, shell=True, env=os.environ)
stdout = p.communicate()[0].splitlines()
for line in stdout:
if re.search('[0-9a-zA-Z_-]+=\S+', line):
key, value = line.split("=", 1)
os.environ[key] = value
def copy(src, dest, force=False):
try:
if force and os.path.isdir(dest):
# not good for speed
rm(dest)
shutil.copytree(src, dest)
except OSError as e:
# if src is not a directory
if e.errno == errno.ENOTDIR:
shutil.copy(src, dest)
else:
print('%s not copied. Error: %s'%(src, e))
def template(src, dest, substitute={}):
with open(src) as f:
s = string.Template(f.read())
o = s.safe_substitute(substitute)
with open(dest, 'w') as g:
g.write(o)
def template_dir(src, dest, substitute={}):
if src<>dest:
copy(src, dest, force=True)
for root, subdirs, files in os.walk(dest):
file_path = os.path.join(dest, filename)
s = template(file_path, file_path, substitute)
class tempfile:
letters = "ABCDEFGHIJLKMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789_"
tempdir = '/tmp/'
seed()
@staticmethod
def randstr(n=10):
return "".join(tempfile.letters[randint(0,len(tempfile.letters)-1)] for i in xrange(n))
@staticmethod
def mkdtemp(prefix='', suffix=''):
n = 7
i = 0
while 1:
try:
path = os.path.join(tempfile.tempdir, prefix + tempfile.randstr(n) + suffix)
os.mkdir(path)
return path
except OSError:
i = i + 1
if i%10==0:
n = n + 1
if n > 12:
raise OSError('cannot create a temporary directory')
|
gpl-2.0
|
Just-D/panda3d
|
direct/src/distributed/TimeManager.py
|
11
|
6863
|
from direct.showbase.DirectObject import *
from pandac.PandaModules import *
from direct.task import Task
from direct.distributed import DistributedObject
from direct.directnotify import DirectNotifyGlobal
from direct.distributed.ClockDelta import globalClockDelta
class TimeManager(DistributedObject.DistributedObject):
"""
This DistributedObject lives on the AI and on the client side, and
serves to synchronize the time between them so they both agree, to
within a few hundred milliseconds at least, what time it is.
It uses a pull model where the client can request a
synchronization check from time to time. It also employs a
round-trip measurement to minimize the effect of latency.
"""
notify = DirectNotifyGlobal.directNotify.newCategory("TimeManager")
# The number of seconds to wait between automatic
# synchronizations. Set to 0 to disable auto sync after
# startup.
updateFreq = base.config.GetFloat('time-manager-freq', 1800)
# The minimum number of seconds to wait between two unrelated
# synchronization attempts. Increasing this number cuts down
# on frivolous synchronizations.
minWait = base.config.GetFloat('time-manager-min-wait', 10)
# The maximum number of seconds of uncertainty to tolerate in
# the clock delta without trying again.
maxUncertainty = base.config.GetFloat('time-manager-max-uncertainty', 1)
# The maximum number of attempts to try to get a low-latency
# time measurement before giving up and accepting whatever we
# get.
maxAttempts = base.config.GetInt('time-manager-max-attempts', 5)
# A simulated clock skew for debugging, in seconds.
extraSkew = base.config.GetInt('time-manager-extra-skew', 0)
if extraSkew != 0:
notify.info("Simulating clock skew of %0.3f s" % extraSkew)
reportFrameRateInterval = base.config.GetDouble('report-frame-rate-interval', 300.0)
def __init__(self, cr):
DistributedObject.DistributedObject.__init__(self, cr)
self.thisContext = -1
self.nextContext = 0
self.attemptCount = 0
self.start = 0
self.lastAttempt = -self.minWait*2
### DistributedObject methods ###
def generate(self):
"""
This method is called when the DistributedObject is reintroduced
to the world, either for the first time or from the cache.
"""
DistributedObject.DistributedObject.generate(self)
self.accept('clock_error', self.handleClockError)
if self.updateFreq > 0:
self.startTask()
def announceGenerate(self):
DistributedObject.DistributedObject.announceGenerate(self)
self.cr.timeManager = self
self.synchronize("TimeManager.announceGenerate")
def disable(self):
"""
This method is called when the DistributedObject is removed from
active duty and stored in a cache.
"""
self.ignore('clock_error')
self.stopTask()
taskMgr.remove('frameRateMonitor')
if self.cr.timeManager is self:
self.cr.timeManager = None
DistributedObject.DistributedObject.disable(self)
def delete(self):
"""
This method is called when the DistributedObject is permanently
removed from the world and deleted from the cache.
"""
DistributedObject.DistributedObject.delete(self)
### Task management methods ###
def startTask(self):
self.stopTask()
taskMgr.doMethodLater(self.updateFreq, self.doUpdate, "timeMgrTask")
def stopTask(self):
taskMgr.remove("timeMgrTask")
def doUpdate(self, task):
self.synchronize("timer")
# Spawn the next one
taskMgr.doMethodLater(self.updateFreq, self.doUpdate, "timeMgrTask")
return Task.done
### Automatic clock error handling ###
def handleClockError(self):
self.synchronize("clock error")
### Synchronization methods ###
def synchronize(self, description):
"""synchronize(self, string description)
Call this function from time to time to synchronize watches
with the server. This initiates a round-trip transaction;
when the transaction completes, the time will be synced.
The description is the string that will be written to the log
file regarding the reason for this synchronization attempt.
The return value is true if the attempt is made, or false if
it is too soon since the last attempt.
"""
now = globalClock.getRealTime()
if now - self.lastAttempt < self.minWait:
self.notify.debug("Not resyncing (too soon): %s" % (description))
return 0
self.talkResult = 0
self.thisContext = self.nextContext
self.attemptCount = 0
self.nextContext = (self.nextContext + 1) & 255
self.notify.info("Clock sync: %s" % (description))
self.start = now
self.lastAttempt = now
self.sendUpdate("requestServerTime", [self.thisContext])
return 1
def serverTime(self, context, timestamp):
"""serverTime(self, int8 context, int32 timestamp)
This message is sent from the AI to the client in response to
a previous requestServerTime. It contains the time as
observed by the AI.
The client should use this, in conjunction with the time
measurement taken before calling requestServerTime (above), to
determine the clock delta between the AI and the client
machines.
"""
end = globalClock.getRealTime()
if context != self.thisContext:
self.notify.info("Ignoring TimeManager response for old context %d" % (context))
return
elapsed = end - self.start
self.attemptCount += 1
self.notify.info("Clock sync roundtrip took %0.3f ms" % (elapsed * 1000.0))
average = (self.start + end) / 2.0 - self.extraSkew
uncertainty = (end - self.start) / 2.0 + abs(self.extraSkew)
globalClockDelta.resynchronize(average, timestamp, uncertainty)
self.notify.info("Local clock uncertainty +/- %.3f s" % (globalClockDelta.getUncertainty()))
if globalClockDelta.getUncertainty() > self.maxUncertainty:
if self.attemptCount < self.maxAttempts:
self.notify.info("Uncertainty is too high, trying again.")
self.start = globalClock.getRealTime()
self.sendUpdate("requestServerTime", [self.thisContext])
return
self.notify.info("Giving up on uncertainty requirement.")
messenger.send("gotTimeSync", taskChain = 'default')
messenger.send(self.cr.uniqueName("gotTimeSync"), taskChain = 'default')
|
bsd-3-clause
|
myriadrf/pyLMS7002M
|
pyLMS7002M/LMS7002_RFE.py
|
2
|
19263
|
#***************************************************************
#* Name: LMS7002_RFE.py
#* Purpose: Class implementing LMS7002 RFE functions
#* Author: Lime Microsystems ()
#* Created: 2016-11-14
#* Copyright: Lime Microsystems (limemicro.com)
#* License:
#**************************************************************
from LMS7002_base import *
class LMS7002_RFE(LMS7002_base):
__slots__=[] # Used to generate error on typos
def __init__(self, chip, Channel):
if Channel not in ['A', 'B']:
raise ValueError("Parameter Channel must be 'A' or 'B'")
self.chip = chip
self.channel = Channel
self.prefix = "RFE_"
# EN_DIR
@property
def EN_DIR(self):
"""
Get the value of EN_DIR
"""
prefix = self.prefix
self.prefix = ""
en_dir = self._readReg('TRX_EN_DIR', 'EN_DIR_RFE')
self.prefix = prefix
return en_dir
@EN_DIR.setter
def EN_DIR(self, value):
"""
Set the value of EN_DIR
"""
if value not in [0, 1]:
raise ValueError("Value must be [0,1]")
prefix = self.prefix
self.prefix = ""
self._writeReg('TRX_EN_DIR', 'EN_DIR_RFE', value)
self.prefix = prefix
#
# RFE_CFG0 (0x010C)
#
# CDC_I_RFE<3:0>
@property
def CDC_I_RFE(self):
"""
Get the value of CDC_I_RFE<3:0>
"""
return self._readReg('CFG0', 'CDC_I_RFE<3:0>')
@CDC_I_RFE.setter
def CDC_I_RFE(self, value):
"""
Set the value of CDC_I_RFE<3:0>
"""
if not(0 <= value <= 15):
raise ValueError("Value must be [0,16]")
self._writeReg('CFG0', 'CDC_I_RFE<3:0>', value)
# CDC_Q_RFE<3:0>
@property
def CDC_Q_RFE(self):
"""
Get the value of CDC_Q_RFE<3:0>
"""
return self._readReg('CFG0', 'CDC_Q_RFE<3:0>')
@CDC_Q_RFE.setter
def CDC_Q_RFE(self, value):
"""
Set the value of CDC_Q_RFE<3:0>
"""
if not(0 <= value <= 15):
raise ValueError("Value must be [0,16]")
self._writeReg('CFG0', 'CDC_Q_RFE<3:0>', value)
# PD_LNA_RFE
@property
def PD_LNA_RFE(self):
"""
Get the value of PD_LNA_RFE
"""
return self._readReg('CFG0', 'PD_LNA_RFE')
@PD_LNA_RFE.setter
def PD_LNA_RFE(self, value):
"""
Set the value of PD_LNA_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='ON':
val = 0
else:
val = 1
self._writeReg('CFG0', 'PD_LNA_RFE', val)
# PD_RLOOPB_1_RFE
@property
def PD_RLOOPB_1_RFE(self):
"""
Get the value of PD_RLOOPB_1_RFE
"""
return self._readReg('CFG0', 'PD_RLOOPB_1_RFE')
@PD_RLOOPB_1_RFE.setter
def PD_RLOOPB_1_RFE(self, value):
"""
Set the value of PD_RLOOPB_1_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='ON':
val = 0
else:
val = 1
self._writeReg('CFG0', 'PD_RLOOPB_1_RFE', val)
# PD_RLOOPB_2_RFE
@property
def PD_RLOOPB_2_RFE(self):
"""
Get the value of PD_RLOOPB_2_RFE
"""
return self._readReg('CFG0', 'PD_RLOOPB_2_RFE')
@PD_RLOOPB_2_RFE.setter
def PD_RLOOPB_2_RFE(self, value):
"""
Set the value of PD_RLOOPB_2_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='ON':
val = 0
else:
val = 1
self._writeReg('CFG0', 'PD_RLOOPB_2_RFE', val)
# PD_MXLOBUF_RFE
@property
def PD_MXLOBUF_RFE(self):
"""
Get the value of PD_MXLOBUF_RFE
"""
return self._readReg('CFG0', 'PD_MXLOBUF_RFE')
@PD_MXLOBUF_RFE.setter
def PD_MXLOBUF_RFE(self, value):
"""
Set the value of PD_MXLOBUF_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='ON':
val = 0
else:
val = 1
self._writeReg('CFG0', 'PD_MXLOBUF_RFE', val)
# PD_QGEN_RFE
@property
def PD_QGEN_RFE(self):
"""
Get the value of PD_QGEN_RFE
"""
return self._readReg('CFG0', 'PD_QGEN_RFE')
@PD_QGEN_RFE.setter
def PD_QGEN_RFE(self, value):
"""
Set the value of PD_QGEN_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='ON':
val = 0
else:
val = 1
self._writeReg('CFG0', 'PD_QGEN_RFE', val)
# PD_RSSI_RFE
@property
def PD_RSSI_RFE(self):
"""
Get the value of PD_RSSI_RFE
"""
return self._readReg('CFG0', 'PD_RSSI_RFE')
@PD_RSSI_RFE.setter
def PD_RSSI_RFE(self, value):
"""
Set the value of PD_RSSI_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='ON':
val = 0
else:
val = 1
self._writeReg('CFG0', 'PD_RSSI_RFE', val)
# PD_TIA_RFE
@property
def PD_TIA_RFE(self):
"""
Get the value of PD_TIA_RFE
"""
return self._readReg('CFG0', 'PD_TIA_RFE')
@PD_TIA_RFE.setter
def PD_TIA_RFE(self, value):
"""
Set the value of PD_TIA_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='ON':
val = 0
else:
val = 1
self._writeReg('CFG0', 'PD_TIA_RFE', val)
# EN_G_RFE
@property
def EN_G_RFE(self):
"""
Get the value of EN_G_RFE
"""
return self._readReg('CFG0', 'EN_G_RFE')
@EN_G_RFE.setter
def EN_G_RFE(self, value):
"""
Set the value of EN_G_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='OFF':
val = 0
else:
val = 1
self._writeReg('CFG0', 'EN_G_RFE', val)
#
# RFE_CFG1 (0x010D)
#
# SEL_PATH_RFE<1:0>
@property
def SEL_PATH_RFE(self):
"""
Get the value of SEL_PATH_RFE<1:0>
"""
return self._readReg('CFG1', 'SEL_PATH_RFE<1:0>')
@SEL_PATH_RFE.setter
def SEL_PATH_RFE(self, value):
"""
Set the value of SEL_PATH_RFE<1:0>
"""
if value not in [0, 1, 2, 3, 'NONE', 'LNAH', 'LNAL', 'LNAW']:
raise ValueError("Value must be [0, 1, 2, 3, 'NONE', 'LNAH', 'LNAL', 'LNAW']")
if value==0 or value=='NONE':
val = 0
elif value==1 or value=='LNAH':
val = 1
elif value==2 or value=='LNAL':
val = 2
else:
val = 3
self._writeReg('CFG1', 'SEL_PATH_RFE<1:0>', val)
# EN_DCOFF_RXFE_RFE
@property
def EN_DCOFF_RXFE_RFE(self):
"""
Get the value of EN_DCOFF_RXFE_RFE
"""
return self._readReg('CFG1', 'EN_DCOFF_RXFE_RFE')
@EN_DCOFF_RXFE_RFE.setter
def EN_DCOFF_RXFE_RFE(self, value):
"""
Set the value of EN_DCOFF_RXFE_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='OFF':
val = 0
else:
val = 1
self._writeReg('CFG1', 'EN_DCOFF_RXFE_RFE', val)
# EN_INSHSW_LB1_RFE
@property
def EN_INSHSW_LB1_RFE(self):
"""
Get the value of EN_INSHSW_LB1_RFE
"""
return self._readReg('CFG1', 'EN_INSHSW_LB1_RFE')
@EN_INSHSW_LB1_RFE.setter
def EN_INSHSW_LB1_RFE(self, value):
"""
Set the value of EN_INSHSW_LB1_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='OFF':
val = 0
else:
val = 1
self._writeReg('CFG1', 'EN_INSHSW_LB1_RFE', val)
# EN_INSHSW_LB2_RFE
@property
def EN_INSHSW_LB2_RFE(self):
"""
Get the value of EN_INSHSW_LB2_RFE
"""
return self._readReg('CFG1', 'EN_INSHSW_LB2_RFE')
@EN_INSHSW_LB2_RFE.setter
def EN_INSHSW_LB2_RFE(self, value):
"""
Set the value of EN_INSHSW_LB2_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='OFF':
val = 0
else:
val = 1
self._writeReg('CFG1', 'EN_INSHSW_LB2_RFE', val)
# EN_INSHSW_L_RFE
@property
def EN_INSHSW_L_RFE(self):
"""
Get the value of EN_INSHSW_L_RFE
"""
return self._readReg('CFG1', 'EN_INSHSW_L_RFE')
@EN_INSHSW_L_RFE.setter
def EN_INSHSW_L_RFE(self, value):
"""
Set the value of EN_INSHSW_L_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='OFF':
val = 0
else:
val = 1
self._writeReg('CFG1', 'EN_INSHSW_L_RFE', val)
# EN_INSHSW_W_RFE
@property
def EN_INSHSW_W_RFE(self):
"""
Get the value of EN_INSHSW_W_RFE
"""
return self._readReg('CFG1', 'EN_INSHSW_W_RFE')
@EN_INSHSW_W_RFE.setter
def EN_INSHSW_W_RFE(self, value):
"""
Set the value of EN_INSHSW_W_RFE
"""
if value not in [0, 1, 'ON', 'OFF']:
raise ValueError("Value must be [0,1,'ON', 'OFF']")
if value==0 or value=='OFF':
val = 0
else:
val = 1
self._writeReg('CFG1', 'EN_INSHSW_W_RFE', val)
# EN_NEXTRX_RFE
@property
def EN_NEXTRX_RFE(self):
"""
Get the value of EN_NEXTRX_RFE
"""
return self._readReg('CFG1', 'EN_NEXTRX_RFE')
@EN_NEXTRX_RFE.setter
def EN_NEXTRX_RFE(self, value):
"""
Set the value of EN_NEXTRX_RFE
"""
if value not in [0, 1, 'SISO', 'MIMO']:
raise ValueError("Value must be [0,1,'SISO', 'MIMO']")
if value==0 or value=='SISO':
val = 0
else:
val = 1
self._writeReg('CFG1', 'EN_NEXTRX_RFE', val)
#
# RFE_DCOFF (0x010E)
#
# DCOFFI_RFE<6:0>
@property
def DCOFFI_RFE(self):
"""
Get the value of DCOFFI_RFE<6:0>
"""
return self.signMagnitudeToInt(self._readReg('DCOFF', 'DCOFFI_RFE<6:0>'),7)
@DCOFFI_RFE.setter
def DCOFFI_RFE(self, value):
"""
Set the value of DCOFFI_RFE<6:0>
"""
if not(-63 <= value <= 63):
raise ValueError("Value must be [-63..63]")
self._writeReg('DCOFF', 'DCOFFI_RFE<6:0>', self.intToSignMagnitude(value,7))
# DCOFFQ_RFE<6:0>
@property
def DCOFFQ_RFE(self):
"""
Get the value of DCOFFQ_RFE<6:0>
"""
return self.signMagnitudeToInt(self._readReg('DCOFF', 'DCOFFQ_RFE<6:0>'),7)
@DCOFFQ_RFE.setter
def DCOFFQ_RFE(self, value):
"""
Set the value of DCOFFQ_RFE<6:0>
"""
if not(-63 <= value <= 63):
raise ValueError("Value must be [-63..63]")
self._writeReg('DCOFF', 'DCOFFQ_RFE<6:0>', self.intToSignMagnitude(value,7))
#
# RFE_ICT0 (0x010F)
#
# ICT_LOOPB_RFE<4:0>
@property
def ICT_LOOPB_RFE(self):
"""
Get the value of ICT_LOOPB_RFE<4:0>
"""
return self._readReg('ICT0', 'ICT_LOOPB_RFE<4:0>')
@ICT_LOOPB_RFE.setter
def ICT_LOOPB_RFE(self, value):
"""
Set the value of ICT_LOOPB_RFE<4:0>
"""
if not(0 <= value <= 31):
raise ValueError("Value must be [0..31]")
self._writeReg('ICT0', 'ICT_LOOPB_RFE<4:0>', value)
# ICT_TIAMAIN_RFE<4:0>
@property
def ICT_TIAMAIN_RFE(self):
"""
Get the value of ICT_TIAMAIN_RFE<4:0>
"""
return self._readReg('ICT0', 'ICT_TIAMAIN_RFE<4:0>')
@ICT_TIAMAIN_RFE.setter
def ICT_TIAMAIN_RFE(self, value):
"""
Set the value of ICT_TIAMAIN_RFE<4:0>
"""
if not(0 <= value <= 31):
raise ValueError("Value must be [0..31]")
self._writeReg('ICT0', 'ICT_TIAMAIN_RFE<4:0>', value)
# ICT_TIAOUT_RFE<4:0>
@property
def ICT_TIAOUT_RFE(self):
"""
Get the value of ICT_TIAOUT_RFE<4:0>
"""
return self._readReg('ICT0', 'ICT_TIAOUT_RFE<4:0>')
@ICT_TIAOUT_RFE.setter
def ICT_TIAOUT_RFE(self, value):
"""
Set the value of ICT_TIAOUT_RFE<4:0>
"""
if not(0 <= value <= 31):
raise ValueError("Value must be [0..31]")
self._writeReg('ICT0', 'ICT_TIAOUT_RFE<4:0>', value)
#
# RFE_ICT1 (0x0110)
#
# ICT_LNACMO_RFE<4:0>
@property
def ICT_LNACMO_RFE(self):
"""
Get the value of ICT_LNACMO_RFE<4:0>
"""
return self._readReg('ICT1', 'ICT_LNACMO_RFE<4:0>')
@ICT_LNACMO_RFE.setter
def ICT_LNACMO_RFE(self, value):
"""
Set the value of ICT_LNACMO_RFE<4:0>
"""
if not(0 <= value <= 31):
raise ValueError("Value must be [0..31]")
self._writeReg('ICT1', 'ICT_LNACMO_RFE<4:0>', value)
# ICT_LNA_RFE<4:0>
@property
def ICT_LNA_RFE(self):
"""
Get the value of ICT_LNA_RFE<4:0>
"""
return self._readReg('ICT1', 'ICT_LNA_RFE<4:0>')
@ICT_LNA_RFE.setter
def ICT_LNA_RFE(self, value):
"""
Set the value of ICT_LNA_RFE<4:0>
"""
if not(0 <= value <= 31):
raise ValueError("Value must be [0..31]")
self._writeReg('ICT1', 'ICT_LNA_RFE<4:0>', value)
# ICT_LODC_RFE<4:0>
@property
def ICT_LODC_RFE(self):
"""
Get the value of ICT_LODC_RFE<4:0>
"""
return self._readReg('ICT1', 'ICT_LODC_RFE<4:0>')
@ICT_LODC_RFE.setter
def ICT_LODC_RFE(self, value):
"""
Set the value of ICT_LODC_RFE<4:0>
"""
if not(0 <= value <= 31):
raise ValueError("Value must be [0..31]")
self._writeReg('ICT1', 'ICT_LODC_RFE<4:0>', value)
#
# RFE_CAP0 (0x0111)
#
# CAP_RXMXO_RFE<4:0>
@property
def CAP_RXMXO_RFE(self):
"""
Get the value of CAP_RXMXO_RFE<4:0>
"""
return self._readReg('CAP0', 'CAP_RXMXO_RFE<4:0>')
@CAP_RXMXO_RFE.setter
def CAP_RXMXO_RFE(self, value):
"""
Set the value of CAP_RXMXO_RFE<4:0>
"""
if not(0 <= value <= 31):
raise ValueError("Value must be [0..31]")
self._writeReg('CAP0', 'CAP_RXMXO_RFE<4:0>', value)
# CGSIN_LNA_RFE<4:0>
@property
def CGSIN_LNA_RFE(self):
"""
Get the value of CGSIN_LNA_RFE<4:0>
"""
return self._readReg('CAP0', 'CGSIN_LNA_RFE<4:0>')
@CGSIN_LNA_RFE.setter
def CGSIN_LNA_RFE(self, value):
"""
Set the value of CGSIN_LNA_RFE<4:0>
"""
if not(0 <= value <= 31):
raise ValueError("Value must be [0..31]")
self._writeReg('CAP0', 'CGSIN_LNA_RFE<4:0>', value)
#
# RFE_CAP1 (0x0112)
#
# CCOMP_TIA_RFE<3:0>
@property
def CCOMP_TIA_RFE(self):
"""
Get the value of CCOMP_TIA_RFE<3:0>
"""
return self._readReg('CAP1', 'CCOMP_TIA_RFE<3:0>')
@CCOMP_TIA_RFE.setter
def CCOMP_TIA_RFE(self, value):
"""
Set the value of CCOMP_TIA_RFE<3:0>
"""
if not(0 <= value <= 15):
raise ValueError("Value must be [0..15]")
self._writeReg('CAP1', 'CCOMP_TIA_RFE<3:0>', value)
# CFB_TIA_RFE<11:0>
@property
def CFB_TIA_RFE(self):
"""
Get the value of CFB_TIA_RFE<11:0>
"""
return self._readReg('CAP1', 'CFB_TIA_RFE<11:0>')
@CFB_TIA_RFE.setter
def CFB_TIA_RFE(self, value):
"""
Set the value of CFB_TIA_RFE<11:0>
"""
if not(0 <= value <= 4095):
raise ValueError("Value must be [0..4095]")
self._writeReg('CAP1', 'CFB_TIA_RFE<11:0>', value)
#
# RFE_GAIN (0x0113)
#
# G_LNA_RFE<3:0>
@property
def G_LNA_RFE(self):
"""
Get the value of G_LNA_RFE<3:0>
"""
return self._readReg('GAIN', 'G_LNA_RFE<3:0>')
@G_LNA_RFE.setter
def G_LNA_RFE(self, value):
"""
Set the value of G_LNA_RFE<3:0>
"""
if not(1 <= value <= 15):
raise ValueError("Value must be [1..15]")
self._writeReg('GAIN', 'G_LNA_RFE<3:0>', value)
# G_RXLOOPB_RFE<3:0>
@property
def G_RXLOOPB_RFE(self):
"""
Get the value of G_RXLOOPB_RFE<3:0>
"""
return self._readReg('GAIN', 'G_RXLOOPB_RFE<3:0>')
@G_RXLOOPB_RFE.setter
def G_RXLOOPB_RFE(self, value):
"""
Set the value of G_RXLOOPB_RFE<3:0>
"""
if not(0 <= value <= 15):
raise ValueError("Value must be [0..15]")
self._writeReg('GAIN', 'G_RXLOOPB_RFE<3:0>', value)
# G_TIA_RFE<1:0>
@property
def G_TIA_RFE(self):
"""
Get the value of G_TIA_RFE<1:0>
"""
return self._readReg('GAIN', 'G_TIA_RFE<1:0>')
@G_TIA_RFE.setter
def G_TIA_RFE(self, value):
"""
Set the value of G_TIA_RFE<1:0>
"""
if not(1 <= value <= 3):
raise ValueError("Value must be [1..3]")
self._writeReg('GAIN', 'G_TIA_RFE<1:0>', value)
#
# RFE_TIA (0x0114)
#
# RCOMP_TIA_RFE<3:0>
@property
def RCOMP_TIA_RFE(self):
"""
Get the value of RCOMP_TIA_RFE<3:0>
"""
return self._readReg('TIA', 'RCOMP_TIA_RFE<3:0>')
@RCOMP_TIA_RFE.setter
def RCOMP_TIA_RFE(self, value):
"""
Set the value of RCOMP_TIA_RFE<3:0>
"""
if not(0 <= value <= 15):
raise ValueError("Value must be [0..15]")
self._writeReg('TIA', 'RCOMP_TIA_RFE<3:0>', value)
# RFB_TIA_RFE<4:0>
@property
def RFB_TIA_RFE(self):
"""
Get the value of RFB_TIA_RFE<4:0>
"""
return self._readReg('TIA', 'RFB_TIA_RFE<4:0>')
@RFB_TIA_RFE.setter
def RFB_TIA_RFE(self, value):
"""
Set the value of RFB_TIA_RFE<4:0>
"""
if not(0 <= value <= 31):
raise ValueError("Value must be [0..31]")
self._writeReg('TIA', 'RFB_TIA_RFE<4:0>', value)
|
apache-2.0
|
rlouf/patterns-of-segregation
|
bin/plot_gini.py
|
1
|
2527
|
"""plot_gini.py
Plot the Gini of the income distribution as a function of the number of
households in cities.
"""
from __future__ import division
import csv
import numpy as np
import itertools
from matplotlib import pylab as plt
#
# Parameters and functions
#
income_bins = [1000,12500,17500,22500,27500,32500,37500,42500,47500,55000,70000,90000,115000,135000,175000,300000]
# Puerto-rican cities are excluded from the analysis
PR_cities = ['7442','0060','6360','4840']
#
# Read data
#
## List of MSA
msa = {}
with open('data/names/msa.csv', 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
if rows[0] not in PR_cities:
msa[rows[0]] = rows[1]
#
# Compute gini for all msa
#
gini = []
households = []
for n, city in enumerate(msa):
print "Compute Gini index for %s (%s/%s)"%(msa[city], n+1, len(msa))
## Import households income
data = {}
with open('data/income/msa/%s/income.csv'%city, 'r') as source:
reader = csv.reader(source, delimiter='\t')
reader.next()
for rows in reader:
num_cat = len(rows[1:])
data[rows[0]] = {c:int(h) for c,h in enumerate(rows[1:])}
# Sum over all areal units
incomes = {cat:sum([data[au][cat] for au in data]) for cat in range(num_cat)}
## Compute the Gini index
# See Dixon, P. M.; Weiner, J.; Mitchell-Olds, T.; and Woodley, R.
# "Bootstrapping the Gini Coefficient of Inequality." Ecology 68, 1548-1551, 1987.
g = 0
pop = 0
for a,b in itertools.permutations(incomes, 2):
g += incomes[a]*incomes[b]*abs(income_bins[a]-income_bins[b])
pop = sum([incomes[a] for a in incomes])
average = sum([incomes[a]*income_bins[a] for a in incomes])/pop
gini.append((1/(2*pop**2*average))*g)
households.append(pop)
#
# Plot
#
fig = plt.figure(figsize=(12,8))
ax = fig.add_subplot(111)
ax.plot(households, gini, 'o', color='black', mec='black')
ax.set_xlabel(r'$H$', fontsize=30)
ax.set_ylabel(r'$Gini$', fontsize=30)
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_position(('outward', 10)) # outward by 10 points
ax.spines['bottom'].set_position(('outward', 10)) # outward by 10 points
ax.spines['left'].set_smart_bounds(True)
ax.spines['bottom'].set_smart_bounds(True)
ax.yaxis.set_ticks_position('left')
ax.xaxis.set_ticks_position('bottom')
ax.set_xscale('log')
plt.savefig('figures/paper/si/gini_income.pdf', bbox_inches='tight')
plt.show()
|
bsd-3-clause
|
phamelin/ardupilot
|
Tools/autotest/param_metadata/ednemit.py
|
15
|
3501
|
#!/usr/bin/env python
"""
Emits parameters as an EDN file, does some small remapping of names
"""
from emit import Emit
import edn_format
import datetime
import pytz
import subprocess
class EDNEmit(Emit):
def __init__(self, *args, **kwargs):
Emit.__init__(self, *args, **kwargs)
self.output = "{:date " + edn_format.dumps(datetime.datetime.now(pytz.utc)) + " "
git = subprocess.Popen(["git log --pretty=format:'%h' -n 1"], shell=True, stdout=subprocess.PIPE).communicate()[0]
self.output += ":git-hash \"" + git.decode("ascii") + "\" "
self.remove_keys = ["real_path"]
self.explict_remap = [["displayname", "display-name"]]
self.vehicle_name = None
def close(self):
if self.vehicle_name is not None:
self.output += ":vehicle \"" + self.vehicle_name + "\" "
else:
raise Exception('Vehicle name never found')
self.output += "}"
f = open("parameters.edn", mode='w')
f.write(self.output)
f.close()
def start_libraries(self):
pass
def emit(self, g):
for param in g.params:
output_dict = dict()
# lowercase all keywords
for key in param.__dict__.keys():
output_dict[key.lower()] = param.__dict__[key]
# strip off any leading sillyness on the param name
split_name = param.__dict__["name"].split(":")
if len(split_name) == 2:
self.vehicle_name = split_name[0]
name = param.__dict__["name"].split(":")[-1]
output_dict["name"] = name
# remove any keys we don't really care to share
for key in self.remove_keys:
output_dict.pop(key, None)
# rearrange bitmasks to be a vector with nil's if the bit doesn't have meaning
if "bitmask" in output_dict:
highest_set_bit = 0
bits = []
for bit in output_dict["bitmask"].split(","):
bit_parts = bit.split(":")
bit_number = int(bit_parts[0])
bit_parts[0] = bit_number
bits.append(bit_parts)
if bit_number > highest_set_bit:
highest_set_bit = bit_number
output_bits = (highest_set_bit+1)*[None]
for bit in bits:
output_bits[bit[0]] = bit[1]
output_dict["bitmask"] = output_bits
# rearrange values into a float indexed map
if "values" in output_dict:
values = dict()
for value in output_dict["values"].split(","):
index, description = value.split(":")
values[float(index)] = description
output_dict["values"] = values
# remap range to be a map of floats
if "range" in output_dict:
low, high = output_dict["range"].split()
output_dict["range"] = {"low": float(low), "high": float(high)}
# remap the string to a float
if "increment" in output_dict:
output_dict["increment"] = float(output_dict["increment"])
# do any name changing desired
for remap in self.explict_remap:
output_dict[remap[1]] = output_dict.pop(remap[0])
self.output += "\"" + name + "\" " + edn_format.dumps(output_dict, keyword_keys=True)
|
gpl-3.0
|
Jajcus/pyxmpp
|
pyxmpp/jabber/muccore.py
|
1
|
27807
|
#
# (C) Copyright 2003-2010 Jacek Konieczny <jajcus@jajcus.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
"""Jabber Multi-User Chat implementation.
Normative reference:
- `JEP 45 <http://www.jabber.org/jeps/jep-0045.html>`__
"""
__docformat__="restructuredtext en"
import libxml2
from pyxmpp.utils import to_utf8,from_utf8
from pyxmpp.xmlextra import common_doc, common_root, common_ns, get_node_ns_uri
from pyxmpp.presence import Presence
from pyxmpp.iq import Iq
from pyxmpp.jid import JID
from pyxmpp import xmlextra
from pyxmpp.objects import StanzaPayloadWrapperObject
from pyxmpp.xmlextra import xml_element_iter
MUC_NS="http://jabber.org/protocol/muc"
MUC_USER_NS=MUC_NS+"#user"
MUC_ADMIN_NS=MUC_NS+"#admin"
MUC_OWNER_NS=MUC_NS+"#owner"
affiliations=("admin","member","none","outcast","owner")
roles=("moderator","none","participant","visitor")
class MucXBase(StanzaPayloadWrapperObject):
"""
Base class for MUC-specific stanza payload - wrapper around
an XML element.
:Ivariables:
- `xmlnode`: the wrapped XML node
"""
element="x"
ns=None
def __init__(self, xmlnode=None, copy=True, parent=None):
"""
Copy MucXBase object or create a new one, possibly
based on or wrapping an XML node.
:Parameters:
- `xmlnode`: is the object to copy or an XML node to wrap.
- `copy`: when `True` a copy of the XML node provided will be included
in `self`, the node will be copied otherwise.
- `parent`: parent node for the created/copied XML element.
:Types:
- `xmlnode`: `MucXBase` or `libxml2.xmlNode`
- `copy`: `bool`
- `parent`: `libxml2.xmlNode`
"""
if self.ns==None:
raise RuntimeError,"Pure virtual class called"
self.xmlnode=None
self.borrowed=False
if isinstance(xmlnode,libxml2.xmlNode):
if copy:
self.xmlnode=xmlnode.docCopyNode(common_doc,1)
common_root.addChild(self.xmlnode)
else:
self.xmlnode=xmlnode
self.borrowed=True
if copy:
ns=xmlnode.ns()
xmlextra.replace_ns(self.xmlnode, ns, common_ns)
elif isinstance(xmlnode,MucXBase):
if not copy:
raise TypeError, "MucXBase may only be copied"
self.xmlnode=xmlnode.xmlnode.docCopyNode(common_doc,1)
common_root.addChild(self.xmlnode)
elif xmlnode is not None:
raise TypeError, "Bad MucX constructor argument"
else:
if parent:
self.xmlnode=parent.newChild(None,self.element,None)
self.borrowed=True
else:
self.xmlnode=common_root.newChild(None,self.element,None)
ns=self.xmlnode.newNs(self.ns,None)
self.xmlnode.setNs(ns)
def __del__(self):
if self.xmlnode:
self.free()
def free(self):
"""
Unlink and free the XML node owned by `self`.
"""
if not self.borrowed:
self.xmlnode.unlinkNode()
self.xmlnode.freeNode()
self.xmlnode=None
def free_borrowed(self):
"""
Detach the XML node borrowed by `self`.
"""
self.xmlnode=None
def xpath_eval(self,expr):
"""
Evaluate XPath expression in context of `self.xmlnode`.
:Parameters:
- `expr`: the XPath expression
:Types:
- `expr`: `unicode`
:return: the result of the expression evaluation.
:returntype: list of `libxml2.xmlNode`
"""
ctxt = common_doc.xpathNewContext()
ctxt.setContextNode(self.xmlnode)
ctxt.xpathRegisterNs("muc",self.ns.getContent())
ret=ctxt.xpathEval(to_utf8(expr))
ctxt.xpathFreeContext()
return ret
def serialize(self):
"""
Serialize `self` as XML.
:return: serialized `self.xmlnode`.
:returntype: `str`
"""
return self.xmlnode.serialize()
class MucX(MucXBase):
"""
Wrapper for http://www.jabber.org/protocol/muc namespaced
stanza payload "x" elements.
"""
ns=MUC_NS
def __init__(self, xmlnode=None, copy=True, parent=None):
MucXBase.__init__(self,xmlnode=xmlnode, copy=copy, parent=parent)
def set_history(self, parameters):
"""
Set history parameters.
Types:
- `parameters`: `HistoryParameters`
"""
for child in xml_element_iter(self.xmlnode.children):
if get_node_ns_uri(child) == MUC_NS and child.name == "history":
child.unlinkNode()
child.freeNode()
break
if parameters.maxchars and parameters.maxchars < 0:
raise ValueError, "History parameter maxchars must be positive"
if parameters.maxstanzas and parameters.maxstanzas < 0:
raise ValueError, "History parameter maxstanzas must be positive"
if parameters.maxseconds and parameters.maxseconds < 0:
raise ValueError, "History parameter maxseconds must be positive"
hnode=self.xmlnode.newChild(self.xmlnode.ns(), "history", None)
if parameters.maxchars is not None:
hnode.setProp("maxchars", str(parameters.maxchars))
if parameters.maxstanzas is not None:
hnode.setProp("maxstanzas", str(parameters.maxstanzas))
if parameters.maxseconds is not None:
hnode.setProp("maxseconds", str(parameters.maxseconds))
if parameters.since is not None:
hnode.setProp("since", parameters.since.strftime("%Y-%m-%dT%H:%M:%SZ"))
def get_history(self):
"""Return history parameters carried by the stanza.
:returntype: `HistoryParameters`"""
for child in xml_element_iter(self.xmlnode.children):
if get_node_ns_uri(child) == MUC_NS and child.name == "history":
maxchars = from_utf8(child.prop("maxchars"))
if maxchars is not None:
maxchars = int(maxchars)
maxstanzas = from_utf8(child.prop("maxstanzas"))
if maxstanzas is not None:
maxstanzas = int(maxstanzas)
maxseconds = from_utf8(child.prop("maxseconds"))
if maxseconds is not None:
maxseconds = int(maxseconds)
# TODO: since -- requires parsing of Jabber dateTime profile
since = None
return HistoryParameters(maxchars, maxstanzas, maxseconds, since)
def set_password(self, password):
"""Set password for the MUC request.
:Parameters:
- `password`: password
:Types:
- `password`: `unicode`"""
for child in xml_element_iter(self.xmlnode.children):
if get_node_ns_uri(child) == MUC_NS and child.name == "password":
child.unlinkNode()
child.freeNode()
break
if password is not None:
self.xmlnode.newTextChild(self.xmlnode.ns(), "password", to_utf8(password))
def get_password(self):
"""Get password from the MUC request.
:returntype: `unicode`
"""
for child in xml_element_iter(self.xmlnode.children):
if get_node_ns_uri(child) == MUC_NS and child.name == "password":
return from_utf8(child.getContent())
return None
class HistoryParameters(object):
"""Provides parameters for MUC history management
:Ivariables:
- `maxchars`: limit of the total number of characters in history.
- `maxstanzas`: limit of the total number of messages in history.
- `seconds`: send only messages received in the last `seconds` seconds.
- `since`: Send only the messages received since the dateTime (UTC)
specified.
:Types:
- `maxchars`: `int`
- `maxstanzas`: `int`
- `seconds`: `int`
- `since`: `datetime.datetime`
"""
def __init__(self, maxchars = None, maxstanzas = None, maxseconds = None, since = None):
"""Initializes a `HistoryParameters` object.
:Parameters:
- `maxchars`: limit of the total number of characters in history.
- `maxstanzas`: limit of the total number of messages in history.
- `maxseconds`: send only messages received in the last `seconds` seconds.
- `since`: Send only the messages received since the dateTime specified.
:Types:
- `maxchars`: `int`
- `maxstanzas`: `int`
- `maxseconds`: `int`
- `since`: `datetime.datetime`
"""
self.maxchars = maxchars
self.maxstanzas = maxstanzas
self.maxseconds = maxseconds
self.since = since
class MucItemBase(object):
"""
Base class for <status/> and <item/> element wrappers.
"""
def __init__(self):
if self.__class__ is MucItemBase:
raise RuntimeError,"Abstract class called"
class MucItem(MucItemBase):
"""
MUC <item/> element -- describes a room occupant.
:Ivariables:
- `affiliation`: affiliation of the user.
- `role`: role of the user.
- `jid`: JID of the user.
- `nick`: nickname of the user.
- `actor`: actor modyfying the user data.
- `reason`: reason of change of the user data.
:Types:
- `affiliation`: `str`
- `role`: `str`
- `jid`: `JID`
- `nick`: `unicode`
- `actor`: `JID`
- `reason`: `unicode`
"""
def __init__(self,xmlnode_or_affiliation,role=None,jid=None,nick=None,actor=None,reason=None):
"""
Initialize a `MucItem` object.
:Parameters:
- `xmlnode_or_affiliation`: XML node to be pased or the affiliation of
the user being described.
- `role`: role of the user.
- `jid`: JID of the user.
- `nick`: nickname of the user.
- `actor`: actor modyfying the user data.
- `reason`: reason of change of the user data.
:Types:
- `xmlnode_or_affiliation`: `libxml2.xmlNode` or `str`
- `role`: `str`
- `jid`: `JID`
- `nick`: `unicode`
- `actor`: `JID`
- `reason`: `unicode`
"""
self.jid,self.nick,self.actor,self.affiliation,self.reason,self.role=(None,)*6
MucItemBase.__init__(self)
if isinstance(xmlnode_or_affiliation,libxml2.xmlNode):
self.__from_xmlnode(xmlnode_or_affiliation)
else:
self.__init(xmlnode_or_affiliation,role,jid,nick,actor,reason)
def __init(self,affiliation,role,jid=None,nick=None,actor=None,reason=None):
"""Initialize a `MucItem` object from a set of attributes.
:Parameters:
- `affiliation`: affiliation of the user.
- `role`: role of the user.
- `jid`: JID of the user.
- `nick`: nickname of the user.
- `actor`: actor modyfying the user data.
- `reason`: reason of change of the user data.
:Types:
- `affiliation`: `str`
- `role`: `str`
- `jid`: `JID`
- `nick`: `unicode`
- `actor`: `JID`
- `reason`: `unicode`
"""
if not affiliation:
affiliation=None
elif affiliation not in affiliations:
raise ValueError,"Bad affiliation"
self.affiliation=affiliation
if not role:
role=None
elif role not in roles:
raise ValueError,"Bad role"
self.role=role
if jid:
self.jid=JID(jid)
else:
self.jid=None
if actor:
self.actor=JID(actor)
else:
self.actor=None
self.nick=nick
self.reason=reason
def __from_xmlnode(self, xmlnode):
"""Initialize a `MucItem` object from an XML node.
:Parameters:
- `xmlnode`: the XML node.
:Types:
- `xmlnode`: `libxml2.xmlNode`
"""
actor=None
reason=None
n=xmlnode.children
while n:
ns=n.ns()
if ns and ns.getContent()!=MUC_USER_NS:
continue
if n.name=="actor":
actor=n.getContent()
if n.name=="reason":
reason=n.getContent()
n=n.next
self.__init(
from_utf8(xmlnode.prop("affiliation")),
from_utf8(xmlnode.prop("role")),
from_utf8(xmlnode.prop("jid")),
from_utf8(xmlnode.prop("nick")),
from_utf8(actor),
from_utf8(reason),
);
def as_xml(self,parent):
"""
Create XML representation of `self`.
:Parameters:
- `parent`: the element to which the created node should be linked to.
:Types:
- `parent`: `libxml2.xmlNode`
:return: an XML node.
:returntype: `libxml2.xmlNode`
"""
n=parent.newChild(None,"item",None)
if self.actor:
n.newTextChild(None,"actor",to_utf8(self.actor))
if self.reason:
n.newTextChild(None,"reason",to_utf8(self.reason))
n.setProp("affiliation",to_utf8(self.affiliation))
if self.role:
n.setProp("role",to_utf8(self.role))
if self.jid:
n.setProp("jid",to_utf8(self.jid.as_unicode()))
if self.nick:
n.setProp("nick",to_utf8(self.nick))
return n
class MucStatus(MucItemBase):
"""
MUC <item/> element - describes special meaning of a stanza
:Ivariables:
- `code`: staus code, as defined in JEP 45
:Types:
- `code`: `int`
"""
def __init__(self,xmlnode_or_code):
"""Initialize a `MucStatus` element.
:Parameters:
- `xmlnode_or_code`: XML node to parse or a status code.
:Types:
- `xmlnode_or_code`: `libxml2.xmlNode` or `int`
"""
self.code=None
MucItemBase.__init__(self)
if isinstance(xmlnode_or_code,libxml2.xmlNode):
self.__from_xmlnode(xmlnode_or_code)
else:
self.__init(xmlnode_or_code)
def __init(self,code):
"""Initialize a `MucStatus` element from a status code.
:Parameters:
- `code`: the status code.
:Types:
- `code`: `int`
"""
code=int(code)
if code<0 or code>999:
raise ValueError,"Bad status code"
self.code=code
def __from_xmlnode(self, xmlnode):
"""Initialize a `MucStatus` element from an XML node.
:Parameters:
- `xmlnode`: XML node to parse.
:Types:
- `xmlnode`: `libxml2.xmlNode`
"""
self.code=int(xmlnode.prop("code"))
def as_xml(self,parent):
"""
Create XML representation of `self`.
:Parameters:
- `parent`: the element to which the created node should be linked to.
:Types:
- `parent`: `libxml2.xmlNode`
:return: an XML node.
:returntype: `libxml2.xmlNode`
"""
n=parent.newChild(None,"status",None)
n.setProp("code","%03i" % (self.code,))
return n
class MucUserX(MucXBase):
"""
Wrapper for http://www.jabber.org/protocol/muc#user namespaced
stanza payload "x" elements and usually containing information
about a room user.
:Ivariables:
- `xmlnode`: wrapped XML node
:Types:
- `xmlnode`: `libxml2.xmlNode`
"""
ns=MUC_USER_NS
def get_items(self):
"""Get a list of objects describing the content of `self`.
:return: the list of objects.
:returntype: `list` of `MucItemBase` (`MucItem` and/or `MucStatus`)
"""
if not self.xmlnode.children:
return []
ret=[]
n=self.xmlnode.children
while n:
ns=n.ns()
if ns and ns.getContent()!=self.ns:
pass
elif n.name=="item":
ret.append(MucItem(n))
elif n.name=="status":
ret.append(MucStatus(n))
# FIXME: alt,decline,invite,password
n=n.next
return ret
def clear(self):
"""
Clear the content of `self.xmlnode` removing all <item/>, <status/>, etc.
"""
if not self.xmlnode.children:
return
n=self.xmlnode.children
while n:
ns=n.ns()
if ns and ns.getContent()!=MUC_USER_NS:
pass
else:
n.unlinkNode()
n.freeNode()
n=n.next
def add_item(self,item):
"""Add an item to `self`.
:Parameters:
- `item`: the item to add.
:Types:
- `item`: `MucItemBase`
"""
if not isinstance(item,MucItemBase):
raise TypeError,"Bad item type for muc#user"
item.as_xml(self.xmlnode)
class MucOwnerX(MucXBase):
"""
Wrapper for http://www.jabber.org/protocol/muc#owner namespaced
stanza payload "x" elements and usually containing information
about a room user.
:Ivariables:
- `xmlnode`: wrapped XML node.
:Types:
- `xmlnode`: `libxml2.xmlNode`
"""
# FIXME: implement
pass
class MucAdminQuery(MucUserX):
"""
Wrapper for http://www.jabber.org/protocol/muc#admin namespaced
IQ stanza payload "query" elements and usually describing
administrative actions or their results.
Not implemented yet.
"""
ns=MUC_ADMIN_NS
element="query"
class MucStanzaExt:
"""
Base class for MUC specific stanza extensions. Used together
with one of stanza classes (Iq, Message or Presence).
"""
def __init__(self):
"""Initialize a `MucStanzaExt` derived object."""
if self.__class__ is MucStanzaExt:
raise RuntimeError,"Abstract class called"
self.xmlnode=None
self.muc_child=None
def get_muc_child(self):
"""
Get the MUC specific payload element.
:return: the object describing the stanza payload in MUC namespace.
:returntype: `MucX` or `MucUserX` or `MucAdminQuery` or `MucOwnerX`
"""
if self.muc_child:
return self.muc_child
if not self.xmlnode.children:
return None
n=self.xmlnode.children
while n:
if n.name not in ("x","query"):
n=n.next
continue
ns=n.ns()
if not ns:
n=n.next
continue
ns_uri=ns.getContent()
if (n.name,ns_uri)==("x",MUC_NS):
self.muc_child=MucX(n)
return self.muc_child
if (n.name,ns_uri)==("x",MUC_USER_NS):
self.muc_child=MucUserX(n)
return self.muc_child
if (n.name,ns_uri)==("query",MUC_ADMIN_NS):
self.muc_child=MucAdminQuery(n)
return self.muc_child
if (n.name,ns_uri)==("query",MUC_OWNER_NS):
self.muc_child=MucOwnerX(n)
return self.muc_child
n=n.next
def clear_muc_child(self):
"""
Remove the MUC specific stanza payload element.
"""
if self.muc_child:
self.muc_child.free_borrowed()
self.muc_child=None
if not self.xmlnode.children:
return
n=self.xmlnode.children
while n:
if n.name not in ("x","query"):
n=n.next
continue
ns=n.ns()
if not ns:
n=n.next
continue
ns_uri=ns.getContent()
if ns_uri in (MUC_NS,MUC_USER_NS,MUC_ADMIN_NS,MUC_OWNER_NS):
n.unlinkNode()
n.freeNode()
n=n.next
def make_muc_userinfo(self):
"""
Create <x xmlns="...muc#user"/> element in the stanza.
:return: the element created.
:returntype: `MucUserX`
"""
self.clear_muc_child()
self.muc_child=MucUserX(parent=self.xmlnode)
return self.muc_child
def make_muc_admin_quey(self):
"""
Create <query xmlns="...muc#admin"/> element in the stanza.
:return: the element created.
:returntype: `MucAdminQuery`
"""
self.clear_muc_child()
self.muc_child=MucAdminQuery(parent=self.xmlnode)
return self.muc_child
def muc_free(self):
"""
Free MUC specific data.
"""
if self.muc_child:
self.muc_child.free_borrowed()
class MucPresence(Presence,MucStanzaExt):
"""
Extend `Presence` with MUC related interface.
"""
def __init__(self, xmlnode=None,from_jid=None,to_jid=None,stanza_type=None,stanza_id=None,
show=None,status=None,priority=0,error=None,error_cond=None):
"""Initialize a `MucPresence` object.
:Parameters:
- `xmlnode`: XML node to_jid be wrapped into the `MucPresence` object
or other Presence object to be copied. If not given then new
presence stanza is created using following parameters.
- `from_jid`: sender JID.
- `to_jid`: recipient JID.
- `stanza_type`: staza type: one of: None, "available", "unavailable",
"subscribe", "subscribed", "unsubscribe", "unsubscribed" or
"error". "available" is automaticaly changed to_jid None.
- `stanza_id`: stanza id -- value of stanza's "id" attribute
- `show`: "show" field of presence stanza. One of: None, "away",
"xa", "dnd", "chat".
- `status`: descriptive text for the presence stanza.
- `priority`: presence priority.
- `error_cond`: error condition name. Ignored if `stanza_type` is not "error"
:Types:
- `xmlnode`: `unicode` or `libxml2.xmlNode` or `pyxmpp.stanza.Stanza`
- `from_jid`: `JID`
- `to_jid`: `JID`
- `stanza_type`: `unicode`
- `stanza_id`: `unicode`
- `show`: `unicode`
- `status`: `unicode`
- `priority`: `unicode`
- `error_cond`: `unicode`"""
MucStanzaExt.__init__(self)
Presence.__init__(self,xmlnode,from_jid=from_jid,to_jid=to_jid,
stanza_type=stanza_type,stanza_id=stanza_id,
show=show,status=status,priority=priority,
error=error,error_cond=error_cond)
def copy(self):
"""
Return a copy of `self`.
"""
return MucPresence(self)
def make_join_request(self, password = None, history_maxchars = None,
history_maxstanzas = None, history_seconds = None,
history_since = None):
"""
Make the presence stanza a MUC room join request.
:Parameters:
- `password`: password to the room.
- `history_maxchars`: limit of the total number of characters in
history.
- `history_maxstanzas`: limit of the total number of messages in
history.
- `history_seconds`: send only messages received in the last
`seconds` seconds.
- `history_since`: Send only the messages received since the
dateTime specified (UTC).
:Types:
- `password`: `unicode`
- `history_maxchars`: `int`
- `history_maxstanzas`: `int`
- `history_seconds`: `int`
- `history_since`: `datetime.datetime`
"""
self.clear_muc_child()
self.muc_child=MucX(parent=self.xmlnode)
if (history_maxchars is not None or history_maxstanzas is not None
or history_seconds is not None or history_since is not None):
history = HistoryParameters(history_maxchars, history_maxstanzas,
history_seconds, history_since)
self.muc_child.set_history(history)
if password is not None:
self.muc_child.set_password(password)
def get_join_info(self):
"""If `self` is a MUC room join request return the information contained.
:return: the join request details or `None`.
:returntype: `MucX`
"""
x=self.get_muc_child()
if not x:
return None
if not isinstance(x,MucX):
return None
return x
def free(self):
"""Free the data associated with this `MucPresence` object."""
self.muc_free()
Presence.free(self)
class MucIq(Iq,MucStanzaExt):
"""
Extend `Iq` with MUC related interface.
"""
def __init__(self,xmlnode=None,from_jid=None,to_jid=None,stanza_type=None,stanza_id=None,
error=None,error_cond=None):
"""Initialize an `Iq` object.
:Parameters:
- `xmlnode`: XML node to_jid be wrapped into the `Iq` object
or other Iq object to be copied. If not given then new
presence stanza is created using following parameters.
- `from_jid`: sender JID.
- `to_jid`: recipient JID.
- `stanza_type`: staza type: one of: "get", "set", "result" or "error".
- `stanza_id`: stanza id -- value of stanza's "id" attribute. If not
given, then unique for the session value is generated.
- `error_cond`: error condition name. Ignored if `stanza_type` is not "error".
:Types:
- `xmlnode`: `unicode` or `libxml2.xmlNode` or `Iq`
- `from_jid`: `JID`
- `to_jid`: `JID`
- `stanza_type`: `unicode`
- `stanza_id`: `unicode`
- `error_cond`: `unicode`"""
MucStanzaExt.__init__(self)
Iq.__init__(self,xmlnode,from_jid=from_jid,to_jid=to_jid,
stanza_type=stanza_type,stanza_id=stanza_id,
error=error,error_cond=error_cond)
def copy(self):
""" Return a copy of `self`. """
return MucIq(self)
def make_kick_request(self,nick,reason):
"""
Make the iq stanza a MUC room participant kick request.
:Parameters:
- `nick`: nickname of user to kick.
- `reason`: reason of the kick.
:Types:
- `nick`: `unicode`
- `reason`: `unicode`
:return: object describing the kick request details.
:returntype: `MucItem`
"""
self.clear_muc_child()
self.muc_child=MucAdminQuery(parent=self.xmlnode)
item=MucItem("none","none",nick=nick,reason=reason)
self.muc_child.add_item(item)
return self.muc_child
def free(self):
"""Free the data associated with this `MucIq` object."""
self.muc_free()
Iq.free(self)
# vi: sts=4 et sw=4
|
lgpl-2.1
|
mozilla/zamboni
|
mkt/site/monitors.py
|
1
|
8772
|
import os
import socket
import StringIO
import tempfile
import time
import traceback
from django.conf import settings
import commonware.log
import elasticsearch
import requests
from cache_nuggets.lib import memoize
from PIL import Image
from lib.crypto import packaged, receipt
from lib.crypto.packaged import SigningError as PackageSigningError
from lib.crypto.receipt import SigningError
from mkt.site.storage_utils import local_storage
monitor_log = commonware.log.getLogger('z.monitor')
def memcache():
memcache = getattr(settings, 'CACHES', {}).get('default')
memcache_results = []
status = ''
if memcache and 'memcache' in memcache['BACKEND']:
hosts = memcache['LOCATION']
using_twemproxy = False
if not isinstance(hosts, (tuple, list)):
hosts = [hosts]
for host in hosts:
ip, port = host.split(':')
if ip == '127.0.0.1':
using_twemproxy = True
try:
s = socket.socket()
s.connect((ip, int(port)))
except Exception, e:
result = False
status = 'Failed to connect to memcached (%s): %s' % (host, e)
monitor_log.critical(status)
else:
result = True
finally:
s.close()
memcache_results.append((ip, port, result))
if (not using_twemproxy and len(hosts) > 1 and
len(memcache_results) < 2):
# If the number of requested hosts is greater than 1, but less
# than 2 replied, raise an error.
status = ('2+ memcache servers are required.'
'%s available') % len(memcache_results)
monitor_log.warning(status)
# If we are in debug mode, don't worry about checking for memcache.
elif settings.DEBUG:
return status, []
if not memcache_results:
status = 'Memcache is not configured'
monitor_log.info(status)
return status, memcache_results
def libraries():
# Check Libraries and versions
libraries_results = []
status = ''
try:
Image.new('RGB', (16, 16)).save(StringIO.StringIO(), 'JPEG')
libraries_results.append(('PIL+JPEG', True, 'Got it!'))
except Exception, e:
msg = "Failed to create a jpeg image: %s" % e
libraries_results.append(('PIL+JPEG', False, msg))
try:
import M2Crypto # NOQA
libraries_results.append(('M2Crypto', True, 'Got it!'))
except ImportError:
libraries_results.append(('M2Crypto', False, 'Failed to import'))
if settings.SPIDERMONKEY:
if os.access(settings.SPIDERMONKEY, os.R_OK):
libraries_results.append(('Spidermonkey is ready!', True, None))
# TODO: see if it works?
else:
msg = "You said spidermonkey was at (%s)" % settings.SPIDERMONKEY
libraries_results.append(('Spidermonkey', False, msg))
# If settings are debug and spidermonkey is empty,
# thorw this error.
elif settings.DEBUG and not settings.SPIDERMONKEY:
msg = 'SPIDERMONKEY is empty'
libraries_results.append(('Spidermonkey', True, msg))
else:
msg = "Please set SPIDERMONKEY in your settings file."
libraries_results.append(('Spidermonkey', False, msg))
missing_libs = [l for l, s, m in libraries_results if not s]
if missing_libs:
status = 'missing libs: %s' % ",".join(missing_libs)
return status, libraries_results
def elastic():
es = elasticsearch.Elasticsearch(hosts=settings.ES_HOSTS)
elastic_results = None
status = ''
try:
health = es.cluster.health()
if health['status'] == 'red':
status = 'ES is red'
elastic_results = health
except elasticsearch.ElasticsearchException:
monitor_log.exception('Failed to communicate with ES')
elastic_results = {'error': traceback.format_exc()}
status = 'traceback'
return status, elastic_results
def path():
# Check file paths / permissions
rw = (settings.TMP_PATH,
settings.NETAPP_STORAGE,
settings.UPLOADS_PATH,
settings.ADDONS_PATH,
settings.GUARDED_ADDONS_PATH,
settings.ADDON_ICONS_PATH,
settings.WEBSITE_ICONS_PATH,
settings.PREVIEWS_PATH,
settings.REVIEWER_ATTACHMENTS_PATH,)
r = [os.path.join(settings.ROOT, 'locale')]
filepaths = [(path, os.R_OK | os.W_OK, "We want read + write")
for path in rw]
filepaths += [(path, os.R_OK, "We want read") for path in r]
filepath_results = []
filepath_status = True
for path, perms, notes in filepaths:
path_exists = os.path.exists(path)
path_perms = os.access(path, perms)
filepath_status = filepath_status and path_exists and path_perms
filepath_results.append((path, path_exists, path_perms, notes))
key_exists = os.path.exists(settings.WEBAPPS_RECEIPT_KEY)
key_perms = os.access(settings.WEBAPPS_RECEIPT_KEY, os.R_OK)
filepath_status = filepath_status and key_exists and key_perms
filepath_results.append(('settings.WEBAPPS_RECEIPT_KEY',
key_exists, key_perms, 'We want read'))
status = filepath_status
status = ''
if not filepath_status:
status = 'check main status page for broken perms'
return status, filepath_results
# The signer check actually asks the signing server to sign something. Do this
# once per nagios check, once per web head might be a bit much. The memoize
# slows it down a bit, by caching the result for 15 seconds.
@memoize('monitors-signer', time=15)
def receipt_signer():
destination = getattr(settings, 'SIGNING_SERVER', None)
if not destination:
return '', 'Signer is not configured.'
# Just send some test data into the signer.
now = int(time.time())
not_valid = (settings.SITE_URL + '/not-valid')
data = {'detail': not_valid, 'exp': now + 3600, 'iat': now,
'iss': settings.SITE_URL,
'product': {'storedata': 'id=1', 'url': u'http://not-valid.com'},
'nbf': now, 'typ': 'purchase-receipt',
'reissue': not_valid,
'user': {'type': 'directed-identifier',
'value': u'something-not-valid'},
'verify': not_valid
}
try:
result = receipt.sign(data)
except SigningError as err:
msg = 'Error on signing (%s): %s' % (destination, err)
return msg, msg
try:
cert, rest = receipt.crack(result)
except Exception as err:
msg = 'Error on cracking receipt (%s): %s' % (destination, err)
return msg, msg
# Check that the certs used to sign the receipts are not about to expire.
limit = now + (60 * 60 * 24) # One day.
if cert['exp'] < limit:
msg = 'Cert will expire soon (%s)' % destination
return msg, msg
cert_err_msg = 'Error on checking public cert (%s): %s'
location = cert['iss']
try:
resp = requests.get(location, timeout=5, stream=False)
except Exception as err:
msg = cert_err_msg % (location, err)
return msg, msg
if not resp.ok:
msg = cert_err_msg % (location, resp.reason)
return msg, msg
cert_json = resp.json()
if not cert_json or 'jwk' not in cert_json:
msg = cert_err_msg % (location, 'Not valid JSON/JWK')
return msg, msg
return '', 'Signer working and up to date'
# Like the receipt signer above this asks the packaged app signing
# service to sign one for us.
@memoize('monitors-package-signer', time=60)
def package_signer():
destination = getattr(settings, 'SIGNED_APPS_SERVER', None)
if not destination:
return '', 'Signer is not configured.'
app_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'nagios_check_packaged_app.zip')
signed_path = tempfile.mktemp()
try:
packaged.sign_app(local_storage.open(app_path), signed_path, None,
False, local=True)
return '', 'Package signer working'
except PackageSigningError, e:
msg = 'Error on package signing (%s): %s' % (destination, e)
return msg, msg
finally:
local_storage.delete(signed_path)
# Not called settings to avoid conflict with django.conf.settings.
def settings_check():
required = ['APP_PURCHASE_KEY', 'APP_PURCHASE_TYP', 'APP_PURCHASE_AUD',
'APP_PURCHASE_SECRET']
for key in required:
if not getattr(settings, key):
msg = 'Missing required value %s' % key
return msg, msg
return '', 'Required settings ok'
|
bsd-3-clause
|
sschiau/swift
|
utils/gyb_syntax_support/NodeSerializationCodes.py
|
1
|
7368
|
from Node import error
SYNTAX_NODE_SERIALIZATION_CODES = {
# 0 is 'Token'. Needs to be defined manually
# 1 is 'Unknown'. Needs to be defined manually
'UnknownDecl': 2,
'TypealiasDecl': 3,
'AssociatedtypeDecl': 4,
'IfConfigDecl': 5,
'PoundErrorDecl': 6,
'PoundWarningDecl': 7,
'PoundSourceLocation': 8,
'ClassDecl': 9,
'StructDecl': 10,
'ProtocolDecl': 11,
'ExtensionDecl': 12,
'FunctionDecl': 13,
'InitializerDecl': 14,
'DeinitializerDecl': 15,
'SubscriptDecl': 16,
'ImportDecl': 17,
'AccessorDecl': 18,
'VariableDecl': 19,
'EnumCaseDecl': 20,
'EnumDecl': 21,
'OperatorDecl': 22,
'PrecedenceGroupDecl': 23,
'UnknownExpr': 24,
'InOutExpr': 25,
'PoundColumnExpr': 26,
'TryExpr': 27,
'IdentifierExpr': 28,
'SuperRefExpr': 29,
'NilLiteralExpr': 30,
'DiscardAssignmentExpr': 31,
'AssignmentExpr': 32,
'SequenceExpr': 33,
'PoundLineExpr': 34,
'PoundFileExpr': 35,
'PoundFunctionExpr': 36,
'PoundDsohandleExpr': 37,
'SymbolicReferenceExpr': 38,
'PrefixOperatorExpr': 39,
'BinaryOperatorExpr': 40,
'ArrowExpr': 41,
'FloatLiteralExpr': 42,
'TupleExpr': 43,
'ArrayExpr': 44,
'DictionaryExpr': 45,
'ImplicitMemberExpr': 46,
'IntegerLiteralExpr': 47,
'StringLiteralExpr': 48,
'BooleanLiteralExpr': 49,
'TernaryExpr': 50,
'MemberAccessExpr': 51,
'DotSelfExpr': 52,
'IsExpr': 53,
'AsExpr': 54,
'TypeExpr': 55,
'ClosureExpr': 56,
'UnresolvedPatternExpr': 57,
'FunctionCallExpr': 58,
'SubscriptExpr': 59,
'OptionalChainingExpr': 60,
'ForcedValueExpr': 61,
'PostfixUnaryExpr': 62,
'SpecializeExpr': 63,
'KeyPathExpr': 65,
'KeyPathBaseExpr': 66,
'ObjcKeyPathExpr': 67,
'ObjcSelectorExpr': 68,
'EditorPlaceholderExpr': 69,
'ObjectLiteralExpr': 70,
'UnknownStmt': 71,
'ContinueStmt': 72,
'WhileStmt': 73,
'DeferStmt': 74,
'ExpressionStmt': 75,
'RepeatWhileStmt': 76,
'GuardStmt': 77,
'ForInStmt': 78,
'SwitchStmt': 79,
'DoStmt': 80,
'ReturnStmt': 81,
'FallthroughStmt': 82,
'BreakStmt': 83,
'DeclarationStmt': 84,
'ThrowStmt': 85,
'IfStmt': 86,
'Decl': 87,
'Expr': 88,
'Stmt': 89,
'Type': 90,
'Pattern': 91,
'CodeBlockItem': 92,
'CodeBlock': 93,
'DeclNameArgument': 94,
'DeclNameArguments': 95,
'FunctionCallArgument': 96,
'TupleElement': 97,
'ArrayElement': 98,
'DictionaryElement': 99,
'ClosureCaptureItem': 100,
'ClosureCaptureSignature': 101,
'ClosureParam': 102,
'ClosureSignature': 103,
'StringSegment': 104,
'ExpressionSegment': 105,
'ObjcNamePiece': 106,
'TypeInitializerClause': 107,
'ParameterClause': 108,
'ReturnClause': 109,
'FunctionSignature': 110,
'IfConfigClause': 111,
'PoundSourceLocationArgs': 112,
'DeclModifier': 113,
'InheritedType': 114,
'TypeInheritanceClause': 115,
'MemberDeclBlock': 116,
'MemberDeclListItem': 117,
'SourceFile': 118,
'InitializerClause': 119,
'FunctionParameter': 120,
'AccessLevelModifier': 121,
'AccessPathComponent': 122,
'AccessorParameter': 123,
'AccessorBlock': 124,
'PatternBinding': 125,
'EnumCaseElement': 126,
'OperatorPrecedenceAndTypes': 127,
'PrecedenceGroupRelation': 128,
'PrecedenceGroupNameElement': 129,
'PrecedenceGroupAssignment': 130,
'PrecedenceGroupAssociativity': 131,
'Attribute': 132,
'LabeledSpecializeEntry': 133,
'ImplementsAttributeArguments': 134,
'ObjCSelectorPiece': 135,
'WhereClause': 136,
'ConditionElement': 137,
'AvailabilityCondition': 138,
'MatchingPatternCondition': 139,
'OptionalBindingCondition': 140,
'ElseIfContinuation': 141,
'ElseBlock': 142,
'SwitchCase': 143,
'SwitchDefaultLabel': 144,
'CaseItem': 145,
'SwitchCaseLabel': 146,
'CatchClause': 147,
'GenericWhereClause': 148,
'SameTypeRequirement': 149,
'GenericParameter': 150,
'GenericParameterClause': 151,
'ConformanceRequirement': 152,
'CompositionTypeElement': 153,
'TupleTypeElement': 154,
'GenericArgument': 155,
'GenericArgumentClause': 156,
'TypeAnnotation': 157,
'TuplePatternElement': 158,
'AvailabilityArgument': 159,
'AvailabilityLabeledArgument': 160,
'AvailabilityVersionRestriction': 161,
'VersionTuple': 162,
'CodeBlockItemList': 163,
'FunctionCallArgumentList': 164,
'TupleElementList': 165,
'ArrayElementList': 166,
'DictionaryElementList': 167,
'StringLiteralSegments': 168,
'DeclNameArgumentList': 169,
'ExprList': 170,
'ClosureCaptureItemList': 171,
'ClosureParamList': 172,
'ObjcName': 173,
'FunctionParameterList': 174,
'IfConfigClauseList': 175,
'InheritedTypeList': 176,
'MemberDeclList': 177,
'ModifierList': 178,
'AccessPath': 179,
'AccessorList': 180,
'PatternBindingList': 181,
'EnumCaseElementList': 182,
'PrecedenceGroupAttributeList': 183,
'PrecedenceGroupNameList': 184,
'TokenList': 185,
'NonEmptyTokenList': 186,
'AttributeList': 187,
'SpecializeAttributeSpecList': 188,
'ObjCSelector': 189,
'SwitchCaseList': 190,
'CatchClauseList': 191,
'CaseItemList': 192,
'ConditionElementList': 193,
'GenericRequirementList': 194,
'GenericParameterList': 195,
'CompositionTypeElementList': 196,
'TupleTypeElementList': 197,
'GenericArgumentList': 198,
'TuplePatternElementList': 199,
'AvailabilitySpecList': 200,
'UnknownPattern': 201,
'EnumCasePattern': 202,
'IsTypePattern': 203,
'OptionalPattern': 204,
'IdentifierPattern': 205,
'AsTypePattern': 206,
'TuplePattern': 207,
'WildcardPattern': 208,
'ExpressionPattern': 209,
'ValueBindingPattern': 210,
'UnknownType': 211,
'SimpleTypeIdentifier': 212,
'MemberTypeIdentifier': 213,
'ClassRestrictionType': 214,
'ArrayType': 215,
'DictionaryType': 216,
'MetatypeType': 217,
'OptionalType': 218,
'ImplicitlyUnwrappedOptionalType': 219,
'CompositionType': 220,
'TupleType': 221,
'FunctionType': 222,
'AttributedType': 223,
'YieldStmt': 224,
'YieldList': 225,
'IdentifierList': 226,
'NamedAttributeStringArgument': 227,
'DeclName': 228,
'PoundAssertStmt': 229,
'SomeType': 230,
'CustomAttribute': 231,
'GenericRequirement': 232,
'LayoutRequirement': 233,
'LayoutConstraint': 234,
'OpaqueReturnTypeOfAttributeArguments': 235,
}
def verify_syntax_node_serialization_codes(nodes, serialization_codes):
# Verify that all nodes have serialization codes
for node in nodes:
if not node.is_base() and node.syntax_kind not in serialization_codes:
error('Node %s has no serialization code' % node.syntax_kind)
# Verify that no serialization code is used twice
used_codes = set()
for serialization_code in serialization_codes.values():
if serialization_code in used_codes:
error("Serialization code %d used twice" % serialization_code)
used_codes.add(serialization_code)
def get_serialization_code(syntax_kind):
return SYNTAX_NODE_SERIALIZATION_CODES[syntax_kind]
|
apache-2.0
|
amitjamadagni/sympy
|
sympy/external/tests/test_codegen.py
|
3
|
11845
|
# This tests the compilation and execution of the source code generated with
# utilities.codegen. The compilation takes place in a temporary directory that
# is removed after the test. By default the test directory is always removed,
# but this behavior can be changed by setting the environment variable
# SYMPY_TEST_CLEAN_TEMP to:
# export SYMPY_TEST_CLEAN_TEMP=always : the default behavior.
# export SYMPY_TEST_CLEAN_TEMP=success : only remove the directories of working tests.
# export SYMPY_TEST_CLEAN_TEMP=never : never remove the directories with the test code.
# When a directory is not removed, the necessary information is printed on
# screen to find the files that belong to the (failed) tests. If a test does
# not fail, py.test captures all the output and you will not see the directories
# corresponding to the successful tests. Use the --nocapture option to see all
# the output.
# All tests below have a counterpart in utilities/test/test_codegen.py. In the
# latter file, the resulting code is compared with predefined strings, without
# compilation or execution.
# All the generated Fortran code should conform with the Fortran 95 standard,
# and all the generated C code should be ANSI C, which facilitates the
# incorporation in various projects. The tests below assume that the binary cc
# is somewhere in the path and that it can compile ANSI C code.
from __future__ import with_statement
from sympy.abc import x, y, z
from sympy.utilities.pytest import skip
from sympy.utilities.codegen import(
codegen, Routine, InputArgument, Result, get_code_generator
)
import sys
import os
import tempfile
import subprocess
# templates for the main program that will test the generated code.
main_template = {}
main_template['F95'] = """
program main
include "codegen.h"
integer :: result;
result = 0
%(statements)s
call exit(result)
end program
"""
main_template['C'] = """
#include "codegen.h"
#include <stdio.h>
#include <math.h>
int main() {
int result = 0;
%(statements)s
return result;
}
"""
# templates for the numerical tests
numerical_test_template = {}
numerical_test_template['C'] = """
if (fabs(%(call)s)>%(threshold)s) {
printf("Numerical validation failed: %(call)s=%%e threshold=%(threshold)s\\n", %(call)s);
result = -1;
}
"""
numerical_test_template['F95'] = """
if (abs(%(call)s)>%(threshold)s) then
write(6,"('Numerical validation failed:')")
write(6,"('%(call)s=',e15.5,'threshold=',e15.5)") %(call)s, %(threshold)s
result = -1;
end if
"""
# command sequences for supported compilers
compile_commands = {}
compile_commands['cc'] = [
"cc -c codegen.c -o codegen.o",
"cc -c main.c -o main.o",
"cc main.o codegen.o -lm -o test.exe"
]
compile_commands['gfortran'] = [
"gfortran -c codegen.f90 -o codegen.o",
"gfortran -ffree-line-length-none -c main.f90 -o main.o",
"gfortran main.o codegen.o -o test.exe"
]
compile_commands['g95'] = [
"g95 -c codegen.f90 -o codegen.o",
"g95 -ffree-line-length-huge -c main.f90 -o main.o",
"g95 main.o codegen.o -o test.exe"
]
compile_commands['ifort'] = [
"ifort -c codegen.f90 -o codegen.o",
"ifort -c main.f90 -o main.o",
"ifort main.o codegen.o -o test.exe"
]
combinations_lang_compiler = [
('C', 'cc'),
('F95', 'ifort'),
('F95', 'gfortran'),
('F95', 'g95')
]
def try_run(commands):
"""Run a series of commands and only return True if all ran fine."""
null = open(os.devnull, 'w')
for command in commands:
retcode = subprocess.call(command, stdout=null, shell=True,
stderr=subprocess.STDOUT)
if retcode != 0:
return False
return True
def run_test(label, routines, numerical_tests, language, commands, friendly=True):
"""A driver for the codegen tests.
This driver assumes that a compiler ifort is present in the PATH and that
ifort is (at least) a Fortran 90 compiler. The generated code is written in
a temporary directory, together with a main program that validates the
generated code. The test passes when the compilation and the validation
run correctly.
"""
# Check input arguments before touching the file system
language = language.upper()
assert language in main_template
assert language in numerical_test_template
# Check that evironment variable makes sense
clean = os.getenv('SYMPY_TEST_CLEAN_TEMP', 'always').lower()
if clean not in ('always', 'success', 'never'):
raise ValueError("SYMPY_TEST_CLEAN_TEMP must be one of the following: 'always', 'success' or 'never'.")
# Do all the magic to compile, run and validate the test code
# 1) prepare the temporary working directory, switch to that dir
work = tempfile.mkdtemp("_sympy_%s_test" % language, "%s_" % label)
oldwork = os.getcwd()
os.chdir(work)
# 2) write the generated code
if friendly:
# interpret the routines as a name_expr list and call the friendly
# function codegen
codegen(routines, language, "codegen", to_files=True)
else:
code_gen = get_code_generator(language, "codegen")
code_gen.write(routines, "codegen", to_files=True)
# 3) write a simple main program that links to the generated code, and that
# includes the numerical tests
test_strings = []
for fn_name, args, expected, threshold in numerical_tests:
call_string = "%s(%s)-(%s)" % (
fn_name, ",".join(str(arg) for arg in args), expected)
if language == "F95":
call_string = fortranize_double_constants(call_string)
threshold = fortranize_double_constants(str(threshold))
test_strings.append(numerical_test_template[language] % {
"call": call_string,
"threshold": threshold,
})
if language == "F95":
f_name = "main.f90"
elif language == "C":
f_name = "main.c"
else:
raise NotImplemented(
"FIXME: filename extension unknown for language: %s" % language)
with open(f_name, "w") as f:
f.write(
main_template[language] % {'statements': "".join(test_strings)})
# 4) Compile and link
compiled = try_run(commands)
# 5) Run if compiled
if compiled:
executed = try_run(["./test.exe"])
else:
executed = False
# 6) Clean up stuff
if clean == 'always' or (clean == 'success' and compiled and executed):
def safe_remove(filename):
if os.path.isfile(filename):
os.remove(filename)
safe_remove("codegen.f90")
safe_remove("codegen.c")
safe_remove("codegen.h")
safe_remove("codegen.o")
safe_remove("main.f90")
safe_remove("main.c")
safe_remove("main.o")
safe_remove("test.exe")
os.chdir(oldwork)
os.rmdir(work)
else:
print >> sys.stderr, "TEST NOT REMOVED: %s" % work
os.chdir(oldwork)
# 7) Do the assertions in the end
assert compiled, "failed to compile %s code with:\n%s" % (
language, "\n".join(commands))
assert executed, "failed to execute %s code from:\n%s" % (
language, "\n".join(commands))
def fortranize_double_constants(code_string):
"""
Replaces every literal float with literal doubles
"""
import re
pattern_exp = re.compile('\d+(\.)?\d*[eE]-?\d+')
pattern_float = re.compile('\d+\.\d*(?!\d*d)')
def subs_exp(matchobj):
return re.sub('[eE]', 'd', matchobj.group(0))
def subs_float(matchobj):
return "%sd0" % matchobj.group(0)
code_string = pattern_exp.sub(subs_exp, code_string)
code_string = pattern_float.sub(subs_float, code_string)
return code_string
def is_feasible(language, commands):
# This test should always work, otherwise the compiler is not present.
routine = Routine("test", x)
numerical_tests = [
("test", ( 1.0,), 1.0, 1e-15),
("test", (-1.0,), -1.0, 1e-15),
]
try:
run_test("is_feasible", [routine], numerical_tests, language, commands,
friendly=False)
return True
except AssertionError:
return False
valid_lang_commands = []
invalid_lang_compilers = []
for lang, compiler in combinations_lang_compiler:
commands = compile_commands[compiler]
if is_feasible(lang, commands):
valid_lang_commands.append((lang, commands))
else:
invalid_lang_compilers.append((lang, compiler))
# We test all language-compiler combinations, just to report what is skipped
def test_C_cc():
if ("C", 'cc') in invalid_lang_compilers:
skip("`cc' command didn't work as expected")
def test_F95_ifort():
if ("F95", 'ifort') in invalid_lang_compilers:
skip("`ifort' command didn't work as expected")
def test_F95_gfortran():
if ("F95", 'gfortran') in invalid_lang_compilers:
skip("`gfortran' command didn't work as expected")
def test_F95_g95():
if ("F95", 'g95') in invalid_lang_compilers:
skip("`g95' command didn't work as expected")
# Here comes the actual tests
def test_basic_codegen():
numerical_tests = [
("test", (1.0, 6.0, 3.0), 21.0, 1e-15),
("test", (-1.0, 2.0, -2.5), -2.5, 1e-15),
]
name_expr = [("test", (x + y)*z)]
for lang, commands in valid_lang_commands:
run_test("basic_codegen", name_expr, numerical_tests, lang, commands)
def test_intrinsic_math1_codegen():
# not included: log10
from sympy import acos, asin, atan, ceiling, cos, cosh, floor, log, ln, \
sin, sinh, sqrt, tan, tanh, N
name_expr = [
("test_fabs", abs(x)),
("test_acos", acos(x)),
("test_asin", asin(x)),
("test_atan", atan(x)),
("test_cos", cos(x)),
("test_cosh", cosh(x)),
("test_log", log(x)),
("test_ln", ln(x)),
("test_sin", sin(x)),
("test_sinh", sinh(x)),
("test_sqrt", sqrt(x)),
("test_tan", tan(x)),
("test_tanh", tanh(x)),
]
numerical_tests = []
for name, expr in name_expr:
for xval in 0.2, 0.5, 0.8:
expected = N(expr.subs(x, xval))
numerical_tests.append((name, (xval,), expected, 1e-14))
for lang, commands in valid_lang_commands:
if lang == "C":
name_expr_C = [("test_floor", floor(x)), ("test_ceil", ceiling(x))]
else:
name_expr_C = []
run_test("intrinsic_math1", name_expr + name_expr_C,
numerical_tests, lang, commands)
def test_instrinsic_math2_codegen():
# not included: frexp, ldexp, modf, fmod
from sympy import atan2, N
name_expr = [
("test_atan2", atan2(x, y)),
("test_pow", x**y),
]
numerical_tests = []
for name, expr in name_expr:
for xval, yval in (0.2, 1.3), (0.5, -0.2), (0.8, 0.8):
expected = N(expr.subs(x, xval).subs(y, yval))
numerical_tests.append((name, (xval, yval), expected, 1e-14))
for lang, commands in valid_lang_commands:
run_test("intrinsic_math2", name_expr, numerical_tests, lang, commands)
def test_complicated_codegen():
from sympy import sin, cos, tan, N
name_expr = [
("test1", ((sin(x) + cos(y) + tan(z))**7).expand()),
("test2", cos(cos(cos(cos(cos(cos(cos(cos(x + y + z))))))))),
]
numerical_tests = []
for name, expr in name_expr:
for xval, yval, zval in (0.2, 1.3, -0.3), (0.5, -0.2, 0.0), (0.8, 2.1, 0.8):
expected = N(expr.subs(x, xval).subs(y, yval).subs(z, zval))
numerical_tests.append((name, (xval, yval, zval), expected, 1e-12))
for lang, commands in valid_lang_commands:
run_test(
"complicated_codegen", name_expr, numerical_tests, lang, commands)
|
bsd-3-clause
|
Azure/azure-sdk-for-python
|
sdk/apimanagement/azure-mgmt-apimanagement/azure/mgmt/apimanagement/aio/operations/_api_operation_operations.py
|
1
|
28513
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ApiOperationOperations:
"""ApiOperationOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.apimanagement.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_api(
self,
resource_group_name: str,
service_name: str,
api_id: str,
filter: Optional[str] = None,
top: Optional[int] = None,
skip: Optional[int] = None,
tags: Optional[str] = None,
**kwargs
) -> AsyncIterable["_models.OperationCollection"]:
"""Lists a collection of the operations for the specified API.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:type api_id: str
:param filter: | Field | Usage | Supported operators | Supported
functions |</br>|-------------|-------------|-------------|-------------|</br>| name |
filter | ge, le, eq, ne, gt, lt | substringof, contains, startswith, endswith |</br>|
displayName | filter | ge, le, eq, ne, gt, lt | substringof, contains, startswith, endswith
|</br>| method | filter | ge, le, eq, ne, gt, lt | substringof, contains, startswith, endswith
|</br>| description | filter | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
endswith |</br>| urlTemplate | filter | ge, le, eq, ne, gt, lt | substringof, contains,
startswith, endswith |</br>.
:type filter: str
:param top: Number of records to return.
:type top: int
:param skip: Number of records to skip.
:type skip: int
:param tags: Include tags in the response.
:type tags: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationCollection or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.apimanagement.models.OperationCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_api.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if filter is not None:
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int', minimum=1)
if skip is not None:
query_parameters['$skip'] = self._serialize.query("skip", skip, 'int', minimum=0)
if tags is not None:
query_parameters['tags'] = self._serialize.query("tags", tags, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('OperationCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_api.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations'} # type: ignore
async def get_entity_tag(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
**kwargs
) -> bool:
"""Gets the entity state (Etag) version of the API operation specified by its identifier.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:type api_id: str
:param operation_id: Operation identifier within an API. Must be unique in the current API
Management service instance.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: bool, or the result of cls(response)
:rtype: bool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = self.get_entity_tag.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.head(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
if cls:
return cls(pipeline_response, None, response_headers)
return 200 <= response.status_code <= 299
get_entity_tag.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}'} # type: ignore
async def get(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
**kwargs
) -> "_models.OperationContract":
"""Gets the details of the API Operation specified by its identifier.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:type api_id: str
:param operation_id: Operation identifier within an API. Must be unique in the current API
Management service instance.
:type operation_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationContract, or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.OperationContract
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationContract"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('OperationContract', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
parameters: "_models.OperationContract",
if_match: Optional[str] = None,
**kwargs
) -> "_models.OperationContract":
"""Creates a new operation in the API or updates an existing one.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:type api_id: str
:param operation_id: Operation identifier within an API. Must be unique in the current API
Management service instance.
:type operation_id: str
:param parameters: Create parameters.
:type parameters: ~azure.mgmt.apimanagement.models.OperationContract
:param if_match: ETag of the Entity. Not required when creating an entity, but required when
updating an entity.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationContract, or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.OperationContract
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationContract"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
if if_match is not None:
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'OperationContract')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
if response.status_code == 200:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('OperationContract', pipeline_response)
if response.status_code == 201:
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('OperationContract', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}'} # type: ignore
async def update(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
if_match: str,
parameters: "_models.OperationUpdateContract",
**kwargs
) -> "_models.OperationContract":
"""Updates the details of the operation in the API specified by its identifier.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:type api_id: str
:param operation_id: Operation identifier within an API. Must be unique in the current API
Management service instance.
:type operation_id: str
:param if_match: ETag of the Entity. ETag should match the current entity state from the header
response of the GET request or it should be * for unconditional update.
:type if_match: str
:param parameters: API Operation Update parameters.
:type parameters: ~azure.mgmt.apimanagement.models.OperationUpdateContract
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OperationContract, or the result of cls(response)
:rtype: ~azure.mgmt.apimanagement.models.OperationContract
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationContract"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'OperationUpdateContract')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
response_headers = {}
response_headers['ETag']=self._deserialize('str', response.headers.get('ETag'))
deserialized = self._deserialize('OperationContract', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, response_headers)
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}'} # type: ignore
async def delete(
self,
resource_group_name: str,
service_name: str,
api_id: str,
operation_id: str,
if_match: str,
**kwargs
) -> None:
"""Deletes the specified operation in the API.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param service_name: The name of the API Management service.
:type service_name: str
:param api_id: API revision identifier. Must be unique in the current API Management service
instance. Non-current revision has ;rev=n as a suffix where n is the revision number.
:type api_id: str
:param operation_id: Operation identifier within an API. Must be unique in the current API
Management service instance.
:type operation_id: str
:param if_match: ETag of the Entity. ETag should match the current entity state from the header
response of the GET request or it should be * for unconditional update.
:type if_match: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serviceName': self._serialize.url("service_name", service_name, 'str', max_length=50, min_length=1, pattern=r'^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'),
'apiId': self._serialize.url("api_id", api_id, 'str', max_length=256, min_length=1, pattern=r'^[^*#&+:<>?]+$'),
'operationId': self._serialize.url("operation_id", operation_id, 'str', max_length=80, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/apis/{apiId}/operations/{operationId}'} # type: ignore
|
mit
|
qwefi/nova
|
nova/console/rpcapi.py
|
4
|
2739
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Client side of the console RPC API.
"""
from oslo.config import cfg
import nova.openstack.common.rpc.proxy
rpcapi_opts = [
cfg.StrOpt('console_topic',
default='console',
help='the topic console proxy nodes listen on'),
]
CONF = cfg.CONF
CONF.register_opts(rpcapi_opts)
rpcapi_cap_opt = cfg.StrOpt('console',
default=None,
help='Set a version cap for messages sent to console services')
CONF.register_opt(rpcapi_cap_opt, 'upgrade_levels')
class ConsoleAPI(nova.openstack.common.rpc.proxy.RpcProxy):
'''Client side of the console rpc API.
API version history:
1.0 - Initial version.
1.1 - Added get_backdoor_port()
... Grizzly supports message version 1.1. So, any changes to existing
methods in 2.x after that point should be done such that they can
handle the version_cap being set to 1.1.
'''
#
# NOTE(russellb): This is the default minimum version that the server
# (manager) side must implement unless otherwise specified using a version
# argument to self.call()/cast()/etc. here. It should be left as X.0 where
# X is the current major API version (1.0, 2.0, ...). For more information
# about rpc API versioning, see the docs in
# openstack/common/rpc/dispatcher.py.
#
BASE_RPC_API_VERSION = '1.0'
VERSION_ALIASES = {
'grizzly': '1.1',
}
def __init__(self, topic=None):
topic = topic if topic else CONF.console_topic
version_cap = self.VERSION_ALIASES.get(CONF.upgrade_levels.console,
CONF.upgrade_levels.console)
super(ConsoleAPI, self).__init__(
topic=topic,
default_version=self.BASE_RPC_API_VERSION,
version_cap=version_cap)
def add_console(self, ctxt, instance_id):
self.cast(ctxt, self.make_msg('add_console', instance_id=instance_id))
def remove_console(self, ctxt, console_id):
self.cast(ctxt, self.make_msg('remove_console', console_id=console_id))
|
apache-2.0
|
Thielak/program-y
|
src/programy/__init__.py
|
49
|
1066
|
"""
Copyright (c) 2016 Keith Sterling
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
|
mit
|
leshchevds/ganeti
|
lib/utils/text.py
|
5
|
18938
|
#
#
# Copyright (C) 2006, 2007, 2010, 2011 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utility functions for manipulating or working with text.
"""
import re
import os
import time
import collections
from ganeti import errors
from ganeti import compat
#: Unit checker regexp
_PARSEUNIT_REGEX = re.compile(r"^([.\d]+)\s*([a-zA-Z]+)?$")
#: Characters which don't need to be quoted for shell commands
_SHELL_UNQUOTED_RE = re.compile("^[-.,=:/_+@A-Za-z0-9]+$")
#: Shell param checker regexp
_SHELLPARAM_REGEX = re.compile(r"^[-a-zA-Z0-9._+/:%@]+$")
#: ASCII equivalent of unicode character 'HORIZONTAL ELLIPSIS' (U+2026)
_ASCII_ELLIPSIS = "..."
#: MAC address octet
_MAC_ADDR_OCTET_RE = r"[0-9a-f]{2}"
def MatchNameComponent(key, name_list, case_sensitive=True):
"""Try to match a name against a list.
This function will try to match a name like test1 against a list
like C{['test1.example.com', 'test2.example.com', ...]}. Against
this list, I{'test1'} as well as I{'test1.example'} will match, but
not I{'test1.ex'}. A multiple match will be considered as no match
at all (e.g. I{'test1'} against C{['test1.example.com',
'test1.example.org']}), except when the key fully matches an entry
(e.g. I{'test1'} against C{['test1', 'test1.example.com']}).
@type key: str
@param key: the name to be searched
@type name_list: list
@param name_list: the list of strings against which to search the key
@type case_sensitive: boolean
@param case_sensitive: whether to provide a case-sensitive match
@rtype: None or str
@return: None if there is no match I{or} if there are multiple matches,
otherwise the element from the list which matches
"""
if key in name_list:
return key
re_flags = 0
if not case_sensitive:
re_flags |= re.IGNORECASE
key = key.upper()
name_re = re.compile(r"^%s(\..*)?$" % re.escape(key), re_flags)
names_filtered = []
string_matches = []
for name in name_list:
if name_re.match(name) is not None:
names_filtered.append(name)
if not case_sensitive and key == name.upper():
string_matches.append(name)
if len(string_matches) == 1:
return string_matches[0]
if len(names_filtered) == 1:
return names_filtered[0]
return None
def _DnsNameGlobHelper(match):
"""Helper function for L{DnsNameGlobPattern}.
Returns regular expression pattern for parts of the pattern.
"""
text = match.group(0)
if text == "*":
return "[^.]*"
elif text == "?":
return "[^.]"
else:
return re.escape(text)
def DnsNameGlobPattern(pattern):
"""Generates regular expression from DNS name globbing pattern.
A DNS name globbing pattern (e.g. C{*.site}) is converted to a regular
expression. Escape sequences or ranges (e.g. [a-z]) are not supported.
Matching always starts at the leftmost part. An asterisk (*) matches all
characters except the dot (.) separating DNS name parts. A question mark (?)
matches a single character except the dot (.).
@type pattern: string
@param pattern: DNS name globbing pattern
@rtype: string
@return: Regular expression
"""
return r"^%s(\..*)?$" % re.sub(r"\*|\?|[^*?]*", _DnsNameGlobHelper, pattern)
def FormatUnit(value, units, roman=False):
"""Formats an incoming number of MiB with the appropriate unit.
@type value: int
@param value: integer representing the value in MiB (1048576)
@type units: char
@param units: the type of formatting we should do:
- 'h' for automatic scaling
- 'm' for MiBs
- 'g' for GiBs
- 't' for TiBs
@rtype: str
@return: the formatted value (with suffix)
"""
if units not in ("m", "g", "t", "h"):
raise errors.ProgrammerError("Invalid unit specified '%s'" % str(units))
if not isinstance(value, (int, long, float)):
raise errors.ProgrammerError("Invalid value specified '%s (%s)'" % (
value, type(value)))
suffix = ""
if units == "m" or (units == "h" and value < 1024):
if units == "h":
suffix = "M"
return "%s%s" % (compat.RomanOrRounded(value, 0, roman), suffix)
elif units == "g" or (units == "h" and value < (1024 * 1024)):
if units == "h":
suffix = "G"
return "%s%s" % (compat.RomanOrRounded(float(value) / 1024, 1, roman),
suffix)
else:
if units == "h":
suffix = "T"
return "%s%s" % (compat.RomanOrRounded(float(value) / 1024 / 1024, 1,
roman), suffix)
def ParseUnit(input_string):
"""Tries to extract number and scale from the given string.
Input must be in the format C{NUMBER+ [DOT NUMBER+] SPACE*
[UNIT]}. If no unit is specified, it defaults to MiB. Return value
is always an int in MiB.
"""
m = _PARSEUNIT_REGEX.match(str(input_string))
if not m:
raise errors.UnitParseError("Invalid format")
value = float(m.groups()[0])
unit = m.groups()[1]
if unit:
lcunit = unit.lower()
else:
lcunit = "m"
if lcunit in ("m", "mb", "mib"):
# Value already in MiB
pass
elif lcunit in ("g", "gb", "gib"):
value *= 1024
elif lcunit in ("t", "tb", "tib"):
value *= 1024 * 1024
else:
raise errors.UnitParseError("Unknown unit: %s" % unit)
# Make sure we round up
if int(value) < value:
value += 1
# Round up to the next multiple of 4
value = int(value)
if value % 4:
value += 4 - value % 4
return value
def ShellQuote(value):
"""Quotes shell argument according to POSIX.
@type value: str
@param value: the argument to be quoted
@rtype: str
@return: the quoted value
"""
if _SHELL_UNQUOTED_RE.match(value):
return value
else:
return "'%s'" % value.replace("'", "'\\''")
def ShellQuoteArgs(args):
"""Quotes a list of shell arguments.
@type args: list
@param args: list of arguments to be quoted
@rtype: str
@return: the quoted arguments concatenated with spaces
"""
return " ".join([ShellQuote(i) for i in args])
def ShellCombineCommands(cmdlist):
"""Out of a list of shell comands construct a single one.
"""
return ["/bin/sh", "-c", " && ".join(ShellQuoteArgs(c) for c in cmdlist)]
class ShellWriter(object):
"""Helper class to write scripts with indentation.
"""
INDENT_STR = " "
def __init__(self, fh, indent=True):
"""Initializes this class.
"""
self._fh = fh
self._indent_enabled = indent
self._indent = 0
def IncIndent(self):
"""Increase indentation level by 1.
"""
self._indent += 1
def DecIndent(self):
"""Decrease indentation level by 1.
"""
assert self._indent > 0
self._indent -= 1
def Write(self, txt, *args):
"""Write line to output file.
"""
assert self._indent >= 0
if args:
line = txt % args
else:
line = txt
if line and self._indent_enabled:
# Indent only if there's something on the line
self._fh.write(self._indent * self.INDENT_STR)
self._fh.write(line)
self._fh.write("\n")
def GenerateSecret(numbytes=20):
"""Generates a random secret.
This will generate a pseudo-random secret returning an hex string
(so that it can be used where an ASCII string is needed).
@param numbytes: the number of bytes which will be represented by the returned
string (defaulting to 20, the length of a SHA1 hash)
@rtype: str
@return: an hex representation of the pseudo-random sequence
"""
return os.urandom(numbytes).encode("hex")
def _MakeMacAddrRegexp(octets):
"""Builds a regular expression for verifying MAC addresses.
@type octets: integer
@param octets: How many octets to expect (1-6)
@return: Compiled regular expression
"""
assert octets > 0
assert octets <= 6
return re.compile("^%s$" % ":".join([_MAC_ADDR_OCTET_RE] * octets),
re.I)
#: Regular expression for full MAC address
_MAC_CHECK_RE = _MakeMacAddrRegexp(6)
#: Regular expression for half a MAC address
_MAC_PREFIX_CHECK_RE = _MakeMacAddrRegexp(3)
def _MacAddressCheck(check_re, mac, msg):
"""Checks a MAC address using a regular expression.
@param check_re: Compiled regular expression as returned by C{re.compile}
@type mac: string
@param mac: MAC address to be validated
@type msg: string
@param msg: Error message (%s will be replaced with MAC address)
"""
if check_re.match(mac):
return mac.lower()
raise errors.OpPrereqError(msg % mac, errors.ECODE_INVAL)
def NormalizeAndValidateMac(mac):
"""Normalizes and check if a MAC address is valid and contains six octets.
Checks whether the supplied MAC address is formally correct. Accepts
colon-separated format only. Normalize it to all lower case.
@type mac: string
@param mac: MAC address to be validated
@rtype: string
@return: Normalized and validated MAC address
@raise errors.OpPrereqError: If the MAC address isn't valid
"""
return _MacAddressCheck(_MAC_CHECK_RE, mac, "Invalid MAC address '%s'")
def NormalizeAndValidateThreeOctetMacPrefix(mac):
"""Normalizes a potential MAC address prefix (three octets).
Checks whether the supplied string is a valid MAC address prefix consisting
of three colon-separated octets. The result is normalized to all lower case.
@type mac: string
@param mac: Prefix to be validated
@rtype: string
@return: Normalized and validated prefix
@raise errors.OpPrereqError: If the MAC address prefix isn't valid
"""
return _MacAddressCheck(_MAC_PREFIX_CHECK_RE, mac,
"Invalid MAC address prefix '%s'")
def SafeEncode(text):
"""Return a 'safe' version of a source string.
This function mangles the input string and returns a version that
should be safe to display/encode as ASCII. To this end, we first
convert it to ASCII using the 'backslashreplace' encoding which
should get rid of any non-ASCII chars, and then we process it
through a loop copied from the string repr sources in the python; we
don't use string_escape anymore since that escape single quotes and
backslashes too, and that is too much; and that escaping is not
stable, i.e. string_escape(string_escape(x)) != string_escape(x).
@type text: str or unicode
@param text: input data
@rtype: str
@return: a safe version of text
"""
if isinstance(text, unicode):
# only if unicode; if str already, we handle it below
text = text.encode("ascii", "backslashreplace")
resu = ""
for char in text:
c = ord(char)
if char == "\t":
resu += r"\t"
elif char == "\n":
resu += r"\n"
elif char == "\r":
resu += r'\'r'
elif c < 32 or c >= 127: # non-printable
resu += "\\x%02x" % (c & 0xff)
else:
resu += char
return resu
def UnescapeAndSplit(text, sep=","):
r"""Split and unescape a string based on a given separator.
This function splits a string based on a separator where the
separator itself can be escape in order to be an element of the
elements. The escaping rules are (assuming coma being the
separator):
- a plain , separates the elements
- a sequence \\\\, (double backslash plus comma) is handled as a
backslash plus a separator comma
- a sequence \, (backslash plus comma) is handled as a
non-separator comma
@type text: string
@param text: the string to split
@type sep: string
@param text: the separator
@rtype: string
@return: a list of strings
"""
# we split the list by sep (with no escaping at this stage)
slist = text.split(sep)
# next, we revisit the elements and if any of them ended with an odd
# number of backslashes, then we join it with the next
rlist = []
while slist:
e1 = slist.pop(0)
if e1.endswith("\\"):
num_b = len(e1) - len(e1.rstrip("\\"))
if num_b % 2 == 1 and slist:
e2 = slist.pop(0)
# Merge the two elements and push the result back to the source list for
# revisiting. If e2 ended with backslashes, further merging may need to
# be done.
slist.insert(0, e1 + sep + e2)
continue
# here the backslashes remain (all), and will be reduced in the next step
rlist.append(e1)
# finally, replace backslash-something with something
rlist = [re.sub(r"\\(.)", r"\1", v) for v in rlist]
return rlist
def EscapeAndJoin(slist, sep=","):
"""Encode a list in a way parsable by UnescapeAndSplit.
@type slist: list of strings
@param slist: the strings to be encoded
@rtype: string
@return: the encoding of the list oas a string
"""
return sep.join([re.sub("\\" + sep, "\\\\" + sep,
re.sub(r"\\", r"\\\\", v)) for v in slist])
def CommaJoin(names):
"""Nicely join a set of identifiers.
@param names: set, list or tuple
@return: a string with the formatted results
"""
return ", ".join([str(val) for val in names])
def FormatTime(val, usecs=None):
"""Formats a time value.
@type val: float or None
@param val: Timestamp as returned by time.time() (seconds since Epoch,
1970-01-01 00:00:00 UTC)
@return: a string value or N/A if we don't have a valid timestamp
"""
if val is None or not isinstance(val, (int, float)):
return "N/A"
# these two codes works on Linux, but they are not guaranteed on all
# platforms
result = time.strftime("%F %T", time.localtime(val))
if usecs is not None:
result += ".%06d" % usecs
return result
def FormatSeconds(secs):
"""Formats seconds for easier reading.
@type secs: number
@param secs: Number of seconds
@rtype: string
@return: Formatted seconds (e.g. "2d 9h 19m 49s")
"""
parts = []
secs = round(secs, 0)
if secs > 0:
# Negative values would be a bit tricky
for unit, one in [("d", 24 * 60 * 60), ("h", 60 * 60), ("m", 60)]:
(complete, secs) = divmod(secs, one)
if complete or parts:
parts.append("%d%s" % (complete, unit))
parts.append("%ds" % secs)
return " ".join(parts)
class LineSplitter(object):
"""Splits data chunks into lines separated by newline.
Instances provide a file-like interface.
"""
def __init__(self, line_fn, *args):
"""Initializes this class.
@type line_fn: callable
@param line_fn: Function called for each line, first parameter is line
@param args: Extra arguments for L{line_fn}
"""
assert callable(line_fn)
if args:
# Python 2.4 doesn't have functools.partial yet
self._line_fn = \
lambda line: line_fn(line, *args) # pylint: disable=W0142
else:
self._line_fn = line_fn
self._lines = collections.deque()
self._buffer = ""
def write(self, data):
parts = (self._buffer + data).split("\n")
self._buffer = parts.pop()
self._lines.extend(parts)
def flush(self):
while self._lines:
self._line_fn(self._lines.popleft().rstrip("\r\n"))
def close(self):
self.flush()
if self._buffer:
self._line_fn(self._buffer)
def IsValidShellParam(word):
"""Verifies is the given word is safe from the shell's p.o.v.
This means that we can pass this to a command via the shell and be
sure that it doesn't alter the command line and is passed as such to
the actual command.
Note that we are overly restrictive here, in order to be on the safe
side.
@type word: str
@param word: the word to check
@rtype: boolean
@return: True if the word is 'safe'
"""
return bool(_SHELLPARAM_REGEX.match(word))
def BuildShellCmd(template, *args):
"""Build a safe shell command line from the given arguments.
This function will check all arguments in the args list so that they
are valid shell parameters (i.e. they don't contain shell
metacharacters). If everything is ok, it will return the result of
template % args.
@type template: str
@param template: the string holding the template for the
string formatting
@rtype: str
@return: the expanded command line
"""
for word in args:
if not IsValidShellParam(word):
raise errors.ProgrammerError("Shell argument '%s' contains"
" invalid characters" % word)
return template % args
def FormatOrdinal(value):
"""Formats a number as an ordinal in the English language.
E.g. the number 1 becomes "1st", 22 becomes "22nd".
@type value: integer
@param value: Number
@rtype: string
"""
tens = value % 10
if value > 10 and value < 20:
suffix = "th"
elif tens == 1:
suffix = "st"
elif tens == 2:
suffix = "nd"
elif tens == 3:
suffix = "rd"
else:
suffix = "th"
return "%s%s" % (value, suffix)
def Truncate(text, length):
"""Truncate string and add ellipsis if needed.
@type text: string
@param text: Text
@type length: integer
@param length: Desired length
@rtype: string
@return: Truncated text
"""
assert length > len(_ASCII_ELLIPSIS)
# Serialize if necessary
if not isinstance(text, basestring):
text = str(text)
if len(text) <= length:
return text
else:
return text[:length - len(_ASCII_ELLIPSIS)] + _ASCII_ELLIPSIS
def FilterEmptyLinesAndComments(text):
"""Filters empty lines and comments from a line-based string.
Whitespace is also removed from the beginning and end of all lines.
@type text: string
@param text: Input string
@rtype: list
"""
return [line for line in map(lambda s: s.strip(), text.splitlines())
# Ignore empty lines and comments
if line and not line.startswith("#")]
def FormatKeyValue(data):
"""Formats a dictionary as "key=value" parameters.
The keys are sorted to have a stable order.
@type data: dict
@rtype: list of string
"""
return ["%s=%s" % (key, value) for (key, value) in sorted(data.items())]
|
bsd-2-clause
|
rizumu/django
|
tests/proxy_model_inheritance/tests.py
|
278
|
1764
|
from __future__ import absolute_import, unicode_literals
import os
from django.core.management import call_command
from django.test import TestCase, TransactionTestCase
from django.test.utils import extend_sys_path
from django.utils._os import upath
from .models import (
ConcreteModel, ConcreteModelSubclass, ConcreteModelSubclassProxy,
)
class ProxyModelInheritanceTests(TransactionTestCase):
"""
Proxy model inheritance across apps can result in migrate not creating the table
for the proxied model (as described in #12286). This test creates two dummy
apps and calls migrate, then verifies that the table has been created.
"""
available_apps = []
def test_table_exists(self):
with extend_sys_path(os.path.dirname(os.path.abspath(upath(__file__)))):
with self.modify_settings(INSTALLED_APPS={'append': ['app1', 'app2']}):
call_command('migrate', verbosity=0, run_syncdb=True)
from app1.models import ProxyModel
from app2.models import NiceModel
self.assertEqual(NiceModel.objects.all().count(), 0)
self.assertEqual(ProxyModel.objects.all().count(), 0)
class MultiTableInheritanceProxyTest(TestCase):
def test_model_subclass_proxy(self):
"""
Deleting an instance of a model proxying a multi-table inherited
subclass should cascade delete down the whole inheritance chain (see
#18083).
"""
instance = ConcreteModelSubclassProxy.objects.create()
instance.delete()
self.assertEqual(0, ConcreteModelSubclassProxy.objects.count())
self.assertEqual(0, ConcreteModelSubclass.objects.count())
self.assertEqual(0, ConcreteModel.objects.count())
|
bsd-3-clause
|
anandology/pyjamas
|
examples/misc/djangoweb/settings.py
|
12
|
2950
|
# Django settings for pyjsDemo project.
import os
STATIC = str(os.path.join(os.path.dirname(__file__), 'media/output').replace('\\','/'))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'mysql' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'webpages' # Or path to database file if using sqlite3.
DATABASE_USER = 'webpages' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '#*jv)6zbb15!9z8oru*3irida-24@_5+ib$k6$-&k&oy84ww87'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'djangoweb.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
#'django.contrib.auth',
#'django.contrib.contenttypes',
#'django.contrib.sessions',
#'django.contrib.sites',
'djangoweb.webpages',
'django_evolution',
)
|
apache-2.0
|
nicememory/pie
|
pyglet/pyglet/extlibs/future/py2_3/future/builtins/iterators.py
|
70
|
1401
|
"""
This module is designed to be used as follows::
from future.builtins.iterators import *
And then, for example::
for i in range(10**15):
pass
for (a, b) in zip(range(10**15), range(-10**15, 0)):
pass
Note that this is standard Python 3 code, plus some imports that do
nothing on Python 3.
The iterators this brings in are::
- ``range``
- ``filter``
- ``map``
- ``zip``
On Python 2, ``range`` is a pure-Python backport of Python 3's ``range``
iterator with slicing support. The other iterators (``filter``, ``map``,
``zip``) are from the ``itertools`` module on Python 2. On Python 3 these
are available in the module namespace but not exported for * imports via
__all__ (zero no namespace pollution).
Note that these are also available in the standard library
``future_builtins`` module on Python 2 -- but not Python 3, so using
the standard library version is not portable, nor anywhere near complete.
"""
from __future__ import division, absolute_import, print_function
import itertools
from future import utils
if not utils.PY3:
filter = itertools.ifilter
map = itertools.imap
from future.types import newrange as range
zip = itertools.izip
__all__ = ['filter', 'map', 'range', 'zip']
else:
import builtins
filter = builtins.filter
map = builtins.map
range = builtins.range
zip = builtins.zip
__all__ = []
|
apache-2.0
|
jusdng/odoo
|
addons/sale_crm/__openerp__.py
|
260
|
2036
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Opportunity to Quotation',
'version': '1.0',
'category': 'Hidden',
'description': """
This module adds a shortcut on one or several opportunity cases in the CRM.
===========================================================================
This shortcut allows you to generate a sales order based on the selected case.
If different cases are open (a list), it generates one sale order by case.
The case is then closed and linked to the generated sales order.
We suggest you to install this module, if you installed both the sale and the crm
modules.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/crm',
'depends': ['sale', 'crm', 'web_kanban_gauge'],
'data': [
'wizard/crm_make_sale_view.xml',
'sale_crm_view.xml',
'security/sale_crm_security.xml',
'security/ir.model.access.csv',
],
'demo': [],
'test': ['test/sale_crm.yml'],
'installable': True,
'auto_install': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
ramezquitao/pyoptools
|
pyoptools/raytrace/_comp_lib/cylindrical_lens.py
|
2
|
2878
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
# Copyright (c) 2007, Ricardo Amézquita Orozco
# All rights reserved.
#
# This software is provided without warranty under the terms of the GPLv3
# license included in LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license.
#
#
# Author: Ricardo Amézquita Orozco
# Description: Spherical lens definition module
# Symbols Defined: SphericalLens
# ------------------------------------------------------------------------------
#
"""
Definition of a spherical lens object and helper functions
"""
# from enthought.traits.api import Float, Instance, HasTraits, Trait
from pyoptools.raytrace.component import Component
from pyoptools.raytrace.surface import Cylindrical, Plane
from pyoptools.raytrace.shape import Rectangular
class CylindricalLens(Component):
"""Class to define a rectangular shaped cylindrical Lens.
:param size: Size (sx,sy) of the lens in mm
:type size: tuple(float,float)
:param thickness: Thickness of the lens at the center
:type thickness: float
:param curvature_s1: Curvature of the anterior surface of the lens, in mm.
:type curvature_s1: float
:param curvature_s2: Curvature of the posterior surface of the lens, in mm.
:type curvature_s2: float
:param material: Material of the lens
:type material: float or
:class:`~pyoptools.raytrace.mat_lib.material.Material`'s subclass
instance
.. warning::
This component is composed only by the front and back surfaces. The
surfaces closing the edges are not implemented yet. User must take
this in to account and avoid rays that can enter or leave the lense by
the edges.
"""
def __init__(self, size=(20, 20), thickness=10, curvature_s1=1. / 200, curvature_s2=1. / 200, *args, **kwargs):
Component.__init__(self, *args, **kwargs)
self.size = size
w, h = self.size
self.thickness = thickness
self.curvature_s1 = curvature_s1
self.curvature_s2 = curvature_s2
if self.curvature_s1 != 0.:
__a_surf = Cylindrical(shape=Rectangular(size=(w, h)),
curvature=self.curvature_s1)
else:
__a_surf = Plane(shape=Rectangular(size=(w, h)))
if self.curvature_s2 != 0:
__p_surf = Cylindrical(shape=Rectangular(size=(w, h)),
curvature=self.curvature_s2)
else:
__p_surf = Plane(shape=Rectangular(size=(w, h)))
self.surflist["S1"] = (__a_surf, (0, 0, -self.thickness / 2), (0, 0, 0))
self.surflist["S2"] = (__p_surf, (0, 0, self.thickness / 2), (0, 0, 0))
# TODO: Falta cerrrar la lente por los lados
|
gpl-3.0
|
adamcik/mopidy
|
tests/core/test_playlists.py
|
4
|
15093
|
import unittest
from unittest import mock
from mopidy import backend, core
from mopidy.models import Playlist, Ref, Track
class BasePlaylistsTest(unittest.TestCase):
def setUp(self): # noqa: N802
self.plr1a = Ref.playlist(name="A", uri="dummy1:pl:a")
self.plr1b = Ref.playlist(name="B", uri="dummy1:pl:b")
self.plr2a = Ref.playlist(name="A", uri="dummy2:pl:a")
self.plr2b = Ref.playlist(name="B", uri="dummy2:pl:b")
self.pl1a = Playlist(name="A", tracks=[Track(uri="dummy1:t:a")])
self.pl1b = Playlist(name="B", tracks=[Track(uri="dummy1:t:b")])
self.pl2a = Playlist(name="A", tracks=[Track(uri="dummy2:t:a")])
self.pl2b = Playlist(name="B", tracks=[Track(uri="dummy2:t:b")])
self.sp1 = mock.Mock(spec=backend.PlaylistsProvider)
self.sp1.as_list.return_value.get.return_value = [
self.plr1a,
self.plr1b,
]
self.sp1.lookup.return_value.get.side_effect = [self.pl1a, self.pl1b]
self.sp2 = mock.Mock(spec=backend.PlaylistsProvider)
self.sp2.as_list.return_value.get.return_value = [
self.plr2a,
self.plr2b,
]
self.sp2.lookup.return_value.get.side_effect = [self.pl2a, self.pl2b]
self.backend1 = mock.Mock()
self.backend1.actor_ref.actor_class.__name__ = "Backend1"
self.backend1.uri_schemes.get.return_value = ["dummy1"]
self.backend1.playlists = self.sp1
self.backend2 = mock.Mock()
self.backend2.actor_ref.actor_class.__name__ = "Backend2"
self.backend2.uri_schemes.get.return_value = ["dummy2"]
self.backend2.playlists = self.sp2
# A backend without the optional playlists provider
self.backend3 = mock.Mock()
self.backend3.uri_schemes.get.return_value = ["dummy3"]
self.backend3.has_playlists().get.return_value = False
self.backend3.playlists = None
self.core = core.Core(
mixer=None, backends=[self.backend3, self.backend1, self.backend2]
)
class PlaylistTest(BasePlaylistsTest):
def test_as_list_combines_result_from_backends(self):
result = self.core.playlists.as_list()
assert self.plr1a in result
assert self.plr1b in result
assert self.plr2a in result
assert self.plr2b in result
def test_as_list_ignores_backends_that_dont_support_it(self):
self.sp2.as_list.return_value.get.side_effect = NotImplementedError
result = self.core.playlists.as_list()
assert len(result) == 2
assert self.plr1a in result
assert self.plr1b in result
def test_get_items_selects_the_matching_backend(self):
ref = Ref.track()
self.sp2.get_items.return_value.get.return_value = [ref]
result = self.core.playlists.get_items("dummy2:pl:a")
assert [ref] == result
assert not self.sp1.get_items.called
self.sp2.get_items.assert_called_once_with("dummy2:pl:a")
def test_get_items_with_unknown_uri_scheme_does_nothing(self):
result = self.core.playlists.get_items("unknown:a")
assert result is None
assert not self.sp1.delete.called
assert not self.sp2.delete.called
def test_create_without_uri_scheme_uses_first_backend(self):
playlist = Playlist()
self.sp1.create.return_value.get.return_value = playlist
result = self.core.playlists.create("foo")
assert playlist == result
self.sp1.create.assert_called_once_with("foo")
assert not self.sp2.create.called
def test_create_without_uri_scheme_ignores_none_result(self):
playlist = Playlist()
self.sp1.create.return_value.get.return_value = None
self.sp2.create.return_value.get.return_value = playlist
result = self.core.playlists.create("foo")
assert playlist == result
self.sp1.create.assert_called_once_with("foo")
self.sp2.create.assert_called_once_with("foo")
def test_create_without_uri_scheme_ignores_exception(self):
playlist = Playlist()
self.sp1.create.return_value.get.side_effect = Exception
self.sp2.create.return_value.get.return_value = playlist
result = self.core.playlists.create("foo")
assert playlist == result
self.sp1.create.assert_called_once_with("foo")
self.sp2.create.assert_called_once_with("foo")
def test_create_with_uri_scheme_selects_the_matching_backend(self):
playlist = Playlist()
self.sp2.create.return_value.get.return_value = playlist
result = self.core.playlists.create("foo", uri_scheme="dummy2")
assert playlist == result
assert not self.sp1.create.called
self.sp2.create.assert_called_once_with("foo")
def test_create_with_unsupported_uri_scheme_uses_first_backend(self):
playlist = Playlist()
self.sp1.create.return_value.get.return_value = playlist
result = self.core.playlists.create("foo", uri_scheme="dummy3")
assert playlist == result
self.sp1.create.assert_called_once_with("foo")
assert not self.sp2.create.called
def test_delete_selects_the_dummy1_backend(self):
success = self.core.playlists.delete("dummy1:a")
assert success
self.sp1.delete.assert_called_once_with("dummy1:a")
assert not self.sp2.delete.called
def test_delete_selects_the_dummy2_backend(self):
success = self.core.playlists.delete("dummy2:a")
assert success
assert not self.sp1.delete.called
self.sp2.delete.assert_called_once_with("dummy2:a")
def test_delete_with_unknown_uri_scheme_does_nothing(self):
success = self.core.playlists.delete("unknown:a")
assert not success
assert not self.sp1.delete.called
assert not self.sp2.delete.called
def test_delete_ignores_backend_without_playlist_support(self):
success = self.core.playlists.delete("dummy3:a")
assert not success
assert not self.sp1.delete.called
assert not self.sp2.delete.called
def test_lookup_selects_the_dummy1_backend(self):
self.core.playlists.lookup("dummy1:a")
self.sp1.lookup.assert_called_once_with("dummy1:a")
assert not self.sp2.lookup.called
def test_lookup_selects_the_dummy2_backend(self):
self.core.playlists.lookup("dummy2:a")
assert not self.sp1.lookup.called
self.sp2.lookup.assert_called_once_with("dummy2:a")
def test_lookup_track_in_backend_without_playlists_fails(self):
result = self.core.playlists.lookup("dummy3:a")
assert result is None
assert not self.sp1.lookup.called
assert not self.sp2.lookup.called
def test_refresh_without_uri_scheme_refreshes_all_backends(self):
self.core.playlists.refresh()
self.sp1.refresh.assert_called_once_with()
self.sp2.refresh.assert_called_once_with()
def test_refresh_with_uri_scheme_refreshes_matching_backend(self):
self.core.playlists.refresh(uri_scheme="dummy2")
assert not self.sp1.refresh.called
self.sp2.refresh.assert_called_once_with()
def test_refresh_with_unknown_uri_scheme_refreshes_nothing(self):
self.core.playlists.refresh(uri_scheme="foobar")
assert not self.sp1.refresh.called
assert not self.sp2.refresh.called
def test_refresh_ignores_backend_without_playlist_support(self):
self.core.playlists.refresh(uri_scheme="dummy3")
assert not self.sp1.refresh.called
assert not self.sp2.refresh.called
def test_save_selects_the_dummy1_backend(self):
playlist = Playlist(uri="dummy1:a")
self.sp1.save.return_value.get.return_value = playlist
result = self.core.playlists.save(playlist)
assert playlist == result
self.sp1.save.assert_called_once_with(playlist)
assert not self.sp2.save.called
def test_save_selects_the_dummy2_backend(self):
playlist = Playlist(uri="dummy2:a")
self.sp2.save.return_value.get.return_value = playlist
result = self.core.playlists.save(playlist)
assert playlist == result
assert not self.sp1.save.called
self.sp2.save.assert_called_once_with(playlist)
def test_save_does_nothing_if_playlist_uri_is_unset(self):
result = self.core.playlists.save(Playlist())
assert result is None
assert not self.sp1.save.called
assert not self.sp2.save.called
def test_save_does_nothing_if_playlist_uri_has_unknown_scheme(self):
result = self.core.playlists.save(Playlist(uri="foobar:a"))
assert result is None
assert not self.sp1.save.called
assert not self.sp2.save.called
def test_save_ignores_backend_without_playlist_support(self):
result = self.core.playlists.save(Playlist(uri="dummy3:a"))
assert result is None
assert not self.sp1.save.called
assert not self.sp2.save.called
def test_get_uri_schemes(self):
result = self.core.playlists.get_uri_schemes()
assert result == ["dummy1", "dummy2"]
class MockBackendCorePlaylistsBase(unittest.TestCase):
def setUp(self): # noqa: N802
self.playlists = mock.Mock(spec=backend.PlaylistsProvider)
self.backend = mock.Mock()
self.backend.actor_ref.actor_class.__name__ = "DummyBackend"
self.backend.uri_schemes.get.return_value = ["dummy"]
self.backend.playlists = self.playlists
self.core = core.Core(mixer=None, backends=[self.backend])
@mock.patch("mopidy.core.playlists.logger")
class AsListBadBackendsTest(MockBackendCorePlaylistsBase):
def test_backend_raises_exception(self, logger):
self.playlists.as_list.return_value.get.side_effect = Exception
assert [] == self.core.playlists.as_list()
logger.exception.assert_called_with(mock.ANY, "DummyBackend")
def test_backend_returns_none(self, logger):
self.playlists.as_list.return_value.get.return_value = None
assert [] == self.core.playlists.as_list()
assert not logger.error.called
def test_backend_returns_wrong_type(self, logger):
self.playlists.as_list.return_value.get.return_value = "abc"
assert [] == self.core.playlists.as_list()
logger.error.assert_called_with(mock.ANY, "DummyBackend", mock.ANY)
@mock.patch("mopidy.core.playlists.logger")
class GetItemsBadBackendsTest(MockBackendCorePlaylistsBase):
def test_backend_raises_exception(self, logger):
self.playlists.get_items.return_value.get.side_effect = Exception
assert self.core.playlists.get_items("dummy:/1") is None
logger.exception.assert_called_with(mock.ANY, "DummyBackend")
def test_backend_returns_none(self, logger):
self.playlists.get_items.return_value.get.return_value = None
assert self.core.playlists.get_items("dummy:/1") is None
assert not logger.error.called
def test_backend_returns_wrong_type(self, logger):
self.playlists.get_items.return_value.get.return_value = "abc"
assert self.core.playlists.get_items("dummy:/1") is None
logger.error.assert_called_with(mock.ANY, "DummyBackend", mock.ANY)
@mock.patch("mopidy.core.playlists.logger")
class CreateBadBackendsTest(MockBackendCorePlaylistsBase):
def test_backend_raises_exception(self, logger):
self.playlists.create.return_value.get.side_effect = Exception
assert self.core.playlists.create("foobar") is None
logger.exception.assert_called_with(mock.ANY, "DummyBackend")
def test_backend_returns_none(self, logger):
self.playlists.create.return_value.get.return_value = None
assert self.core.playlists.create("foobar") is None
assert not logger.error.called
def test_backend_returns_wrong_type(self, logger):
self.playlists.create.return_value.get.return_value = "abc"
assert self.core.playlists.create("foobar") is None
logger.error.assert_called_with(mock.ANY, "DummyBackend", mock.ANY)
@mock.patch("mopidy.core.playlists.logger")
class DeleteBadBackendsTest(MockBackendCorePlaylistsBase):
def test_backend_raises_exception(self, logger):
self.playlists.delete.return_value.get.side_effect = Exception
assert not self.core.playlists.delete("dummy:/1")
logger.exception.assert_called_with(mock.ANY, "DummyBackend")
@mock.patch("mopidy.core.playlists.logger")
class LookupBadBackendsTest(MockBackendCorePlaylistsBase):
def test_backend_raises_exception(self, logger):
self.playlists.lookup.return_value.get.side_effect = Exception
assert self.core.playlists.lookup("dummy:/1") is None
logger.exception.assert_called_with(mock.ANY, "DummyBackend")
def test_backend_returns_none(self, logger):
self.playlists.lookup.return_value.get.return_value = None
assert self.core.playlists.lookup("dummy:/1") is None
assert not logger.error.called
def test_backend_returns_wrong_type(self, logger):
self.playlists.lookup.return_value.get.return_value = "abc"
assert self.core.playlists.lookup("dummy:/1") is None
logger.error.assert_called_with(mock.ANY, "DummyBackend", mock.ANY)
@mock.patch("mopidy.core.playlists.logger")
class RefreshBadBackendsTest(MockBackendCorePlaylistsBase):
@mock.patch("mopidy.core.listener.CoreListener.send")
def test_backend_raises_exception(self, send, logger):
self.playlists.refresh.return_value.get.side_effect = Exception
self.core.playlists.refresh()
assert not send.called
logger.exception.assert_called_with(mock.ANY, "DummyBackend")
@mock.patch("mopidy.core.listener.CoreListener.send")
def test_backend_raises_exception_called_with_uri(self, send, logger):
self.playlists.refresh.return_value.get.side_effect = Exception
self.core.playlists.refresh("dummy")
assert not send.called
logger.exception.assert_called_with(mock.ANY, "DummyBackend")
@mock.patch("mopidy.core.playlists.logger")
class SaveBadBackendsTest(MockBackendCorePlaylistsBase):
def test_backend_raises_exception(self, logger):
playlist = Playlist(uri="dummy:/1")
self.playlists.save.return_value.get.side_effect = Exception
assert self.core.playlists.save(playlist) is None
logger.exception.assert_called_with(mock.ANY, "DummyBackend")
def test_backend_returns_none(self, logger):
playlist = Playlist(uri="dummy:/1")
self.playlists.save.return_value.get.return_value = None
assert self.core.playlists.save(playlist) is None
assert not logger.error.called
def test_backend_returns_wrong_type(self, logger):
playlist = Playlist(uri="dummy:/1")
self.playlists.save.return_value.get.return_value = "abc"
assert self.core.playlists.save(playlist) is None
logger.error.assert_called_with(mock.ANY, "DummyBackend", mock.ANY)
|
apache-2.0
|
lizardsystem/lizard-esf
|
doc/source/conf.py
|
1
|
7078
|
# -*- coding: utf-8 -*-
# Note that not all possible configuration values are present in this
# autogenerated file.
# All configuration values have a default; values that are commented out
# serve to show the default.
import datetime
import os
import sys
from pkg_resources import parse_version
import pkginfo
def _egg_info(path_to_egg='../../'):
path_to_egg = os.path.join(
os.path.dirname(__file__), path_to_egg)
egg_info = pkginfo.Develop(path_to_egg)
release = egg_info.version
parsed_version = parse_version(release)
version = '%s.%s' % tuple([int(x) for x in parsed_version[0:2]])
return egg_info.name, egg_info.author, version, release
project, author, version, release = _egg_info()
this_year = datetime.date.today().year
copyright = '%s, %s' % (this_year, author)
# Comment out one of these if we're a django project.
#os.environ['DJANGO_SETTINGS_MODULE'] = 'lizard_esf.testsettings'
#os.environ['DJANGO_SETTINGS_MODULE'] = 'lizard_esf.settings'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%Y-%m-%d'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
latex_paper_size = 'a4'
# The font size ('10pt', '11pt' or '12pt').
latex_font_size = '11pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', '%s.tex' % project, u'%s Documentation' % project,
author, 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
gpl-3.0
|
richarddzh/markdown-latex-tools
|
md2tex/md2tex.py
|
1
|
6180
|
'''
md2tex.py
- author: Richard Dong
- description: Convert markdown to latex
'''
from __future__ import print_function
import re
import io
import sys
import argparse
import markdown
class State:
NORMAL = 0
RAW = 1
class Handler:
def __init__(self):
self.vars = dict()
self.state = State.NORMAL
self._begin_latex = re.compile(r'^<!-- latex\s*$')
self._set_vars = re.compile(r'^<!-- set(\s+\w+="[^"]+")+\s*-->$')
self._var_pair = re.compile(r'(\w+)="([^"]+)"')
self._escape = re.compile(r'(&|%|\$|_|\{|\})')
self._inline_math = re.compile(r'\$\$(.+?)\$\$')
self._cite = re.compile(r'\[(cite|ref)@\s*([A-Za-z0-9:]+(\s*,\s*[A-Za-z0-9:]+)*)\]')
self._bold = re.compile(r'\*\*(?!\s)(.+?)\*\*')
def convert_text(self, text):
if len(text) == 0 or text.isspace(): return ''
m = self._inline_math.split(text)
s = ''
for i in range(len(m)):
if len(m[i]) == 0 or m[i].isspace(): continue
if i % 2 == 0:
text = self.convert_text_no_math(m[i])
else:
text = '$' + m[i] + '$'
s = s + text
return s
def convert_text_no_math(self, text):
if len(text) == 0 or text.isspace(): return ''
m = self._bold.split(text)
s = ''
for i in range(len(m)):
if len(m[i]) == 0 or m[i].isspace(): continue
if i % 2 == 0:
text = self.convert_text_no_bold(m[i])
else:
text = '\\textbf{' + self.convert_text_no_bold(m[i]) + '}'
s = s + text
return s
def convert_text_no_bold(self, text):
text = self._escape.sub(r'\\\1', text)
text = text.replace(r'\\', r'\textbackslash{}')
text = self._cite.sub(r'\\\1{\2}', text)
return text
def print_label(self):
if 'label' in self.vars:
print('\\label{%s}' % self.vars.pop('label', 'nolabel'))
def get_float_style(self):
fl = self.vars.pop('float', '!ht')
if fl == '!h' or fl == 'h!':
fl = '!ht'
return fl
def on_begin_table(self):
caption = self.convert_text(self.vars.pop('caption', ''))
print('\\begin{table}[%s]' % self.get_float_style())
print('\\caption{%s}' % caption)
self.print_label()
print('\\centering\\begin{tabular}{%s}\\hline' % self.vars.pop('columns', 'c'))
def on_end_table(self):
print('\\hline\\end{tabular}')
print('\\end{table}')
def on_text(self, text):
print(self.convert_text(text))
def on_comment(self, comment):
if self._begin_latex.match(comment):
self.state = State.RAW
elif self.state == State.RAW and '-->' in comment:
self.state = State.NORMAL
elif self.state == State.RAW:
print(comment)
elif self._set_vars.match(comment):
for (k, v) in self._var_pair.findall(comment):
self.vars[k] = v
def on_title(self, **arg):
level = arg['level']
title = self.convert_text(arg['title'])
if level == 1:
print('\\chapter{%s}' % title)
else:
print('\\%ssection{%s}' % ('sub' * (level - 2), title))
def on_image(self, **arg):
url = arg['url']
caption = self.convert_text(arg['caption'])
style = self.vars.pop('style', 'figure')
url = self.vars.pop('url', url)
width = self.vars.pop('width', '0.5')
endline = self.vars.pop('endline', '')
if style == 'figure':
print('\\begin{figure}[%s]' % self.get_float_style())
print('\\centering\\includegraphics[width=%s\\linewidth]{%s}\\caption{%s}' % (width, url, caption))
self.print_label()
print('\\end{figure}')
elif style == 'subfloat':
print('\\subfloat[%s]{\\includegraphics[width=%s\\linewidth]{%s}' % (caption, width, url))
self.print_label();
print('}%s' % endline)
elif style == 'raw':
print('\\includegraphics[width=%s\\linewidth]{%s}%s' % (width, url, endline))
def on_table_line(self):
print('\\hline')
def on_table_row(self, row):
row = [self.convert_text(x) for x in row]
print(' & '.join(row) + ' \\\\')
def on_begin_equation(self):
print('\\begin{equation}')
self.print_label()
def on_end_equation(self):
print('\\end{equation}')
def on_equation(self, equ):
print(equ)
def on_begin_list(self, sym):
if sym[0].isdigit():
print('\\begin{enumerate}')
else:
print('\\begin{itemize}')
def on_end_list(self, sym):
if sym[0].isdigit():
print('\\end{enumerate}')
else:
print('\\end{itemize}')
def on_list_item(self, sym):
print('\\item ', end='')
def on_include(self, filename):
print('\\input{%s.tex}' % filename)
def on_begin_code(self, lang):
params = list()
if lang and not lang.isspace():
params.append('language=%s' % lang)
caption = self.convert_text(self.vars.pop('caption', ''))
if caption and not caption.isspace():
params.append('caption={%s}' % caption)
params = ','.join(params)
if params and not params.isspace():
params = '[' + params + ']'
if lang == 'algorithm':
self.vars['lang'] = 'algorithm'
print('\\begin{algorithm}[%s]' % self.get_float_style())
print('\\caption{%s}' % caption)
self.print_label()
print('\\setstretch{1.3}')
print('\\SetKwProg{Fn}{function}{}{end}')
else:
print('\\begin{lstlisting}' + params)
def on_end_code(self):
lang = self.vars.pop('lang', '')
if lang == 'algorithm':
print('\\end{algorithm}')
else:
print('\\end{lstlisting}')
def on_code(self, code):
print(code)
parser = argparse.ArgumentParser(description='convert markdown to latex.')
parser.add_argument('-c', dest='encoding', help='file encoding', default='utf8')
parser.add_argument('-o', dest='output', help='output file')
parser.add_argument('file', nargs='*', help='input files')
args = parser.parse_args()
if args.output is not None:
sys.stdout = io.open(args.output, mode='wt', encoding=args.encoding)
for f in args.file:
p = markdown.Parser()
p.handler = Handler()
with io.open(f, mode='rt', encoding=args.encoding) as fi:
for line in fi:
p.parse_line(line)
p.parse_line('')
if not args.file:
p = markdown.Parser()
p.handler = Handler()
for line in sys.stdin:
p.parse_line(line)
p.parse_line('')
|
gpl-2.0
|
drawks/ansible
|
lib/ansible/plugins/netconf/ce.py
|
10
|
7483
|
#
# (c) 2017 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import re
from ansible.module_utils._text import to_text, to_bytes, to_native
from ansible.errors import AnsibleConnectionFailure
from ansible.plugins.netconf import NetconfBase
from ansible.plugins.netconf import ensure_connected, ensure_ncclient
try:
from ncclient import manager
from ncclient.operations import RPCError
from ncclient.transport.errors import SSHUnknownHostError
from ncclient.xml_ import to_ele, to_xml, new_ele
HAS_NCCLIENT = True
except (ImportError, AttributeError): # paramiko and gssapi are incompatible and raise AttributeError not ImportError
HAS_NCCLIENT = False
class Netconf(NetconfBase):
@ensure_ncclient
def get_text(self, ele, tag):
try:
return to_text(ele.find(tag).text, errors='surrogate_then_replace').strip()
except AttributeError:
pass
@ensure_ncclient
def get_device_info(self):
device_info = dict()
device_info['network_os'] = 'ce'
ele = new_ele('get-software-information')
data = self.execute_rpc(to_xml(ele))
reply = to_ele(to_bytes(data, errors='surrogate_or_strict'))
sw_info = reply.find('.//software-information')
device_info['network_os_version'] = self.get_text(sw_info, 'ce-version')
device_info['network_os_hostname'] = self.get_text(sw_info, 'host-name')
device_info['network_os_model'] = self.get_text(sw_info, 'product-model')
return device_info
@ensure_connected
def execute_rpc(self, name):
"""RPC to be execute on remote device
:name: Name of rpc in string format"""
return self.rpc(name)
@ensure_ncclient
@ensure_connected
def load_configuration(self, *args, **kwargs):
"""Loads given configuration on device
:format: Format of configuration (xml, text, set)
:action: Action to be performed (merge, replace, override, update)
:target: is the name of the configuration datastore being edited
:config: is the configuration in string format."""
if kwargs.get('config'):
kwargs['config'] = to_bytes(kwargs['config'], errors='surrogate_or_strict')
if kwargs.get('format', 'xml') == 'xml':
kwargs['config'] = to_ele(kwargs['config'])
try:
return self.m.load_configuration(*args, **kwargs).data_xml
except RPCError as exc:
raise Exception(to_xml(exc.xml))
def get_capabilities(self):
result = dict()
result['rpc'] = self.get_base_rpc() + ['commit', 'discard_changes', 'validate', 'lock', 'unlock', 'copy_copy',
'execute_rpc', 'load_configuration', 'get_configuration', 'command',
'reboot', 'halt']
result['network_api'] = 'netconf'
result['device_info'] = self.get_device_info()
result['server_capabilities'] = [c for c in self.m.server_capabilities]
result['client_capabilities'] = [c for c in self.m.client_capabilities]
result['session_id'] = self.m.session_id
return json.dumps(result)
@staticmethod
@ensure_ncclient
def guess_network_os(obj):
try:
m = manager.connect(
host=obj._play_context.remote_addr,
port=obj._play_context.port or 830,
username=obj._play_context.remote_user,
password=obj._play_context.password,
key_filename=obj.key_filename,
hostkey_verify=obj.get_option('host_key_checking'),
look_for_keys=obj.get_option('look_for_keys'),
allow_agent=obj._play_context.allow_agent,
timeout=obj.get_option('persistent_connect_timeout')
)
except SSHUnknownHostError as exc:
raise AnsibleConnectionFailure(to_native(exc))
guessed_os = None
for c in m.server_capabilities:
if re.search('huawei', c):
guessed_os = 'ce'
break
m.close_session()
return guessed_os
@ensure_connected
def get_configuration(self, *args, **kwargs):
"""Retrieve all or part of a specified configuration.
:format: format in configuration should be retrieved
:filter: specifies the portion of the configuration to retrieve
(by default entire configuration is retrieved)"""
return self.m.get_configuration(*args, **kwargs).data_xml
@ensure_connected
def compare_configuration(self, *args, **kwargs):
"""Compare configuration
:rollback: rollback id"""
return self.m.compare_configuration(*args, **kwargs).data_xml
@ensure_ncclient
@ensure_connected
def execute_action(self, xml_str):
"""huawei execute-action"""
con_obj = None
try:
con_obj = self.m.action(action=xml_str)
except RPCError as exc:
raise Exception(to_xml(exc.xml))
return con_obj.xml
@ensure_connected
def halt(self):
"""reboot the device"""
return self.m.halt().data_xml
@ensure_connected
def reboot(self):
"""reboot the device"""
return self.m.reboot().data_xml
@ensure_ncclient
@ensure_connected
def get(self, *args, **kwargs):
try:
return self.m.get(*args, **kwargs).data_xml
except RPCError as exc:
raise Exception(to_xml(exc.xml))
@ensure_ncclient
@ensure_connected
def get_config(self, *args, **kwargs):
try:
return self.m.get_config(*args, **kwargs).data_xml
except RPCError as exc:
raise Exception(to_xml(exc.xml))
@ensure_ncclient
@ensure_connected
def edit_config(self, *args, **kwargs):
try:
return self.m.edit_config(*args, **kwargs).xml
except RPCError as exc:
raise Exception(to_xml(exc.xml))
@ensure_ncclient
@ensure_connected
def execute_nc_cli(self, *args, **kwargs):
try:
return self.m.cli(*args, **kwargs).xml
except RPCError as exc:
raise Exception(to_xml(exc.xml))
@ensure_ncclient
@ensure_connected
def commit(self, *args, **kwargs):
try:
return self.m.commit(*args, **kwargs).data_xml
except RPCError as exc:
raise Exception(to_xml(exc.xml))
@ensure_connected
def validate(self, *args, **kwargs):
return self.m.validate(*args, **kwargs).data_xml
@ensure_connected
def discard_changes(self, *args, **kwargs):
return self.m.discard_changes(*args, **kwargs).data_xml
|
gpl-3.0
|
nck0405/MyOwn
|
modules/tests/volunteer/export_volunteers.py
|
25
|
1890
|
# -*- coding: utf-8 -*-
""" Sahana Eden Volunteer Module Automated Tests
@copyright: 2011-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from tests.web2unittest import SeleniumUnitTest
class ExportVolunteers(SeleniumUnitTest):
def test_export_volunteers(self):
print "\n"
# Login, if not-already done so
self.login(account="admin", nexturl="vol/volunteer/search")
#@ToDo: 1) Perform some sort of check to test the export works
# 2) Integrate this with the search test helper function so that the export is working for EVERY search
# 3) Extend the export to include xls, csv, xml
browser = self.browser
browser.find_element_by_xpath("//img[@src='/eden/static/img/pdficon_small.gif']").click()
|
mit
|
yongshengwang/hue
|
build/env/lib/python2.7/site-packages/Paste-2.0.1-py2.7.egg/paste/util/quoting.py
|
50
|
2219
|
# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
import cgi
import six
import re
from six.moves import html_entities
from six.moves.urllib.parse import quote, unquote
__all__ = ['html_quote', 'html_unquote', 'url_quote', 'url_unquote',
'strip_html']
default_encoding = 'UTF-8'
def html_quote(v, encoding=None):
r"""
Quote the value (turned to a string) as HTML. This quotes <, >,
and quotes:
"""
encoding = encoding or default_encoding
if v is None:
return ''
elif isinstance(v, six.binary_type):
return cgi.escape(v, 1)
elif isinstance(v, six.text_type):
if six.PY3:
return cgi.escape(v, 1)
else:
return cgi.escape(v.encode(encoding), 1)
else:
if six.PY3:
return cgi.escape(six.text_type(v), 1)
else:
return cgi.escape(six.text_type(v).encode(encoding), 1)
_unquote_re = re.compile(r'&([a-zA-Z]+);')
def _entity_subber(match, name2c=html_entities.name2codepoint):
code = name2c.get(match.group(1))
if code:
return six.unichr(code)
else:
return match.group(0)
def html_unquote(s, encoding=None):
r"""
Decode the value.
"""
if isinstance(s, six.binary_type):
s = s.decode(encoding or default_encoding)
return _unquote_re.sub(_entity_subber, s)
def strip_html(s):
# should this use html_unquote?
s = re.sub('<.*?>', '', s)
s = html_unquote(s)
return s
def no_quote(s):
"""
Quoting that doesn't do anything
"""
return s
_comment_quote_re = re.compile(r'\-\s*\>')
# Everything but \r, \n, \t:
_bad_chars_re = re.compile('[\x00-\x08\x0b-\x0c\x0e-\x1f]')
def comment_quote(s):
"""
Quote that makes sure text can't escape a comment
"""
comment = str(s)
#comment = _bad_chars_re.sub('', comment)
#print('in ', repr(str(s)))
#print('out', repr(comment))
comment = _comment_quote_re.sub('->', comment)
return comment
url_quote = quote
url_unquote = unquote
if __name__ == '__main__':
import doctest
doctest.testmod()
|
apache-2.0
|
tecknicaltom/xhtml2pdf
|
demo/tgpisa/setup.py
|
168
|
2452
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from turbogears.finddata import find_package_data
import os
execfile(os.path.join("tgpisa", "release.py"))
packages=find_packages()
package_data = find_package_data(where='tgpisa',
package='tgpisa')
if os.path.isdir('locales'):
packages.append('locales')
package_data.update(find_package_data(where='locales',
exclude=('*.po',), only_in_packages=False))
setup(
name="tgpisa",
version=version,
# uncomment the following lines if you fill them out in release.py
#description=description,
#author=author,
#author_email=email,
#url=url,
#download_url=download_url,
#license=license,
install_requires=[
"TurboGears >= 1.0.4.3",
"SQLObject>=0.8,<=0.10.0"
],
zip_safe=False,
packages=packages,
package_data=package_data,
keywords=[
# Use keywords if you'll be adding your package to the
# Python Cheeseshop
# if this has widgets, uncomment the next line
# 'turbogears.widgets',
# if this has a tg-admin command, uncomment the next line
# 'turbogears.command',
# if this has identity providers, uncomment the next line
# 'turbogears.identity.provider',
# If this is a template plugin, uncomment the next line
# 'python.templating.engines',
# If this is a full application, uncomment the next line
# 'turbogears.app',
],
classifiers=[
'Development Status :: 3 - Alpha',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: TurboGears',
# if this is an application that you'll distribute through
# the Cheeseshop, uncomment the next line
# 'Framework :: TurboGears :: Applications',
# if this is a package that includes widgets that you'll distribute
# through the Cheeseshop, uncomment the next line
# 'Framework :: TurboGears :: Widgets',
],
test_suite='nose.collector',
entry_points = {
'console_scripts': [
'start-tgpisa = tgpisa.commands:start',
],
},
# Uncomment next line and create a default.cfg file in your project dir
# if you want to package a default configuration in your egg.
#data_files = [('config', ['default.cfg'])],
)
|
apache-2.0
|
Benocs/core
|
src/daemon/core/misc/ipaddr.py
|
1
|
15121
|
#
# CORE
#
# Copyright (c)2010-2012 the Boeing Company.
# See the LICENSE.BOEING file included in this distribution.
#
# author: Tom Goff <thomas.goff@boeing.com>
#
# Copyright (c) 2014 Benocs GmbH
#
# author: Robert Wuttke <robert@benocs.com>
#
# See the LICENSE file included in this distribution.
#
'''
ipaddr.py: helper objects for dealing with IPv4/v6 addresses.
'''
import socket
import struct
import random
import ipaddress
from core.constants import *
from core.misc.netid import NetIDNodeMap
from core.misc.netid import NetIDSubnetMap
AF_INET = socket.AF_INET
AF_INET6 = socket.AF_INET6
class MacAddr(object):
def __init__(self, addr):
self.addr = addr
def __str__(self):
return ":".join([("%02x" % x) for x in self.addr])
def tolinklocal(self):
''' Convert the MAC address to a IPv6 link-local address, using EUI 48
to EUI 64 conversion process per RFC 5342.
'''
if not self.addr:
return IPAddr.fromstring("::")
tmp = struct.unpack("!Q", '\x00\x00' + self.addr)[0]
nic = int(tmp) & 0x000000FFFFFF
oui = int(tmp) & 0xFFFFFF000000
# toggle U/L bit
oui ^= 0x020000000000
# append EUI-48 octets
oui = (oui << 16) | 0xFFFE000000
return IPAddr(AF_INET6, struct.pack("!QQ", 0xfe80 << 48, oui | nic))
@classmethod
def fromstring(cls, s):
addr = "".join([chr(int(x, 16)) for x in s.split(":")])
return cls(addr)
@classmethod
def random(cls):
tmp = random.randint(0, 0xFFFFFF)
tmp |= 0x00163E << 24 # use the Xen OID 00:16:3E
tmpbytes = struct.pack("!Q", tmp)
return cls(tmpbytes[2:])
class IPAddr(object):
def __init__(self, af, addr):
# check if (af, addr) is valid
tmp = None
try:
if af == AF_INET:
tmp = ipaddress.IPv4Address(addr)
elif af == AF_INET6:
tmp = ipaddress.IPv6Address(addr)
else:
raise ValueError("invalid af/addr")
except:
raise ValueError("invalid af/addr: \"%s\", \"%s\"" % (str(af),
str(addr)))
self.af = af
self.addr = tmp
if af == AF_INET:
# assume a /32 as default prefix length
self.prefixlen = 32
else:
# assume a /128 as default prefix length
self.prefixlen = 128
def set_prefixlen(self, prefixlen):
if not isinstance(prefixlen, int):
raise ValueError('prefixlen needs to be a number')
self.prefixlen = prefixlen
def get_prefixlen(self):
return self.prefixlen
def isIPv4(self):
return self.af == AF_INET
def isIPv6(self):
return self.af == AF_INET6
def __repr__(self):
return '%s/%d' % (self.addr.compressed, self.prefixlen)
def __str__(self):
return self.addr.compressed
def __eq__(self, other):
try:
return self.addr == other.addr
except:
return False
def __add__(self, other):
if not self.__class__ == other.__class__ and not isinstance(other, int):
raise ValueError
if isinstance(other, IPAddr):
if self.addr.version == 4:
return IPAddr(AF_INET,
str(ipaddress.IPv4Address(self.addr + other.addr)))
elif self.addr.version == 6:
return IPAddr(AF_INET6,
str(ipaddress.IPv6Address(self.addr + other.addr)))
elif isinstance(other, ipaddress.IPv4Address):
return IPAddr(AF_INET, str(ipaddress.IPv4Address(self.addr + other)))
elif isinstance(other, ipaddress.IPv6Address):
return IPAddr(AF_INET6, str(ipaddress.IPv6Address(self.addr + other)))
elif isinstance(other, int):
return self.__class__(self.addr + other)
else:
return NotImplemented
def __sub__(self, other):
try:
tmp = -int(other.addr)
except:
return NotImplemented
return self.__add__(tmp)
def __le__(self, other):
return self.addr.__le__(other.addr)
def __lt__(self, other):
return self.addr.__lt__(other.addr)
@classmethod
def fromstring(cls, s):
for af in AF_INET, AF_INET6:
try:
return cls(af, socket.inet_pton(af, s))
except Exception as e:
pass
raise e
@staticmethod
def toint(s):
''' convert IPv4 string to 32-bit integer
'''
return int(self.addr)
class IPv4Addr(IPAddr):
def __init__(self, addr):
super().__init__(AF_INET, addr)
class IPv6Addr(IPAddr):
def __init__(self, addr):
super().__init__(AF_INET6, addr)
class IPPrefix(object):
def __init__(self, af, prefixstr):
"prefixstr format: address/prefixlen"
self.af = af
if self.af == AF_INET:
self.addrlen = 32
self.prefix = ipaddress.IPv4Network(prefixstr, strict = False)
elif self.af == AF_INET6:
self.prefix = ipaddress.IPv6Network(prefixstr, strict = False)
self.addrlen = 128
else:
raise ValueError("invalid address family: '%s'" % self.af)
tmp = prefixstr.split("/")
if len(tmp) > 2:
raise ValueError("invalid prefix: '%s'" % prefixstr)
if len(tmp) == 2:
self.prefixlen = int(tmp[1])
else:
self.prefixlen = self.addrlen
def __str__(self):
return str(self.prefix)
def __eq__(self, other):
try:
return other.af == self.af and \
other.prefixlen == self.prefixlen and \
other.prefix == self.prefix
except:
return False
def addr(self, hostid):
tmp = int(hostid)
if (tmp == 1 or tmp == 0 or tmp == -1) and self.addrlen == self.prefixlen:
return IPAddr(self.af, self.prefix)
if tmp == 0 or \
tmp > (1 << (self.addrlen - self.prefixlen)) - 1 or \
(self.af == AF_INET and tmp == (1 << (self.addrlen - self.prefixlen)) - 1):
raise ValueError("invalid hostid for prefix %s: %s" % (str(self), str(hostid)))
addr = IPAddr(self.af, int(self.prefix.network_address) + int(hostid))
return addr
def minaddr(self):
if self.af == AF_INET:
return IPv4Addr(self.prefix.network_address + 1)
elif self.af == AF_INET6:
return IPv6Addr(self.prefix.network_address + 1)
else:
raise ValueError("invalid address family: '%s'" % self.af)
def maxaddr(self):
if self.af == AF_INET:
return IPv4Addr(self.prefix.broadcast_address - 1)
elif self.af == AF_INET6:
return IPv6Addr(self.prefix.broadcast_address - 1)
else:
raise ValueError("invalid address family: '%s'" % self.af)
def numaddr(self):
return self.prefix.num_addresses - 2
def prefixstr(self):
return '%s' % self.prefix
def netmaskstr(self):
return '%s' % self.prefix.netmask
class IPv4Prefix(IPPrefix):
def __init__(self, prefixstr):
IPPrefix.__init__(self, AF_INET, prefixstr)
class IPv6Prefix(IPPrefix):
def __init__(self, prefixstr):
IPPrefix.__init__(self, AF_INET6, prefixstr)
def isIPAddress(af, addrstr):
if af == AF_INET and isinstance(addrstr, IPv4Addr):
return True
if af == AF_INET6 and isinstance(addrstr, IPv6Addr):
return True
if isinstance(addrstr, IPAddr):
return True
try:
(ip, sep, mask) = addrstr.partition('/')
tmp = socket.inet_pton(af, ip)
return True
except:
return False
def isIPv4Address(addrstr):
if isinstance(addrstr, IPv4Addr):
return True
if isinstance(addrstr, IPAddr):
addrstr = str(addrstr)
return isIPAddress(AF_INET, addrstr)
def isIPv6Address(addrstr):
if isinstance(addrstr, IPv6Addr):
return True
if isinstance(addrstr, IPAddr):
addrstr = str(addrstr)
return isIPAddress(AF_INET6, addrstr)
class Interface():
@staticmethod
def cfg_sanitation_checks(ipversion):
interface_net = 'ipv%d_interface_net' % ipversion
interface_net_per_netid = 'ipv%d_interface_net_per_netid' % ipversion
interface_net_per_ptp_link = 'ipv%d_interface_net_per_ptp_link' % \
ipversion
interface_net_per_brdcst_link = 'ipv%d_interface_net_per_brdcst_link' %\
ipversion
if not 'ipaddrs' in CONFIGS or \
not interface_net in CONFIGS['ipaddrs'] or \
not len(CONFIGS['ipaddrs'][interface_net].split('/')) == 2 or \
not interface_net_per_netid in CONFIGS['ipaddrs'] or \
not interface_net_per_ptp_link in CONFIGS['ipaddrs'] or \
not interface_net_per_brdcst_link in CONFIGS['ipaddrs']:
raise ValueError('Could not read ipaddrs.conf')
@staticmethod
def getInterfaceNet(ipversion):
Interface.cfg_sanitation_checks(ipversion=ipversion)
interface_net = 'ipv%d_interface_net' % ipversion
interface_net_per_netid = 'ipv%d_interface_net_per_netid' % ipversion
if ipversion == 4:
ipprefix_cls = IPv4Prefix
elif ipversion == 6:
ipprefix_cls = IPv6Prefix
else:
raise ValueError('IP version is neither 4 nor 6: %s' % str(ipversion))
global_interface_prefix_str = CONFIGS['ipaddrs'][interface_net]
global_prefixbase, global_prefixlen = global_interface_prefix_str.split('/')
try:
global_prefixlen = int(global_prefixlen)
except ValueError:
raise ValueError('Could not parse %s from ipaddrs.conf' % interface_net)
global_interface_prefix = ipprefix_cls(global_interface_prefix_str)
return global_interface_prefix
@staticmethod
def getInterfaceNet_per_net(sessionid, netid, ipversion):
Interface.cfg_sanitation_checks(ipversion=ipversion)
interface_net = 'ipv%d_interface_net' % ipversion
interface_net_per_netid = 'ipv%d_interface_net_per_netid' % ipversion
if ipversion == 4:
ipprefix_cls = IPv4Prefix
elif ipversion == 6:
ipprefix_cls = IPv6Prefix
else:
raise ValueError('IP version is neither 4 nor 6: %s' % str(ipversion))
# local means per netid (e.g., AS)
try:
local_prefixlen = int(CONFIGS['ipaddrs'][interface_net_per_netid])
except ValueError:
raise ValueError('Could not parse %s from ipaddrs.conf' % interface_net_per_netid)
global_interface_prefix = Interface.getInterfaceNet(ipversion)
global_prefixbase, global_prefixlen = str(global_interface_prefix).split('/')
subnet_id = NetIDSubnetMap.register_netid(sessionid, netid, ipversion)
baseprefix = ipprefix_cls('%s/%d' % (global_prefixbase, local_prefixlen))
target_network_baseaddr = baseprefix.minaddr() + ((subnet_id) * (baseprefix.numaddr() + 2))
target_network_prefix = ipprefix_cls('%s/%d' % (target_network_baseaddr, local_prefixlen))
return target_network_prefix
class Loopback():
@staticmethod
def cfg_sanitation_checks(ipversion):
loopback_net = 'ipv%d_loopback_net' % ipversion
loopback_net_per_netid = 'ipv%d_loopback_net_per_netid' % ipversion
if not 'ipaddrs' in CONFIGS or \
not loopback_net in CONFIGS['ipaddrs'] or \
not len(CONFIGS['ipaddrs'][loopback_net].split('/')) == 2 or \
not loopback_net_per_netid in CONFIGS['ipaddrs']:
raise ValueError('Could not read ipaddrs.conf')
@staticmethod
def getLoopbackNet(ipversion):
Loopback.cfg_sanitation_checks(ipversion=ipversion)
loopback_net = 'ipv%d_loopback_net' % ipversion
loopback_net_per_netid = 'ipv%d_loopback_net_per_netid' % ipversion
if ipversion == 4:
ipprefix_cls = IPv4Prefix
elif ipversion == 6:
ipprefix_cls = IPv6Prefix
else:
raise ValueError('IP version is neither 4 nor 6: %s' % str(ipversion))
global_loopback_prefix_str = CONFIGS['ipaddrs'][loopback_net]
global_prefixbase, global_prefixlen = global_loopback_prefix_str.split('/')
try:
global_prefixlen = int(global_prefixlen)
except ValueError:
raise ValueError('Could not parse %s from ipaddrs.conf' % loopback_net)
global_loopback_prefix = ipprefix_cls(global_loopback_prefix_str)
return global_loopback_prefix
@staticmethod
def getLoopbackNet_per_net(sessionid, netid, ipversion):
Loopback.cfg_sanitation_checks(ipversion=ipversion)
loopback_net = 'ipv%d_loopback_net' % ipversion
loopback_net_per_netid = 'ipv%d_loopback_net_per_netid' % ipversion
if ipversion == 4:
ipprefix_cls = IPv4Prefix
elif ipversion == 6:
ipprefix_cls = IPv6Prefix
else:
raise ValueError('IP version is neither 4 nor 6: %s' % str(ipversion))
# local means per netid (e.g., AS)
try:
local_prefixlen = int(CONFIGS['ipaddrs'][loopback_net_per_netid])
except ValueError:
raise ValueError('Could not parse %s from ipaddrs.conf' % loopback_net_per_netid)
global_loopback_prefix = Loopback.getLoopbackNet(ipversion)
global_prefixbase, global_prefixlen = str(global_loopback_prefix).split('/')
subnet_id = NetIDSubnetMap.register_netid(sessionid, netid, ipversion)
baseprefix = ipprefix_cls('%s/%d' % (global_prefixbase, local_prefixlen))
target_network_baseaddr = baseprefix.minaddr() + ((subnet_id) * (baseprefix.numaddr() + 2))
target_network_prefix = ipprefix_cls('%s/%d' % (target_network_baseaddr, local_prefixlen))
return target_network_prefix
@staticmethod
def getLoopback(node, ipversion):
Loopback.cfg_sanitation_checks(ipversion=ipversion)
if hasattr(node, 'netid') and not node.netid is None:
netid = node.netid
else:
# TODO: netid 0 is invalid - instead use first unused ASN
node.warn('[LOOPBACK] no ASN found. falling back to default (0)')
netid = 0
target_network_prefix = Loopback.getLoopbackNet_per_net(
node.session.sessionid, netid, ipversion)
nodeid = NetIDNodeMap.register_node(node.session.sessionid,
node.nodeid(), netid)
addr = target_network_prefix.addr(nodeid)
#node.info('[LOOPBACK] generated addr for node: %s: %s' % (node.name, str(addr)))
return addr
@staticmethod
def getLoopbackIPv4(node):
return Loopback.getLoopback(node, ipversion=4)
@staticmethod
def getLoopbackIPv6(node):
return Loopback.getLoopback(node, ipversion=6)
|
bsd-3-clause
|
mancoast/CPythonPyc_test
|
cpython/262_test_largefile.py
|
58
|
6626
|
"""Test largefile support on system where this makes sense.
"""
import os
import stat
import sys
import unittest
from test.test_support import run_unittest, TESTFN, verbose, requires, \
TestSkipped, unlink
try:
import signal
# The default handler for SIGXFSZ is to abort the process.
# By ignoring it, system calls exceeding the file size resource
# limit will raise IOError instead of crashing the interpreter.
oldhandler = signal.signal(signal.SIGXFSZ, signal.SIG_IGN)
except (ImportError, AttributeError):
pass
# create >2GB file (2GB = 2147483648 bytes)
size = 2500000000L
class TestCase(unittest.TestCase):
"""Test that each file function works as expected for a large
(i.e. > 2GB, do we have to check > 4GB) files.
NOTE: the order of execution of the test methods is important! test_seek
must run first to create the test file. File cleanup must also be handled
outside the test instances because of this.
"""
def test_seek(self):
if verbose:
print 'create large file via seek (may be sparse file) ...'
with open(TESTFN, 'wb') as f:
f.write('z')
f.seek(0)
f.seek(size)
f.write('a')
f.flush()
if verbose:
print 'check file size with os.fstat'
self.assertEqual(os.fstat(f.fileno())[stat.ST_SIZE], size+1)
def test_osstat(self):
if verbose:
print 'check file size with os.stat'
self.assertEqual(os.stat(TESTFN)[stat.ST_SIZE], size+1)
def test_seek_read(self):
if verbose:
print 'play around with seek() and read() with the built largefile'
with open(TESTFN, 'rb') as f:
self.assertEqual(f.tell(), 0)
self.assertEqual(f.read(1), 'z')
self.assertEqual(f.tell(), 1)
f.seek(0)
self.assertEqual(f.tell(), 0)
f.seek(0, 0)
self.assertEqual(f.tell(), 0)
f.seek(42)
self.assertEqual(f.tell(), 42)
f.seek(42, 0)
self.assertEqual(f.tell(), 42)
f.seek(42, 1)
self.assertEqual(f.tell(), 84)
f.seek(0, 1)
self.assertEqual(f.tell(), 84)
f.seek(0, 2) # seek from the end
self.assertEqual(f.tell(), size + 1 + 0)
f.seek(-10, 2)
self.assertEqual(f.tell(), size + 1 - 10)
f.seek(-size-1, 2)
self.assertEqual(f.tell(), 0)
f.seek(size)
self.assertEqual(f.tell(), size)
# the 'a' that was written at the end of file above
self.assertEqual(f.read(1), 'a')
f.seek(-size-1, 1)
self.assertEqual(f.read(1), 'z')
self.assertEqual(f.tell(), 1)
def test_lseek(self):
if verbose:
print 'play around with os.lseek() with the built largefile'
with open(TESTFN, 'rb') as f:
self.assertEqual(os.lseek(f.fileno(), 0, 0), 0)
self.assertEqual(os.lseek(f.fileno(), 42, 0), 42)
self.assertEqual(os.lseek(f.fileno(), 42, 1), 84)
self.assertEqual(os.lseek(f.fileno(), 0, 1), 84)
self.assertEqual(os.lseek(f.fileno(), 0, 2), size+1+0)
self.assertEqual(os.lseek(f.fileno(), -10, 2), size+1-10)
self.assertEqual(os.lseek(f.fileno(), -size-1, 2), 0)
self.assertEqual(os.lseek(f.fileno(), size, 0), size)
# the 'a' that was written at the end of file above
self.assertEqual(f.read(1), 'a')
def test_truncate(self):
if verbose:
print 'try truncate'
with open(TESTFN, 'r+b') as f:
# this is already decided before start running the test suite
# but we do it anyway for extra protection
if not hasattr(f, 'truncate'):
raise TestSkipped, "open().truncate() not available on this system"
f.seek(0, 2)
# else we've lost track of the true size
self.assertEqual(f.tell(), size+1)
# Cut it back via seek + truncate with no argument.
newsize = size - 10
f.seek(newsize)
f.truncate()
self.assertEqual(f.tell(), newsize) # else pointer moved
f.seek(0, 2)
self.assertEqual(f.tell(), newsize) # else wasn't truncated
# Ensure that truncate(smaller than true size) shrinks
# the file.
newsize -= 1
f.seek(42)
f.truncate(newsize)
self.assertEqual(f.tell(), 42) # else pointer moved
f.seek(0, 2)
self.assertEqual(f.tell(), newsize) # else wasn't truncated
# XXX truncate(larger than true size) is ill-defined
# across platform; cut it waaaaay back
f.seek(0)
f.truncate(1)
self.assertEqual(f.tell(), 0) # else pointer moved
self.assertEqual(len(f.read()), 1) # else wasn't truncated
def test_main():
# On Windows and Mac OSX this test comsumes large resources; It
# takes a long time to build the >2GB file and takes >2GB of disk
# space therefore the resource must be enabled to run this test.
# If not, nothing after this line stanza will be executed.
if sys.platform[:3] == 'win' or sys.platform == 'darwin':
requires('largefile',
'test requires %s bytes and a long time to run' % str(size))
else:
# Only run if the current filesystem supports large files.
# (Skip this test on Windows, since we now always support
# large files.)
f = open(TESTFN, 'wb')
try:
# 2**31 == 2147483648
f.seek(2147483649L)
# Seeking is not enough of a test: you must write and
# flush, too!
f.write("x")
f.flush()
except (IOError, OverflowError):
f.close()
unlink(TESTFN)
raise TestSkipped, "filesystem does not have largefile support"
else:
f.close()
suite = unittest.TestSuite()
suite.addTest(TestCase('test_seek'))
suite.addTest(TestCase('test_osstat'))
suite.addTest(TestCase('test_seek_read'))
suite.addTest(TestCase('test_lseek'))
with open(TESTFN, 'w') as f:
if hasattr(f, 'truncate'):
suite.addTest(TestCase('test_truncate'))
unlink(TESTFN)
try:
run_unittest(suite)
finally:
unlink(TESTFN)
if __name__ == '__main__':
test_main()
|
gpl-3.0
|
kenshay/ImageScript
|
ProgramData/SystemFiles/Python/Lib/site-packages/nbconvert/filters/tests/test_latex.py
|
21
|
1550
|
"""
Module with tests for Latex
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from ...tests.base import TestsBase
from ..latex import escape_latex
#-----------------------------------------------------------------------------
# Class
#-----------------------------------------------------------------------------
class TestLatex(TestsBase):
def test_escape_latex(self):
"""escape_latex test"""
tests = [
(r'How are \you doing today?', r'How are \textbackslash{}you doing today?'),
(r'\escapechar=`\A\catcode`\|=0 |string|foo', r'\textbackslash{}escapechar=`\textbackslash{}A\textbackslash{}catcode`\textbackslash{}|=0 |string|foo'),
(r'# $ % & ~ _ ^ \ { }', r'\# \$ \% \& \textasciitilde{} \_ \^{} \textbackslash{} \{ \}'),
('...', r'{\ldots}'),
('','')]
for test in tests:
self._try_escape_latex(test[0], test[1])
def _try_escape_latex(self, test, result):
"""Try to remove latex from string"""
self.assertEqual(escape_latex(test), result)
|
gpl-3.0
|
twestbrookunh/paladin-plugins
|
core/main.py
|
1
|
13912
|
#! /usr/bin/env python3
"""
The MIT License
Copyright (c) 2017 by Anthony Westbrook, University of New Hampshire <anthony.westbrook@unh.edu>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# Core module is responsible for plugin interopability, as well as standard API
import pkgutil
import importlib
import plugins
import pprint
import re
import sys
from core.filestore import FileStore
class PluginDef:
""" Plugin definition to be populated by each plugin via its plugin_init method at load time """
def __init__(self, module):
# Plugin fields
self.module = module
self.name = ""
self.description = ""
self.version_major = 0
self.version_minor = 0
self.version_revision = 0
self.dependencies = list()
# Callbacks
self.callback_args = None
self.callback_init = None
self.callback_main = None
class SamEntry:
""" Store data per individual SAM entry """
FIELD_QNAME = 0
FIELD_FLAG = 1
FIELD_RNAME = 2
FIELD_POS = 3
FIELD_MAPQ = 4
FIELD_CIGAR = 5
FIELD_RNEXT = 6
FIELD_PNEXT = 7
FIELD_TLEN = 8
FIELD_SEQ = 9
FIELD_QUAL = 10
def __init__(self):
self.query = ""
self.flag = 0
self.reference = ""
self.pos = 0
self.mapqual = 0
self.cigar = ""
self.nextref = ""
self.nextpos = 0
self.length = 0
self.sequence = ""
self.readqual = 0
self.frame = ""
@staticmethod
def get_entries(filename, quality):
""" Public API for obtaining SAM data (will handle caching internally) """
# Check if in cache
if not (filename, quality) in SamEntry._entries:
cache = SamEntry.populate_entries(filename, quality)
SamEntry._entries[(filename, quality)] = cache
return SamEntry._entries[(filename, quality)]
@staticmethod
def populate_entries(filename, quality):
""" Store SAM entries filtered for the requested quality """
ret_entries = dict()
# Open SAM file
with open(filename, "r") as handle:
for line in handle:
fields = line.rstrip().split("\t")
# Skip header and malformed lines
if line.startswith("@"):
continue
if len(fields) < 11:
continue
# Filter for minimum quality
if fields[SamEntry.FIELD_RNAME] == "*" and quality != -1:
continue
if quality != -1 and int(fields[SamEntry.FIELD_MAPQ]) < quality:
continue
# Remove PALADIN frame header since best scoring frame may change between alignments
header_match = re.search("(.*?:.*?:.*?:)(.*)", fields[SamEntry.FIELD_QNAME])
entry = SamEntry()
entry.query = header_match.group(2)
entry.flag = int(fields[SamEntry.FIELD_FLAG])
# Fill in entry information if mapped
if entry.is_mapped:
entry.reference = fields[SamEntry.FIELD_RNAME]
entry.pos = SamEntry.get_sam_int(fields[SamEntry.FIELD_POS])
entry.mapqual = SamEntry.get_sam_int(fields[SamEntry.FIELD_MAPQ])
entry.cigar = fields[SamEntry.FIELD_CIGAR]
entry.nextref = fields[SamEntry.FIELD_RNEXT]
entry.nextpos = fields[SamEntry.FIELD_PNEXT]
entry.length = SamEntry.get_sam_int(fields[SamEntry.FIELD_TLEN])
entry.sequence = fields[SamEntry.FIELD_SEQ]
entry.readqual = SamEntry.get_sam_int(fields[SamEntry.FIELD_QUAL])
entry.frame = header_match.group(1)
# Each read can have multiple non-linear/chimeric hits - store as tuple for ease of processing
read_base = header_match.group(2)
hit_idx = 0
while (read_base, hit_idx) in ret_entries:
hit_idx += 1
ret_entries[(read_base, hit_idx)] = entry
return ret_entries
@staticmethod
def get_sam_int(val):
if val.isdigit():
return int(val)
return 0
def is_mapped(self):
return self.flag & 0x04 > 0
_entries = dict()
class PaladinEntry:
""" PALADIN UniProt entry """
FIELD_COUNT = 0
FIELD_ABUNDANCE = 1
FIELD_QUALAVG = 2
FIELD_QUALMAX = 3
FIELD_KB = 4
FIELD_ID = 5
FIELD_SPECIES = 6
FIELD_PROTEIN = 7
FIELD_ONTOLOGY = 11
TYPE_UNKNOWN = 0
TYPE_UNIPROT_EXACT = 1
TYPE_UNIPROT_GROUP = 2
TYPE_CUSTOM = 3
def __init__(self):
self.type = PaladinEntry.TYPE_UNKNOWN
self.id = "Unknown"
self.kb = "Unknown"
self.count = 0
self.abundance = 0.0
self.quality_avg = 0.0
self.quality_max = 0
self.species_id = "Unknown"
self.species_full = "Unknown"
self.protein = "Unknown"
self.ontology = list()
@staticmethod
def get_entries(filename, quality, pattern=None):
""" Public API for obtaining UniProt report data (will handle caching internally) """
# Check if in cache
if not (filename, quality, pattern) in PaladinEntry._entries:
cache = PaladinEntry.populate_entries(filename, quality, pattern)
PaladinEntry._entries[(filename, quality, pattern)] = cache
return PaladinEntry._entries[(filename, quality, pattern)]
@staticmethod
def populate_entries(filename, quality, pattern):
""" Cache this UniProt report data for the requested quality """
ret_entries = dict()
# Open UniProt report, skip header
with open(filename, "r") as handle:
handle.readline()
for line in handle:
fields = line.rstrip().split("\t")
# Filter for minimum quality
if float(fields[PaladinEntry.FIELD_QUALMAX]) < quality:
continue
entry = PaladinEntry()
entry.count = int(fields[PaladinEntry.FIELD_COUNT])
entry.abundance = float(fields[PaladinEntry.FIELD_ABUNDANCE])
entry.qual_avg = float(fields[PaladinEntry.FIELD_QUALAVG])
entry.qual_max = int(fields[PaladinEntry.FIELD_QUALMAX])
entry.kb = fields[PaladinEntry.FIELD_KB]
if len(fields) > 10:
# Existence of fields indicates a successful UniProt parse by PALADIN
if "_9" in entry.kb: entry.type = PaladinEntry.TYPE_UNIPROT_GROUP
else: entry.type = PaladinEntry.TYPE_UNIPROT_EXACT
entry.species_id = entry.kb.split("_")[1]
entry.species_full = fields[PaladinEntry.FIELD_SPECIES]
entry.id = fields[PaladinEntry.FIELD_ID]
entry.protein = fields[PaladinEntry.FIELD_PROTEIN]
entry.ontology = [term.strip() for term in fields[PaladinEntry.FIELD_ONTOLOGY].split(";")]
else:
# Check for custom match
if pattern:
match = re.search(pattern, entry.kb)
if match:
entry.type = PaladinEntry.TYPE_CUSTOM
entry.species_id = match.group(1)
entry.species_full = match.group(1)
ret_entries[fields[PaladinEntry.FIELD_KB]] = entry
return ret_entries
_entries = dict()
# Plugins internal to core, and loaded external modules
internal_plugins = dict()
plugin_modules = dict()
# Standard output and error buffers
output_stdout = list()
output_stderr = list()
console_stdout = False
console_stderr = True
def connect_plugins(debug):
""" Search for all modules in the plugin package (directory), import each and run plugin_connect method """
# Initialize File Store
FileStore.init("pp-", "~/.paladin-plugins", ".", 30)
# Add internal core plugins
internal_plugins["flush"] = render_output
internal_plugins["write"] = render_output
# Import all external plugin modules in package (using full path)
for importer, module, package in pkgutil.iter_modules(plugins.__path__):
try:
module_handle = importlib.import_module("{0}.{1}".format(plugins.__name__, module))
if "plugin_connect" in dir(module_handle):
plugin_modules[module] = PluginDef(module_handle)
except Exception as exception:
if debug:
raise exception
else:
send_output("Error loading \"{0}.py\", skipping...".format(module), "stderr")
# Connect to all external plugins
for plugin in plugin_modules:
plugin_modules[plugin].module.plugin_connect(plugin_modules[plugin])
def args_plugins(plugins):
""" Run argument parsing for each plugin """
for plugin in plugins:
plugin_modules[plugin].callback_args()
def init_plugins(plugins):
""" _initialize plugins being used in this session """
init_queue = set()
init_history = set()
# Scan for plugins and dependencies
for plugin in plugins:
if plugin not in plugin_modules and plugin not in internal_plugins:
if plugin in plugins.modules_disabled:
print("Disabled plugin: {0}".format(plugin))
else:
print("Unknown plugin: {0}".format(plugin))
return False
# _initialize external plugins
if plugin in plugin_modules:
# Look for dependencies
init_queue.update(plugin_modules[plugin].dependencies)
init_queue.add(plugin)
# _initialize
for plugin in init_queue:
if plugin_modules[plugin].callback_init:
if plugin not in init_history:
plugin_modules[plugin].callback_init()
init_history.add(plugin)
return True
#def exec_pipeline(pipeline):
# """ Execute requested plugin pipeline """
# for task in pipeline:
# if task[0] in internal_plugins:
# # Internal plugin
# internal_plugins[task[0]](task[1].strip("\""))
# else:
# # External plugin
# if plugin_modules[task[0]].callback_main:
# plugin_modules[task[0]].callback_main(task[1])
def exec_pipeline(pipeline):
""" Execute requested plugin pipeline """
for task in pipeline:
if task[0] in internal_plugins:
# Internal plugin
internal_plugins[task[0]](task[1].strip("\""))
elif task[0] in plugin_modules:
# External plugin
plugin = plugin_modules[task[0]]
# Process arguments (this may sys.exit if help mode)
if plugin.callback_args:
args = plugin.callback_args(task[1])
# Process dependencies and initialization
for dependency in [plugin_modules[x] for x in plugin.dependencies]:
if dependency.callback_init:
dependency.callback_init()
if plugin.callback_init:
plugin.callback_init()
# Execute
if plugin.callback_main:
plugin.callback_main(args)
else:
# Invalid plugin
send_output("Invalid plugin \"{0}\"".format(task[0]), "stderr")
sys.exit(1)
def render_output(filename="", target="stdout"):
""" The flush internal plugin handles rendering output (to stdout or file) """
if target == "stdout":
render_text = "".join(output_stdout)
del output_stdout[:]
if target == "stderr":
render_text = "".join(output_stderr)
del output_stderr[:]
if not filename:
std_target = sys.stdout if target == "stdout" else sys.stderr
print(render_text, flush=True, file=std_target)
else:
with open(filename, "w") as handle:
handle.write(render_text)
def send_output(output_text, target="stdout", suffix="\n"):
""" API - Record output into the appropriate buffer """
new_content = "{0}{1}".format(output_text, suffix)
if target == "stdout":
if console_stdout:
print(new_content, end="", flush=True)
else:
output_stdout.append(new_content)
else:
if console_stderr:
print(new_content, end="", flush=True, file=sys.stderr)
else:
output_stderr.append(new_content)
def getInteger(val):
""" API - Return value if string is integer (allows negatives) """
try:
return int(val)
except:
return None
def debugPrint(obj):
""" API - Debugging """
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(obj)
|
mit
|
sahiljain/catapult
|
third_party/google-endpoints/endpoints/message_parser.py
|
7
|
7890
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Describe ProtoRPC Messages in JSON Schema.
Add protorpc.message subclasses to MessageTypeToJsonSchema and get a JSON
Schema description of all the messages.
"""
# pylint: disable=g-bad-name
import re
from protorpc import message_types
from protorpc import messages
__all__ = ['MessageTypeToJsonSchema']
class MessageTypeToJsonSchema(object):
"""Describe ProtoRPC messages in JSON Schema.
Add protorpc.message subclasses to MessageTypeToJsonSchema and get a JSON
Schema description of all the messages. MessageTypeToJsonSchema handles
all the types of fields that can appear in a message.
"""
# Field to schema type and format. If the field maps to tuple, the
# first entry is set as the type, the second the format (or left alone if
# None). If the field maps to a dictionary, we'll grab the value from the
# field's Variant in that dictionary.
# The variant dictionary should include an element that None maps to,
# to fall back on as a default.
__FIELD_TO_SCHEMA_TYPE_MAP = {
messages.IntegerField: {messages.Variant.INT32: ('integer', 'int32'),
messages.Variant.INT64: ('string', 'int64'),
messages.Variant.UINT32: ('integer', 'uint32'),
messages.Variant.UINT64: ('string', 'uint64'),
messages.Variant.SINT32: ('integer', 'int32'),
messages.Variant.SINT64: ('string', 'int64'),
None: ('integer', 'int64')},
messages.FloatField: {messages.Variant.FLOAT: ('number', 'float'),
messages.Variant.DOUBLE: ('number', 'double'),
None: ('number', 'float')},
messages.BooleanField: ('boolean', None),
messages.BytesField: ('string', 'byte'),
message_types.DateTimeField: ('string', 'date-time'),
messages.StringField: ('string', None),
messages.MessageField: ('object', None),
messages.EnumField: ('string', None),
}
__DEFAULT_SCHEMA_TYPE = ('string', None)
def __init__(self):
# A map of schema ids to schemas.
self.__schemas = {}
# A map from schema id to non-normalized definition name.
self.__normalized_names = {}
def add_message(self, message_type):
"""Add a new message.
Args:
message_type: protorpc.message.Message class to be parsed.
Returns:
string, The JSON Schema id.
Raises:
KeyError if the Schema id for this message_type would collide with the
Schema id of a different message_type that was already added.
"""
name = self.__normalized_name(message_type)
if name not in self.__schemas:
# Set a placeholder to prevent infinite recursion.
self.__schemas[name] = None
schema = self.__message_to_schema(message_type)
self.__schemas[name] = schema
return name
def ref_for_message_type(self, message_type):
"""Returns the JSON Schema id for the given message.
Args:
message_type: protorpc.message.Message class to be parsed.
Returns:
string, The JSON Schema id.
Raises:
KeyError: if the message hasn't been parsed via add_message().
"""
name = self.__normalized_name(message_type)
if name not in self.__schemas:
raise KeyError('Message has not been parsed: %s', name)
return name
def schemas(self):
"""Returns the JSON Schema of all the messages.
Returns:
object: JSON Schema description of all messages.
"""
return self.__schemas.copy()
def __normalized_name(self, message_type):
"""Normalized schema name.
Generate a normalized schema name, taking the class name and stripping out
everything but alphanumerics, and camel casing the remaining words.
A normalized schema name is a name that matches [a-zA-Z][a-zA-Z0-9]*
Args:
message_type: protorpc.message.Message class being parsed.
Returns:
A string, the normalized schema name.
Raises:
KeyError: A collision was found between normalized names.
"""
# Normalization is applied to match the constraints that Discovery applies
# to Schema names.
name = message_type.definition_name()
split_name = re.split(r'[^0-9a-zA-Z]', name)
normalized = ''.join(
part[0].upper() + part[1:] for part in split_name if part)
previous = self.__normalized_names.get(normalized)
if previous:
if previous != name:
raise KeyError('Both %s and %s normalize to the same schema name: %s' %
(name, previous, normalized))
else:
self.__normalized_names[normalized] = name
return normalized
def __message_to_schema(self, message_type):
"""Parse a single message into JSON Schema.
Will recursively descend the message structure
and also parse other messages references via MessageFields.
Args:
message_type: protorpc.messages.Message class to parse.
Returns:
An object representation of the schema.
"""
name = self.__normalized_name(message_type)
schema = {
'id': name,
'type': 'object',
}
if message_type.__doc__:
schema['description'] = message_type.__doc__
properties = {}
for field in message_type.all_fields():
descriptor = {}
# Info about the type of this field. This is either merged with
# the descriptor or it's placed within the descriptor's 'items'
# property, depending on whether this is a repeated field or not.
type_info = {}
if type(field) == messages.MessageField:
field_type = field.type().__class__
type_info['$ref'] = self.add_message(field_type)
if field_type.__doc__:
descriptor['description'] = field_type.__doc__
else:
schema_type = self.__FIELD_TO_SCHEMA_TYPE_MAP.get(
type(field), self.__DEFAULT_SCHEMA_TYPE)
# If the map pointed to a dictionary, check if the field's variant
# is in that dictionary and use the type specified there.
if isinstance(schema_type, dict):
variant_map = schema_type
variant = getattr(field, 'variant', None)
if variant in variant_map:
schema_type = variant_map[variant]
else:
# The variant map needs to specify a default value, mapped by None.
schema_type = variant_map[None]
type_info['type'] = schema_type[0]
if schema_type[1]:
type_info['format'] = schema_type[1]
if type(field) == messages.EnumField:
sorted_enums = sorted([enum_info for enum_info in field.type],
key=lambda enum_info: enum_info.number)
type_info['enum'] = [enum_info.name for enum_info in sorted_enums]
if field.required:
descriptor['required'] = True
if field.default:
if type(field) == messages.EnumField:
descriptor['default'] = str(field.default)
else:
descriptor['default'] = field.default
if field.repeated:
descriptor['items'] = type_info
descriptor['type'] = 'array'
else:
descriptor.update(type_info)
properties[field.name] = descriptor
schema['properties'] = properties
return schema
|
bsd-3-clause
|
jmartinm/invenio-master
|
modules/webjournal/lib/widgets/bfe_webjournal_widget_weather.py
|
25
|
8191
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
WebJournal widget - Display weather forecast
"""
import os
import time
import re
import socket
try:
# Try to load feedparser. Remember for later if it was installed
# or not. Note that feedparser is slow to load: if we don't load
# it in a 'global' way, it will be loaded for every call to this
# element.
global feedparser
import feedparser
feedparser_available = 1
except ImportError:
feedparser_available = 0
from invenio.config import \
CFG_CACHEDIR, \
CFG_ACCESS_CONTROL_LEVEL_SITE
from invenio.errorlib import register_exception
from invenio.webjournal_utils import \
parse_url_string, WEBJOURNAL_OPENER
from invenio.messages import gettext_set_language
re_image_pattern = re.compile(r'<img\s*(class=["\']imageScale["\'])*?\s*src="(?P<image>\S*)"\s*/>',
re.DOTALL | re.IGNORECASE | re.VERBOSE)
yahoo_weather_rss_base_url = 'http://weather.yahooapis.com/forecastrss?w=%(location)s&u=%(degree_unit)s'
def format_element(bfo, location='782041', degree_unit='c' ,
display_weather_icon='false', weather_icon_only='false'):
"""
Display the latest weather forecast from Yahoo Weather
(See http://developer.yahoo.com/weather/)
@param location: Yahoo location code for the forecast
@param degree_unit: Degree unit ('f'=Fahrenheit or 'c'=Celsius)
@param display_weather_icon: if 'true', display weather icon inside the forecasts
@param weather_icon_only: it 'true' display only the wheater icon (without text)
"""
if not feedparser_available:
return ""
args = parse_url_string(bfo.user_info['uri'])
journal_name = args["journal_name"]
cached_filename = "webjournal_widget_weather_%s.rss" % journal_name
expire_time_filename = "webjournal_widget_weather_%s_RSS_expires" % \
journal_name
out = get_widget_html(yahoo_weather_rss_base_url % \
{'location': location, 'degree_unit': degree_unit},
cached_filename,
expire_time_filename,
journal_name)
if weather_icon_only == 'true':
try:
out = '<img alt="" src="%s" align="bottom" />' % \
re_image_pattern.findall(out)[0][1]
except:
register_exception(req=bfo.req)
out = ''
elif display_weather_icon == 'false':
try:
out = re.sub(re_image_pattern, "", out)
except:
register_exception(req=bfo.req)
out = ''
return out
def escape_values(bfo):
"""
Called by BibFormat in order to check if output of this element
should be escaped.
"""
return 0
def get_widget_html(yahoo_weather_rss, cached_filename, expire_time_filename, journal_name):
"""
weather forecast using Yahoo! Weather service
we check and store the "expires" data from the rss feed to decide when
an update is needed.
there always resides a cached version in cds CFG_CACHEDIR along with a flat
file that indicates the time when the feed expires.
"""
cached_weather_box = _get_weather_from_cache(journal_name)
if cached_weather_box:
return cached_weather_box
# No HTML cache? Then read locally saved feed data, and even
# refresh it from Yahoo if it has expired.
try:
cached_rss_path = os.path.join(CFG_CACHEDIR, cached_filename)
assert(os.path.exists(cached_rss_path))
weather_feed = feedparser.parse(cached_rss_path)
assert(not weather_feed.bozo_exception)
except:
try:
_update_feed(yahoo_weather_rss, cached_filename, expire_time_filename)
weather_feed = feedparser.parse('%s/%s' % \
(CFG_CACHEDIR, cached_filename))
except:
return "<ul><li><i>" + _("No information available") + "</i></li></ul>"
now_in_gmt = time.gmtime()
try:
expire_time = time.strptime(open(expire_time_filename).read(),
"%a, %d %b %Y %H:%M:%S %Z")
diff = time.mktime(expire_time) - time.mktime(now_in_gmt)
except:
diff = -1
if diff < 0:
try:
_update_feed(yahoo_weather_rss, cached_filename, expire_time_filename)
weather_feed = feedparser.parse('%s/%s' % \
(CFG_CACHEDIR, cached_filename))
except:
return "<ul><li><i>" + _("No information available") + "</i></li></ul>"
# Construct the HTML. Well, simply take the one provided by
# Yahoo..
html = weather_feed.entries[0]['summary']
cache_weather(html, journal_name)
return html
def _get_weather_from_cache(journal_name):
"""
Try to get the weather information from cache. Return False if
cache does not exist
"""
cache_path = os.path.abspath('%s/webjournal/%s/weather.html' % \
(CFG_CACHEDIR,
journal_name))
if not cache_path.startswith(CFG_CACHEDIR + '/webjournal'):
# Make sure we are reading from correct directory (you
# know, in case there are '../../' inside journal name..)
return False
try:
last_update = os.path.getctime(cache_path)
except:
return False
now = time.time()
if (last_update + 15*60) < now:
# invalidate after 15 minutes
return False
try:
cached_file = open(cache_path).read()
except:
return False
return cached_file
def cache_weather(html, journal_name):
"""
Caches the weather box for 30 minutes.
"""
if not CFG_ACCESS_CONTROL_LEVEL_SITE == 2:
cache_path = os.path.abspath('%s/webjournal/%s/weather.html' % \
(CFG_CACHEDIR,
journal_name))
if cache_path.startswith(CFG_CACHEDIR + '/webjournal'):
# Do not try to cache if the journal name led us to some
# other directory ('../../' inside journal name for
# example)
cache_dir = CFG_CACHEDIR + '/webjournal/' + journal_name
if not os.path.isdir(cache_dir):
os.makedirs(cache_dir)
cache_file = file(cache_path, "w")
cache_file.write(html)
cache_file.close()
def _update_feed(yahoo_weather_rss, cached_filename, expire_time_filename):
"""
Retrieve the latest weather information from Yahoo and write it to
'cached_filename'. Also write the supposed expiration date
provided by Yahoo to 'expire_time_filename'.
"""
default_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(2.0)
try:
try:
feed = WEBJOURNAL_OPENER.open(yahoo_weather_rss)
except:
return
finally:
socket.setdefaulttimeout(default_timeout)
cached_file = open('%s/%s' % (CFG_CACHEDIR, cached_filename), 'w')
cached_file.write(feed.read())
cached_file.close()
feed_data = feedparser.parse(yahoo_weather_rss)
expire_time = feed_data.headers['expires']
expire_file = open('%s/%s' % (CFG_CACHEDIR, expire_time_filename), 'w')
expire_file.write(expire_time)
expire_file.close()
_ = gettext_set_language('en')
dummy = _("Under the CERN sky")
|
gpl-2.0
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_10_01/operations/_load_balancer_outbound_rules_operations.py
|
1
|
8796
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerOutboundRulesOperations(object):
"""LoadBalancerOutboundRulesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_10_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
load_balancer_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.LoadBalancerOutboundRuleListResult"]
"""Gets all the outbound rules in a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerOutboundRuleListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_10_01.models.LoadBalancerOutboundRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerOutboundRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerOutboundRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules'} # type: ignore
def get(
self,
resource_group_name, # type: str
load_balancer_name, # type: str
outbound_rule_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.OutboundRule"
"""Gets the specified load balancer outbound rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param outbound_rule_name: The name of the outbound rule.
:type outbound_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OutboundRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_10_01.models.OutboundRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OutboundRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-10-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'outboundRuleName': self._serialize.url("outbound_rule_name", outbound_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('OutboundRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules/{outboundRuleName}'} # type: ignore
|
mit
|
dgladkov/django
|
django/contrib/gis/gdal/prototypes/errcheck.py
|
586
|
4229
|
"""
This module houses the error-checking routines used by the GDAL
ctypes prototypes.
"""
from ctypes import c_void_p, string_at
from django.contrib.gis.gdal.error import (
GDALException, SRSException, check_err,
)
from django.contrib.gis.gdal.libgdal import lgdal
from django.utils import six
# Helper routines for retrieving pointers and/or values from
# arguments passed in by reference.
def arg_byref(args, offset=-1):
"Returns the pointer argument's by-reference value."
return args[offset]._obj.value
def ptr_byref(args, offset=-1):
"Returns the pointer argument passed in by-reference."
return args[offset]._obj
# ### String checking Routines ###
def check_const_string(result, func, cargs, offset=None, cpl=False):
"""
Similar functionality to `check_string`, but does not free the pointer.
"""
if offset:
check_err(result, cpl=cpl)
ptr = ptr_byref(cargs, offset)
return ptr.value
else:
return result
def check_string(result, func, cargs, offset=-1, str_result=False):
"""
Checks the string output returned from the given function, and frees
the string pointer allocated by OGR. The `str_result` keyword
may be used when the result is the string pointer, otherwise
the OGR error code is assumed. The `offset` keyword may be used
to extract the string pointer passed in by-reference at the given
slice offset in the function arguments.
"""
if str_result:
# For routines that return a string.
ptr = result
if not ptr:
s = None
else:
s = string_at(result)
else:
# Error-code return specified.
check_err(result)
ptr = ptr_byref(cargs, offset)
# Getting the string value
s = ptr.value
# Correctly freeing the allocated memory behind GDAL pointer
# with the VSIFree routine.
if ptr:
lgdal.VSIFree(ptr)
return s
# ### DataSource, Layer error-checking ###
# ### Envelope checking ###
def check_envelope(result, func, cargs, offset=-1):
"Checks a function that returns an OGR Envelope by reference."
env = ptr_byref(cargs, offset)
return env
# ### Geometry error-checking routines ###
def check_geom(result, func, cargs):
"Checks a function that returns a geometry."
# OGR_G_Clone may return an integer, even though the
# restype is set to c_void_p
if isinstance(result, six.integer_types):
result = c_void_p(result)
if not result:
raise GDALException('Invalid geometry pointer returned from "%s".' % func.__name__)
return result
def check_geom_offset(result, func, cargs, offset=-1):
"Chcks the geometry at the given offset in the C parameter list."
check_err(result)
geom = ptr_byref(cargs, offset=offset)
return check_geom(geom, func, cargs)
# ### Spatial Reference error-checking routines ###
def check_srs(result, func, cargs):
if isinstance(result, six.integer_types):
result = c_void_p(result)
if not result:
raise SRSException('Invalid spatial reference pointer returned from "%s".' % func.__name__)
return result
# ### Other error-checking routines ###
def check_arg_errcode(result, func, cargs, cpl=False):
"""
The error code is returned in the last argument, by reference.
Check its value with `check_err` before returning the result.
"""
check_err(arg_byref(cargs), cpl=cpl)
return result
def check_errcode(result, func, cargs, cpl=False):
"""
Check the error code returned (c_int).
"""
check_err(result, cpl=cpl)
def check_pointer(result, func, cargs):
"Makes sure the result pointer is valid."
if isinstance(result, six.integer_types):
result = c_void_p(result)
if result:
return result
else:
raise GDALException('Invalid pointer returned from "%s"' % func.__name__)
def check_str_arg(result, func, cargs):
"""
This is for the OSRGet[Angular|Linear]Units functions, which
require that the returned string pointer not be freed. This
returns both the double and string values.
"""
dbl = result
ptr = cargs[-1]._obj
return dbl, ptr.value.decode()
|
bsd-3-clause
|
dhhagan/ACT
|
ACT/thermo/visualize.py
|
1
|
13306
|
"""
Classes and functions used to visualize data for thermo scientific analyzers
"""
from pandas import Series, DataFrame
import pandas as pd
import datetime as dt
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import dates as d
import os
import math
import glob
import matplotlib
import warnings
import sys
__all__ = ['diurnal_plot','diurnal_plot_single', 'ThermoPlot']
def diurnal_plot(data, dates=[], shaded=False, title="Diurnal Profile of Trace Gases", xlabel="Local Time: East St. Louis, MO"):
'''
If plotting the entire DataFrame (data), choose all_data=True, else choose all_data=False
and declare the date or dates to plot as a list. `data` should be a pandas core DataFrame
with time index and each trace gas concentration as a column
returns a single plot for NOx, SO2, and O3
>>>
'''
# Check to make sure the data is a valid dataframe
if not isinstance(data, pd.DataFrame):
print ("data is not a pandas DataFrame, thus this will not end well for you.")
exit
# If length of dates is zero, plot everything
if len(dates) == 0:
# Plot everything, yo!
pass
elif len(dates) == 1:
# Plot just this date
data = data[dates[0]]
elif len(dates) == 2:
# Plot between these dates
data = data[dates[0]:dates[1]]
else:
sys.exit("Dates are not properly configured.")
# Add columns for time to enable simple diurnal trends to be found
data['Time'] = data.index.map(lambda x: x.strftime("%H:%M"))
# Group the data by time and grab the statistics
grouped = data.groupby('Time').describe().unstack()
# set the index to be a str
grouped.index = pd.to_datetime(grouped.index.astype(str))
# Plot
fig, (ax1, ax2, ax3) = plt.subplots(3, figsize=(10,9), sharex=True)
# Set plot titles and labels
ax1.set_title(title, fontsize=14)
ax1.set_ylabel(r'$\ [NO_x] (ppb)$', fontsize=14, weight='bold')
ax2.set_ylabel(r'$\ [SO_2] (ppb)$', fontsize=14)
ax3.set_ylabel(r'$\ [O_3] (ppb)$', fontsize=14)
ax3.set_xlabel(xlabel, fontsize=14)
# Make the ticks invisible on the first and second plots
plt.setp( ax1.get_xticklabels(), visible=False)
plt.setp( ax2.get_xticklabels(), visible=False)
# Set y min to zero just in case:
ax1.set_ylim(0,grouped['nox']['mean'].max()*1.05)
ax2.set_ylim(0,grouped['so2']['mean'].max()*1.05)
ax3.set_ylim(0,grouped['o3']['mean'].max()*1.05)
# Plot means
ax1.plot(grouped.index, grouped['nox']['mean'],'g', linewidth=2.0)
ax2.plot(grouped.index, grouped['so2']['mean'], 'r', linewidth=2.0)
ax3.plot(grouped.index, grouped['o3']['mean'], 'b', linewidth=2.0)
# If shaded=true, plot trends
if shaded == True:
ax1.plot(grouped.index, grouped['nox']['75%'],'g')
ax1.plot(grouped.index, grouped['nox']['25%'],'g')
ax1.set_ylim(0,grouped['nox']['75%'].max()*1.05)
ax1.fill_between(grouped.index, grouped['nox']['mean'], grouped['nox']['75%'], alpha=.5, facecolor='green')
ax1.fill_between(grouped.index, grouped['nox']['mean'], grouped['nox']['25%'], alpha=.5, facecolor='green')
ax2.plot(grouped.index, grouped['so2']['75%'],'r')
ax2.plot(grouped.index, grouped['so2']['25%'],'r')
ax2.set_ylim(0,grouped['so2']['75%'].max()*1.05)
ax2.fill_between(grouped.index, grouped['so2']['mean'], grouped['so2']['75%'], alpha=.5, facecolor='red')
ax2.fill_between(grouped.index, grouped['so2']['mean'], grouped['so2']['25%'], alpha=.5, facecolor='red')
ax3.plot(grouped.index, grouped['o3']['75%'],'b')
ax3.plot(grouped.index, grouped['o3']['25%'],'b')
ax3.set_ylim(0,grouped['o3']['75%'].max()*1.05)
ax3.fill_between(grouped.index, grouped['o3']['mean'], grouped['o3']['75%'], alpha=.5, facecolor='blue')
ax3.fill_between(grouped.index, grouped['o3']['mean'], grouped['o3']['25%'], alpha=.5, facecolor='blue')
# Get/Set xticks
ticks = ax1.get_xticks()
ax3.set_xticks(np.linspace(ticks[0], d.date2num(d.num2date(ticks[-1]) + dt.timedelta(hours=3)), 5))
ax3.set_xticks(np.linspace(ticks[0], d.date2num(d.num2date(ticks[-1]) + dt.timedelta(hours=3)), 25), minor=True)
ax3.xaxis.set_major_formatter(matplotlib.dates.DateFormatter('%I:%M %p'))
# Make the layout tight to get rid of some whitespace
plt.tight_layout()
plt.show()
return (fig, (ax1, ax2, ax3))
def diurnal_plot_single(data, model='', dates=[], shaded=False, color1 = 'blue',
title="Diurnal Profile of Trace Gases", xlabel="Local Time: East St. Louis, MO",
ylabel=r'$\ [NO_x] (ppb)$'):
'''
`data` should be a pandas core DataFrame with time index and each trace gas concentration as a column
returns a single plot for one of the three analyzers.
>>>diurnal_plot_single(data,model='o3', ylabel='O3', shaded=True, color1='green')
'''
# Check to make sure the data is a valid dataframe
if not isinstance(data, pd.DataFrame):
sys.exit("data is not a pandas DataFrame, thus this will not end well for you.")
# Check to make sure the model is valid
if model.lower() not in ['nox','so2','o3','sox']:
sys.exit("Model is not defined correctly: options are ['nox','so2','sox','o3']")
# Set model to predefined variable
if model.lower() == 'nox':
instr = 'nox'
elif model.lower() == 'so2' or model.lower() == 'sox':
instr = 'sox'
else:
instr = 'o3'
# If not plotting all the data, truncate the dataframe to include only the needed data
if len(dates) == 0:
# plot everything
pass
elif len(dates) == 1:
# plot just this date
data = data[dates[0]]
elif len(dates) == 2:
# plot between these dates
data = data[dates[0]:dates[1]]
else:
sys.exit("You have an error with how you defined your dates")
# Add columns for time to enable simple diurnal trends to be found
data['Time'] = data.index.map(lambda x: x.strftime("%H:%M"))
# Group the data by time and grab the statistics
grouped = data.groupby('Time').describe().unstack()
# set the index to be a str
grouped.index = pd.to_datetime(grouped.index.astype(str))
# Plot
fig, ax = plt.subplots(1, figsize=(8,4))
# Set plot titles and labels
ax.set_title(title, fontsize=14)
ax.set_ylabel(ylabel, fontsize=14, weight='bold')
ax.set_xlabel(xlabel, fontsize=14)
# Set y min to zero just in case:
ax.set_ylim(0,grouped[instr]['mean'].max()*1.05)
# Plot means
ax.plot(grouped.index, grouped[instr]['mean'], color1,linewidth=2.0)
# If shaded=true, plot trends
if shaded == True:
ax.plot(grouped.index, grouped[instr]['75%'],color1)
ax.plot(grouped.index, grouped[instr]['25%'],color1)
ax.set_ylim(0,grouped[instr]['75%'].max()*1.05)
ax.fill_between(grouped.index, grouped[instr]['mean'], grouped[instr]['75%'], alpha=.5, facecolor=color1)
ax.fill_between(grouped.index, grouped[instr]['mean'], grouped[instr]['25%'], alpha=.5, facecolor=color1)
# Get/Set xticks
ticks = ax.get_xticks()
ax.set_xticks(np.linspace(ticks[0], d.date2num(d.num2date(ticks[-1]) + dt.timedelta(hours=3)), 5))
ax.set_xticks(np.linspace(ticks[0], d.date2num(d.num2date(ticks[-1]) + dt.timedelta(hours=3)), 25), minor=True)
ax.xaxis.set_major_formatter(matplotlib.dates.DateFormatter('%I:%M %p'))
# Make the layout tight to get rid of some whitespace
plt.tight_layout()
plt.show()
return (fig, ax)
class ThermoPlot():
'''
Allows for easy plotting of internal instrument data. Currently supports the
following models:
- NO, NO2, NOx (42I)
- O3 (49I)
- SO2 (43I)
'''
def __init__(self, data):
self.data = data
def debug_plot(self, args={}):
'''
Plots thermo scientific instrument data for debugging purposes. The top plot contains internal
instrument data such as flow rates and temperatures. The bottom plot contains trace gas data for the
instrument.
instrument must be set to either nox, so2, sox, or o3
>>> nox = ThermoPlot(data)
>>> f, (a1, a2, a3) = nox.debug_plot()
'''
default_args = {
'xlabel':'Local Time, East St Louis, MO',
'ylabpressure':'Flow (LPM)',
'ylabgas':'Gas Conc. (ppb)',
'ylabtemp':'Temperature (C)',
'title_fontsize':'18',
'labels_fontsize':'14',
'grid':False
}
# Figure out what model we are trying to plot and set instrument specific default args
cols = [i.lower() for i in self.data.columns.values.tolist()]
if 'o3' in cols:
default_args['instrument'] = 'o3'
default_args['title'] = "Debug Plot for " + r'$\ O_{3} $' + ": Model 49I"
default_args['color_o3'] = 'blue'
elif 'sox' in cols or 'so2' in cols:
default_args['instrument'] = 'so2'
default_args['title'] = "Debug Plot for " + r'$\ SO_{2} $' + ": Model 43I"
default_args['color_so2'] = 'green'
elif 'nox' in cols:
default_args['instrument'] = 'nox'
default_args['title'] = "Debug Plot for " + r'$\ NO_{x} $' + ": Model 42I"
default_args['color_no'] = '#FAB923'
default_args['color_nox'] = '#FC5603'
default_args['color_no2'] = '#FAE823'
else:
sys.exit("Could not figure out what isntrument this is for")
# If kwargs are set, replace the default values
for key, val in default_args.iteritems():
if args.has_key(key):
default_args[key] = args[key]
# Set up Plot and all three axes
fig, (ax1, ax3) = plt.subplots(2, figsize=(10,6), sharex=True)
ax2 = ax1.twinx()
# set up axes labels and titles
ax1.set_title(default_args['title'], fontsize=default_args['title_fontsize'])
ax1.set_ylabel(default_args['ylabpressure'], fontsize=default_args['labels_fontsize'])
ax2.set_ylabel(default_args['ylabtemp'], fontsize=default_args['labels_fontsize'])
ax3.set_ylabel(default_args['ylabgas'], fontsize=default_args['labels_fontsize'])
ax3.set_xlabel(default_args['xlabel'], fontsize=default_args['labels_fontsize'])
# Make the ticks invisible on the first and second plots
plt.setp( ax1.get_xticklabels(), visible=False )
# Plot the debug data on the top graph
if default_args['instrument'] == 'o3':
self.data['bncht'].plot(ax=ax2, label=r'$\ T_{bench}$')
self.data['lmpt'].plot(ax=ax2, label=r'$\ T_{lamp}$')
self.data['flowa'].plot(ax=ax1, label=r'$\ Q_{A}$', style='--')
self.data['flowb'].plot(ax=ax1, label=r'$\ Q_{B}$', style='--')
self.data['o3'].plot(ax=ax3, color=default_args['color_o3'], label=r'$\ O_{3}$')
elif default_args['instrument'] == 'so2':
self.data['intt'].plot(ax=ax2, label=r'$\ T_{internal}$')
self.data['rctt'].plot(ax=ax2, label=r'$\ T_{reactor}$')
self.data['smplfl'].plot(ax=ax1, label=r'$\ Q_{sample}$', style='--')
self.data['so2'].plot(ax=ax3, label=r'$\ SO_2 $', color=default_args['color_so2'], ylim=[0,self.data['so2'].max()*1.05])
else:
m = max(self.data['convt'].max(),self.data['intt'].max(),self.data['pmtt'].max())
self.data['convt'].plot(ax=ax2, label=r'$\ T_{converter}$')
self.data['intt'].plot(ax=ax2, label=r'$\ T_{internal}$')
self.data['rctt'].plot(ax=ax2, label=r'$\ T_{reactor}$')
self.data['pmtt'].plot(ax=ax2, label=r'$\ T_{PMT}$')
self.data['smplf'].plot(ax=ax1, label=r'$\ Q_{sample}$', style='--')
self.data['ozonf'].plot(ax=ax1, label=r'$\ Q_{ozone}$', style='--')
self.data['no'].plot(ax=ax3, label=r'$\ NO $', color=default_args['color_no'])
self.data['no2'].plot(ax=ax3, label=r'$\ NO_{2}$', color=default_args['color_no2'])
self.data['nox'].plot(ax=ax3, label=r'$\ NO_{x}$', color=default_args['color_nox'], ylim=(0,math.ceil(self.data.nox.max()*1.05)))
# Legends
lines, labels = ax1.get_legend_handles_labels()
lines2, labels2 = ax2.get_legend_handles_labels()
plt.legend(lines+lines2, labels+labels2, bbox_to_anchor=(1.10, 1), loc=2, borderaxespad=0.)
ax3.legend(bbox_to_anchor=(1.10, 1.), loc=2, borderaxespad=0.)
# Hide grids?
ax1.grid(default_args['grid'])
ax2.grid(default_args['grid'])
ax3.grid(default_args['grid'])
# More of the things..
plt.tight_layout()
plt.show()
return fig, (ax1, ax2, ax3)
|
mit
|
MarkWh1te/xueqiu_predict
|
python3_env/lib/python3.4/site-packages/setuptools/package_index.py
|
10
|
39958
|
"""PyPI and direct package downloading"""
import sys
import os
import re
import shutil
import socket
import base64
import hashlib
import itertools
from functools import wraps
try:
from urllib.parse import splituser
except ImportError:
from urllib2 import splituser
from setuptools.extern import six
from setuptools.extern.six.moves import urllib, http_client, configparser, map
import setuptools
from pkg_resources import (
CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST,
require, Environment, find_distributions, safe_name, safe_version,
to_filename, Requirement, DEVELOP_DIST,
)
from setuptools import ssl_support
from distutils import log
from distutils.errors import DistutilsError
from fnmatch import translate
from setuptools.py26compat import strip_fragment
from setuptools.py27compat import get_all_headers
EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
# this is here to fix emacs' cruddy broken syntax highlighting
PYPI_MD5 = re.compile(
'<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)'
'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)'
)
URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):', re.I).match
EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
__all__ = [
'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
'interpret_distro_name',
]
_SOCKET_TIMEOUT = 15
_tmpl = "setuptools/{setuptools.__version__} Python-urllib/{py_major}"
user_agent = _tmpl.format(py_major=sys.version[:3], **globals())
def parse_requirement_arg(spec):
try:
return Requirement.parse(spec)
except ValueError:
raise DistutilsError(
"Not a URL, existing file, or requirement spec: %r" % (spec,)
)
def parse_bdist_wininst(name):
"""Return (base,pyversion) or (None,None) for possible .exe name"""
lower = name.lower()
base, py_ver, plat = None, None, None
if lower.endswith('.exe'):
if lower.endswith('.win32.exe'):
base = name[:-10]
plat = 'win32'
elif lower.startswith('.win32-py', -16):
py_ver = name[-7:-4]
base = name[:-16]
plat = 'win32'
elif lower.endswith('.win-amd64.exe'):
base = name[:-14]
plat = 'win-amd64'
elif lower.startswith('.win-amd64-py', -20):
py_ver = name[-7:-4]
base = name[:-20]
plat = 'win-amd64'
return base, py_ver, plat
def egg_info_for_url(url):
parts = urllib.parse.urlparse(url)
scheme, server, path, parameters, query, fragment = parts
base = urllib.parse.unquote(path.split('/')[-1])
if server == 'sourceforge.net' and base == 'download': # XXX Yuck
base = urllib.parse.unquote(path.split('/')[-2])
if '#' in base:
base, fragment = base.split('#', 1)
return base, fragment
def distros_for_url(url, metadata=None):
"""Yield egg or source distribution objects that might be found at a URL"""
base, fragment = egg_info_for_url(url)
for dist in distros_for_location(url, base, metadata):
yield dist
if fragment:
match = EGG_FRAGMENT.match(fragment)
if match:
for dist in interpret_distro_name(
url, match.group(1), metadata, precedence=CHECKOUT_DIST
):
yield dist
def distros_for_location(location, basename, metadata=None):
"""Yield egg or source distribution objects based on basename"""
if basename.endswith('.egg.zip'):
basename = basename[:-4] # strip the .zip
if basename.endswith('.egg') and '-' in basename:
# only one, unambiguous interpretation
return [Distribution.from_location(location, basename, metadata)]
if basename.endswith('.exe'):
win_base, py_ver, platform = parse_bdist_wininst(basename)
if win_base is not None:
return interpret_distro_name(
location, win_base, metadata, py_ver, BINARY_DIST, platform
)
# Try source distro extensions (.zip, .tgz, etc.)
#
for ext in EXTENSIONS:
if basename.endswith(ext):
basename = basename[:-len(ext)]
return interpret_distro_name(location, basename, metadata)
return [] # no extension matched
def distros_for_filename(filename, metadata=None):
"""Yield possible egg or source distribution objects based on a filename"""
return distros_for_location(
normalize_path(filename), os.path.basename(filename), metadata
)
def interpret_distro_name(
location, basename, metadata, py_version=None, precedence=SOURCE_DIST,
platform=None
):
"""Generate alternative interpretations of a source distro name
Note: if `location` is a filesystem filename, you should call
``pkg_resources.normalize_path()`` on it before passing it to this
routine!
"""
# Generate alternative interpretations of a source distro name
# Because some packages are ambiguous as to name/versions split
# e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
# So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
# "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice,
# the spurious interpretations should be ignored, because in the event
# there's also an "adns" package, the spurious "python-1.1.0" version will
# compare lower than any numeric version number, and is therefore unlikely
# to match a request for it. It's still a potential problem, though, and
# in the long run PyPI and the distutils should go for "safe" names and
# versions in distribution archive names (sdist and bdist).
parts = basename.split('-')
if not py_version and any(re.match('py\d\.\d$', p) for p in parts[2:]):
# it is a bdist_dumb, not an sdist -- bail out
return
for p in range(1, len(parts) + 1):
yield Distribution(
location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
py_version=py_version, precedence=precedence,
platform=platform
)
# From Python 2.7 docs
def unique_everseen(iterable, key=None):
"List unique elements, preserving order. Remember all elements ever seen."
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
# unique_everseen('ABBCcAD', str.lower) --> A B C D
seen = set()
seen_add = seen.add
if key is None:
for element in six.moves.filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element
def unique_values(func):
"""
Wrap a function returning an iterable such that the resulting iterable
only ever yields unique items.
"""
@wraps(func)
def wrapper(*args, **kwargs):
return unique_everseen(func(*args, **kwargs))
return wrapper
REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I)
# this line is here to fix emacs' cruddy broken syntax highlighting
@unique_values
def find_external_links(url, page):
"""Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
for match in REL.finditer(page):
tag, rel = match.groups()
rels = set(map(str.strip, rel.lower().split(',')))
if 'homepage' in rels or 'download' in rels:
for match in HREF.finditer(tag):
yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
for tag in ("<th>Home Page", "<th>Download URL"):
pos = page.find(tag)
if pos != -1:
match = HREF.search(page, pos)
if match:
yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
class ContentChecker(object):
"""
A null content checker that defines the interface for checking content
"""
def feed(self, block):
"""
Feed a block of data to the hash.
"""
return
def is_valid(self):
"""
Check the hash. Return False if validation fails.
"""
return True
def report(self, reporter, template):
"""
Call reporter with information about the checker (hash name)
substituted into the template.
"""
return
class HashChecker(ContentChecker):
pattern = re.compile(
r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)='
r'(?P<expected>[a-f0-9]+)'
)
def __init__(self, hash_name, expected):
self.hash_name = hash_name
self.hash = hashlib.new(hash_name)
self.expected = expected
@classmethod
def from_url(cls, url):
"Construct a (possibly null) ContentChecker from a URL"
fragment = urllib.parse.urlparse(url)[-1]
if not fragment:
return ContentChecker()
match = cls.pattern.search(fragment)
if not match:
return ContentChecker()
return cls(**match.groupdict())
def feed(self, block):
self.hash.update(block)
def is_valid(self):
return self.hash.hexdigest() == self.expected
def report(self, reporter, template):
msg = template % self.hash_name
return reporter(msg)
class PackageIndex(Environment):
"""A distribution index that scans web pages for download URLs"""
def __init__(
self, index_url="https://pypi.python.org/simple", hosts=('*',),
ca_bundle=None, verify_ssl=True, *args, **kw
):
Environment.__init__(self, *args, **kw)
self.index_url = index_url + "/" [:not index_url.endswith('/')]
self.scanned_urls = {}
self.fetched_urls = {}
self.package_pages = {}
self.allows = re.compile('|'.join(map(translate, hosts))).match
self.to_scan = []
use_ssl = (
verify_ssl
and ssl_support.is_available
and (ca_bundle or ssl_support.find_ca_bundle())
)
if use_ssl:
self.opener = ssl_support.opener_for(ca_bundle)
else:
self.opener = urllib.request.urlopen
def process_url(self, url, retrieve=False):
"""Evaluate a URL as a possible download, and maybe retrieve it"""
if url in self.scanned_urls and not retrieve:
return
self.scanned_urls[url] = True
if not URL_SCHEME(url):
self.process_filename(url)
return
else:
dists = list(distros_for_url(url))
if dists:
if not self.url_ok(url):
return
self.debug("Found link: %s", url)
if dists or not retrieve or url in self.fetched_urls:
list(map(self.add, dists))
return # don't need the actual page
if not self.url_ok(url):
self.fetched_urls[url] = True
return
self.info("Reading %s", url)
self.fetched_urls[url] = True # prevent multiple fetch attempts
tmpl = "Download error on %s: %%s -- Some packages may not be found!"
f = self.open_url(url, tmpl % url)
if f is None:
return
self.fetched_urls[f.url] = True
if 'html' not in f.headers.get('content-type', '').lower():
f.close() # not html, we can't process it
return
base = f.url # handle redirects
page = f.read()
if not isinstance(page, str): # We are in Python 3 and got bytes. We want str.
if isinstance(f, urllib.error.HTTPError):
# Errors have no charset, assume latin1:
charset = 'latin-1'
else:
charset = f.headers.get_param('charset') or 'latin-1'
page = page.decode(charset, "ignore")
f.close()
for match in HREF.finditer(page):
link = urllib.parse.urljoin(base, htmldecode(match.group(1)))
self.process_url(link)
if url.startswith(self.index_url) and getattr(f, 'code', None) != 404:
page = self.process_index(url, page)
def process_filename(self, fn, nested=False):
# process filenames or directories
if not os.path.exists(fn):
self.warn("Not found: %s", fn)
return
if os.path.isdir(fn) and not nested:
path = os.path.realpath(fn)
for item in os.listdir(path):
self.process_filename(os.path.join(path, item), True)
dists = distros_for_filename(fn)
if dists:
self.debug("Found: %s", fn)
list(map(self.add, dists))
def url_ok(self, url, fatal=False):
s = URL_SCHEME(url)
is_file = s and s.group(1).lower() == 'file'
if is_file or self.allows(urllib.parse.urlparse(url)[1]):
return True
msg = ("\nNote: Bypassing %s (disallowed host; see "
"http://bit.ly/1dg9ijs for details).\n")
if fatal:
raise DistutilsError(msg % url)
else:
self.warn(msg, url)
def scan_egg_links(self, search_path):
dirs = filter(os.path.isdir, search_path)
egg_links = (
(path, entry)
for path in dirs
for entry in os.listdir(path)
if entry.endswith('.egg-link')
)
list(itertools.starmap(self.scan_egg_link, egg_links))
def scan_egg_link(self, path, entry):
with open(os.path.join(path, entry)) as raw_lines:
# filter non-empty lines
lines = list(filter(None, map(str.strip, raw_lines)))
if len(lines) != 2:
# format is not recognized; punt
return
egg_path, setup_path = lines
for dist in find_distributions(os.path.join(path, egg_path)):
dist.location = os.path.join(path, *lines)
dist.precedence = SOURCE_DIST
self.add(dist)
def process_index(self, url, page):
"""Process the contents of a PyPI page"""
def scan(link):
# Process a URL to see if it's for a package page
if link.startswith(self.index_url):
parts = list(map(
urllib.parse.unquote, link[len(self.index_url):].split('/')
))
if len(parts) == 2 and '#' not in parts[1]:
# it's a package page, sanitize and index it
pkg = safe_name(parts[0])
ver = safe_version(parts[1])
self.package_pages.setdefault(pkg.lower(), {})[link] = True
return to_filename(pkg), to_filename(ver)
return None, None
# process an index page into the package-page index
for match in HREF.finditer(page):
try:
scan(urllib.parse.urljoin(url, htmldecode(match.group(1))))
except ValueError:
pass
pkg, ver = scan(url) # ensure this page is in the page index
if pkg:
# process individual package page
for new_url in find_external_links(url, page):
# Process the found URL
base, frag = egg_info_for_url(new_url)
if base.endswith('.py') and not frag:
if ver:
new_url += '#egg=%s-%s' % (pkg, ver)
else:
self.need_version_info(url)
self.scan_url(new_url)
return PYPI_MD5.sub(
lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1, 3, 2), page
)
else:
return "" # no sense double-scanning non-package pages
def need_version_info(self, url):
self.scan_all(
"Page at %s links to .py file(s) without version info; an index "
"scan is required.", url
)
def scan_all(self, msg=None, *args):
if self.index_url not in self.fetched_urls:
if msg:
self.warn(msg, *args)
self.info(
"Scanning index of all packages (this may take a while)"
)
self.scan_url(self.index_url)
def find_packages(self, requirement):
self.scan_url(self.index_url + requirement.unsafe_name + '/')
if not self.package_pages.get(requirement.key):
# Fall back to safe version of the name
self.scan_url(self.index_url + requirement.project_name + '/')
if not self.package_pages.get(requirement.key):
# We couldn't find the target package, so search the index page too
self.not_found_in_index(requirement)
for url in list(self.package_pages.get(requirement.key, ())):
# scan each page that might be related to the desired package
self.scan_url(url)
def obtain(self, requirement, installer=None):
self.prescan()
self.find_packages(requirement)
for dist in self[requirement.key]:
if dist in requirement:
return dist
self.debug("%s does not match %s", requirement, dist)
return super(PackageIndex, self).obtain(requirement, installer)
def check_hash(self, checker, filename, tfp):
"""
checker is a ContentChecker
"""
checker.report(self.debug,
"Validating %%s checksum for %s" % filename)
if not checker.is_valid():
tfp.close()
os.unlink(filename)
raise DistutilsError(
"%s validation failed for %s; "
"possible download problem?" % (
checker.hash.name, os.path.basename(filename))
)
def add_find_links(self, urls):
"""Add `urls` to the list that will be prescanned for searches"""
for url in urls:
if (
self.to_scan is None # if we have already "gone online"
or not URL_SCHEME(url) # or it's a local file/directory
or url.startswith('file:')
or list(distros_for_url(url)) # or a direct package link
):
# then go ahead and process it now
self.scan_url(url)
else:
# otherwise, defer retrieval till later
self.to_scan.append(url)
def prescan(self):
"""Scan urls scheduled for prescanning (e.g. --find-links)"""
if self.to_scan:
list(map(self.scan_url, self.to_scan))
self.to_scan = None # from now on, go ahead and process immediately
def not_found_in_index(self, requirement):
if self[requirement.key]: # we've seen at least one distro
meth, msg = self.info, "Couldn't retrieve index page for %r"
else: # no distros seen for this name, might be misspelled
meth, msg = (self.warn,
"Couldn't find index page for %r (maybe misspelled?)")
meth(msg, requirement.unsafe_name)
self.scan_all()
def download(self, spec, tmpdir):
"""Locate and/or download `spec` to `tmpdir`, returning a local path
`spec` may be a ``Requirement`` object, or a string containing a URL,
an existing local filename, or a project/version requirement spec
(i.e. the string form of a ``Requirement`` object). If it is the URL
of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
automatically created alongside the downloaded file.
If `spec` is a ``Requirement`` object or a string containing a
project/version requirement spec, this method returns the location of
a matching distribution (possibly after downloading it to `tmpdir`).
If `spec` is a locally existing file or directory name, it is simply
returned unchanged. If `spec` is a URL, it is downloaded to a subpath
of `tmpdir`, and the local filename is returned. Various errors may be
raised if a problem occurs during downloading.
"""
if not isinstance(spec, Requirement):
scheme = URL_SCHEME(spec)
if scheme:
# It's a url, download it to tmpdir
found = self._download_url(scheme.group(1), spec, tmpdir)
base, fragment = egg_info_for_url(spec)
if base.endswith('.py'):
found = self.gen_setup(found, fragment, tmpdir)
return found
elif os.path.exists(spec):
# Existing file or directory, just return it
return spec
else:
spec = parse_requirement_arg(spec)
return getattr(self.fetch_distribution(spec, tmpdir), 'location', None)
def fetch_distribution(
self, requirement, tmpdir, force_scan=False, source=False,
develop_ok=False, local_index=None
):
"""Obtain a distribution suitable for fulfilling `requirement`
`requirement` must be a ``pkg_resources.Requirement`` instance.
If necessary, or if the `force_scan` flag is set, the requirement is
searched for in the (online) package index as well as the locally
installed packages. If a distribution matching `requirement` is found,
the returned distribution's ``location`` is the value you would have
gotten from calling the ``download()`` method with the matching
distribution's URL or filename. If no matching distribution is found,
``None`` is returned.
If the `source` flag is set, only source distributions and source
checkout links will be considered. Unless the `develop_ok` flag is
set, development and system eggs (i.e., those using the ``.egg-info``
format) will be ignored.
"""
# process a Requirement
self.info("Searching for %s", requirement)
skipped = {}
dist = None
def find(req, env=None):
if env is None:
env = self
# Find a matching distribution; may be called more than once
for dist in env[req.key]:
if dist.precedence == DEVELOP_DIST and not develop_ok:
if dist not in skipped:
self.warn("Skipping development or system egg: %s", dist)
skipped[dist] = 1
continue
if dist in req and (dist.precedence <= SOURCE_DIST or not source):
dist.download_location = self.download(dist.location, tmpdir)
if os.path.exists(dist.download_location):
return dist
if force_scan:
self.prescan()
self.find_packages(requirement)
dist = find(requirement)
if not dist and local_index is not None:
dist = find(requirement, local_index)
if dist is None:
if self.to_scan is not None:
self.prescan()
dist = find(requirement)
if dist is None and not force_scan:
self.find_packages(requirement)
dist = find(requirement)
if dist is None:
self.warn(
"No local packages or working download links found for %s%s",
(source and "a source distribution of " or ""),
requirement,
)
else:
self.info("Best match: %s", dist)
return dist.clone(location=dist.download_location)
def fetch(self, requirement, tmpdir, force_scan=False, source=False):
"""Obtain a file suitable for fulfilling `requirement`
DEPRECATED; use the ``fetch_distribution()`` method now instead. For
backward compatibility, this routine is identical but returns the
``location`` of the downloaded distribution instead of a distribution
object.
"""
dist = self.fetch_distribution(requirement, tmpdir, force_scan, source)
if dist is not None:
return dist.location
return None
def gen_setup(self, filename, fragment, tmpdir):
match = EGG_FRAGMENT.match(fragment)
dists = match and [
d for d in
interpret_distro_name(filename, match.group(1), None) if d.version
] or []
if len(dists) == 1: # unambiguous ``#egg`` fragment
basename = os.path.basename(filename)
# Make sure the file has been downloaded to the temp dir.
if os.path.dirname(filename) != tmpdir:
dst = os.path.join(tmpdir, basename)
from setuptools.command.easy_install import samefile
if not samefile(filename, dst):
shutil.copy2(filename, dst)
filename = dst
with open(os.path.join(tmpdir, 'setup.py'), 'w') as file:
file.write(
"from setuptools import setup\n"
"setup(name=%r, version=%r, py_modules=[%r])\n"
% (
dists[0].project_name, dists[0].version,
os.path.splitext(basename)[0]
)
)
return filename
elif match:
raise DistutilsError(
"Can't unambiguously interpret project/version identifier %r; "
"any dashes in the name or version should be escaped using "
"underscores. %r" % (fragment, dists)
)
else:
raise DistutilsError(
"Can't process plain .py files without an '#egg=name-version'"
" suffix to enable automatic setup script generation."
)
dl_blocksize = 8192
def _download_to(self, url, filename):
self.info("Downloading %s", url)
# Download the file
fp, info = None, None
try:
checker = HashChecker.from_url(url)
fp = self.open_url(strip_fragment(url))
if isinstance(fp, urllib.error.HTTPError):
raise DistutilsError(
"Can't download %s: %s %s" % (url, fp.code, fp.msg)
)
headers = fp.info()
blocknum = 0
bs = self.dl_blocksize
size = -1
if "content-length" in headers:
# Some servers return multiple Content-Length headers :(
sizes = get_all_headers(headers, 'Content-Length')
size = max(map(int, sizes))
self.reporthook(url, filename, blocknum, bs, size)
with open(filename, 'wb') as tfp:
while True:
block = fp.read(bs)
if block:
checker.feed(block)
tfp.write(block)
blocknum += 1
self.reporthook(url, filename, blocknum, bs, size)
else:
break
self.check_hash(checker, filename, tfp)
return headers
finally:
if fp:
fp.close()
def reporthook(self, url, filename, blocknum, blksize, size):
pass # no-op
def open_url(self, url, warning=None):
if url.startswith('file:'):
return local_open(url)
try:
return open_with_auth(url, self.opener)
except (ValueError, http_client.InvalidURL) as v:
msg = ' '.join([str(arg) for arg in v.args])
if warning:
self.warn(warning, msg)
else:
raise DistutilsError('%s %s' % (url, msg))
except urllib.error.HTTPError as v:
return v
except urllib.error.URLError as v:
if warning:
self.warn(warning, v.reason)
else:
raise DistutilsError("Download error for %s: %s"
% (url, v.reason))
except http_client.BadStatusLine as v:
if warning:
self.warn(warning, v.line)
else:
raise DistutilsError(
'%s returned a bad status line. The server might be '
'down, %s' %
(url, v.line)
)
except http_client.HTTPException as v:
if warning:
self.warn(warning, v)
else:
raise DistutilsError("Download error for %s: %s"
% (url, v))
def _download_url(self, scheme, url, tmpdir):
# Determine download filename
#
name, fragment = egg_info_for_url(url)
if name:
while '..' in name:
name = name.replace('..', '.').replace('\\', '_')
else:
name = "__downloaded__" # default if URL has no path contents
if name.endswith('.egg.zip'):
name = name[:-4] # strip the extra .zip before download
filename = os.path.join(tmpdir, name)
# Download the file
#
if scheme == 'svn' or scheme.startswith('svn+'):
return self._download_svn(url, filename)
elif scheme == 'git' or scheme.startswith('git+'):
return self._download_git(url, filename)
elif scheme.startswith('hg+'):
return self._download_hg(url, filename)
elif scheme == 'file':
return urllib.request.url2pathname(urllib.parse.urlparse(url)[2])
else:
self.url_ok(url, True) # raises error if not allowed
return self._attempt_download(url, filename)
def scan_url(self, url):
self.process_url(url, True)
def _attempt_download(self, url, filename):
headers = self._download_to(url, filename)
if 'html' in headers.get('content-type', '').lower():
return self._download_html(url, headers, filename)
else:
return filename
def _download_html(self, url, headers, filename):
file = open(filename)
for line in file:
if line.strip():
# Check for a subversion index page
if re.search(r'<title>([^- ]+ - )?Revision \d+:', line):
# it's a subversion index page:
file.close()
os.unlink(filename)
return self._download_svn(url, filename)
break # not an index page
file.close()
os.unlink(filename)
raise DistutilsError("Unexpected HTML page found at " + url)
def _download_svn(self, url, filename):
url = url.split('#', 1)[0] # remove any fragment for svn's sake
creds = ''
if url.lower().startswith('svn:') and '@' in url:
scheme, netloc, path, p, q, f = urllib.parse.urlparse(url)
if not netloc and path.startswith('//') and '/' in path[2:]:
netloc, path = path[2:].split('/', 1)
auth, host = splituser(netloc)
if auth:
if ':' in auth:
user, pw = auth.split(':', 1)
creds = " --username=%s --password=%s" % (user, pw)
else:
creds = " --username=" + auth
netloc = host
parts = scheme, netloc, url, p, q, f
url = urllib.parse.urlunparse(parts)
self.info("Doing subversion checkout from %s to %s", url, filename)
os.system("svn checkout%s -q %s %s" % (creds, url, filename))
return filename
@staticmethod
def _vcs_split_rev_from_url(url, pop_prefix=False):
scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
scheme = scheme.split('+', 1)[-1]
# Some fragment identification fails
path = path.split('#', 1)[0]
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
# Also, discard fragment
url = urllib.parse.urlunsplit((scheme, netloc, path, query, ''))
return url, rev
def _download_git(self, url, filename):
filename = filename.split('#', 1)[0]
url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
self.info("Doing git clone from %s to %s", url, filename)
os.system("git clone --quiet %s %s" % (url, filename))
if rev is not None:
self.info("Checking out %s", rev)
os.system("(cd %s && git checkout --quiet %s)" % (
filename,
rev,
))
return filename
def _download_hg(self, url, filename):
filename = filename.split('#', 1)[0]
url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True)
self.info("Doing hg clone from %s to %s", url, filename)
os.system("hg clone --quiet %s %s" % (url, filename))
if rev is not None:
self.info("Updating to %s", rev)
os.system("(cd %s && hg up -C -r %s >&-)" % (
filename,
rev,
))
return filename
def debug(self, msg, *args):
log.debug(msg, *args)
def info(self, msg, *args):
log.info(msg, *args)
def warn(self, msg, *args):
log.warn(msg, *args)
# This pattern matches a character entity reference (a decimal numeric
# references, a hexadecimal numeric reference, or a named reference).
entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub
def uchr(c):
if not isinstance(c, int):
return c
if c > 255:
return six.unichr(c)
return chr(c)
def decode_entity(match):
what = match.group(1)
if what.startswith('#x'):
what = int(what[2:], 16)
elif what.startswith('#'):
what = int(what[1:])
else:
what = six.moves.html_entities.name2codepoint.get(what, match.group(0))
return uchr(what)
def htmldecode(text):
"""Decode HTML entities in the given text."""
return entity_sub(decode_entity, text)
def socket_timeout(timeout=15):
def _socket_timeout(func):
def _socket_timeout(*args, **kwargs):
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
return func(*args, **kwargs)
finally:
socket.setdefaulttimeout(old_timeout)
return _socket_timeout
return _socket_timeout
def _encode_auth(auth):
"""
A function compatible with Python 2.3-3.3 that will encode
auth from a URL suitable for an HTTP header.
>>> str(_encode_auth('username%3Apassword'))
'dXNlcm5hbWU6cGFzc3dvcmQ='
Long auth strings should not cause a newline to be inserted.
>>> long_auth = 'username:' + 'password'*10
>>> chr(10) in str(_encode_auth(long_auth))
False
"""
auth_s = urllib.parse.unquote(auth)
# convert to bytes
auth_bytes = auth_s.encode()
# use the legacy interface for Python 2.3 support
encoded_bytes = base64.encodestring(auth_bytes)
# convert back to a string
encoded = encoded_bytes.decode()
# strip the trailing carriage return
return encoded.replace('\n', '')
class Credential(object):
"""
A username/password pair. Use like a namedtuple.
"""
def __init__(self, username, password):
self.username = username
self.password = password
def __iter__(self):
yield self.username
yield self.password
def __str__(self):
return '%(username)s:%(password)s' % vars(self)
class PyPIConfig(configparser.RawConfigParser):
def __init__(self):
"""
Load from ~/.pypirc
"""
defaults = dict.fromkeys(['username', 'password', 'repository'], '')
configparser.RawConfigParser.__init__(self, defaults)
rc = os.path.join(os.path.expanduser('~'), '.pypirc')
if os.path.exists(rc):
self.read(rc)
@property
def creds_by_repository(self):
sections_with_repositories = [
section for section in self.sections()
if self.get(section, 'repository').strip()
]
return dict(map(self._get_repo_cred, sections_with_repositories))
def _get_repo_cred(self, section):
repo = self.get(section, 'repository').strip()
return repo, Credential(
self.get(section, 'username').strip(),
self.get(section, 'password').strip(),
)
def find_credential(self, url):
"""
If the URL indicated appears to be a repository defined in this
config, return the credential for that repository.
"""
for repository, cred in self.creds_by_repository.items():
if url.startswith(repository):
return cred
def open_with_auth(url, opener=urllib.request.urlopen):
"""Open a urllib2 request, handling HTTP authentication"""
scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
# Double scheme does not raise on Mac OS X as revealed by a
# failing test. We would expect "nonnumeric port". Refs #20.
if netloc.endswith(':'):
raise http_client.InvalidURL("nonnumeric port: ''")
if scheme in ('http', 'https'):
auth, host = splituser(netloc)
else:
auth = None
if not auth:
cred = PyPIConfig().find_credential(url)
if cred:
auth = str(cred)
info = cred.username, url
log.info('Authenticating as %s for %s (from .pypirc)', *info)
if auth:
auth = "Basic " + _encode_auth(auth)
parts = scheme, host, path, params, query, frag
new_url = urllib.parse.urlunparse(parts)
request = urllib.request.Request(new_url)
request.add_header("Authorization", auth)
else:
request = urllib.request.Request(url)
request.add_header('User-Agent', user_agent)
fp = opener(request)
if auth:
# Put authentication info back into request URL if same host,
# so that links found on the page will work
s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url)
if s2 == scheme and h2 == host:
parts = s2, netloc, path2, param2, query2, frag2
fp.url = urllib.parse.urlunparse(parts)
return fp
# adding a timeout to avoid freezing package_index
open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth)
def fix_sf_url(url):
return url # backward compatibility
def local_open(url):
"""Read a local path, with special support for directories"""
scheme, server, path, param, query, frag = urllib.parse.urlparse(url)
filename = urllib.request.url2pathname(path)
if os.path.isfile(filename):
return urllib.request.urlopen(url)
elif path.endswith('/') and os.path.isdir(filename):
files = []
for f in os.listdir(filename):
filepath = os.path.join(filename, f)
if f == 'index.html':
with open(filepath, 'r') as fp:
body = fp.read()
break
elif os.path.isdir(filepath):
f += '/'
files.append('<a href="{name}">{name}</a>'.format(name=f))
else:
tmpl = ("<html><head><title>{url}</title>"
"</head><body>{files}</body></html>")
body = tmpl.format(url=url, files='\n'.join(files))
status, message = 200, "OK"
else:
status, message, body = 404, "Path not found", "Not found"
headers = {'content-type': 'text/html'}
body_stream = six.StringIO(body)
return urllib.error.HTTPError(url, status, message, headers, body_stream)
|
mit
|
BeATz-UnKNoWN/python-for-android
|
python3-alpha/extra_modules/pyxmpp2/streambase.py
|
46
|
24972
|
#
# (C) Copyright 2003-2011 Jacek Konieczny <jajcus@jajcus.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
# pylint: disable-msg=W0201
"""Core XMPP stream functionality.
Normative reference:
- `RFC 6120 <http://xmpp.org/rfcs/rfc6120.html>`__
"""
__docformat__ = "restructuredtext en"
import inspect
import logging
import uuid
import re
import threading
from .etree import ElementTree, element_to_unicode
from .xmppparser import XMLStreamHandler
from .error import StreamErrorElement
from .jid import JID
from .exceptions import StreamError
from .exceptions import FatalStreamError, StreamParseError
from .constants import STREAM_QNP, XML_LANG_QNAME, STREAM_ROOT_TAG
from .settings import XMPPSettings
from .xmppserializer import serialize
from .streamevents import StreamConnectedEvent, GotFeaturesEvent
from .streamevents import AuthenticatedEvent, StreamRestartedEvent
from .stanzaprocessor import stanza_factory
from .interfaces import StreamFeatureHandler
from .interfaces import StreamFeatureHandled, StreamFeatureNotHandled
logger = logging.getLogger("pyxmpp2.streambase")
LANG_SPLIT_RE = re.compile(r"(.*)(?:-[a-zA-Z0-9])?-[a-zA-Z0-9]+$")
ERROR_TAG = STREAM_QNP + "error"
FEATURES_TAG = STREAM_QNP + "features"
# just to distinguish those from a domain name
IP_RE = re.compile(r"^((\d+.){3}\d+)|([0-9a-f]*:[0-9a-f:]*:[0-9a-f]*)$")
class StreamBase(XMLStreamHandler):
"""Base class for a generic XMPP stream.
Responsible for establishing connection, parsing the stream, handling
stream elements and passing stanzas receiver to other object.
This doesn't provide any authentication or encryption (both required by
the XMPP specification) and is not usable on its own.
Whenever we say "stream" here we actually mean two streams
(incoming and outgoing) of one connections, as defined by the XMPP
specification.
:Ivariables:
- `authenticated`: `True` if local entity has authenticated to peer
- `features`: stream features as annouced by the receiver.
- `handlers`: handlers for stream elements
- `initiator`: `True` if local stream endpoint is the initiating entity.
- `lock`: RLock object used to synchronize access to Stream object.
- `me`: local stream endpoint JID.
- `peer_authenticated`: `True` if the peer has authenticated to us
- `peer_language`: language of human-readable stream content selected
by the peer
- `peer`: remote stream endpoint JID.
- `settings`: stream settings
- `stanza_namespace`: default namespace of the stream
- `tls_established`: `True` when the stream is protected by TLS
- `transport`: transport used by this stream
- `version`: Negotiated version of the XMPP protocol. (0,9) for the
legacy (pre-XMPP) Jabber protocol.
- `_element_handlers`: mapping from stream element names to lists of
methods handling them
- `_input_state`: `None`, "open" (<stream:stream> has been received)
"restart" or "closed" (</stream:stream> or EOF has been received)
- `_output_state`: `None`, "open" (<stream:stream> has been received)
"restart" or "closed" (</stream:stream> or EOF has been received)
- `_stanza_namespace_p`: qname prefix of the stanza namespace
- `_stream_feature_handlers`: stream features handlers
:Types:
- `authenticated`: `bool`
- `features`: :etree:`ElementTree.Element`
- `handlers`: `list`
- `initiator`: `bool`
- `lock`: :std:`threading.RLock`
- `me`: `JID`
- `peer_authenticated`: `bool`
- `peer_language`: `str`
- `peer`: `JID`
- `settings`: XMPPSettings
- `stanza_namespace`: `str`
- `tls_established`: `bool`
- `transport`: `transport.XMPPTransport`
- `version`: (`int`, `int`) tuple
- `_element_handlers`: `dict`
- `_input_state`: `str`
- `_output_state`: `str`
- `_stanza_namespace_p`: `str`
- `_stream_feature_handlers`: `list` of `StreamFeatureHandler`
"""
# pylint: disable-msg=R0902,R0904
def __init__(self, stanza_namespace, stanza_route, handlers,
settings = None):
"""Initialize StreamBase object
:Parameters:
- `stanza_namespace`: stream's default namespace URI ("jabber:client"
for client, "jabber:server" for server, etc.)
- `stanza_route`: object to handle received stanzas
- `handlers`: objects to handle the stream events and elements
- `settings`: extra settings
:Types:
- `stanza_namespace`: `str`
- `stanza_route`: `StanzaRoute`
- `settings`: XMPPSettings
- `handlers`: `list` of objects
"""
XMLStreamHandler.__init__(self)
self.lock = threading.RLock()
if settings is None:
settings = XMPPSettings()
self.settings = settings
self.stanza_namespace = stanza_namespace
self._stanza_namespace_p = "{{{0}}}".format(stanza_namespace)
self.stanza_route = stanza_route
self.handlers = handlers
self._stream_feature_handlers = []
for handler in handlers:
if isinstance(handler, StreamFeatureHandler):
self._stream_feature_handlers.append(handler)
self.me = None
self.peer = None
self.stream_id = None
self.initiator = None
self.features = None
self.authenticated = False
self.peer_authenticated = False
self.tls_established = False
self.auth_method_used = None
self.version = None
self.language = None
self.peer_language = None
self.transport = None
self._input_state = None
self._output_state = None
self._element_handlers = {}
def initiate(self, transport, to = None):
"""Initiate an XMPP connection over the `transport`.
:Parameters:
- `transport`: an XMPP transport instance
- `to`: peer name
"""
with self.lock:
self.initiator = True
self.transport = transport
transport.set_target(self)
if to:
self.peer = JID(to)
else:
self.peer = None
if transport.is_connected():
self._initiate()
def _initiate(self):
"""Initiate an XMPP connection over a connected `transport`.
[ called with `lock` acquired ]
"""
self._setup_stream_element_handlers()
self._send_stream_start()
def receive(self, transport, myname):
"""Receive an XMPP connection over the `transport`.
:Parameters:
- `transport`: an XMPP transport instance
- `myname`: local stream endpoint name.
"""
with self.lock:
self.transport = transport
transport.set_target(self)
self.me = JID(myname)
self.initiator = False
self._setup_stream_element_handlers()
def _setup_stream_element_handlers(self):
"""Set up stream element handlers.
Scans the `handlers` list for `StreamFeatureHandler`
instances and updates `_element_handlers` mapping with their
methods decorated with @`stream_element_handler`
"""
# pylint: disable-msg=W0212
if self.initiator:
mode = "initiator"
else:
mode = "receiver"
self._element_handlers = {}
for handler in self.handlers:
if not isinstance(handler, StreamFeatureHandler):
continue
for _unused, meth in inspect.getmembers(handler, callable):
if not hasattr(meth, "_pyxmpp_stream_element_handled"):
continue
element_handled = meth._pyxmpp_stream_element_handled
if element_handled in self._element_handlers:
# use only the first matching handler
continue
if meth._pyxmpp_usage_restriction in (None, mode):
self._element_handlers[element_handled] = meth
def disconnect(self):
"""Gracefully close the connection."""
with self.lock:
self.transport.disconnect()
self._output_state = "closed"
def event(self, event): # pylint: disable-msg=R0201
"""Handle a stream event.
Called when connection state is changed.
Should not be called with self.lock acquired!
"""
event.stream = self
logger.debug("Stream event: {0}".format(event))
self.settings["event_queue"].put(event)
return False
def transport_connected(self):
"""Called when transport has been connected.
Send the stream head if initiator.
"""
with self.lock:
if self.initiator:
if self._output_state is None:
self._initiate()
def close(self):
"""Forcibly close the connection and clear the stream state."""
self.transport.close()
def stream_start(self, element):
"""Process <stream:stream> (stream start) tag received from peer.
`lock` is acquired when this method is called.
:Parameters:
- `element`: root element (empty) created by the parser"""
with self.lock:
logger.debug("input document: " + element_to_unicode(element))
if not element.tag.startswith(STREAM_QNP):
self._send_stream_error("invalid-namespace")
raise FatalStreamError("Bad stream namespace")
if element.tag != STREAM_ROOT_TAG:
self._send_stream_error("bad-format")
raise FatalStreamError("Bad root element")
if self._input_state == "restart":
event = StreamRestartedEvent(self.peer)
else:
event = StreamConnectedEvent(self.peer)
self._input_state = "open"
version = element.get("version")
if version:
try:
major, minor = version.split(".", 1)
major, minor = int(major), int(minor)
except ValueError:
self._send_stream_error("unsupported-version")
raise FatalStreamError("Unsupported protocol version.")
self.version = (major, minor)
else:
self.version = (0, 9)
if self.version[0] != 1 and self.version != (0, 9):
self._send_stream_error("unsupported-version")
raise FatalStreamError("Unsupported protocol version.")
peer_lang = element.get(XML_LANG_QNAME)
self.peer_language = peer_lang
if not self.initiator:
lang = None
languages = self.settings["languages"]
while peer_lang:
if peer_lang in languages:
lang = peer_lang
break
match = LANG_SPLIT_RE.match(peer_lang)
if not match:
break
peer_lang = match.group(0)
if lang:
self.language = lang
if self.initiator:
self.stream_id = element.get("id")
peer = element.get("from")
if peer:
peer = JID(peer)
if self.peer:
if peer and peer != self.peer:
logger.debug("peer hostname mismatch: {0!r} != {1!r}"
.format(peer, self.peer))
self.peer = peer
else:
to = element.get("to")
if to:
to = self.check_to(to)
if not to:
self._send_stream_error("host-unknown")
raise FatalStreamError('Bad "to"')
self.me = JID(to)
peer = element.get("from")
if peer:
peer = JID(peer)
self._send_stream_start(self.generate_id(), stream_to = peer)
self._send_stream_features()
self.event(event)
def stream_end(self):
"""Process </stream:stream> (stream end) tag received from peer.
"""
logger.debug("Stream ended")
with self.lock:
self._input_state = "closed"
self.transport.disconnect()
self._output_state = "closed"
def stream_eof(self):
"""Process stream EOF.
"""
self.stream_end()
def stream_element(self, element):
"""Process first level child element of the stream).
:Parameters:
- `element`: XML element received
:Types:
- `element`: :etree:`ElementTree.Element`
"""
with self.lock:
self._process_element(element)
def stream_parse_error(self, descr):
"""Called when an error is encountered in the stream.
:Parameters:
- `descr`: description of the error
:Types:
- `descr`: `str`"""
self.send_stream_error("not-well-formed")
raise StreamParseError(descr)
def _send_stream_start(self, stream_id = None, stream_to = None):
"""Send stream start tag."""
if self._output_state in ("open", "closed"):
raise StreamError("Stream start already sent")
if not self.language:
self.language = self.settings["language"]
if stream_to:
stream_to = str(stream_to)
elif self.peer and self.initiator:
stream_to = str(self.peer)
stream_from = None
if self.me and (self.tls_established or not self.initiator):
stream_from = str(self.me)
if stream_id:
self.stream_id = stream_id
else:
self.stream_id = None
self.transport.send_stream_head(self.stanza_namespace,
stream_from, stream_to,
self.stream_id, language = self.language)
self._output_state = "open"
def send_stream_error(self, condition):
"""Send stream error element.
:Parameters:
- `condition`: stream error condition name, as defined in the
XMPP specification.
"""
with self.lock:
self._send_stream_error(condition)
def _send_stream_error(self, condition):
"""Same as `send_stream_error`, but expects `lock` acquired.
"""
if self._output_state is "closed":
return
if self._output_state in (None, "restart"):
self._send_stream_start()
element = StreamErrorElement(condition).as_xml()
self.transport.send_element(element)
self.transport.disconnect()
self._output_state = "closed"
def _restart_stream(self):
"""Restart the stream as needed after SASL and StartTLS negotiation."""
self._input_state = "restart"
self._output_state = "restart"
self.features = None
self.transport.restart()
if self.initiator:
self._send_stream_start(self.stream_id)
def _make_stream_features(self):
"""Create the <features/> element for the stream.
[receving entity only]
:returns: new <features/> element
:returntype: :etree:`ElementTree.Element`"""
features = ElementTree.Element(FEATURES_TAG)
for handler in self._stream_feature_handlers:
handler.make_stream_features(self, features)
return features
def _send_stream_features(self):
"""Send stream <features/>.
[receiving entity only]"""
self.features = self._make_stream_features()
self._write_element(self.features)
def write_element(self, element):
"""Write XML `element` to the stream.
:Parameters:
- `element`: Element node to send.
:Types:
- `element`: :etree:`ElementTree.Element`
"""
with self.lock:
self._write_element(element)
def _write_element(self, element):
"""Same as `write_element` but with `lock` already acquired.
"""
self.transport.send_element(element)
def send(self, stanza):
"""Write stanza to the stream.
:Parameters:
- `stanza`: XMPP stanza to send.
:Types:
- `stanza`: `pyxmpp2.stanza.Stanza`
"""
with self.lock:
return self._send(stanza)
def _send(self, stanza):
"""Same as `send` but assume `lock` is acquired."""
self.fix_out_stanza(stanza)
element = stanza.as_xml()
self._write_element(element)
def _process_element(self, element):
"""Process first level element of the stream.
The element may be stream error or features, StartTLS
request/response, SASL request/response or a stanza.
:Parameters:
- `element`: XML element
:Types:
- `element`: :etree:`ElementTree.Element`
"""
tag = element.tag
if tag in self._element_handlers:
handler = self._element_handlers[tag]
logger.debug("Passing element {0!r} to method {1!r}"
.format(element, handler))
handled = handler(self, element)
if handled:
return
if tag.startswith(self._stanza_namespace_p):
stanza = stanza_factory(element, self, self.language)
self.uplink_receive(stanza)
elif tag == ERROR_TAG:
error = StreamErrorElement(element)
self.process_stream_error(error)
elif tag == FEATURES_TAG:
logger.debug("Got features element: {0}".format(serialize(element)))
self._got_features(element)
else:
logger.debug("Unhandled element: {0}".format(serialize(element)))
logger.debug(" known handlers: {0!r}".format(
self._element_handlers))
def uplink_receive(self, stanza):
"""Handle stanza received from the stream."""
with self.lock:
if self.stanza_route:
self.stanza_route.uplink_receive(stanza)
else:
logger.debug("Stanza dropped (no route): {0!r}".format(stanza))
def process_stream_error(self, error):
"""Process stream error element received.
:Parameters:
- `error`: error received
:Types:
- `error`: `StreamErrorElement`
"""
# pylint: disable-msg=R0201
logger.debug("Unhandled stream error: condition: {0} {1!r}"
.format(error.condition_name, error.serialize()))
def check_to(self, to):
"""Check "to" attribute of received stream header.
:return: `to` if it is equal to `me`, None otherwise.
Should be overriden in derived classes which require other logic
for handling that attribute."""
if to != self.me:
return None
return to
def generate_id(self):
"""Generate a random and unique stream ID.
:return: the id string generated."""
# pylint: disable-msg=R0201
return str(uuid.uuid4())
def _got_features(self, features):
"""Process incoming <stream:features/> element.
[initiating entity only]
The received features node is available in `features`."""
self.features = features
logger.debug("got features, passing to event handlers...")
handled = self.event(GotFeaturesEvent(self.features))
logger.debug(" handled: {0}".format(handled))
if not handled:
mandatory_handled = []
mandatory_not_handled = []
logger.debug(" passing to stream features handlers: {0}"
.format(self._stream_feature_handlers))
for handler in self._stream_feature_handlers:
ret = handler.handle_stream_features(self, self.features)
if ret is None:
continue
elif isinstance(ret, StreamFeatureHandled):
if ret.mandatory:
mandatory_handled.append(str(ret))
break
break
elif isinstance(ret, StreamFeatureNotHandled):
if ret.mandatory:
mandatory_not_handled.append(str(ret))
break
else:
raise ValueError("Wrong value returned from a stream"
" feature handler: {0!r}".format(ret))
if mandatory_not_handled and not mandatory_handled:
self.send_stream_error("unsupported-feature")
raise FatalStreamError(
"Unsupported mandatory-to-implement features: "
+ " ".join(mandatory_not_handled))
def is_connected(self):
"""Check if stream is is_connected and stanzas may be sent.
:return: True if stream connection is active."""
return self.transport.is_connected() and self._output_state == "open"
def set_peer_authenticated(self, peer, restart_stream = False):
"""Mark the other side of the stream authenticated as `peer`
:Parameters:
- `peer`: local JID just authenticated
- `restart_stream`: `True` when stream should be restarted (needed
after SASL authentication)
:Types:
- `peer`: `JID`
- `restart_stream`: `bool`
"""
with self.lock:
self.peer_authenticated = True
self.peer = peer
if restart_stream:
self._restart_stream()
self.event(AuthenticatedEvent(self.peer))
def set_authenticated(self, me, restart_stream = False):
"""Mark stream authenticated as `me`.
:Parameters:
- `me`: local JID just authenticated
- `restart_stream`: `True` when stream should be restarted (needed
after SASL authentication)
:Types:
- `me`: `JID`
- `restart_stream`: `bool`
"""
with self.lock:
self.authenticated = True
self.me = me
if restart_stream:
self._restart_stream()
self.event(AuthenticatedEvent(self.me))
def fix_in_stanza(self, stanza):
"""Fix incoming stanza, setting the implicit fields.
Used for for servers side of client stream to set proper stanza from.
"""
# pylint: disable-msg=R0201
return stanza
def fix_out_stanza(self, stanza):
"""Fix outgoing, setting or clearing the implicit fields.
Used for for client side of client stream to clear the 'from'
attribute.
"""
# pylint: disable-msg=R0201
return stanza
def _languages_factory(settings):
"""Make the default value of the :r:`languages setting`."""
return [settings["language"]]
XMPPSettings.add_setting("language", type = str, default = "en",
cmdline_help = "Preferred language of the XMPP stream",
doc = """The preferred language of the XMPP stream."""
)
XMPPSettings.add_setting("languages", type = "list of ``unicode``",
validator = XMPPSettings.validate_string_list,
factory = _languages_factory,
cmdline_help = "Accepted languages of the XMPP stream",
doc = """When the remote entity selects one of these languages
on their stream, the same language will be sent in our stream declaration."""
)
XMPPSettings.add_setting("extra_ns_prefixes", type = "prefix -> uri mapping",
default = {},
doc = """Extra namespace prefix declarations to use at the stream root
element."""
)
# vi: sts=4 et sw=4
|
apache-2.0
|
brummer-simon/RIOT
|
tests/cond_order/tests/01-run.py
|
7
|
1506
|
#!/usr/bin/env python3
# Copyright (C) 2016 Kaspar Schleiser <kaspar@schleiser.de>
# Copyright (C) 2016 Oliver Hahm <oliver.hahm@inria.fr>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
NUM_THREADS = 5
def testfunc(child):
# First collect the thread info how they are created
# A number of lines with:
# T4 (prio 6): waiting on condition variable now
thread_prios = {}
for _ in range(NUM_THREADS):
child.expect(r"T(\d+) \(prio (\d+)\): waiting on condition variable now")
thread_id = int(child.match.group(1))
thread_prio = int(child.match.group(2))
thread_prios[thread_id] = thread_prio
child.expect_exact("First batch was signaled")
count = 0
last_prio = -1
for _ in range(len(thread_prios)):
child.expect(r"T(\d+) \(prio (\d+)\): condition variable was signaled now")
thread_id = int(child.match.group(1))
thread_prio = int(child.match.group(2))
assert thread_prios[thread_id] == thread_prio
assert thread_prio > last_prio
last_prio = thread_prio
count += 1
if count == 3:
child.expect_exact("First batch has woken up")
child.expect_exact("Second batch was signaled")
child.expect_exact("Second batch has woken up")
if __name__ == "__main__":
sys.exit(run(testfunc))
|
lgpl-2.1
|
punktniklas/NiKom
|
Extras/Botcheck/striptelnetcmds.py
|
2
|
1118
|
#!/usr/bin/env python
# This script strips telnet command data from stdin.
import sys
state = "Normal"
for line in sys.stdin:
chars = []
for c in line:
if "Normal" == state:
if "\xff" == c:
state = "Command"
else:
chars.append(c)
elif "Command" == state:
command = c
if "\xff" == command:
# Command followed by command should output command
chars.append("\xff")
state = "Normal"
elif "\xfa" == command:
state = "SubOption"
elif command in ("\xfb", "\xfc", "\xfd", "\xfe"):
# WILL, WON'T, DO and DON'T is followed by a byte of option code
state = "OptionCode"
else:
state = "Normal"
elif "SubOption" == state:
# Bytes following a sub-option til next command are part of it
if "\xff" == c:
state = "Command"
elif "OptionCode" == state:
state = "Normal"
sys.stdout.write("".join(chars))
|
mit
|
TheNite/namebench
|
nb_third_party/jinja2/ext.py
|
199
|
21800
|
# -*- coding: utf-8 -*-
"""
jinja2.ext
~~~~~~~~~~
Jinja extensions allow to add custom tags similar to the way django custom
tags work. By default two example extensions exist: an i18n and a cache
extension.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD.
"""
from collections import deque
from jinja2 import nodes
from jinja2.defaults import *
from jinja2.environment import get_spontaneous_environment
from jinja2.runtime import Undefined, concat
from jinja2.exceptions import TemplateAssertionError, TemplateSyntaxError
from jinja2.utils import contextfunction, import_string, Markup, next
# the only real useful gettext functions for a Jinja template. Note
# that ugettext must be assigned to gettext as Jinja doesn't support
# non unicode strings.
GETTEXT_FUNCTIONS = ('_', 'gettext', 'ngettext')
class ExtensionRegistry(type):
"""Gives the extension an unique identifier."""
def __new__(cls, name, bases, d):
rv = type.__new__(cls, name, bases, d)
rv.identifier = rv.__module__ + '.' + rv.__name__
return rv
class Extension(object):
"""Extensions can be used to add extra functionality to the Jinja template
system at the parser level. Custom extensions are bound to an environment
but may not store environment specific data on `self`. The reason for
this is that an extension can be bound to another environment (for
overlays) by creating a copy and reassigning the `environment` attribute.
As extensions are created by the environment they cannot accept any
arguments for configuration. One may want to work around that by using
a factory function, but that is not possible as extensions are identified
by their import name. The correct way to configure the extension is
storing the configuration values on the environment. Because this way the
environment ends up acting as central configuration storage the
attributes may clash which is why extensions have to ensure that the names
they choose for configuration are not too generic. ``prefix`` for example
is a terrible name, ``fragment_cache_prefix`` on the other hand is a good
name as includes the name of the extension (fragment cache).
"""
__metaclass__ = ExtensionRegistry
#: if this extension parses this is the list of tags it's listening to.
tags = set()
#: the priority of that extension. This is especially useful for
#: extensions that preprocess values. A lower value means higher
#: priority.
#:
#: .. versionadded:: 2.4
priority = 100
def __init__(self, environment):
self.environment = environment
def bind(self, environment):
"""Create a copy of this extension bound to another environment."""
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.environment = environment
return rv
def preprocess(self, source, name, filename=None):
"""This method is called before the actual lexing and can be used to
preprocess the source. The `filename` is optional. The return value
must be the preprocessed source.
"""
return source
def filter_stream(self, stream):
"""It's passed a :class:`~jinja2.lexer.TokenStream` that can be used
to filter tokens returned. This method has to return an iterable of
:class:`~jinja2.lexer.Token`\s, but it doesn't have to return a
:class:`~jinja2.lexer.TokenStream`.
In the `ext` folder of the Jinja2 source distribution there is a file
called `inlinegettext.py` which implements a filter that utilizes this
method.
"""
return stream
def parse(self, parser):
"""If any of the :attr:`tags` matched this method is called with the
parser as first argument. The token the parser stream is pointing at
is the name token that matched. This method has to return one or a
list of multiple nodes.
"""
raise NotImplementedError()
def attr(self, name, lineno=None):
"""Return an attribute node for the current extension. This is useful
to pass constants on extensions to generated template code::
self.attr('_my_attribute', lineno=lineno)
"""
return nodes.ExtensionAttribute(self.identifier, name, lineno=lineno)
def call_method(self, name, args=None, kwargs=None, dyn_args=None,
dyn_kwargs=None, lineno=None):
"""Call a method of the extension. This is a shortcut for
:meth:`attr` + :class:`jinja2.nodes.Call`.
"""
if args is None:
args = []
if kwargs is None:
kwargs = []
return nodes.Call(self.attr(name, lineno=lineno), args, kwargs,
dyn_args, dyn_kwargs, lineno=lineno)
@contextfunction
def _gettext_alias(context, string):
return context.resolve('gettext')(string)
class InternationalizationExtension(Extension):
"""This extension adds gettext support to Jinja2."""
tags = set(['trans'])
# TODO: the i18n extension is currently reevaluating values in a few
# situations. Take this example:
# {% trans count=something() %}{{ count }} foo{% pluralize
# %}{{ count }} fooss{% endtrans %}
# something is called twice here. One time for the gettext value and
# the other time for the n-parameter of the ngettext function.
def __init__(self, environment):
Extension.__init__(self, environment)
environment.globals['_'] = _gettext_alias
environment.extend(
install_gettext_translations=self._install,
install_null_translations=self._install_null,
uninstall_gettext_translations=self._uninstall,
extract_translations=self._extract
)
def _install(self, translations):
gettext = getattr(translations, 'ugettext', None)
if gettext is None:
gettext = translations.gettext
ngettext = getattr(translations, 'ungettext', None)
if ngettext is None:
ngettext = translations.ngettext
self.environment.globals.update(gettext=gettext, ngettext=ngettext)
def _install_null(self):
self.environment.globals.update(
gettext=lambda x: x,
ngettext=lambda s, p, n: (n != 1 and (p,) or (s,))[0]
)
def _uninstall(self, translations):
for key in 'gettext', 'ngettext':
self.environment.globals.pop(key, None)
def _extract(self, source, gettext_functions=GETTEXT_FUNCTIONS):
if isinstance(source, basestring):
source = self.environment.parse(source)
return extract_from_ast(source, gettext_functions)
def parse(self, parser):
"""Parse a translatable tag."""
lineno = next(parser.stream).lineno
# find all the variables referenced. Additionally a variable can be
# defined in the body of the trans block too, but this is checked at
# a later state.
plural_expr = None
variables = {}
while parser.stream.current.type != 'block_end':
if variables:
parser.stream.expect('comma')
# skip colon for python compatibility
if parser.stream.skip_if('colon'):
break
name = parser.stream.expect('name')
if name.value in variables:
parser.fail('translatable variable %r defined twice.' %
name.value, name.lineno,
exc=TemplateAssertionError)
# expressions
if parser.stream.current.type == 'assign':
next(parser.stream)
variables[name.value] = var = parser.parse_expression()
else:
variables[name.value] = var = nodes.Name(name.value, 'load')
if plural_expr is None:
plural_expr = var
parser.stream.expect('block_end')
plural = plural_names = None
have_plural = False
referenced = set()
# now parse until endtrans or pluralize
singular_names, singular = self._parse_block(parser, True)
if singular_names:
referenced.update(singular_names)
if plural_expr is None:
plural_expr = nodes.Name(singular_names[0], 'load')
# if we have a pluralize block, we parse that too
if parser.stream.current.test('name:pluralize'):
have_plural = True
next(parser.stream)
if parser.stream.current.type != 'block_end':
name = parser.stream.expect('name')
if name.value not in variables:
parser.fail('unknown variable %r for pluralization' %
name.value, name.lineno,
exc=TemplateAssertionError)
plural_expr = variables[name.value]
parser.stream.expect('block_end')
plural_names, plural = self._parse_block(parser, False)
next(parser.stream)
referenced.update(plural_names)
else:
next(parser.stream)
# register free names as simple name expressions
for var in referenced:
if var not in variables:
variables[var] = nodes.Name(var, 'load')
# no variables referenced? no need to escape
if not referenced:
singular = singular.replace('%%', '%')
if plural:
plural = plural.replace('%%', '%')
if not have_plural:
plural_expr = None
elif plural_expr is None:
parser.fail('pluralize without variables', lineno)
if variables:
variables = nodes.Dict([nodes.Pair(nodes.Const(x, lineno=lineno), y)
for x, y in variables.items()])
else:
variables = None
node = self._make_node(singular, plural, variables, plural_expr)
node.set_lineno(lineno)
return node
def _parse_block(self, parser, allow_pluralize):
"""Parse until the next block tag with a given name."""
referenced = []
buf = []
while 1:
if parser.stream.current.type == 'data':
buf.append(parser.stream.current.value.replace('%', '%%'))
next(parser.stream)
elif parser.stream.current.type == 'variable_begin':
next(parser.stream)
name = parser.stream.expect('name').value
referenced.append(name)
buf.append('%%(%s)s' % name)
parser.stream.expect('variable_end')
elif parser.stream.current.type == 'block_begin':
next(parser.stream)
if parser.stream.current.test('name:endtrans'):
break
elif parser.stream.current.test('name:pluralize'):
if allow_pluralize:
break
parser.fail('a translatable section can have only one '
'pluralize section')
parser.fail('control structures in translatable sections are '
'not allowed')
elif parser.stream.eos:
parser.fail('unclosed translation block')
else:
assert False, 'internal parser error'
return referenced, concat(buf)
def _make_node(self, singular, plural, variables, plural_expr):
"""Generates a useful node from the data provided."""
# singular only:
if plural_expr is None:
gettext = nodes.Name('gettext', 'load')
node = nodes.Call(gettext, [nodes.Const(singular)],
[], None, None)
# singular and plural
else:
ngettext = nodes.Name('ngettext', 'load')
node = nodes.Call(ngettext, [
nodes.Const(singular),
nodes.Const(plural),
plural_expr
], [], None, None)
# mark the return value as safe if we are in an
# environment with autoescaping turned on
if self.environment.autoescape:
node = nodes.MarkSafe(node)
if variables:
node = nodes.Mod(node, variables)
return nodes.Output([node])
class ExprStmtExtension(Extension):
"""Adds a `do` tag to Jinja2 that works like the print statement just
that it doesn't print the return value.
"""
tags = set(['do'])
def parse(self, parser):
node = nodes.ExprStmt(lineno=next(parser.stream).lineno)
node.node = parser.parse_tuple()
return node
class LoopControlExtension(Extension):
"""Adds break and continue to the template engine."""
tags = set(['break', 'continue'])
def parse(self, parser):
token = next(parser.stream)
if token.value == 'break':
return nodes.Break(lineno=token.lineno)
return nodes.Continue(lineno=token.lineno)
class WithExtension(Extension):
"""Adds support for a django-like with block."""
tags = set(['with'])
def parse(self, parser):
node = nodes.Scope(lineno=next(parser.stream).lineno)
assignments = []
while parser.stream.current.type != 'block_end':
lineno = parser.stream.current.lineno
if assignments:
parser.stream.expect('comma')
target = parser.parse_assign_target()
parser.stream.expect('assign')
expr = parser.parse_expression()
assignments.append(nodes.Assign(target, expr, lineno=lineno))
node.body = assignments + \
list(parser.parse_statements(('name:endwith',),
drop_needle=True))
return node
class AutoEscapeExtension(Extension):
"""Changes auto escape rules for a scope."""
tags = set(['autoescape'])
def parse(self, parser):
node = nodes.ScopedEvalContextModifier(lineno=next(parser.stream).lineno)
node.options = [
nodes.Keyword('autoescape', parser.parse_expression())
]
node.body = parser.parse_statements(('name:endautoescape',),
drop_needle=True)
return nodes.Scope([node])
def extract_from_ast(node, gettext_functions=GETTEXT_FUNCTIONS,
babel_style=True):
"""Extract localizable strings from the given template node. Per
default this function returns matches in babel style that means non string
parameters as well as keyword arguments are returned as `None`. This
allows Babel to figure out what you really meant if you are using
gettext functions that allow keyword arguments for placeholder expansion.
If you don't want that behavior set the `babel_style` parameter to `False`
which causes only strings to be returned and parameters are always stored
in tuples. As a consequence invalid gettext calls (calls without a single
string parameter or string parameters after non-string parameters) are
skipped.
This example explains the behavior:
>>> from jinja2 import Environment
>>> env = Environment()
>>> node = env.parse('{{ (_("foo"), _(), ngettext("foo", "bar", 42)) }}')
>>> list(extract_from_ast(node))
[(1, '_', 'foo'), (1, '_', ()), (1, 'ngettext', ('foo', 'bar', None))]
>>> list(extract_from_ast(node, babel_style=False))
[(1, '_', ('foo',)), (1, 'ngettext', ('foo', 'bar'))]
For every string found this function yields a ``(lineno, function,
message)`` tuple, where:
* ``lineno`` is the number of the line on which the string was found,
* ``function`` is the name of the ``gettext`` function used (if the
string was extracted from embedded Python code), and
* ``message`` is the string itself (a ``unicode`` object, or a tuple
of ``unicode`` objects for functions with multiple string arguments).
This extraction function operates on the AST and is because of that unable
to extract any comments. For comment support you have to use the babel
extraction interface or extract comments yourself.
"""
for node in node.find_all(nodes.Call):
if not isinstance(node.node, nodes.Name) or \
node.node.name not in gettext_functions:
continue
strings = []
for arg in node.args:
if isinstance(arg, nodes.Const) and \
isinstance(arg.value, basestring):
strings.append(arg.value)
else:
strings.append(None)
for arg in node.kwargs:
strings.append(None)
if node.dyn_args is not None:
strings.append(None)
if node.dyn_kwargs is not None:
strings.append(None)
if not babel_style:
strings = tuple(x for x in strings if x is not None)
if not strings:
continue
else:
if len(strings) == 1:
strings = strings[0]
else:
strings = tuple(strings)
yield node.lineno, node.node.name, strings
class _CommentFinder(object):
"""Helper class to find comments in a token stream. Can only
find comments for gettext calls forwards. Once the comment
from line 4 is found, a comment for line 1 will not return a
usable value.
"""
def __init__(self, tokens, comment_tags):
self.tokens = tokens
self.comment_tags = comment_tags
self.offset = 0
self.last_lineno = 0
def find_backwards(self, offset):
try:
for _, token_type, token_value in \
reversed(self.tokens[self.offset:offset]):
if token_type in ('comment', 'linecomment'):
try:
prefix, comment = token_value.split(None, 1)
except ValueError:
continue
if prefix in self.comment_tags:
return [comment.rstrip()]
return []
finally:
self.offset = offset
def find_comments(self, lineno):
if not self.comment_tags or self.last_lineno > lineno:
return []
for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset:]):
if token_lineno > lineno:
return self.find_backwards(self.offset + idx)
return self.find_backwards(len(self.tokens))
def babel_extract(fileobj, keywords, comment_tags, options):
"""Babel extraction method for Jinja templates.
.. versionchanged:: 2.3
Basic support for translation comments was added. If `comment_tags`
is now set to a list of keywords for extraction, the extractor will
try to find the best preceeding comment that begins with one of the
keywords. For best results, make sure to not have more than one
gettext call in one line of code and the matching comment in the
same line or the line before.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results.
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples.
(comments will be empty currently)
"""
extensions = set()
for extension in options.get('extensions', '').split(','):
extension = extension.strip()
if not extension:
continue
extensions.add(import_string(extension))
if InternationalizationExtension not in extensions:
extensions.add(InternationalizationExtension)
environment = get_spontaneous_environment(
options.get('block_start_string', BLOCK_START_STRING),
options.get('block_end_string', BLOCK_END_STRING),
options.get('variable_start_string', VARIABLE_START_STRING),
options.get('variable_end_string', VARIABLE_END_STRING),
options.get('comment_start_string', COMMENT_START_STRING),
options.get('comment_end_string', COMMENT_END_STRING),
options.get('line_statement_prefix') or LINE_STATEMENT_PREFIX,
options.get('line_comment_prefix') or LINE_COMMENT_PREFIX,
str(options.get('trim_blocks', TRIM_BLOCKS)).lower() in \
('1', 'on', 'yes', 'true'),
NEWLINE_SEQUENCE, frozenset(extensions),
# fill with defaults so that environments are shared
# with other spontaneus environments. The rest of the
# arguments are optimizer, undefined, finalize, autoescape,
# loader, cache size, auto reloading setting and the
# bytecode cache
True, Undefined, None, False, None, 0, False, None
)
source = fileobj.read().decode(options.get('encoding', 'utf-8'))
try:
node = environment.parse(source)
tokens = list(environment.lex(environment.preprocess(source)))
except TemplateSyntaxError, e:
# skip templates with syntax errors
return
finder = _CommentFinder(tokens, comment_tags)
for lineno, func, message in extract_from_ast(node, keywords):
yield lineno, func, message, finder.find_comments(lineno)
#: nicer import names
i18n = InternationalizationExtension
do = ExprStmtExtension
loopcontrols = LoopControlExtension
with_ = WithExtension
autoescape = AutoEscapeExtension
|
apache-2.0
|
isaac-s/cloudify-plugins-common
|
cloudify/constants.py
|
2
|
1540
|
########
# Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
MANAGER_FILE_SERVER_URL_KEY = 'MANAGER_FILE_SERVER_URL'
MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL_KEY = \
'MANAGER_FILE_SERVER_BLUEPRINTS_ROOT_URL'
MANAGER_IP_KEY = 'MANAGEMENT_IP'
MANAGER_REST_PORT_KEY = 'MANAGER_REST_PORT'
CELERY_BROKER_URL_KEY = 'CELERY_BROKER_URL'
VIRTUALENV_PATH_KEY = 'VIRTUALENV_PATH'
CELERY_WORK_DIR_PATH_KEY = 'CELERY_WORK_DIR_PATH'
AGENT_INSTALL_METHOD_NONE = 'none'
AGENT_INSTALL_METHOD_REMOTE = 'remote'
AGENT_INSTALL_METHOD_INIT_SCRIPT = 'init_script'
AGENT_INSTALL_METHOD_PROVIDED = 'provided'
AGENT_INSTALL_METHODS = [
AGENT_INSTALL_METHOD_NONE,
AGENT_INSTALL_METHOD_REMOTE,
AGENT_INSTALL_METHOD_INIT_SCRIPT,
AGENT_INSTALL_METHOD_PROVIDED
]
AGENT_INSTALL_METHODS_SCRIPTS = [
AGENT_INSTALL_METHOD_INIT_SCRIPT,
AGENT_INSTALL_METHOD_PROVIDED
]
COMPUTE_NODE_TYPE = 'cloudify.nodes.Compute'
BROKER_PORT_NO_SSL = 5672
BROKER_PORT_SSL = 5671
|
apache-2.0
|
timm/timmnix
|
pypy3-v5.5.0-linux64/lib-python/3/ctypes/util.py
|
1
|
8948
|
import sys, os
import contextlib
import subprocess
# find_library(name) returns the pathname of a library, or None.
if os.name == "nt":
def _get_build_version():
"""Return the version of MSVC that was used to build Python.
For Python 2.3 and up, the version number is included in
sys.version. For earlier versions, assume the compiler is MSVC 6.
"""
# This function was copied from Lib/distutils/msvccompiler.py
prefix = "MSC v."
i = sys.version.find(prefix)
if i == -1:
return 6
i = i + len(prefix)
s, rest = sys.version[i:].split(" ", 1)
majorVersion = int(s[:-2]) - 6
minorVersion = int(s[2:3]) / 10.0
# I don't think paths are affected by minor version in version 6
if majorVersion == 6:
minorVersion = 0
if majorVersion >= 6:
return majorVersion + minorVersion
# else we don't know what version of the compiler this is
return None
def find_msvcrt():
"""Return the name of the VC runtime dll"""
version = _get_build_version()
if version is None:
# better be safe than sorry
return None
if version <= 6:
clibname = 'msvcrt'
else:
clibname = 'msvcr%d' % (version * 10)
# If python was built with in debug mode
import importlib.machinery
if '_d.pyd' in importlib.machinery.EXTENSION_SUFFIXES:
clibname += 'd'
return clibname+'.dll'
def find_library(name):
if name in ('c', 'm'):
return find_msvcrt()
# See MSDN for the REAL search order.
for directory in os.environ['PATH'].split(os.pathsep):
fname = os.path.join(directory, name)
if os.path.isfile(fname):
return fname
if fname.lower().endswith(".dll"):
continue
fname = fname + ".dll"
if os.path.isfile(fname):
return fname
return None
if os.name == "ce":
# search path according to MSDN:
# - absolute path specified by filename
# - The .exe launch directory
# - the Windows directory
# - ROM dll files (where are they?)
# - OEM specified search path: HKLM\Loader\SystemPath
def find_library(name):
return name
if os.name == "posix" and sys.platform == "darwin":
from ctypes.macholib.dyld import dyld_find as _dyld_find
def find_library(name):
possible = ['lib%s.dylib' % name,
'%s.dylib' % name,
'%s.framework/%s' % (name, name)]
for name in possible:
try:
return _dyld_find(name)
except ValueError:
continue
return None
elif os.name == "posix":
# Andreas Degert's find functions, using gcc, /sbin/ldconfig, objdump
import re, errno
def _findLib_gcc(name):
import tempfile
expr = r'[^\(\)\s]*lib%s\.[^\(\)\s]*' % re.escape(name)
fdout, ccout = tempfile.mkstemp()
os.close(fdout)
cmd = 'if type gcc >/dev/null 2>&1; then CC=gcc; elif type cc >/dev/null 2>&1; then CC=cc;else exit 10; fi;' \
'LANG=C LC_ALL=C $CC -Wl,-t -o ' + ccout + ' 2>&1 -l' + name
try:
f = os.popen(cmd)
try:
trace = f.read()
finally:
rv = f.close()
finally:
try:
os.unlink(ccout)
except OSError as e:
if e.errno != errno.ENOENT:
raise
if rv == 10:
raise OSError('gcc or cc command not found')
res = re.search(expr, trace)
if not res:
return None
return res.group(0)
if sys.platform == "sunos5":
# use /usr/ccs/bin/dump on solaris
def _get_soname(f):
if not f:
return None
cmd = "/usr/ccs/bin/dump -Lpv 2>/dev/null " + f
with contextlib.closing(os.popen(cmd)) as f:
data = f.read()
res = re.search(r'\[.*\]\sSONAME\s+([^\s]+)', data)
if not res:
return None
return res.group(1)
else:
def _get_soname(f):
# assuming GNU binutils / ELF
if not f:
return None
cmd = 'if ! type objdump >/dev/null 2>&1; then exit 10; fi;' \
"objdump -p -j .dynamic 2>/dev/null " + f
f = os.popen(cmd)
dump = f.read()
rv = f.close()
if rv == 10:
raise OSError('objdump command not found')
res = re.search(r'\sSONAME\s+([^\s]+)', dump)
if not res:
return None
return res.group(1)
if sys.platform.startswith(("freebsd", "openbsd", "dragonfly")):
def _num_version(libname):
# "libxyz.so.MAJOR.MINOR" => [ MAJOR, MINOR ]
parts = libname.split(".")
nums = []
try:
while parts:
nums.insert(0, int(parts.pop()))
except ValueError:
pass
return nums or [ sys.maxsize ]
def find_library(name):
ename = re.escape(name)
expr = r':-l%s\.\S+ => \S*/(lib%s\.\S+)' % (ename, ename)
with contextlib.closing(os.popen('/sbin/ldconfig -r 2>/dev/null')) as f:
data = f.read()
res = re.findall(expr, data)
if not res:
return _get_soname(_findLib_gcc(name))
res.sort(key=_num_version)
return res[-1]
elif sys.platform == "sunos5":
def _findLib_crle(name, is64):
if not os.path.exists('/usr/bin/crle'):
return None
if is64:
cmd = 'env LC_ALL=C /usr/bin/crle -64 2>/dev/null'
else:
cmd = 'env LC_ALL=C /usr/bin/crle 2>/dev/null'
for line in os.popen(cmd).readlines():
line = line.strip()
if line.startswith('Default Library Path (ELF):'):
paths = line.split()[4]
if not paths:
return None
for dir in paths.split(":"):
libfile = os.path.join(dir, "lib%s.so" % name)
if os.path.exists(libfile):
return libfile
return None
def find_library(name, is64 = False):
return _get_soname(_findLib_crle(name, is64) or _findLib_gcc(name))
else:
def _findSoname_ldconfig(name):
import struct
if struct.calcsize('l') == 4:
machine = os.uname().machine + '-32'
else:
machine = os.uname().machine + '-64'
mach_map = {
'x86_64-64': 'libc6,x86-64',
'ppc64-64': 'libc6,64bit',
'sparc64-64': 'libc6,64bit',
's390x-64': 'libc6,64bit',
'ia64-64': 'libc6,IA-64',
}
abi_type = mach_map.get(machine, 'libc6')
# XXX assuming GLIBC's ldconfig (with option -p)
regex = os.fsencode(
'\s+(lib%s\.[^\s]+)\s+\(%s' % (re.escape(name), abi_type))
try:
with subprocess.Popen(['/sbin/ldconfig', '-p'],
stdin=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
stdout=subprocess.PIPE,
env={'LC_ALL': 'C', 'LANG': 'C'}) as p:
res = re.search(regex, p.stdout.read())
if res:
return os.fsdecode(res.group(1))
except OSError:
pass
def find_library(name):
return _findSoname_ldconfig(name) or _get_soname(_findLib_gcc(name))
################################################################
# test code
def test():
from ctypes import cdll
if os.name == "nt":
print(cdll.msvcrt)
print(cdll.load("msvcrt"))
print(find_library("msvcrt"))
if os.name == "posix":
# find and load_version
print(find_library("m"))
print(find_library("c"))
print(find_library("bz2"))
# getattr
## print cdll.m
## print cdll.bz2
# load
if sys.platform == "darwin":
print(cdll.LoadLibrary("libm.dylib"))
print(cdll.LoadLibrary("libcrypto.dylib"))
print(cdll.LoadLibrary("libSystem.dylib"))
print(cdll.LoadLibrary("System.framework/System"))
else:
print(cdll.LoadLibrary("libm.so"))
print(cdll.LoadLibrary("libcrypt.so"))
print(find_library("crypt"))
if __name__ == "__main__":
test()
|
mit
|
ForeverWintr/ImageClassipy
|
clouds/tests/util/util.py
|
1
|
1283
|
"""
Test utils
"""
import tempfile
import PIL
import numpy as np
from clouds.util.constants import HealthStatus
def createXors(tgt):
#create test xor images
xorIn = [
((255, 255, 255, 255), HealthStatus.GOOD),
((255, 255, 0, 0), HealthStatus.CLOUDY),
((0, 0, 0, 0), HealthStatus.GOOD),
((0, 0, 255, 255), HealthStatus.CLOUDY),
]
xorImages = []
for ar, expected in xorIn:
npar = np.array(ar, dtype=np.uint8).reshape(2, 2)
image = PIL.Image.fromarray(npar)
#pybrain needs a lot of test input. We'll make 20 of each image
for i in range(20):
path = tempfile.mktemp(suffix=".png", prefix='xor_', dir=tgt)
image.save(path)
xorImages.append((path, expected))
return xorImages
class MockStream(object):
def __init__(self, inputQueue):
"""
A class used as a replacement for stream objects. As data are recieved on the inputQueue,
make them available to `readline`.
"""
self.q = inputQueue
def read(self):
return [l for l in self.readline()]
def readline(self):
"""
Block until an item appears in the queue.
"""
return self.q.get()
def close(self):
pass
|
mit
|
jmehnle/ansible
|
lib/ansible/modules/cloud/google/gcdns_record.py
|
49
|
28445
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 CallFire Inc.
#
# This file is part of Ansible.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcdns_record
short_description: Creates or removes resource records in Google Cloud DNS
description:
- Creates or removes resource records in Google Cloud DNS.
version_added: "2.2"
author: "William Albert (@walbert947)"
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.19.0"
options:
state:
description:
- Whether the given resource record should or should not be present.
required: false
choices: ["present", "absent"]
default: "present"
record:
description:
- The fully-qualified domain name of the resource record.
required: true
aliases: ['name']
zone:
description:
- The DNS domain name of the zone (e.g., example.com).
- One of either I(zone) or I(zone_id) must be specified as an
option, or the module will fail.
- If both I(zone) and I(zone_id) are specifed, I(zone_id) will be
used.
required: false
zone_id:
description:
- The Google Cloud ID of the zone (e.g., example-com).
- One of either I(zone) or I(zone_id) must be specified as an
option, or the module will fail.
- These usually take the form of domain names with the dots replaced
with dashes. A zone ID will never have any dots in it.
- I(zone_id) can be faster than I(zone) in projects with a large
number of zones.
- If both I(zone) and I(zone_id) are specifed, I(zone_id) will be
used.
required: false
type:
description:
- The type of resource record to add.
required: true
choices: [ 'A', 'AAAA', 'CNAME', 'SRV', 'TXT', 'SOA', 'NS', 'MX', 'SPF', 'PTR' ]
record_data:
description:
- The record_data to use for the resource record.
- I(record_data) must be specified if I(state) is C(present) or
I(overwrite) is C(True), or the module will fail.
- Valid record_data vary based on the record's I(type). In addition,
resource records that contain a DNS domain name in the value
field (e.g., CNAME, PTR, SRV, .etc) MUST include a trailing dot
in the value.
- Individual string record_data for TXT records must be enclosed in
double quotes.
- For resource records that have the same name but different
record_data (e.g., multiple A records), they must be defined as
multiple list entries in a single record.
required: false
aliases: ['value']
ttl:
description:
- The amount of time in seconds that a resource record will remain
cached by a caching resolver.
required: false
default: 300
overwrite:
description:
- Whether an attempt to overwrite an existing record should succeed
or fail. The behavior of this option depends on I(state).
- If I(state) is C(present) and I(overwrite) is C(True), this
module will replace an existing resource record of the same name
with the provided I(record_data). If I(state) is C(present) and
I(overwrite) is C(False), this module will fail if there is an
existing resource record with the same name and type, but
different resource data.
- If I(state) is C(absent) and I(overwrite) is C(True), this
module will remove the given resource record unconditionally.
If I(state) is C(absent) and I(overwrite) is C(False), this
module will fail if the provided record_data do not match exactly
with the existing resource record's record_data.
required: false
choices: [True, False]
default: False
service_account_email:
description:
- The e-mail address for a service account with access to Google
Cloud DNS.
required: false
default: null
pem_file:
description:
- The path to the PEM file associated with the service account
email.
- This option is deprecated and may be removed in a future release.
Use I(credentials_file) instead.
required: false
default: null
credentials_file:
description:
- The path to the JSON file associated with the service account
email.
required: false
default: null
project_id:
description:
- The Google Cloud Platform project ID to use.
required: false
default: null
notes:
- See also M(gcdns_zone).
- This modules's underlying library does not support in-place updates for
DNS resource records. Instead, resource records are quickly deleted and
recreated.
- SOA records are technically supported, but their functionality is limited
to verifying that a zone's existing SOA record matches a pre-determined
value. The SOA record cannot be updated.
- Root NS records cannot be updated.
- NAPTR records are not supported.
'''
EXAMPLES = '''
# Create an A record.
- gcdns_record:
record: 'www1.example.com'
zone: 'example.com'
type: A
value: '1.2.3.4'
# Update an existing record.
- gcdns_record:
record: 'www1.example.com'
zone: 'example.com'
type: A
overwrite: true
value: '5.6.7.8'
# Remove an A record.
- gcdns_record:
record: 'www1.example.com'
zone_id: 'example-com'
state: absent
type: A
value: '5.6.7.8'
# Create a CNAME record.
- gcdns_record:
record: 'www.example.com'
zone_id: 'example-com'
type: CNAME
value: 'www.example.com.' # Note the trailing dot
# Create an MX record with a custom TTL.
- gcdns_record:
record: 'example.com'
zone: 'example.com'
type: MX
ttl: 3600
value: '10 mail.example.com.' # Note the trailing dot
# Create multiple A records with the same name.
- gcdns_record:
record: 'api.example.com'
zone_id: 'example-com'
type: A
record_data:
- '192.0.2.23'
- '10.4.5.6'
- '198.51.100.5'
- '203.0.113.10'
# Change the value of an existing record with multiple record_data.
- gcdns_record:
record: 'api.example.com'
zone: 'example.com'
type: A
overwrite: true
record_data: # WARNING: All values in a record will be replaced
- '192.0.2.23'
- '192.0.2.42' # The changed record
- '198.51.100.5'
- '203.0.113.10'
# Safely remove a multi-line record.
- gcdns_record:
record: 'api.example.com'
zone_id: 'example-com'
state: absent
type: A
record_data: # NOTE: All of the values must match exactly
- '192.0.2.23'
- '192.0.2.42'
- '198.51.100.5'
- '203.0.113.10'
# Unconditionally remove a record.
- gcdns_record:
record: 'api.example.com'
zone_id: 'example-com'
state: absent
overwrite: true # overwrite is true, so no values are needed
type: A
# Create an AAAA record
- gcdns_record:
record: 'www1.example.com'
zone: 'example.com'
type: AAAA
value: 'fd00:db8::1'
# Create a PTR record
- gcdns_record:
record: '10.5.168.192.in-addr.arpa'
zone: '5.168.192.in-addr.arpa'
type: PTR
value: 'api.example.com.' # Note the trailing dot.
# Create an NS record
- gcdns_record:
record: 'subdomain.example.com'
zone: 'example.com'
type: NS
ttl: 21600
record_data:
- 'ns-cloud-d1.googledomains.com.' # Note the trailing dots on values
- 'ns-cloud-d2.googledomains.com.'
- 'ns-cloud-d3.googledomains.com.'
- 'ns-cloud-d4.googledomains.com.'
# Create a TXT record
- gcdns_record:
record: 'example.com'
zone_id: 'example-com'
type: TXT
record_data:
- '"v=spf1 include:_spf.google.com -all"' # A single-string TXT value
- '"hello " "world"' # A multi-string TXT value
'''
RETURN = '''
overwrite:
description: Whether to the module was allowed to overwrite the record
returned: success
type: boolean
sample: True
record:
description: Fully-qualified domain name of the resource record
returned: success
type: string
sample: mail.example.com.
state:
description: Whether the record is present or absent
returned: success
type: string
sample: present
ttl:
description: The time-to-live of the resource record
returned: success
type: int
sample: 300
type:
description: The type of the resource record
returned: success
type: string
sample: A
record_data:
description: The resource record values
returned: success
type: list
sample: ['5.6.7.8', '9.10.11.12']
zone:
description: The dns name of the zone
returned: success
type: string
sample: example.com.
zone_id:
description: The Google Cloud DNS ID of the zone
returned: success
type: string
sample: example-com
'''
################################################################################
# Imports
################################################################################
import socket
from distutils.version import LooseVersion
try:
from libcloud import __version__ as LIBCLOUD_VERSION
from libcloud.common.google import InvalidRequestError
from libcloud.common.types import LibcloudError
from libcloud.dns.types import Provider
from libcloud.dns.types import RecordDoesNotExistError
from libcloud.dns.types import ZoneDoesNotExistError
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
################################################################################
# Constants
################################################################################
# Apache libcloud 0.19.0 was the first to contain the non-beta Google Cloud DNS
# v1 API. Earlier versions contained the beta v1 API, which has since been
# deprecated and decommissioned.
MINIMUM_LIBCLOUD_VERSION = '0.19.0'
# The libcloud Google Cloud DNS provider.
PROVIDER = Provider.GOOGLE
# The records that libcloud's Google Cloud DNS provider supports.
#
# Libcloud has a RECORD_TYPE_MAP dictionary in the provider that also contains
# this information and is the authoritative source on which records are
# supported, but accessing the dictionary requires creating a Google Cloud DNS
# driver object, which is done in a helper module.
#
# I'm hard-coding the supported record types here, because they (hopefully!)
# shouldn't change much, and it allows me to use it as a "choices" parameter
# in an AnsibleModule argument_spec.
SUPPORTED_RECORD_TYPES = [ 'A', 'AAAA', 'CNAME', 'SRV', 'TXT', 'SOA', 'NS', 'MX', 'SPF', 'PTR' ]
################################################################################
# Functions
################################################################################
def create_record(module, gcdns, zone, record):
"""Creates or overwrites a resource record."""
overwrite = module.boolean(module.params['overwrite'])
record_name = module.params['record']
record_type = module.params['type']
ttl = module.params['ttl']
record_data = module.params['record_data']
data = dict(ttl=ttl, rrdatas=record_data)
# Google Cloud DNS wants the trailing dot on all DNS names.
if record_name[-1] != '.':
record_name = record_name + '.'
# If we found a record, we need to check if the values match.
if record is not None:
# If the record matches, we obviously don't have to change anything.
if _records_match(record.data['ttl'], record.data['rrdatas'], ttl, record_data):
return False
# The record doesn't match, so we need to check if we can overwrite it.
if not overwrite:
module.fail_json(
msg = 'cannot overwrite existing record, overwrite protection enabled',
changed = False
)
# The record either doesn't exist, or it exists and we can overwrite it.
if record is None and not module.check_mode:
# There's no existing record, so we'll just create it.
try:
gcdns.create_record(record_name, zone, record_type, data)
except InvalidRequestError as error:
if error.code == 'invalid':
# The resource record name and type are valid by themselves, but
# not when combined (e.g., an 'A' record with "www.example.com"
# as its value).
module.fail_json(
msg = 'value is invalid for the given type: ' +
"%s, got value: %s" % (record_type, record_data),
changed = False
)
elif error.code == 'cnameResourceRecordSetConflict':
# We're attempting to create a CNAME resource record when we
# already have another type of resource record with the name
# domain name.
module.fail_json(
msg = "non-CNAME resource record already exists: %s" % record_name,
changed = False
)
else:
# The error is something else that we don't know how to handle,
# so we'll just re-raise the exception.
raise
elif record is not None and not module.check_mode:
# The Google provider in libcloud doesn't support updating a record in
# place, so if the record already exists, we need to delete it and
# recreate it using the new information.
gcdns.delete_record(record)
try:
gcdns.create_record(record_name, zone, record_type, data)
except InvalidRequestError:
# Something blew up when creating the record. This will usually be a
# result of invalid value data in the new record. Unfortunately, we
# already changed the state of the record by deleting the old one,
# so we'll try to roll back before failing out.
try:
gcdns.create_record(record.name, record.zone, record.type, record.data)
module.fail_json(
msg = 'error updating record, the original record was restored',
changed = False
)
except LibcloudError:
# We deleted the old record, couldn't create the new record, and
# couldn't roll back. That really sucks. We'll dump the original
# record to the failure output so the user can resore it if
# necessary.
module.fail_json(
msg = 'error updating record, and could not restore original record, ' +
"original name: %s " % record.name +
"original zone: %s " % record.zone +
"original type: %s " % record.type +
"original data: %s" % record.data,
changed = True)
return True
def remove_record(module, gcdns, record):
"""Remove a resource record."""
overwrite = module.boolean(module.params['overwrite'])
ttl = module.params['ttl']
record_data = module.params['record_data']
# If there is no record, we're obviously done.
if record is None:
return False
# If there is an existing record, do our values match the values of the
# existing record?
if not overwrite:
if not _records_match(record.data['ttl'], record.data['rrdatas'], ttl, record_data):
module.fail_json(
msg = 'cannot delete due to non-matching ttl or record_data: ' +
"ttl: %d, record_data: %s " % (ttl, record_data) +
"original ttl: %d, original record_data: %s" % (record.data['ttl'], record.data['rrdatas']),
changed = False
)
# If we got to this point, we're okay to delete the record.
if not module.check_mode:
gcdns.delete_record(record)
return True
def _get_record(gcdns, zone, record_type, record_name):
"""Gets the record object for a given FQDN."""
# The record ID is a combination of its type and FQDN. For example, the
# ID of an A record for www.example.com would be 'A:www.example.com.'
record_id = "%s:%s" % (record_type, record_name)
try:
return gcdns.get_record(zone.id, record_id)
except RecordDoesNotExistError:
return None
def _get_zone(gcdns, zone_name, zone_id):
"""Gets the zone object for a given domain name."""
if zone_id is not None:
try:
return gcdns.get_zone(zone_id)
except ZoneDoesNotExistError:
return None
# To create a zone, we need to supply a domain name. However, to delete a
# zone, we need to supply a zone ID. Zone ID's are often based on domain
# names, but that's not guaranteed, so we'll iterate through the list of
# zones to see if we can find a matching domain name.
available_zones = gcdns.iterate_zones()
found_zone = None
for zone in available_zones:
if zone.domain == zone_name:
found_zone = zone
break
return found_zone
def _records_match(old_ttl, old_record_data, new_ttl, new_record_data):
"""Checks to see if original and new TTL and values match."""
matches = True
if old_ttl != new_ttl:
matches = False
if old_record_data != new_record_data:
matches = False
return matches
def _sanity_check(module):
"""Run sanity checks that don't depend on info from the zone/record."""
overwrite = module.params['overwrite']
record_name = module.params['record']
record_type = module.params['type']
state = module.params['state']
ttl = module.params['ttl']
record_data = module.params['record_data']
# Apache libcloud needs to be installed and at least the minimum version.
if not HAS_LIBCLOUD:
module.fail_json(
msg = 'This module requires Apache libcloud %s or greater' % MINIMUM_LIBCLOUD_VERSION,
changed = False
)
elif LooseVersion(LIBCLOUD_VERSION) < MINIMUM_LIBCLOUD_VERSION:
module.fail_json(
msg = 'This module requires Apache libcloud %s or greater' % MINIMUM_LIBCLOUD_VERSION,
changed = False
)
# A negative TTL is not permitted (how would they even work?!).
if ttl < 0:
module.fail_json(
msg = 'TTL cannot be less than zero, got: %d' % ttl,
changed = False
)
# Deleting SOA records is not permitted.
if record_type == 'SOA' and state == 'absent':
module.fail_json(msg='cannot delete SOA records', changed=False)
# Updating SOA records is not permitted.
if record_type == 'SOA' and state == 'present' and overwrite:
module.fail_json(msg='cannot update SOA records', changed=False)
# Some sanity checks depend on what value was supplied.
if record_data is not None and (state == 'present' or not overwrite):
# A records must contain valid IPv4 addresses.
if record_type == 'A':
for value in record_data:
try:
socket.inet_aton(value)
except socket.error:
module.fail_json(
msg = 'invalid A record value, got: %s' % value,
changed = False
)
# AAAA records must contain valid IPv6 addresses.
if record_type == 'AAAA':
for value in record_data:
try:
socket.inet_pton(socket.AF_INET6, value)
except socket.error:
module.fail_json(
msg = 'invalid AAAA record value, got: %s' % value,
changed = False
)
# CNAME and SOA records can't have multiple values.
if record_type in ['CNAME', 'SOA'] and len(record_data) > 1:
module.fail_json(
msg = 'CNAME or SOA records cannot have more than one value, ' +
"got: %s" % record_data,
changed = False
)
# Google Cloud DNS does not support wildcard NS records.
if record_type == 'NS' and record_name[0] == '*':
module.fail_json(
msg = "wildcard NS records not allowed, got: %s" % record_name,
changed = False
)
# Values for txt records must begin and end with a double quote.
if record_type == 'TXT':
for value in record_data:
if value[0] != '"' and value[-1] != '"':
module.fail_json(
msg = 'TXT record_data must be enclosed in double quotes, ' +
'got: %s' % value,
changed = False
)
def _additional_sanity_checks(module, zone):
"""Run input sanity checks that depend on info from the zone/record."""
overwrite = module.params['overwrite']
record_name = module.params['record']
record_type = module.params['type']
state = module.params['state']
# CNAME records are not allowed to have the same name as the root domain.
if record_type == 'CNAME' and record_name == zone.domain:
module.fail_json(
msg = 'CNAME records cannot match the zone name',
changed = False
)
# The root domain must always have an NS record.
if record_type == 'NS' and record_name == zone.domain and state == 'absent':
module.fail_json(
msg = 'cannot delete root NS records',
changed = False
)
# Updating NS records with the name as the root domain is not allowed
# because libcloud does not support in-place updates and root domain NS
# records cannot be removed.
if record_type == 'NS' and record_name == zone.domain and overwrite:
module.fail_json(
msg = 'cannot update existing root NS records',
changed = False
)
# SOA records with names that don't match the root domain are not permitted
# (and wouldn't make sense anyway).
if record_type == 'SOA' and record_name != zone.domain:
module.fail_json(
msg = 'non-root SOA records are not permitted, got: %s' % record_name,
changed = False
)
################################################################################
# Main
################################################################################
def main():
"""Main function"""
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['present', 'absent'], type='str'),
record = dict(required=True, aliases=['name'], type='str'),
zone = dict(type='str'),
zone_id = dict(type='str'),
type = dict(required=True, choices=SUPPORTED_RECORD_TYPES, type='str'),
record_data = dict(aliases=['value'], type='list'),
ttl = dict(default=300, type='int'),
overwrite = dict(default=False, type='bool'),
service_account_email = dict(type='str'),
pem_file = dict(type='path'),
credentials_file = dict(type='path'),
project_id = dict(type='str')
),
required_if = [
('state', 'present', ['record_data']),
('overwrite', False, ['record_data'])
],
required_one_of = [['zone', 'zone_id']],
supports_check_mode = True
)
_sanity_check(module)
record_name = module.params['record']
record_type = module.params['type']
state = module.params['state']
ttl = module.params['ttl']
zone_name = module.params['zone']
zone_id = module.params['zone_id']
json_output = dict(
state = state,
record = record_name,
zone = zone_name,
zone_id = zone_id,
type = record_type,
record_data = module.params['record_data'],
ttl = ttl,
overwrite = module.boolean(module.params['overwrite'])
)
# Google Cloud DNS wants the trailing dot on all DNS names.
if zone_name is not None and zone_name[-1] != '.':
zone_name = zone_name + '.'
if record_name[-1] != '.':
record_name = record_name + '.'
# Build a connection object that we can use to connect with Google Cloud
# DNS.
gcdns = gcdns_connect(module, provider=PROVIDER)
# We need to check that the zone we're creating a record for actually
# exists.
zone = _get_zone(gcdns, zone_name, zone_id)
if zone is None and zone_name is not None:
module.fail_json(
msg = 'zone name was not found: %s' % zone_name,
changed = False
)
elif zone is None and zone_id is not None:
module.fail_json(
msg = 'zone id was not found: %s' % zone_id,
changed = False
)
# Populate the returns with the actual zone information.
json_output['zone'] = zone.domain
json_output['zone_id'] = zone.id
# We also need to check if the record we want to create or remove actually
# exists.
try:
record = _get_record(gcdns, zone, record_type, record_name)
except InvalidRequestError:
# We gave Google Cloud DNS an invalid DNS record name.
module.fail_json(
msg = 'record name is invalid: %s' % record_name,
changed = False
)
_additional_sanity_checks(module, zone)
diff = dict()
# Build the 'before' diff
if record is None:
diff['before'] = ''
diff['before_header'] = '<absent>'
else:
diff['before'] = dict(
record = record.data['name'],
type = record.data['type'],
record_data = record.data['rrdatas'],
ttl = record.data['ttl']
)
diff['before_header'] = "%s:%s" % (record_type, record_name)
# Create, remove, or modify the record.
if state == 'present':
diff['after'] = dict(
record = record_name,
type = record_type,
record_data = module.params['record_data'],
ttl = ttl
)
diff['after_header'] = "%s:%s" % (record_type, record_name)
changed = create_record(module, gcdns, zone, record)
elif state == 'absent':
diff['after'] = ''
diff['after_header'] = '<absent>'
changed = remove_record(module, gcdns, record)
module.exit_json(changed=changed, diff=diff, **json_output)
from ansible.module_utils.basic import *
from ansible.module_utils.gcdns import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
frishberg/django
|
tests/admin_changelist/models.py
|
276
|
2890
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
class Event(models.Model):
# Oracle can have problems with a column named "date"
date = models.DateField(db_column="event_date")
class Parent(models.Model):
name = models.CharField(max_length=128)
class Child(models.Model):
parent = models.ForeignKey(Parent, models.SET_NULL, editable=False, null=True)
name = models.CharField(max_length=30, blank=True)
age = models.IntegerField(null=True, blank=True)
class Genre(models.Model):
name = models.CharField(max_length=20)
class Band(models.Model):
name = models.CharField(max_length=20)
nr_of_members = models.PositiveIntegerField()
genres = models.ManyToManyField(Genre)
@python_2_unicode_compatible
class Musician(models.Model):
name = models.CharField(max_length=30)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Group(models.Model):
name = models.CharField(max_length=30)
members = models.ManyToManyField(Musician, through='Membership')
def __str__(self):
return self.name
class Concert(models.Model):
name = models.CharField(max_length=30)
group = models.ForeignKey(Group, models.CASCADE)
class Membership(models.Model):
music = models.ForeignKey(Musician, models.CASCADE)
group = models.ForeignKey(Group, models.CASCADE)
role = models.CharField(max_length=15)
class Quartet(Group):
pass
class ChordsMusician(Musician):
pass
class ChordsBand(models.Model):
name = models.CharField(max_length=30)
members = models.ManyToManyField(ChordsMusician, through='Invitation')
class Invitation(models.Model):
player = models.ForeignKey(ChordsMusician, models.CASCADE)
band = models.ForeignKey(ChordsBand, models.CASCADE)
instrument = models.CharField(max_length=15)
class Swallow(models.Model):
origin = models.CharField(max_length=255)
load = models.FloatField()
speed = models.FloatField()
class Meta:
ordering = ('speed', 'load')
class SwallowOneToOne(models.Model):
swallow = models.OneToOneField(Swallow, models.CASCADE)
class UnorderedObject(models.Model):
"""
Model without any defined `Meta.ordering`.
Refs #17198.
"""
bool = models.BooleanField(default=True)
class OrderedObjectManager(models.Manager):
def get_queryset(self):
return super(OrderedObjectManager, self).get_queryset().order_by('number')
class OrderedObject(models.Model):
"""
Model with Manager that defines a default order.
Refs #17198.
"""
name = models.CharField(max_length=255)
bool = models.BooleanField(default=True)
number = models.IntegerField(default=0, db_column='number_val')
objects = OrderedObjectManager()
class CustomIdUser(models.Model):
uuid = models.AutoField(primary_key=True)
|
bsd-3-clause
|
vitorio/bite-project
|
deps/gdata-python-client/src/gdata/health/__init__.py
|
263
|
7090
|
#!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains extensions to Atom objects used with Google Health."""
__author__ = 'api.eric@google.com (Eric Bidelman)'
import atom
import gdata
CCR_NAMESPACE = 'urn:astm-org:CCR'
METADATA_NAMESPACE = 'http://schemas.google.com/health/metadata'
class Ccr(atom.AtomBase):
"""Represents a Google Health <ContinuityOfCareRecord>."""
_tag = 'ContinuityOfCareRecord'
_namespace = CCR_NAMESPACE
_children = atom.AtomBase._children.copy()
def __init__(self, extension_elements=None,
extension_attributes=None, text=None):
atom.AtomBase.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
def GetAlerts(self):
"""Helper for extracting Alert/Allergy data from the CCR.
Returns:
A list of ExtensionElements (one for each allergy found) or None if
no allergies where found in this CCR.
"""
try:
body = self.FindExtensions('Body')[0]
return body.FindChildren('Alerts')[0].FindChildren('Alert')
except:
return None
def GetAllergies(self):
"""Alias for GetAlerts()."""
return self.GetAlerts()
def GetProblems(self):
"""Helper for extracting Problem/Condition data from the CCR.
Returns:
A list of ExtensionElements (one for each problem found) or None if
no problems where found in this CCR.
"""
try:
body = self.FindExtensions('Body')[0]
return body.FindChildren('Problems')[0].FindChildren('Problem')
except:
return None
def GetConditions(self):
"""Alias for GetProblems()."""
return self.GetProblems()
def GetProcedures(self):
"""Helper for extracting Procedure data from the CCR.
Returns:
A list of ExtensionElements (one for each procedure found) or None if
no procedures where found in this CCR.
"""
try:
body = self.FindExtensions('Body')[0]
return body.FindChildren('Procedures')[0].FindChildren('Procedure')
except:
return None
def GetImmunizations(self):
"""Helper for extracting Immunization data from the CCR.
Returns:
A list of ExtensionElements (one for each immunization found) or None if
no immunizations where found in this CCR.
"""
try:
body = self.FindExtensions('Body')[0]
return body.FindChildren('Immunizations')[0].FindChildren('Immunization')
except:
return None
def GetMedications(self):
"""Helper for extracting Medication data from the CCR.
Returns:
A list of ExtensionElements (one for each medication found) or None if
no medications where found in this CCR.
"""
try:
body = self.FindExtensions('Body')[0]
return body.FindChildren('Medications')[0].FindChildren('Medication')
except:
return None
def GetResults(self):
"""Helper for extracting Results/Labresults data from the CCR.
Returns:
A list of ExtensionElements (one for each result found) or None if
no results where found in this CCR.
"""
try:
body = self.FindExtensions('Body')[0]
return body.FindChildren('Results')[0].FindChildren('Result')
except:
return None
class ProfileEntry(gdata.GDataEntry):
"""The Google Health version of an Atom Entry."""
_tag = gdata.GDataEntry._tag
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}ContinuityOfCareRecord' % CCR_NAMESPACE] = ('ccr', Ccr)
def __init__(self, ccr=None, author=None, category=None, content=None,
atom_id=None, link=None, published=None, title=None,
updated=None, text=None, extension_elements=None,
extension_attributes=None):
self.ccr = ccr
gdata.GDataEntry.__init__(
self, author=author, category=category, content=content,
atom_id=atom_id, link=link, published=published, title=title,
updated=updated, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
class ProfileFeed(gdata.GDataFeed):
"""A feed containing a list of Google Health profile entries."""
_tag = gdata.GDataFeed._tag
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [ProfileEntry])
class ProfileListEntry(gdata.GDataEntry):
"""The Atom Entry in the Google Health profile list feed."""
_tag = gdata.GDataEntry._tag
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
def GetProfileId(self):
return self.content.text
def GetProfileName(self):
return self.title.text
class ProfileListFeed(gdata.GDataFeed):
"""A feed containing a list of Google Health profile list entries."""
_tag = gdata.GDataFeed._tag
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [ProfileListEntry])
def ProfileEntryFromString(xml_string):
"""Converts an XML string into a ProfileEntry object.
Args:
xml_string: string The XML describing a Health profile feed entry.
Returns:
A ProfileEntry object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(ProfileEntry, xml_string)
def ProfileListEntryFromString(xml_string):
"""Converts an XML string into a ProfileListEntry object.
Args:
xml_string: string The XML describing a Health profile list feed entry.
Returns:
A ProfileListEntry object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(ProfileListEntry, xml_string)
def ProfileFeedFromString(xml_string):
"""Converts an XML string into a ProfileFeed object.
Args:
xml_string: string The XML describing a ProfileFeed feed.
Returns:
A ProfileFeed object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(ProfileFeed, xml_string)
def ProfileListFeedFromString(xml_string):
"""Converts an XML string into a ProfileListFeed object.
Args:
xml_string: string The XML describing a ProfileListFeed feed.
Returns:
A ProfileListFeed object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(ProfileListFeed, xml_string)
|
apache-2.0
|
klahnakoski/jx-sqlite
|
vendor/mo_logs/__init__.py
|
1
|
15837
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Contact: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import absolute_import, division, unicode_literals
import os
import platform
import sys
from datetime import datetime
from mo_dots import Data, FlatList, coalesce, is_data, is_list, listwrap, unwraplist, wrap
from mo_future import PY3, is_text, text
from mo_logs import constants, exceptions, strings
from mo_logs.exceptions import Except, LogItem, suppress_exception
from mo_logs.strings import CR, indent
_Thread = None
if PY3:
STDOUT = sys.stdout.buffer
else:
STDOUT = sys.stdout
class Log(object):
"""
FOR STRUCTURED LOGGING AND EXCEPTION CHAINING
"""
trace = False
main_log = None
logging_multi = None
profiler = None # simple pypy-friendly profiler
error_mode = False # prevent error loops
@classmethod
def start(cls, settings=None):
"""
RUN ME FIRST TO SETUP THE THREADED LOGGING
http://victorlin.me/2012/08/good-logging-practice-in-python/
log - LIST OF PARAMETERS FOR LOGGER(S)
trace - SHOW MORE DETAILS IN EVERY LOG LINE (default False)
cprofile - True==ENABLE THE C-PROFILER THAT COMES WITH PYTHON (default False)
USE THE LONG FORM TO SET THE FILENAME {"enabled": True, "filename": "cprofile.tab"}
profile - True==ENABLE pyLibrary SIMPLE PROFILING (default False) (eg with Profiler("some description"):)
USE THE LONG FORM TO SET FILENAME {"enabled": True, "filename": "profile.tab"}
constants - UPDATE MODULE CONSTANTS AT STARTUP (PRIMARILY INTENDED TO CHANGE DEBUG STATE)
"""
global _Thread
if not settings:
return
settings = wrap(settings)
Log.stop()
cls.settings = settings
cls.trace = coalesce(settings.trace, False)
if cls.trace:
from mo_threads import Thread as _Thread
_ = _Thread
# ENABLE CPROFILE
if settings.cprofile is False:
settings.cprofile = {"enabled": False}
elif settings.cprofile is True:
if isinstance(settings.cprofile, bool):
settings.cprofile = {"enabled": True, "filename": "cprofile.tab"}
if settings.cprofile.enabled:
from mo_threads import profiles
profiles.enable_profilers(settings.cprofile.filename)
if settings.profile is True or (is_data(settings.profile) and settings.profile.enabled):
Log.error("REMOVED 2018-09-02, Activedata revision 3f30ff46f5971776f8ba18")
# from mo_logs import profiles
#
# if isinstance(settings.profile, bool):
# profiles.ON = True
# settings.profile = {"enabled": True, "filename": "profile.tab"}
#
# if settings.profile.enabled:
# profiles.ON = True
if settings.constants:
constants.set(settings.constants)
logs = coalesce(settings.log, settings.logs)
if logs:
cls.logging_multi = StructuredLogger_usingMulti()
for log in listwrap(logs):
Log.add_log(Log.new_instance(log))
from mo_logs.log_usingThread import StructuredLogger_usingThread
cls.main_log = StructuredLogger_usingThread(cls.logging_multi)
@classmethod
def stop(cls):
"""
DECONSTRUCTS ANY LOGGING, AND RETURNS TO DIRECT-TO-stdout LOGGING
EXECUTING MULUTIPLE TIMES IN A ROW IS SAFE, IT HAS NO NET EFFECT, IT STILL LOGS TO stdout
:return: NOTHING
"""
main_log, cls.main_log = cls.main_log, StructuredLogger_usingStream(STDOUT)
main_log.stop()
@classmethod
def new_instance(cls, settings):
settings = wrap(settings)
if settings["class"]:
if settings["class"].startswith("logging.handlers."):
from mo_logs.log_usingHandler import StructuredLogger_usingHandler
return StructuredLogger_usingHandler(settings)
else:
with suppress_exception:
from mo_logs.log_usingLogger import make_log_from_settings
return make_log_from_settings(settings)
# OH WELL :(
if settings.log_type == "logger":
from mo_logs.log_usingLogger import StructuredLogger_usingLogger
return StructuredLogger_usingLogger(settings)
if settings.log_type == "file" or settings.file:
return StructuredLogger_usingFile(settings.file)
if settings.log_type == "file" or settings.filename:
return StructuredLogger_usingFile(settings.filename)
if settings.log_type == "console":
from mo_logs.log_usingThreadedStream import StructuredLogger_usingThreadedStream
return StructuredLogger_usingThreadedStream(STDOUT)
if settings.log_type == "mozlog":
from mo_logs.log_usingMozLog import StructuredLogger_usingMozLog
return StructuredLogger_usingMozLog(STDOUT, coalesce(settings.app_name, settings.appname))
if settings.log_type == "stream" or settings.stream:
from mo_logs.log_usingThreadedStream import StructuredLogger_usingThreadedStream
return StructuredLogger_usingThreadedStream(settings.stream)
if settings.log_type == "elasticsearch" or settings.stream:
from mo_logs.log_usingElasticSearch import StructuredLogger_usingElasticSearch
return StructuredLogger_usingElasticSearch(settings)
if settings.log_type == "email":
from mo_logs.log_usingEmail import StructuredLogger_usingEmail
return StructuredLogger_usingEmail(settings)
if settings.log_type == "ses":
from mo_logs.log_usingSES import StructuredLogger_usingSES
return StructuredLogger_usingSES(settings)
if settings.log_type.lower() in ["nothing", "none", "null"]:
from mo_logs.log_usingNothing import StructuredLogger
return StructuredLogger()
Log.error("Log type of {{log_type|quote}} is not recognized", log_type=settings.log_type)
@classmethod
def add_log(cls, log):
cls.logging_multi.add_log(log)
@classmethod
def note(
cls,
template,
default_params={},
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
if not is_text(template):
Log.error("Log.note was expecting a unicode template")
Log._annotate(
LogItem(
context=exceptions.NOTE,
format=template,
template=template,
params=dict(default_params, **more_params)
),
timestamp,
stack_depth+1
)
@classmethod
def unexpected(
cls,
template,
default_params={},
cause=None,
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param cause: *Exception* for chaining
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
if not is_text(template):
Log.error("Log.warning was expecting a unicode template")
if isinstance(default_params, BaseException):
cause = default_params
default_params = {}
if "values" in more_params.keys():
Log.error("Can not handle a logging parameter by name `values`")
params = Data(dict(default_params, **more_params))
cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
trace = exceptions.get_stacktrace(stack_depth + 1)
e = Except(exceptions.UNEXPECTED, template=template, params=params, cause=cause, trace=trace)
Log._annotate(
e,
timestamp,
stack_depth+1
)
@classmethod
def alarm(
cls,
template,
default_params={},
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
format = ("*" * 80) + CR + indent(template, prefix="** ").strip() + CR + ("*" * 80)
Log._annotate(
LogItem(
context=exceptions.ALARM,
format=format,
template=template,
params=dict(default_params, **more_params)
),
timestamp,
stack_depth + 1
)
alert = alarm
@classmethod
def warning(
cls,
template,
default_params={},
cause=None,
stack_depth=0,
log_context=None,
**more_params
):
"""
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param cause: *Exception* for chaining
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
timestamp = datetime.utcnow()
if not is_text(template):
Log.error("Log.warning was expecting a unicode template")
if isinstance(default_params, BaseException):
cause = default_params
default_params = {}
if "values" in more_params.keys():
Log.error("Can not handle a logging parameter by name `values`")
params = Data(dict(default_params, **more_params))
cause = unwraplist([Except.wrap(c) for c in listwrap(cause)])
trace = exceptions.get_stacktrace(stack_depth + 1)
e = Except(exceptions.WARNING, template=template, params=params, cause=cause, trace=trace)
Log._annotate(
e,
timestamp,
stack_depth+1
)
@classmethod
def error(
cls,
template, # human readable template
default_params={}, # parameters for template
cause=None, # pausible cause
stack_depth=0,
**more_params
):
"""
raise an exception with a trace for the cause too
:param template: *string* human readable string with placeholders for parameters
:param default_params: *dict* parameters to fill in template
:param cause: *Exception* for chaining
:param stack_depth: *int* how many calls you want popped off the stack to report the *true* caller
:param log_context: *dict* extra key:value pairs for your convenience
:param more_params: *any more parameters (which will overwrite default_params)
:return:
"""
if not is_text(template):
sys.stderr.write(str("Log.error was expecting a unicode template"))
Log.error("Log.error was expecting a unicode template")
if default_params and isinstance(listwrap(default_params)[0], BaseException):
cause = default_params
default_params = {}
params = Data(dict(default_params, **more_params))
add_to_trace = False
if cause == None:
causes = None
elif is_list(cause):
causes = []
for c in listwrap(cause): # CAN NOT USE LIST-COMPREHENSION IN PYTHON3 (EXTRA STACK DEPTH FROM THE IN-LINED GENERATOR)
causes.append(Except.wrap(c, stack_depth=1))
causes = FlatList(causes)
elif isinstance(cause, BaseException):
causes = Except.wrap(cause, stack_depth=1)
else:
causes = None
Log.error("can only accept Exception, or list of exceptions")
trace = exceptions.get_stacktrace(stack_depth + 1)
if add_to_trace:
cause[0].trace.extend(trace[1:])
e = Except(context=exceptions.ERROR, template=template, params=params, cause=causes, trace=trace)
raise_from_none(e)
@classmethod
def _annotate(
cls,
item,
timestamp,
stack_depth
):
"""
:param itemt: A LogItemTHE TYPE OF MESSAGE
:param stack_depth: FOR TRACKING WHAT LINE THIS CAME FROM
:return:
"""
item.timestamp = timestamp
item.machine = machine_metadata
item.template = strings.limit(item.template, 10000)
item.format = strings.limit(item.format, 10000)
if item.format == None:
format = text(item)
else:
format = item.format.replace("{{", "{{params.")
if not format.startswith(CR) and format.find(CR) > -1:
format = CR + format
if cls.trace:
log_format = item.format = "{{machine.name}} (pid {{machine.pid}}) - {{timestamp|datetime}} - {{thread.name}} - \"{{location.file}}:{{location.line}}\" - ({{location.method}}) - " + format
f = sys._getframe(stack_depth + 1)
item.location = {
"line": f.f_lineno,
"file": text(f.f_code.co_filename),
"method": text(f.f_code.co_name)
}
thread = _Thread.current()
item.thread = {"name": thread.name, "id": thread.id}
else:
log_format = item.format = "{{timestamp|datetime}} - " + format
cls.main_log.write(log_format, item.__data__())
def write(self):
raise NotImplementedError
def _same_frame(frameA, frameB):
return (frameA.line, frameA.file) == (frameB.line, frameB.file)
# GET THE MACHINE METADATA
machine_metadata = wrap({
"pid": os.getpid(),
"python": text(platform.python_implementation()),
"os": text(platform.system() + platform.release()).strip(),
"name": text(platform.node())
})
def raise_from_none(e):
raise e
if PY3:
exec("def raise_from_none(e):\n raise e from None\n", globals(), locals())
from mo_logs.log_usingFile import StructuredLogger_usingFile
from mo_logs.log_usingMulti import StructuredLogger_usingMulti
from mo_logs.log_usingStream import StructuredLogger_usingStream
if not Log.main_log:
Log.main_log = StructuredLogger_usingStream(STDOUT)
|
mpl-2.0
|
inside-track/pemi
|
pemi/fields.py
|
1
|
7300
|
import decimal
import datetime
import json
from functools import wraps
import dateutil
import pemi.transforms
__all__ = [
'StringField',
'IntegerField',
'FloatField',
'DateField',
'DateTimeField',
'BooleanField',
'DecimalField',
'JsonField'
]
BLANK_DATE_VALUES = ['null', 'none', 'nan', 'nat']
class CoercionError(ValueError): pass
class DecimalCoercionError(ValueError): pass
def convert_exception(fun):
@wraps(fun)
def wrapper(self, value):
try:
coerced = fun(self, value)
except Exception as err:
raise CoercionError('Unable to coerce value "{}" to {}: {}: {}'.format(
value,
self.__class__.__name__,
err.__class__.__name__,
err
))
return coerced
return wrapper
#pylint: disable=too-few-public-methods
class Field:
'''
A field is a thing that is inherited
'''
def __init__(self, name=None, **metadata):
self.name = name
self.metadata = metadata
default_metadata = {'null': None}
self.metadata = {**default_metadata, **metadata}
self.null = self.metadata['null']
@convert_exception
def coerce(self, value):
raise NotImplementedError
def __str__(self):
return '<{} {}>'.format(self.__class__.__name__, self.__dict__.__str__())
def __eq__(self, other):
return type(self) is type(other) \
and self.metadata == other.metadata \
and self.name == other.name
class StringField(Field):
def __init__(self, name=None, **metadata):
metadata['null'] = metadata.get('null', '')
super().__init__(name=name, **metadata)
@convert_exception
def coerce(self, value):
if pemi.transforms.isblank(value):
return self.null
return str(value).strip()
class IntegerField(Field):
def __init__(self, name=None, **metadata):
super().__init__(name=name, **metadata)
self.coerce_float = self.metadata.get('coerce_float', False)
@convert_exception
def coerce(self, value):
if pemi.transforms.isblank(value):
return self.null
if self.coerce_float:
return int(float(value))
return int(value)
class FloatField(Field):
@convert_exception
def coerce(self, value):
if pemi.transforms.isblank(value):
return self.null
return float(value)
class DateField(Field):
def __init__(self, name=None, **metadata):
super().__init__(name=name, **metadata)
self.format = self.metadata.get('format', '%Y-%m-%d')
self.infer_format = self.metadata.get('infer_format', False)
@convert_exception
def coerce(self, value):
if hasattr(value, 'strip'):
value = value.strip()
if pemi.transforms.isblank(value) or (
isinstance(value, str) and value.lower() in BLANK_DATE_VALUES):
return self.null
return self.parse(value)
def parse(self, value):
if isinstance(value, datetime.datetime):
return value.date()
if isinstance(value, datetime.date):
return value
if not self.infer_format:
return datetime.datetime.strptime(value, self.format).date()
return dateutil.parser.parse(value).date()
class DateTimeField(Field):
def __init__(self, name=None, **metadata):
super().__init__(name=name, **metadata)
self.format = self.metadata.get('format', '%Y-%m-%d %H:%M:%S')
self.infer_format = self.metadata.get('infer_format', False)
@convert_exception
def coerce(self, value):
if hasattr(value, 'strip'):
value = value.strip()
if pemi.transforms.isblank(value) or (
isinstance(value, str) and value.lower() in BLANK_DATE_VALUES):
return self.null
return self.parse(value)
def parse(self, value):
if isinstance(value, datetime.datetime):
return value
if isinstance(value, datetime.date):
return datetime.datetime.combine(value, datetime.time.min)
if not self.infer_format:
return datetime.datetime.strptime(value, self.format)
return dateutil.parser.parse(value)
class BooleanField(Field):
# when defined, the value of unknown_truthiness is used when no matching is found
def __init__(self, name=None, **metadata):
super().__init__(name=name, **metadata)
self.true_values = self.metadata.get(
'true_values',
['t', 'true', 'y', 'yes', 'on', '1']
)
self.false_values = self.metadata.get(
'false_values',
['f', 'false', 'n', 'no', 'off', '0']
)
@convert_exception
def coerce(self, value):
if hasattr(value, 'strip'):
value = value.strip()
if isinstance(value, bool):
return value
if pemi.transforms.isblank(value):
return self.null
return self.parse(value)
def parse(self, value):
value = str(value).lower()
if value in self.true_values:
return True
if value in self.false_values:
return False
if 'unknown_truthiness' in self.metadata:
return self.metadata['unknown_truthiness']
raise ValueError('Not a boolean value')
class DecimalField(Field):
def __init__(self, name=None, **metadata):
super().__init__(name=name, **metadata)
self.precision = self.metadata.get('precision', 16)
self.scale = self.metadata.get('scale', 2)
self.truncate_decimal = self.metadata.get('truncate_decimal', False)
self.enforce_decimal = self.metadata.get('enforce_decimal', True)
@convert_exception
def coerce(self, value):
if pemi.transforms.isblank(value):
return self.null
return self.parse(value)
def parse(self, value):
dec = decimal.Decimal(str(value))
if dec != dec: #pylint: disable=comparison-with-itself
return dec
if self.truncate_decimal:
dec = round(dec, self.scale)
if self.enforce_decimal:
detected_precision = len(dec.as_tuple().digits)
detected_scale = -dec.as_tuple().exponent
if detected_precision > self.precision:
msg = ('Decimal coercion error for "{}". ' \
+ 'Expected precision: {}, Actual precision: {}').format(
dec, self.precision, detected_precision
)
raise DecimalCoercionError(msg)
if detected_scale > self.scale:
msg = ('Decimal coercion error for "{}". ' \
+ 'Expected scale: {}, Actual scale: {}').format(
dec, self.scale, detected_scale
)
raise DecimalCoercionError(msg)
return dec
class JsonField(Field):
@convert_exception
def coerce(self, value):
if pemi.transforms.isblank(value):
return self.null
try:
return json.loads(value)
except TypeError:
return value
#pylint: enable=too-few-public-methods
|
mit
|
leeon/annotated-django
|
django/contrib/formtools/tests/tests.py
|
32
|
7417
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import os
import unittest
import warnings
from django import http
from django.contrib.formtools import preview, utils
from django.test import TestCase, override_settings
from django.utils._os import upath
from django.contrib.formtools.tests.forms import (
HashTestBlankForm, HashTestForm, TestForm,
)
success_string = "Done was called!"
success_string_encoded = success_string.encode()
class TestFormPreview(preview.FormPreview):
def get_context(self, request, form):
context = super(TestFormPreview, self).get_context(request, form)
context.update({'custom_context': True})
return context
def get_initial(self, request):
return {'field1': 'Works!'}
def done(self, request, cleaned_data):
return http.HttpResponse(success_string)
@override_settings(
TEMPLATE_DIRS=(
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
),
ROOT_URLCONF='django.contrib.formtools.tests.urls',
)
class PreviewTests(TestCase):
def setUp(self):
super(PreviewTests, self).setUp()
# Create a FormPreview instance to share between tests
self.preview = preview.FormPreview(TestForm)
input_template = '<input type="hidden" name="%s" value="%s" />'
self.input = input_template % (self.preview.unused_name('stage'), "%d")
self.test_data = {'field1': 'foo', 'field1_': 'asdf'}
def test_unused_name(self):
"""
Verifies name mangling to get uniue field name.
"""
self.assertEqual(self.preview.unused_name('field1'), 'field1__')
def test_form_get(self):
"""
Test contrib.formtools.preview form retrieval.
Use the client library to see if we can successfully retrieve
the form (mostly testing the setup ROOT_URLCONF
process). Verify that an additional hidden input field
is created to manage the stage.
"""
response = self.client.get('/preview/')
stage = self.input % 1
self.assertContains(response, stage, 1)
self.assertEqual(response.context['custom_context'], True)
self.assertEqual(response.context['form'].initial, {'field1': 'Works!'})
def test_form_preview(self):
"""
Test contrib.formtools.preview form preview rendering.
Use the client library to POST to the form to see if a preview
is returned. If we do get a form back check that the hidden
value is correctly managing the state of the form.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 1, 'date1': datetime.date(2006, 10, 25)})
response = self.client.post('/preview/', self.test_data)
# Check to confirm stage is set to 2 in output form.
stage = self.input % 2
self.assertContains(response, stage, 1)
def test_form_submit(self):
"""
Test contrib.formtools.preview form submittal.
Use the client library to POST to the form with stage set to 3
to see if our forms done() method is called. Check first
without the security hash, verify failure, retry with security
hash and verify success.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 2, 'date1': datetime.date(2006, 10, 25)})
response = self.client.post('/preview/', self.test_data)
self.assertNotEqual(response.content, success_string_encoded)
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/preview/', self.test_data)
self.assertEqual(response.content, success_string_encoded)
def test_bool_submit(self):
"""
Test contrib.formtools.preview form submittal when form contains:
BooleanField(required=False)
Ticket: #6209 - When an unchecked BooleanField is previewed, the preview
form's hash would be computed with no value for ``bool1``. However, when
the preview form is rendered, the unchecked hidden BooleanField would be
rendered with the string value 'False'. So when the preview form is
resubmitted, the hash would be computed with the value 'False' for
``bool1``. We need to make sure the hashes are the same in both cases.
"""
self.test_data.update({'stage': 2})
hash = self.preview.security_hash(None, TestForm(self.test_data))
self.test_data.update({'hash': hash, 'bool1': 'False'})
with warnings.catch_warnings(record=True):
response = self.client.post('/preview/', self.test_data)
self.assertEqual(response.content, success_string_encoded)
def test_form_submit_good_hash(self):
"""
Test contrib.formtools.preview form submittal, using a correct
hash
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 2})
response = self.client.post('/preview/', self.test_data)
self.assertNotEqual(response.content, success_string_encoded)
hash = utils.form_hmac(TestForm(self.test_data))
self.test_data.update({'hash': hash})
response = self.client.post('/preview/', self.test_data)
self.assertEqual(response.content, success_string_encoded)
def test_form_submit_bad_hash(self):
"""
Test contrib.formtools.preview form submittal does not proceed
if the hash is incorrect.
"""
# Pass strings for form submittal and add stage variable to
# show we previously saw first stage of the form.
self.test_data.update({'stage': 2})
response = self.client.post('/preview/', self.test_data)
self.assertEqual(response.status_code, 200)
self.assertNotEqual(response.content, success_string_encoded)
hash = utils.form_hmac(TestForm(self.test_data)) + "bad"
self.test_data.update({'hash': hash})
response = self.client.post('/previewpreview/', self.test_data)
self.assertNotEqual(response.content, success_string_encoded)
class FormHmacTests(unittest.TestCase):
def test_textfield_hash(self):
"""
Regression test for #10034: the hash generation function should ignore
leading/trailing whitespace so as to be friendly to broken browsers that
submit it (usually in textareas).
"""
f1 = HashTestForm({'name': 'joe', 'bio': 'Speaking español.'})
f2 = HashTestForm({'name': ' joe', 'bio': 'Speaking español. '})
hash1 = utils.form_hmac(f1)
hash2 = utils.form_hmac(f2)
self.assertEqual(hash1, hash2)
def test_empty_permitted(self):
"""
Regression test for #10643: the security hash should allow forms with
empty_permitted = True, or forms where data has not changed.
"""
f1 = HashTestBlankForm({})
f2 = HashTestForm({}, empty_permitted=True)
hash1 = utils.form_hmac(f1)
hash2 = utils.form_hmac(f2)
self.assertEqual(hash1, hash2)
|
bsd-3-clause
|
Robpol86/FlashAirMusic
|
tests/test_convert_transcode.py
|
1
|
13863
|
"""Test functions in module."""
import asyncio
import itertools
import re
import signal
from textwrap import dedent
import pytest
from flash_air_music.configuration import FFMPEG_DEFAULT_BINARY
from flash_air_music.convert import transcode
from flash_air_music.convert.discover import get_songs, Song
from tests import HERE
@pytest.mark.skipif(str(FFMPEG_DEFAULT_BINARY is None))
def test_convert_file_success(monkeypatch, tmpdir, caplog):
"""Test convert_file() with no errors.
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
"""
monkeypatch.setattr(transcode, 'GLOBAL_MUTABLE_CONFIG', {'--ffmpeg-bin': FFMPEG_DEFAULT_BINARY})
monkeypatch.setattr(transcode, 'SLEEP_FOR', 0.1)
source_dir = tmpdir.ensure_dir('source')
target_dir = tmpdir.ensure_dir('target')
HERE.join('1khz_sine_2.mp3').copy(source_dir.join('song1.mp3'))
song = Song(str(source_dir.join('song1.mp3')), str(source_dir), str(target_dir))
assert song.needs_action is True
# Run.
loop = asyncio.get_event_loop()
command, exit_status = loop.run_until_complete(transcode.convert_file(song))[1:]
messages = [r.message for r in caplog.records if r.name.startswith('flash_air_music')]
# Verify.
assert exit_status == 0
assert target_dir.join('song1.mp3').check(file=True)
assert Song(str(source_dir.join('song1.mp3')), str(source_dir), str(target_dir)).needs_action is False
# Verify log.
command_str = str(command)
assert 'Converting song1.mp3' in messages
assert 'Storing metadata in song1.mp3' in messages
assert any(command_str in m for m in messages)
assert any(re.match(r'^Process \d+ exited 0$', m) for m in messages)
@pytest.mark.skipif(str(FFMPEG_DEFAULT_BINARY is None))
@pytest.mark.parametrize('delete', [False, True])
def test_convert_file_failure(monkeypatch, tmpdir, caplog, delete):
"""Test convert_file() with errors.
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
:param bool delete: Test removing bad target file.
"""
ffmpeg = tmpdir.join('ffmpeg')
ffmpeg.write(dedent("""\
#!/bin/bash
exit 1
"""))
ffmpeg.chmod(0o0755)
monkeypatch.setattr(transcode, 'GLOBAL_MUTABLE_CONFIG', {'--ffmpeg-bin': str(ffmpeg)})
monkeypatch.setattr(transcode, 'SLEEP_FOR', 0.1)
source_dir = tmpdir.ensure_dir('source')
target_dir = tmpdir.ensure_dir('target')
HERE.join('1khz_sine_2.mp3').copy(source_dir.join('song1.mp3'))
if delete:
HERE.join('1khz_sine_2.mp3').copy(target_dir.join('song1.mp3'))
song = Song(str(source_dir.join('song1.mp3')), str(source_dir), str(target_dir))
assert song.needs_action is True
# Run.
loop = asyncio.get_event_loop()
command, exit_status = loop.run_until_complete(transcode.convert_file(song))[1:]
messages = [r.message for r in caplog.records if r.name.startswith('flash_air_music')]
# Verify.
assert exit_status == 1
assert not target_dir.join('song1.mp3').check(file=True)
assert Song(str(source_dir.join('song1.mp3')), str(source_dir), str(target_dir)).needs_action is True
# Verify log.
command_str = str(command)
assert 'Converting song1.mp3' in messages
assert 'Storing metadata in song1.mp3' not in messages
assert 'Failed to convert song1.mp3! ffmpeg exited 1.' in messages
assert any(command_str in m for m in messages)
assert any(re.match(r'^Process \d+ exited 1$', m) for m in messages)
if delete:
assert 'Removing {}'.format(target_dir.join('song1.mp3')) in messages
else:
assert 'Removing {}'.format(target_dir.join('song1.mp3')) not in messages
@pytest.mark.skipif(str(FFMPEG_DEFAULT_BINARY is None))
def test_convert_file_deadlock(monkeypatch, tmpdir, caplog):
"""Test convert_file() with ffmpeg outputting a lot of data, filling up buffers.
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
"""
ffmpeg = tmpdir.join('ffmpeg')
ffmpeg.write(dedent("""\
#!/bin/bash
ffmpeg $@
for i in {1..10240}; do echo -n test_stdout$i; done
for i in {1..10240}; do echo -n test_stderr$i >&2; done
"""))
ffmpeg.chmod(0o0755)
monkeypatch.setattr(transcode, 'GLOBAL_MUTABLE_CONFIG', {'--ffmpeg-bin': str(ffmpeg)})
source_dir = tmpdir.ensure_dir('source')
target_dir = tmpdir.ensure_dir('target')
HERE.join('1khz_sine_2.mp3').copy(source_dir.join('song1.mp3'))
song = Song(str(source_dir.join('song1.mp3')), str(source_dir), str(target_dir))
# Run.
loop = asyncio.get_event_loop()
command, exit_status = loop.run_until_complete(transcode.convert_file(song))[1:]
messages = [r.message for r in caplog.records if r.name.startswith('flash_air_music')]
# Verify.
assert exit_status == 0
assert target_dir.join('song1.mp3').check(file=True)
assert Song(str(source_dir.join('song1.mp3')), str(source_dir), str(target_dir)).needs_action is False
# Verify log.
command_str = str(command)
assert 'Converting song1.mp3' in messages
assert 'Storing metadata in song1.mp3' in messages
assert any(command_str in m for m in messages)
assert any(re.match(r'^Process \d+ exited 0$', m) for m in messages)
assert any(re.match(r'^Process \d+ still running\.\.\.$', m) for m in messages)
assert any(m.endswith('test_stdout10240') for m in messages)
assert any(m.endswith('test_stderr10240') for m in messages)
@pytest.mark.parametrize('exit_signal', [signal.SIGINT, signal.SIGTERM, signal.SIGKILL])
def test_convert_file_timeout(monkeypatch, tmpdir, caplog, exit_signal):
"""Test convert_file() with a stalled process.
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
:param int exit_signal: Script exits on this signal.
"""
ffmpeg = tmpdir.join('ffmpeg')
ffmpeg.write(dedent("""\
#!/usr/bin/env python
import os, signal, sys, time
def exit(signum, _):
if int(os.environ['EXIT_SIGNAL']) == signum:
print('Catching {}'.format(os.environ['EXIT_SIGNAL']))
sys.exit(2)
print('Ignoring {}'.format(signum))
signal.signal(signal.SIGINT, exit)
signal.signal(signal.SIGTERM, exit)
for i in range(10):
print(i)
time.sleep(1)
sys.exit(1)
"""))
ffmpeg.chmod(0o0755)
monkeypatch.setattr(transcode, 'GLOBAL_MUTABLE_CONFIG', {'--ffmpeg-bin': str(ffmpeg)})
monkeypatch.setattr(transcode, 'SLEEP_FOR', 0.1)
monkeypatch.setattr(transcode, 'TIMEOUT', 0.5)
monkeypatch.setenv('EXIT_SIGNAL', exit_signal)
source_dir = tmpdir.ensure_dir('source')
target_dir = tmpdir.ensure_dir('target')
HERE.join('1khz_sine_2.mp3').copy(source_dir.join('song1.mp3'))
song = Song(str(source_dir.join('song1.mp3')), str(source_dir), str(target_dir))
# Run.
loop = asyncio.get_event_loop()
exit_status = loop.run_until_complete(transcode.convert_file(song))[-1]
messages = [r.message for r in caplog.records if r.name.startswith('flash_air_music')]
# Verify.
assert exit_status == (2 if exit_signal != signal.SIGKILL else -9)
assert 'Converting song1.mp3' in messages
assert 'Storing metadata in song1.mp3' not in messages
assert any(re.match(r'^Process \d+ exited {}$'.format(exit_status), m) for m in messages)
assert any(re.match(r'^Process \d+ still running\.\.\.$', m) for m in messages)
# Verify based on exit_signal.
sent_signals = [m for m in messages if m.startswith('Timeout exceeded')]
assert sent_signals[0].startswith('Timeout exceeded, sending signal 2')
if exit_signal in (signal.SIGTERM, signal.SIGKILL):
assert sent_signals[1].startswith('Timeout exceeded, sending signal 15')
if exit_signal == signal.SIGKILL:
assert sent_signals[2].startswith('Timeout exceeded, sending signal 9')
@pytest.mark.skipif(str(FFMPEG_DEFAULT_BINARY is None))
@pytest.mark.parametrize('mode', ['failure', 'exception'])
def test_convert_songs_errors(monkeypatch, tmpdir, caplog, mode):
"""Test convert_songs()'s error handling.
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
:param str mode: Scenario to test for.
"""
ffmpeg = tmpdir.join('ffmpeg')
if mode != 'exception':
ffmpeg.write(dedent("""\
#!/bin/bash
[ "$ERROR_ON" == "$(basename $2)" ] && exit 2
ffmpeg $@
"""))
ffmpeg.chmod(0o0755)
monkeypatch.setattr(transcode, 'GLOBAL_MUTABLE_CONFIG', {'--ffmpeg-bin': str(ffmpeg), '--threads': '2'})
monkeypatch.setenv('ERROR_ON', 'song1.mp3' if mode == 'failure' else '')
source_dir = tmpdir.ensure_dir('source')
target_dir = tmpdir.ensure_dir('target')
HERE.join('1khz_sine_2.mp3').copy(source_dir.join('song1.mp3'))
HERE.join('1khz_sine_2.mp3').copy(source_dir.join('song2.mp3'))
songs = get_songs(str(source_dir), str(target_dir))[0]
# Run.
loop = asyncio.get_event_loop()
loop.run_until_complete(transcode.convert_songs(songs))
messages = [r.message for r in caplog.records if r.name.startswith('flash_air_music')]
# Verify files.
assert not target_dir.join('song1.mp3').check()
if mode == 'exception':
assert not target_dir.join('song2.mp3').check()
else:
assert target_dir.join('song2.mp3').check(file=True)
# Verify log.
assert 'Storing metadata in song1.mp3' not in messages
if mode == 'exception':
assert 'Storing metadata in song2.mp3' not in messages
assert len([True for m in messages if m.startswith('BUG!')]) == 2
assert any(re.match(r'Beginning to convert 2 file\(s\) up to 2 at a time\.$', m) for m in messages)
assert any(re.match(r'Done converting 2 file\(s\) \(2 failed\)\.$', m) for m in messages)
elif mode == 'failure':
assert 'Storing metadata in song2.mp3' in messages
assert len([True for m in messages if m.startswith('BUG!')]) == 0
assert any(re.match(r'Beginning to convert 2 file\(s\) up to 2 at a time\.$', m) for m in messages)
assert any(re.match(r'Done converting 2 file\(s\) \(1 failed\)\.$', m) for m in messages)
@pytest.mark.skipif(str(FFMPEG_DEFAULT_BINARY is None))
def test_convert_songs_single(monkeypatch, tmpdir, caplog):
"""Test convert_songs() with one file.
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
"""
monkeypatch.setattr(transcode, 'GLOBAL_MUTABLE_CONFIG', {'--ffmpeg-bin': FFMPEG_DEFAULT_BINARY, '--threads': '2'})
source_dir = tmpdir.ensure_dir('source')
target_dir = tmpdir.ensure_dir('target')
HERE.join('1khz_sine_2.mp3').copy(source_dir.join('song1.mp3'))
songs = get_songs(str(source_dir), str(target_dir))[0]
# Run.
loop = asyncio.get_event_loop()
loop.run_until_complete(transcode.convert_songs(songs))
messages = [r.message for r in caplog.records if r.name.startswith('flash_air_music')]
# Verify.
assert target_dir.join('song1.mp3').check(file=True)
assert 'Storing metadata in song1.mp3' in messages
assert any(re.match(r'Beginning to convert 1 file\(s\) up to 2 at a time\.$', m) for m in messages)
assert any(re.match(r'Done converting 1 file\(s\) \(0 failed\)\.$', m) for m in messages)
@pytest.mark.skipif(str(FFMPEG_DEFAULT_BINARY is None))
def test_convert_songs_semaphore(monkeypatch, tmpdir, caplog):
"""Test convert_songs() concurrency limit.
:param monkeypatch: pytest fixture.
:param tmpdir: pytest fixture.
:param caplog: pytest extension fixture.
"""
ffmpeg = tmpdir.join('ffmpeg')
ffmpeg.write(dedent("""\
#!/bin/bash
python3 -c "import time; print('$(basename $2) START_TIME:', time.time())"
ffmpeg $@
python3 -c "import time; print('$(basename $2) END_TIME:', time.time())"
"""))
ffmpeg.chmod(0o0755)
monkeypatch.setattr(transcode, 'GLOBAL_MUTABLE_CONFIG', {'--ffmpeg-bin': str(ffmpeg), '--threads': '2'})
source_dir = tmpdir.ensure_dir('source')
target_dir = tmpdir.ensure_dir('target')
HERE.join('1khz_sine_2.mp3').copy(source_dir.join('song1.mp3'))
HERE.join('1khz_sine_2.mp3').copy(source_dir.join('song2.mp3'))
HERE.join('1khz_sine_2.mp3').copy(source_dir.join('song3.mp3'))
HERE.join('1khz_sine_2.mp3').copy(source_dir.join('song4.mp3'))
songs = get_songs(str(source_dir), str(target_dir))[0]
# Run.
loop = asyncio.get_event_loop()
loop.run_until_complete(transcode.convert_songs(songs))
messages = [r.message for r in caplog.records if r.name.startswith('flash_air_music')]
# Verify.
assert target_dir.join('song1.mp3').check(file=True)
assert 'Storing metadata in song1.mp3' in messages
assert 'Storing metadata in song2.mp3' in messages
assert 'Storing metadata in song3.mp3' in messages
assert 'Storing metadata in song4.mp3' in messages
assert any(re.match(r'Beginning to convert 4 file\(s\) up to 2 at a time\.$', m) for m in messages)
assert any(re.match(r'Done converting 4 file\(s\) \(0 failed\)\.$', m) for m in messages)
# Verify overlaps.
regex = re.compile(r'(song\d\.mp3) START_TIME: ([\d\.]+)\n\1 END_TIME: ([\d\.]+)')
intervals = [(float(p[0]), float(p[1])) for p in (g.groups()[1:] for g in (regex.search(m) for m in messages) if g)]
intervals.sort()
assert len(intervals) == 4
overlaps = 0
for a, b in itertools.combinations(range(4), 2):
if intervals[b][0] < intervals[a][0] < intervals[b][1] or intervals[a][0] < intervals[b][0] < intervals[a][1]:
overlaps += 1
assert overlaps <= 3
|
mit
|
joonro/PyTables
|
tables/tests/test_earray.py
|
6
|
102218
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import sys
import numpy
import tables
from tables import Int16Atom, Int32Atom, Float64Atom, StringAtom
from tables.utils import byteorders
from tables.tests import common
from tables.tests.common import allequal
from tables.tests.common import unittest
from tables.tests.common import PyTablesTestCase as TestCase
class BasicTestCase(common.TempFileMixin, TestCase):
# Default values
obj = None
flavor = "numpy"
type = 'int32'
dtype = 'int32'
shape = (2, 0)
start = 0
stop = 10
step = 1
length = 1
chunksize = 5
nappends = 10
compress = 0
complib = "zlib" # Default compression library
shuffle = 0
fletcher32 = 0
reopen = 1 # Tells whether the file has to be reopened on each test or not
def setUp(self):
super(BasicTestCase, self).setUp()
# Create an instance of an HDF5 Table
self.rootgroup = self.h5file.root
self.populateFile()
if self.reopen:
# Close the file
self.h5file.close()
def populateFile(self):
group = self.rootgroup
obj = self.obj
if obj is None:
if self.type == "string":
atom = StringAtom(itemsize=self.length)
else:
atom = tables.Atom.from_type(self.type)
else:
atom = None
title = self.__class__.__name__
filters = tables.Filters(complevel=self.compress,
complib=self.complib,
shuffle=self.shuffle,
fletcher32=self.fletcher32)
earray = self.h5file.create_earray(group, 'earray1',
atom=atom, shape=self.shape,
title=title, filters=filters,
expectedrows=1, obj=obj)
earray.flavor = self.flavor
# Fill it with rows
self.rowshape = list(earray.shape)
if obj is not None:
self.rowshape[0] = 0
self.objsize = self.length
for i in self.rowshape:
if i != 0:
self.objsize *= i
self.extdim = earray.extdim
self.objsize *= self.chunksize
self.rowshape[earray.extdim] = self.chunksize
if self.type == "string":
object = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.rowshape,
dtype="S%s" % earray.atom.itemsize)
else:
object = numpy.arange(self.objsize, dtype=earray.atom.dtype.base)
object.shape = self.rowshape
if common.verbose:
if self.flavor == "numpy":
print("Object to append -->", object)
else:
print("Object to append -->", repr(object))
for i in range(self.nappends):
if self.type == "string":
earray.append(object)
else:
earray.append(object * i)
def _get_shape(self):
if self.shape is not None:
shape = self.shape
else:
shape = numpy.asarray(self.obj).shape
return shape
def test00_attributes(self):
if self.reopen:
self._reopen()
obj = self.h5file.get_node("/earray1")
shape = self._get_shape()
shape = list(shape)
shape[self.extdim] = self.chunksize * self.nappends
if self.obj is not None:
shape[self.extdim] += len(self.obj)
shape = tuple(shape)
self.assertEqual(obj.flavor, self.flavor)
self.assertEqual(obj.shape, shape)
self.assertEqual(obj.ndim, len(shape))
self.assertEqual(obj.nrows, shape[self.extdim])
self.assertEqual(obj.atom.type, self.type)
def test01_iterEArray(self):
"""Checking enlargeable array iterator."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_iterEArray..." % self.__class__.__name__)
# Create an instance of an HDF5 Table
if self.reopen:
self._reopen()
earray = self.h5file.get_node("/earray1")
# Choose a small value for buffer size
earray.nrowsinbuf = 3
if common.verbose:
print("EArray descr:", repr(earray))
print("shape of read array ==>", earray.shape)
print("reopening?:", self.reopen)
# Build the array to do comparisons
if self.type == "string":
object_ = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.rowshape,
dtype="S%s" % earray.atom.itemsize)
else:
object_ = numpy.arange(self.objsize, dtype=earray.atom.dtype.base)
object_.shape = self.rowshape
object_ = object_.swapaxes(earray.extdim, 0)
if self.obj is not None:
initialrows = len(self.obj)
else:
initialrows = 0
shape = self._get_shape()
# Read all the array
for idx, row in enumerate(earray):
if idx < initialrows:
self.assertTrue(
allequal(row, numpy.asarray(self.obj[idx]), self.flavor))
continue
chunk = int((earray.nrow - initialrows) % self.chunksize)
if chunk == 0:
if self.type == "string":
object__ = object_
else:
i = int(earray.nrow - initialrows)
object__ = object_ * (i // self.chunksize)
object = object__[chunk]
# The next adds much more verbosity
if common.verbose and 0:
print("number of row ==>", earray.nrow)
if hasattr(object, "shape"):
print("shape should look as:", object.shape)
print("row in earray ==>", repr(row))
print("Should look like ==>", repr(object))
self.assertEqual(initialrows + self.nappends * self.chunksize,
earray.nrows)
self.assertTrue(allequal(row, object, self.flavor))
if hasattr(row, "shape"):
self.assertEqual(len(row.shape), len(shape) - 1)
else:
# Scalar case
self.assertEqual(len(shape), 1)
# Check filters:
if self.compress != earray.filters.complevel and common.verbose:
print("Error in compress. Class:", self.__class__.__name__)
print("self, earray:", self.compress, earray.filters.complevel)
self.assertEqual(earray.filters.complevel, self.compress)
if self.compress > 0 and tables.which_lib_version(self.complib):
self.assertEqual(earray.filters.complib, self.complib)
if self.shuffle != earray.filters.shuffle and common.verbose:
print("Error in shuffle. Class:", self.__class__.__name__)
print("self, earray:", self.shuffle, earray.filters.shuffle)
self.assertEqual(self.shuffle, earray.filters.shuffle)
if self.fletcher32 != earray.filters.fletcher32 and common.verbose:
print("Error in fletcher32. Class:", self.__class__.__name__)
print("self, earray:", self.fletcher32,
earray.filters.fletcher32)
self.assertEqual(self.fletcher32, earray.filters.fletcher32)
def test02_sssEArray(self):
"""Checking enlargeable array iterator with (start, stop, step)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_sssEArray..." % self.__class__.__name__)
# Create an instance of an HDF5 Table
if self.reopen:
self._reopen()
earray = self.h5file.get_node("/earray1")
# Choose a small value for buffer size
earray.nrowsinbuf = 3
if common.verbose:
print("EArray descr:", repr(earray))
print("shape of read array ==>", earray.shape)
print("reopening?:", self.reopen)
# Build the array to do comparisons
if self.type == "string":
object_ = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.rowshape,
dtype="S%s" % earray.atom.itemsize)
else:
object_ = numpy.arange(self.objsize, dtype=earray.atom.dtype.base)
object_.shape = self.rowshape
object_ = object_.swapaxes(earray.extdim, 0)
if self.obj is not None:
initialrows = len(self.obj)
else:
initialrows = 0
shape = self._get_shape()
# Read all the array
for idx, row in enumerate(earray.iterrows(start=self.start,
stop=self.stop,
step=self.step)):
if idx < initialrows:
self.assertTrue(
allequal(row, numpy.asarray(self.obj[idx]), self.flavor))
continue
if self.chunksize == 1:
index = 0
else:
index = int((earray.nrow - initialrows) % self.chunksize)
if self.type == "string":
object__ = object_
else:
i = int(earray.nrow - initialrows)
object__ = object_ * (i // self.chunksize)
object = object__[index]
# The next adds much more verbosity
if common.verbose and 0:
print("number of row ==>", earray.nrow)
if hasattr(object, "shape"):
print("shape should look as:", object.shape)
print("row in earray ==>", repr(row))
print("Should look like ==>", repr(object))
self.assertEqual(initialrows + self.nappends * self.chunksize,
earray.nrows)
self.assertTrue(allequal(row, object, self.flavor))
if hasattr(row, "shape"):
self.assertEqual(len(row.shape), len(shape) - 1)
else:
# Scalar case
self.assertEqual(len(shape), 1)
def test03_readEArray(self):
"""Checking read() of enlargeable arrays."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03_readEArray..." % self.__class__.__name__)
# This conversion made just in case indices are numpy scalars
if self.start is not None:
self.start = long(self.start)
if self.stop is not None:
self.stop = long(self.stop)
if self.step is not None:
self.step = long(self.step)
# Create an instance of an HDF5 Table
if self.reopen:
self._reopen()
earray = self.h5file.get_node("/earray1")
# Choose a small value for buffer size
earray.nrowsinbuf = 3
if common.verbose:
print("EArray descr:", repr(earray))
print("shape of read array ==>", earray.shape)
print("reopening?:", self.reopen)
# Build the array to do comparisons
if self.type == "string":
object_ = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.rowshape,
dtype="S%s" % earray.atom.itemsize)
else:
object_ = numpy.arange(self.objsize, dtype=earray.atom.dtype.base)
object_.shape = self.rowshape
object_ = object_.swapaxes(earray.extdim, 0)
if self.obj is not None:
initialrows = len(self.obj)
else:
initialrows = 0
rowshape = self.rowshape
rowshape[self.extdim] *= (self.nappends + initialrows)
if self.type == "string":
object__ = numpy.empty(
shape=rowshape, dtype="S%s" % earray.atom.itemsize)
else:
object__ = numpy.empty(shape=rowshape, dtype=self.dtype)
object__ = object__.swapaxes(0, self.extdim)
if initialrows:
object__[0:initialrows] = self.obj
for i in range(self.nappends):
j = initialrows + i * self.chunksize
if self.type == "string":
object__[j:j + self.chunksize] = object_
else:
object__[j:j + self.chunksize] = object_ * i
stop = self.stop
if self.nappends:
# stop == None means read only the element designed by start
# (in read() contexts)
if self.stop is None:
if self.start == -1: # corner case
stop = earray.nrows
else:
stop = self.start + 1
# Protection against number of elements less than existing
# if rowshape[self.extdim] < self.stop or self.stop == 0:
if rowshape[self.extdim] < stop:
# self.stop == 0 means last row only in read()
# and not in [::] slicing notation
stop = rowshape[self.extdim]
# do a copy() in order to ensure that len(object._data)
# actually do a measure of its length
#object = object__[self.start:stop:self.step].copy()
object = object__[self.start:self.stop:self.step].copy()
# Swap the axes again to have normal ordering
if self.flavor == "numpy":
object = object.swapaxes(0, self.extdim)
else:
object = numpy.empty(shape=self.shape, dtype=self.dtype)
# Read all the array
try:
row = earray.read(self.start, self.stop, self.step)
except IndexError:
row = numpy.empty(shape=self.shape, dtype=self.dtype)
if common.verbose:
if hasattr(object, "shape"):
print("shape should look as:", object.shape)
print("Object read ==>", repr(row))
print("Should look like ==>", repr(object))
self.assertEqual(initialrows + self.nappends * self.chunksize,
earray.nrows)
self.assertTrue(allequal(row, object, self.flavor))
shape = self._get_shape()
if hasattr(row, "shape"):
self.assertEqual(len(row.shape), len(shape))
if self.flavor == "numpy":
self.assertEqual(row.itemsize, earray.atom.itemsize)
else:
# Scalar case
self.assertEqual(len(shape), 1)
def test03_readEArray_out_argument(self):
"""Checking read() of enlargeable arrays."""
# This conversion made just in case indices are numpy scalars
if self.start is not None:
self.start = long(self.start)
if self.stop is not None:
self.stop = long(self.stop)
if self.step is not None:
self.step = long(self.step)
# Create an instance of an HDF5 Table
if self.reopen:
self._reopen()
earray = self.h5file.get_node("/earray1")
# Choose a small value for buffer size
earray.nrowsinbuf = 3
# Build the array to do comparisons
if self.type == "string":
object_ = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.rowshape,
dtype="S%s" % earray.atom.itemsize)
else:
object_ = numpy.arange(self.objsize, dtype=earray.atom.dtype.base)
object_.shape = self.rowshape
object_ = object_.swapaxes(earray.extdim, 0)
if self.obj is not None:
initialrows = len(self.obj)
else:
initialrows = 0
rowshape = self.rowshape
rowshape[self.extdim] *= (self.nappends + initialrows)
if self.type == "string":
object__ = numpy.empty(
shape=rowshape, dtype="S%s" % earray.atom.itemsize)
else:
object__ = numpy.empty(shape=rowshape, dtype=self.dtype)
object__ = object__.swapaxes(0, self.extdim)
if initialrows:
object__[0:initialrows] = self.obj
for i in range(self.nappends):
j = initialrows + i * self.chunksize
if self.type == "string":
object__[j:j + self.chunksize] = object_
else:
object__[j:j + self.chunksize] = object_ * i
stop = self.stop
if self.nappends:
# stop == None means read only the element designed by start
# (in read() contexts)
if self.stop is None:
if self.start == -1: # corner case
stop = earray.nrows
else:
stop = self.start + 1
# Protection against number of elements less than existing
# if rowshape[self.extdim] < self.stop or self.stop == 0:
if rowshape[self.extdim] < stop:
# self.stop == 0 means last row only in read()
# and not in [::] slicing notation
stop = rowshape[self.extdim]
# do a copy() in order to ensure that len(object._data)
# actually do a measure of its length
#object = object__[self.start:stop:self.step].copy()
object = object__[self.start:self.stop:self.step].copy()
# Swap the axes again to have normal ordering
if self.flavor == "numpy":
object = object.swapaxes(0, self.extdim)
else:
object = numpy.empty(shape=self.shape, dtype=self.dtype)
# Read all the array
try:
row = numpy.empty(earray.shape, dtype=earray.atom.dtype)
slice_obj = [slice(None)] * len(earray.shape)
#slice_obj[earray.maindim] = slice(self.start, stop, self.step)
slice_obj[earray.maindim] = slice(self.start, self.stop, self.step)
row = row[slice_obj].copy()
earray.read(self.start, self.stop, self.step, out=row)
except IndexError:
row = numpy.empty(shape=self.shape, dtype=self.dtype)
if common.verbose:
if hasattr(object, "shape"):
print("shape should look as:", object.shape)
print("Object read ==>", repr(row))
print("Should look like ==>", repr(object))
self.assertEqual(initialrows + self.nappends * self.chunksize,
earray.nrows)
self.assertTrue(allequal(row, object, self.flavor))
shape = self._get_shape()
if hasattr(row, "shape"):
self.assertEqual(len(row.shape), len(shape))
if self.flavor == "numpy":
self.assertEqual(row.itemsize, earray.atom.itemsize)
else:
# Scalar case
self.assertEqual(len(shape), 1)
def test04_getitemEArray(self):
"""Checking enlargeable array __getitem__ special method."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test04_getitemEArray..." %
self.__class__.__name__)
if not hasattr(self, "slices"):
# If there is not a slices attribute, create it
# This conversion made just in case indices are numpy scalars
if self.start is not None:
self.start = long(self.start)
if self.stop is not None:
self.stop = long(self.stop)
if self.step is not None:
self.step = long(self.step)
self.slices = (slice(self.start, self.stop, self.step),)
# Create an instance of an HDF5 Table
if self.reopen:
self._reopen()
earray = self.h5file.get_node("/earray1")
# Choose a small value for buffer size
# earray.nrowsinbuf = 3 # this does not really changes the chunksize
if common.verbose:
print("EArray descr:", repr(earray))
print("shape of read array ==>", earray.shape)
print("reopening?:", self.reopen)
# Build the array to do comparisons
if self.type == "string":
object_ = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.rowshape,
dtype="S%s" % earray.atom.itemsize)
else:
object_ = numpy.arange(self.objsize, dtype=earray.atom.dtype.base)
object_.shape = self.rowshape
object_ = object_.swapaxes(earray.extdim, 0)
if self.obj is not None:
initialrows = len(self.obj)
else:
initialrows = 0
rowshape = self.rowshape
rowshape[self.extdim] *= (self.nappends + initialrows)
if self.type == "string":
object__ = numpy.empty(
shape=rowshape, dtype="S%s" % earray.atom.itemsize)
else:
object__ = numpy.empty(shape=rowshape, dtype=self.dtype)
# Additional conversion for the numpy case
object__ = object__.swapaxes(0, earray.extdim)
if initialrows:
object__[0:initialrows] = self.obj
for i in range(self.nappends):
j = initialrows + i * self.chunksize
if self.type == "string":
object__[j:j + self.chunksize] = object_
else:
object__[j:j + self.chunksize] = object_ * i
if self.nappends:
# Swap the axes again to have normal ordering
if self.flavor == "numpy":
object__ = object__.swapaxes(0, self.extdim)
else:
object__.swapaxes(0, self.extdim)
# do a copy() in order to ensure that len(object._data)
# actually do a measure of its length
object = object__.__getitem__(self.slices).copy()
else:
object = numpy.empty(shape=self.shape, dtype=self.dtype)
# Read all the array
try:
row = earray.__getitem__(self.slices)
except IndexError:
row = numpy.empty(shape=self.shape, dtype=self.dtype)
if common.verbose:
print("Object read:\n", repr(row))
print("Should look like:\n", repr(object))
if hasattr(object, "shape"):
print("Original object shape:", self.shape)
print("Shape read:", row.shape)
print("shape should look as:", object.shape)
self.assertEqual(initialrows + self.nappends * self.chunksize,
earray.nrows)
self.assertTrue(allequal(row, object, self.flavor))
if not hasattr(row, "shape"):
# Scalar case
self.assertEqual(len(self.shape), 1)
def test05_setitemEArray(self):
"""Checking enlargeable array __setitem__ special method."""
if self.__class__.__name__ == "Ellipsis6EArrayTestCase":
# We have a problem with test design here, but I think
# it is not worth the effort to solve it
# F.Alted 2004-10-27
return
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05_setitemEArray..." %
self.__class__.__name__)
if not hasattr(self, "slices"):
# If there is not a slices attribute, create it
# This conversion made just in case indices are numpy scalars
if self.start is not None:
self.start = long(self.start)
if self.stop is not None:
self.stop = long(self.stop)
if self.step is not None:
self.step = long(self.step)
self.slices = (slice(self.start, self.stop, self.step),)
# Create an instance of an HDF5 Table
if self.reopen:
self._reopen(mode="a")
earray = self.h5file.get_node("/earray1")
# Choose a small value for buffer size
# earray.nrowsinbuf = 3 # this does not really changes the chunksize
if common.verbose:
print("EArray descr:", repr(earray))
print("shape of read array ==>", earray.shape)
print("reopening?:", self.reopen)
# Build the array to do comparisons
if self.type == "string":
object_ = numpy.ndarray(buffer=b"a"*self.objsize,
shape=self.rowshape,
dtype="S%s" % earray.atom.itemsize)
else:
object_ = numpy.arange(self.objsize, dtype=earray.atom.dtype.base)
object_.shape = self.rowshape
object_ = object_.swapaxes(earray.extdim, 0)
if self.obj is not None:
initialrows = len(self.obj)
else:
initialrows = 0
rowshape = self.rowshape
rowshape[self.extdim] *= (self.nappends + initialrows)
if self.type == "string":
object__ = numpy.empty(
shape=rowshape, dtype="S%s" % earray.atom.itemsize)
else:
object__ = numpy.empty(shape=rowshape, dtype=self.dtype)
# Additional conversion for the numpy case
object__ = object__.swapaxes(0, earray.extdim)
for i in range(self.nappends):
j = initialrows + i * self.chunksize
if self.type == "string":
object__[j:j + self.chunksize] = object_
else:
object__[j:j + self.chunksize] = object_ * i
# Modify the earray
# earray[j:j + self.chunksize] = object_ * i
# earray[self.slices] = 1
if initialrows:
object__[0:initialrows] = self.obj
if self.nappends:
# Swap the axes again to have normal ordering
if self.flavor == "numpy":
object__ = object__.swapaxes(0, self.extdim)
else:
object__.swapaxes(0, self.extdim)
# do a copy() in order to ensure that len(object._data)
# actually do a measure of its length
object = object__.__getitem__(self.slices).copy()
else:
object = numpy.empty(shape=self.shape, dtype=self.dtype)
if self.flavor == "numpy":
object = numpy.asarray(object)
if self.type == "string":
if hasattr(self, "wslice"):
# These sentences should be equivalent
# object[self.wslize] = object[self.wslice].pad("xXx")
# earray[self.wslice] = earray[self.wslice].pad("xXx")
object[self.wslize] = "xXx"
earray[self.wslice] = "xXx"
elif sum(object[self.slices].shape) != 0:
# object[:] = object.pad("xXx")
object[:] = "xXx"
if object.size > 0:
earray[self.slices] = object
else:
if hasattr(self, "wslice"):
object[self.wslice] = object[self.wslice] * 2 + 3
earray[self.wslice] = earray[self.wslice] * 2 + 3
elif sum(object[self.slices].shape) != 0:
object = object * 2 + 3
if numpy.prod(object.shape) > 0:
earray[self.slices] = earray[self.slices] * 2 + 3
# Read all the array
row = earray.__getitem__(self.slices)
try:
row = earray.__getitem__(self.slices)
except IndexError:
print("IndexError!")
row = numpy.empty(shape=self.shape, dtype=self.dtype)
if common.verbose:
print("Object read:\n", repr(row))
print("Should look like:\n", repr(object))
if hasattr(object, "shape"):
print("Original object shape:", self.shape)
print("Shape read:", row.shape)
print("shape should look as:", object.shape)
self.assertEqual(initialrows + self.nappends * self.chunksize,
earray.nrows)
self.assertTrue(allequal(row, object, self.flavor))
if not hasattr(row, "shape"):
# Scalar case
self.assertEqual(len(self.shape), 1)
class BasicWriteTestCase(BasicTestCase):
type = 'int32'
shape = (0,)
chunksize = 5
nappends = 10
step = 1
# wslice = slice(1,nappends,2)
wslice = 1 # single element case
class Basic2WriteTestCase(BasicTestCase):
type = 'int32'
dtype = 'i4'
shape = (0,)
chunksize = 5
nappends = 10
step = 1
wslice = slice(chunksize-2, nappends, 2) # range of elements
reopen = 0 # This case does not reopen files
class Basic3WriteTestCase(BasicTestCase):
obj = [1, 2]
type = numpy.asarray(obj).dtype.name
dtype = numpy.asarray(obj).dtype.str
shape = (0,)
chunkshape = (5,)
step = 1
reopen = 0 # This case does not reopen files
class Basic4WriteTestCase(BasicTestCase):
obj = numpy.array([1, 2])
type = obj.dtype.name
dtype = obj.dtype.str
shape = None
chunkshape = (5,)
step = 1
reopen = 0 # This case does not reopen files
class Basic5WriteTestCase(BasicTestCase):
obj = [1, 2]
type = numpy.asarray(obj).dtype.name
dtype = numpy.asarray(obj).dtype.str
shape = (0,)
chunkshape = (5,)
step = 1
reopen = 1 # This case does reopen files
class Basic6WriteTestCase(BasicTestCase):
obj = numpy.array([1, 2])
type = obj.dtype.name
dtype = obj.dtype.str
shape = None
chunkshape = (5,)
step = 1
reopen = 1 # This case does reopen files
class Basic7WriteTestCase(BasicTestCase):
obj = [[1, 2], [3, 4]]
type = numpy.asarray(obj).dtype.name
dtype = numpy.asarray(obj).dtype.str
shape = (0, 2)
chunkshape = (5,)
step = 1
reopen = 0 # This case does not reopen files
class Basic8WriteTestCase(BasicTestCase):
obj = [[1, 2], [3, 4]]
type = numpy.asarray(obj).dtype.name
dtype = numpy.asarray(obj).dtype.str
shape = (0, 2)
chunkshape = (5,)
step = 1
reopen = 1 # This case does reopen files
class EmptyEArrayTestCase(BasicTestCase):
type = 'int32'
dtype = numpy.dtype('int32')
shape = (2, 0)
chunksize = 5
nappends = 0
start = 0
stop = 10
step = 1
class NP_EmptyEArrayTestCase(BasicTestCase):
type = 'int32'
dtype = numpy.dtype('()int32')
shape = (2, 0)
chunksize = 5
nappends = 0
class Empty2EArrayTestCase(BasicTestCase):
type = 'int32'
dtype = 'int32'
shape = (2, 0)
chunksize = 5
nappends = 0
start = 0
stop = 10
step = 1
reopen = 0 # This case does not reopen files
@unittest.skipIf(not common.lzo_avail, 'LZO compression library not available')
class SlicesEArrayTestCase(BasicTestCase):
compress = 1
complib = "lzo"
type = 'int32'
shape = (2, 0)
chunksize = 5
nappends = 2
slices = (slice(1, 2, 1), slice(1, 3, 1))
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
class Slices2EArrayTestCase(BasicTestCase):
compress = 1
complib = "blosc"
type = 'int32'
shape = (2, 0, 4)
chunksize = 5
nappends = 20
slices = (slice(1, 2, 1), slice(None, None, None), slice(1, 4, 2))
class EllipsisEArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 0)
chunksize = 5
nappends = 2
# slices = (slice(1,2,1), Ellipsis)
slices = (Ellipsis, slice(1, 2, 1))
class Ellipsis2EArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 0, 4)
chunksize = 5
nappends = 20
slices = (slice(1, 2, 1), Ellipsis, slice(1, 4, 2))
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
class Slices3EArrayTestCase(BasicTestCase):
compress = 1 # To show the chunks id DEBUG is on
complib = "blosc"
type = 'int32'
shape = (2, 3, 4, 0)
chunksize = 5
nappends = 20
slices = (slice(1, 2, 1), slice(0, None, None),
slice(1, 4, 2)) # Don't work
# slices = (slice(None, None, None), slice(0, None, None),
# slice(1,4,1)) # W
# slices = (slice(None, None, None), slice(None, None, None),
# slice(1,4,2)) # N
# slices = (slice(1,2,1), slice(None, None, None), slice(1,4,2)) # N
# Disable the failing test temporarily with a working test case
slices = (slice(1, 2, 1), slice(1, 4, None), slice(1, 4, 2)) # Y
# slices = (slice(1,2,1), slice(0, 4, None), slice(1,4,1)) # Y
slices = (slice(1, 2, 1), slice(0, 4, None), slice(1, 4, 2)) # N
# slices = (slice(1,2,1), slice(0, 4, None), slice(1,4,2),
# slice(0,100,1)) # N
class Slices4EArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 0, 5, 6)
chunksize = 5
nappends = 20
slices = (slice(1, 2, 1), slice(0, None, None), slice(1, 4, 2),
slice(0, 4, 2), slice(3, 5, 2), slice(2, 7, 1))
class Ellipsis3EArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 0)
chunksize = 5
nappends = 20
slices = (Ellipsis, slice(0, 4, None), slice(1, 4, 2))
slices = (slice(1, 2, 1), slice(0, 4, None), slice(1, 4, 2), Ellipsis)
class Ellipsis4EArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 0)
chunksize = 5
nappends = 20
slices = (Ellipsis, slice(0, 4, None), slice(1, 4, 2))
slices = (slice(1, 2, 1), Ellipsis, slice(1, 4, 2))
class Ellipsis5EArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 0)
chunksize = 5
nappends = 20
slices = (slice(1, 2, 1), slice(0, 4, None), Ellipsis)
class Ellipsis6EArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 0)
chunksize = 5
nappends = 2
# The next slices gives problems with setting values (test05)
# This is a problem on the test design, not the Array.__setitem__
# code, though.
slices = (slice(1, 2, 1), slice(0, 4, None), 2, Ellipsis)
class Ellipsis7EArrayTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 4, 0)
chunksize = 5
nappends = 2
slices = (slice(1, 2, 1), slice(0, 4, None), slice(2, 3), Ellipsis)
class MD3WriteTestCase(BasicTestCase):
type = 'int32'
shape = (2, 0, 3)
chunksize = 4
step = 2
class MD5WriteTestCase(BasicTestCase):
type = 'int32'
shape = (2, 0, 3, 4, 5) # ok
# shape = (1, 1, 0, 1) # Minimum shape that shows problems with HDF5 1.6.1
# shape = (2, 3, 0, 4, 5) # Floating point exception (HDF5 1.6.1)
# shape = (2, 3, 3, 0, 5, 6) # Segmentation fault (HDF5 1.6.1)
chunksize = 1
nappends = 1
start = 1
stop = 10
step = 10
class MD6WriteTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 3, 0, 5, 6)
chunksize = 1
nappends = 10
start = 1
stop = 10
step = 3
class NP_MD6WriteTestCase(BasicTestCase):
"Testing NumPy scalars as indexes"
type = 'int32'
shape = (2, 3, 3, 0, 5, 6)
chunksize = 1
nappends = 10
class MD6WriteTestCase__(BasicTestCase):
type = 'int32'
shape = (2, 0)
chunksize = 1
nappends = 3
start = 1
stop = 3
step = 1
class MD7WriteTestCase(BasicTestCase):
type = 'int32'
shape = (2, 3, 3, 4, 5, 0, 3)
chunksize = 10
nappends = 1
start = 1
stop = 10
step = 2
class MD10WriteTestCase(BasicTestCase):
type = 'int32'
shape = (1, 2, 3, 4, 5, 5, 4, 3, 2, 0)
chunksize = 5
nappends = 10
start = -1
stop = -1
step = 10
class NP_MD10WriteTestCase(BasicTestCase):
type = 'int32'
shape = (1, 2, 3, 4, 5, 5, 4, 3, 2, 0)
chunksize = 5
nappends = 10
class ZlibComprTestCase(BasicTestCase):
compress = 1
complib = "zlib"
start = 3
# stop = 0 # means last row
stop = None # means last row from 0.8 on
step = 10
class ZlibShuffleTestCase(BasicTestCase):
shuffle = 1
compress = 1
complib = "zlib"
# case start < stop , i.e. no rows read
start = 3
stop = 1
step = 10
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
class BloscComprTestCase(BasicTestCase):
compress = 1 # sss
complib = "blosc"
chunksize = 10
nappends = 100
start = 3
stop = 10
step = 3
@unittest.skipIf(not common.blosc_avail,
'BLOSC compression library not available')
class BloscShuffleTestCase(BasicTestCase):
compress = 1
shuffle = 1
complib = "blosc"
chunksize = 100
nappends = 10
start = 3
stop = 10
step = 7
@unittest.skipIf(not common.lzo_avail, 'LZO compression library not available')
class LZOComprTestCase(BasicTestCase):
compress = 1 # sss
complib = "lzo"
chunksize = 10
nappends = 100
start = 3
stop = 10
step = 3
@unittest.skipIf(not common.lzo_avail, 'LZO compression library not available')
class LZOShuffleTestCase(BasicTestCase):
compress = 1
shuffle = 1
complib = "lzo"
chunksize = 100
nappends = 10
start = 3
stop = 10
step = 7
@unittest.skipIf(not common.bzip2_avail,
'BZIP2 compression library not available')
class Bzip2ComprTestCase(BasicTestCase):
compress = 1
complib = "bzip2"
chunksize = 100
nappends = 10
start = 3
stop = 10
step = 8
@unittest.skipIf(not common.bzip2_avail,
'BZIP2 compression library not available')
class Bzip2ShuffleTestCase(BasicTestCase):
compress = 1
shuffle = 1
complib = "bzip2"
chunksize = 100
nappends = 10
start = 3
stop = 10
step = 6
class Fletcher32TestCase(BasicTestCase):
compress = 0
fletcher32 = 1
chunksize = 50
nappends = 20
start = 4
stop = 20
step = 7
class AllFiltersTestCase(BasicTestCase):
compress = 1
shuffle = 1
fletcher32 = 1
complib = "zlib"
chunksize = 20 # sss
nappends = 50
start = 2
stop = 99
step = 6
# chunksize = 3
# nappends = 2
# start = 1
# stop = 10
# step = 2
class FloatTypeTestCase(BasicTestCase):
type = 'float64'
dtype = 'float64'
shape = (2, 0)
chunksize = 5
nappends = 10
start = 3
stop = 10
step = 20
class ComplexTypeTestCase(BasicTestCase):
type = 'complex128'
dtype = 'complex128'
shape = (2, 0)
chunksize = 5
nappends = 10
start = 3
stop = 10
step = 20
class StringTestCase(BasicTestCase):
type = "string"
length = 20
shape = (2, 0)
# shape = (2,0,20)
chunksize = 5
nappends = 10
start = 3
stop = 10
step = 20
slices = (slice(0, 1), slice(1, 2))
class String2TestCase(BasicTestCase):
type = "string"
length = 20
shape = (0,)
# shape = (0, 20)
chunksize = 5
nappends = 10
start = 1
stop = 10
step = 2
class StringComprTestCase(BasicTestCase):
type = "string"
length = 20
shape = (20, 0, 10)
# shape = (20,0,10,20)
compr = 1
# shuffle = 1 # this shouldn't do nothing on chars
chunksize = 50
nappends = 10
start = -1
stop = 100
step = 20
class SizeOnDiskInMemoryPropertyTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(SizeOnDiskInMemoryPropertyTestCase, self).setUp()
self.array_size = (0, 10)
# set chunkshape so it divides evenly into array_size, to avoid
# partially filled chunks
self.chunkshape = (1000, 10)
# approximate size (in bytes) of non-data portion of hdf5 file
self.hdf_overhead = 6000
def create_array(self, complevel):
filters = tables.Filters(complevel=complevel, complib='blosc')
self.array = self.h5file.create_earray('/', 'earray', atom=Int32Atom(),
shape=self.array_size,
filters=filters,
chunkshape=self.chunkshape)
def test_zero_length(self):
complevel = 0
self.create_array(complevel)
self.assertEqual(self.array.size_on_disk, 0)
self.assertEqual(self.array.size_in_memory, 0)
# add 10 chunks of data in one append
def test_no_compression_one_append(self):
complevel = 0
self.create_array(complevel)
self.array.append([tuple(range(10))] * self.chunkshape[0] * 10)
self.assertEqual(self.array.size_on_disk, 10 * 1000 * 10 * 4)
self.assertEqual(self.array.size_in_memory, 10 * 1000 * 10 * 4)
# add 10 chunks of data in two appends
def test_no_compression_multiple_appends(self):
complevel = 0
self.create_array(complevel)
self.array.append([tuple(range(10))] * self.chunkshape[0] * 5)
self.array.append([tuple(range(10))] * self.chunkshape[0] * 5)
self.assertEqual(self.array.size_on_disk, 10 * 1000 * 10 * 4)
self.assertEqual(self.array.size_in_memory, 10 * 1000 * 10 * 4)
def test_with_compression(self):
complevel = 1
self.create_array(complevel)
self.array.append([tuple(range(10))] * self.chunkshape[0] * 10)
file_size = os.stat(self.h5fname).st_size
self.assertTrue(
abs(self.array.size_on_disk - file_size) <= self.hdf_overhead)
self.assertEqual(self.array.size_in_memory, 10 * 1000 * 10 * 4)
self.assertTrue(self.array.size_on_disk < self.array.size_in_memory)
class OffsetStrideTestCase(common.TempFileMixin, TestCase):
mode = "w"
compress = 0
complib = "zlib" # Default compression library
def setUp(self):
super(OffsetStrideTestCase, self).setUp()
self.rootgroup = self.h5file.root
def test01a_String(self):
"""Checking earray with offseted numpy strings appends."""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01a_StringAtom..." % self.__class__.__name__)
earray = self.h5file.create_earray(root, 'strings',
atom=StringAtom(itemsize=3),
shape=(0, 2, 2),
title="Array of strings")
a = numpy.array([[["a", "b"], [
"123", "45"], ["45", "123"]]], dtype="S3")
earray.append(a[:, 1:])
a = numpy.array([[["s", "a"], [
"ab", "f"], ["s", "abc"], ["abc", "f"]]])
earray.append(a[:, 2:])
# Read all the rows:
row = earray.read()
if common.verbose:
print("Object read:", row)
print("Nrows in", earray._v_pathname, ":", earray.nrows)
print("Second row in earray ==>", row[1].tolist())
self.assertEqual(earray.nrows, 2)
self.assertEqual(row[0].tolist(), [[b"123", b"45"], [b"45", b"123"]])
self.assertEqual(row[1].tolist(), [[b"s", b"abc"], [b"abc", b"f"]])
self.assertEqual(len(row[0]), 2)
self.assertEqual(len(row[1]), 2)
def test01b_String(self):
"""Checking earray with strided numpy strings appends."""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01b_StringAtom..." % self.__class__.__name__)
earray = self.h5file.create_earray(root, 'strings',
atom=StringAtom(itemsize=3),
shape=(0, 2, 2),
title="Array of strings")
a = numpy.array([[["a", "b"], [
"123", "45"], ["45", "123"]]], dtype="S3")
earray.append(a[:, ::2])
a = numpy.array([[["s", "a"], [
"ab", "f"], ["s", "abc"], ["abc", "f"]]])
earray.append(a[:, ::2])
# Read all the rows:
row = earray.read()
if common.verbose:
print("Object read:", row)
print("Nrows in", earray._v_pathname, ":", earray.nrows)
print("Second row in earray ==>", row[1].tolist())
self.assertEqual(earray.nrows, 2)
self.assertEqual(row[0].tolist(), [[b"a", b"b"], [b"45", b"123"]])
self.assertEqual(row[1].tolist(), [[b"s", b"a"], [b"s", b"abc"]])
self.assertEqual(len(row[0]), 2)
self.assertEqual(len(row[1]), 2)
def test02a_int(self):
"""Checking earray with offseted NumPy ints appends."""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02a_int..." % self.__class__.__name__)
# Create an string atom
earray = self.h5file.create_earray(root, 'EAtom',
atom=Int32Atom(), shape=(0, 3),
title="array of ints")
a = numpy.array([(0, 0, 0), (1, 0, 3), (
1, 1, 1), (0, 0, 0)], dtype='int32')
earray.append(a[2:]) # Create an offset
a = numpy.array([(1, 1, 1), (-1, 0, 0)], dtype='int32')
earray.append(a[1:]) # Create an offset
# Read all the rows:
row = earray.read()
if common.verbose:
print("Object read:", row)
print("Nrows in", earray._v_pathname, ":", earray.nrows)
print("Third row in vlarray ==>", row[2])
self.assertEqual(earray.nrows, 3)
self.assertTrue(allequal(row[
0], numpy.array([1, 1, 1], dtype='int32')))
self.assertTrue(allequal(row[
1], numpy.array([0, 0, 0], dtype='int32')))
self.assertTrue(allequal(row[
2], numpy.array([-1, 0, 0], dtype='int32')))
def test02b_int(self):
"""Checking earray with strided NumPy ints appends."""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02b_int..." % self.__class__.__name__)
earray = self.h5file.create_earray(root, 'EAtom',
atom=Int32Atom(), shape=(0, 3),
title="array of ints")
a = numpy.array([(0, 0, 0), (1, 0, 3), (
1, 1, 1), (3, 3, 3)], dtype='int32')
earray.append(a[::3]) # Create an offset
a = numpy.array([(1, 1, 1), (-1, 0, 0)], dtype='int32')
earray.append(a[::2]) # Create an offset
# Read all the rows:
row = earray.read()
if common.verbose:
print("Object read:", row)
print("Nrows in", earray._v_pathname, ":", earray.nrows)
print("Third row in vlarray ==>", row[2])
self.assertEqual(earray.nrows, 3)
self.assertTrue(allequal(row[
0], numpy.array([0, 0, 0], dtype='int32')))
self.assertTrue(allequal(row[
1], numpy.array([3, 3, 3], dtype='int32')))
self.assertTrue(allequal(row[
2], numpy.array([1, 1, 1], dtype='int32')))
def test03a_int(self):
"""Checking earray with byteswapped appends (ints)"""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03a_int..." % self.__class__.__name__)
earray = self.h5file.create_earray(root, 'EAtom',
atom=Int32Atom(), shape=(0, 3),
title="array of ints")
# Add a native ordered array
a = numpy.array([(0, 0, 0), (1, 0, 3), (
1, 1, 1), (3, 3, 3)], dtype='Int32')
earray.append(a)
# Change the byteorder of the array
a = a.byteswap()
a = a.newbyteorder()
# Add a byteswapped array
earray.append(a)
# Read all the rows:
native = earray[:4, :]
swapped = earray[4:, :]
if common.verbose:
print("Native rows:", native)
print("Byteorder native rows:", native.dtype.byteorder)
print("Swapped rows:", swapped)
print("Byteorder swapped rows:", swapped.dtype.byteorder)
self.assertTrue(allequal(native, swapped))
def test03b_float(self):
"""Checking earray with byteswapped appends (floats)"""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03b_float..." % self.__class__.__name__)
earray = self.h5file.create_earray(root, 'EAtom',
atom=Float64Atom(), shape=(0, 3),
title="array of floats")
# Add a native ordered array
a = numpy.array([(0, 0, 0), (1, 0, 3), (
1, 1, 1), (3, 3, 3)], dtype='Float64')
earray.append(a)
# Change the byteorder of the array
a = a.byteswap()
a = a.newbyteorder()
# Add a byteswapped array
earray.append(a)
# Read all the rows:
native = earray[:4, :]
swapped = earray[4:, :]
if common.verbose:
print("Native rows:", native)
print("Byteorder native rows:", native.dtype.byteorder)
print("Swapped rows:", swapped)
print("Byteorder swapped rows:", swapped.dtype.byteorder)
self.assertTrue(allequal(native, swapped))
def test04a_int(self):
"""Checking earray with byteswapped appends (2, ints)"""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test04a_int..." % self.__class__.__name__)
byteorder = {'little': 'big', 'big': 'little'}[sys.byteorder]
earray = self.h5file.create_earray(root, 'EAtom',
atom=Int32Atom(), shape=(0, 3),
title="array of ints",
byteorder=byteorder)
# Add a native ordered array
a = numpy.array([(0, 0, 0), (1, 0, 3), (
1, 1, 1), (3, 3, 3)], dtype='Int32')
earray.append(a)
# Change the byteorder of the array
a = a.byteswap()
a = a.newbyteorder()
# Add a byteswapped array
earray.append(a)
# Read all the rows:
native = earray[:4, :]
swapped = earray[4:, :]
if common.verbose:
print("Byteorder native rows:", byteorders[native.dtype.byteorder])
print("Byteorder earray on-disk:", earray.byteorder)
self.assertEqual(byteorders[native.dtype.byteorder], sys.byteorder)
self.assertEqual(earray.byteorder, byteorder)
self.assertTrue(allequal(native, swapped))
def test04b_int(self):
"""Checking earray with byteswapped appends (2, ints, reopen)"""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test04b_int..." % self.__class__.__name__)
byteorder = {'little': 'big', 'big': 'little'}[sys.byteorder]
earray = self.h5file.create_earray(root, 'EAtom',
atom=Int32Atom(), shape=(0, 3),
title="array of ints",
byteorder=byteorder)
self._reopen(mode="a")
earray = self.h5file.get_node("/EAtom")
# Add a native ordered array
a = numpy.array([(0, 0, 0), (1, 0, 3), (
1, 1, 1), (3, 3, 3)], dtype='Int32')
earray.append(a)
# Change the byteorder of the array
a = a.byteswap()
a = a.newbyteorder()
# Add a byteswapped array
earray.append(a)
# Read all the rows:
native = earray[:4, :]
swapped = earray[4:, :]
if common.verbose:
print("Byteorder native rows:", byteorders[native.dtype.byteorder])
print("Byteorder earray on-disk:", earray.byteorder)
self.assertEqual(byteorders[native.dtype.byteorder], sys.byteorder)
self.assertEqual(earray.byteorder, byteorder)
self.assertTrue(allequal(native, swapped))
def test04c_float(self):
"""Checking earray with byteswapped appends (2, floats)"""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test04c_float..." % self.__class__.__name__)
byteorder = {'little': 'big', 'big': 'little'}[sys.byteorder]
earray = self.h5file.create_earray(root, 'EAtom',
atom=Float64Atom(), shape=(0, 3),
title="array of floats",
byteorder=byteorder)
# Add a native ordered array
a = numpy.array([(0, 0, 0), (1, 0, 3), (
1, 1, 1), (3, 3, 3)], dtype='Float64')
earray.append(a)
# Change the byteorder of the array
a = a.byteswap()
a = a.newbyteorder()
# Add a byteswapped array
earray.append(a)
# Read all the rows:
native = earray[:4, :]
swapped = earray[4:, :]
if common.verbose:
print("Byteorder native rows:", byteorders[native.dtype.byteorder])
print("Byteorder earray on-disk:", earray.byteorder)
self.assertEqual(byteorders[native.dtype.byteorder], sys.byteorder)
self.assertEqual(earray.byteorder, byteorder)
self.assertTrue(allequal(native, swapped))
def test04d_float(self):
"""Checking earray with byteswapped appends (2, floats, reopen)"""
root = self.rootgroup
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test04d_float..." % self.__class__.__name__)
byteorder = {'little': 'big', 'big': 'little'}[sys.byteorder]
earray = self.h5file.create_earray(root, 'EAtom',
atom=Float64Atom(), shape=(0, 3),
title="array of floats",
byteorder=byteorder)
self._reopen(mode='a')
earray = self.h5file.get_node("/EAtom")
# Add a native ordered array
a = numpy.array([(0, 0, 0), (1, 0, 3), (
1, 1, 1), (3, 3, 3)], dtype='Float64')
earray.append(a)
# Change the byteorder of the array
a = a.byteswap()
a = a.newbyteorder()
# Add a byteswapped array
earray.append(a)
# Read all the rows:
native = earray[:4, :]
swapped = earray[4:, :]
if common.verbose:
print("Byteorder native rows:", byteorders[native.dtype.byteorder])
print("Byteorder earray on-disk:", earray.byteorder)
self.assertEqual(byteorders[native.dtype.byteorder], sys.byteorder)
self.assertEqual(earray.byteorder, byteorder)
self.assertTrue(allequal(native, swapped))
class CopyTestCase(common.TempFileMixin, TestCase):
def test01_copy(self):
"""Checking EArray.copy() method."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_copy..." % self.__class__.__name__)
# Create an EArray
atom = Int16Atom()
array1 = self.h5file.create_earray(self.h5file.root, 'array1',
atom=atom, shape=(0, 2),
title="title array1")
array1.append(numpy.array([[456, 2], [3, 457]], dtype='Int16'))
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode='a')
array1 = self.h5file.root.array1
# Copy it to another location
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
# print("dirs-->", dir(array1), dir(array2))
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor)
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.atom.itemsize, array2.atom.itemsize)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
def test02_copy(self):
"""Checking EArray.copy() method (where specified)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_copy..." % self.__class__.__name__)
# Create an EArray
atom = Int16Atom()
array1 = self.h5file.create_earray(self.h5file.root, 'array1',
atom=atom, shape=(0, 2),
title="title array1")
array1.append(numpy.array([[456, 2], [3, 457]], dtype='Int16'))
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode='a')
array1 = self.h5file.root.array1
# Copy to another location
group1 = self.h5file.create_group("/", "group1")
array2 = array1.copy(group1, 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.group1.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
# print("dirs-->", dir(array1), dir(array2))
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor)
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.atom.itemsize, array2.atom.itemsize)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
def test03a_copy(self):
"""Checking EArray.copy() method (python flavor)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03b_copy..." % self.__class__.__name__)
atom = Int16Atom()
array1 = self.h5file.create_earray(self.h5file.root, 'array1',
atom=atom, shape=(0, 2),
title="title array1")
array1.flavor = "python"
array1.append(((456, 2), (3, 457)))
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode='a')
array1 = self.h5file.root.array1
# Copy to another location
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all elements are equal
self.assertEqual(array1.read(), array2.read())
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor) # Very important here!
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.atom.itemsize, array2.atom.itemsize)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
def test03b_copy(self):
"""Checking EArray.copy() method (python string flavor)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03d_copy..." % self.__class__.__name__)
atom = StringAtom(itemsize=3)
array1 = self.h5file.create_earray(self.h5file.root, 'array1',
atom=atom, shape=(0, 2),
title="title array1")
array1.flavor = "python"
array1.append([["456", "2"], ["3", "457"]])
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode='a')
array1 = self.h5file.root.array1
# Copy to another location
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all elements are equal
self.assertEqual(array1.read(), array2.read())
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor) # Very important here!
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.atom.itemsize, array2.atom.itemsize)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
def test03c_copy(self):
"""Checking EArray.copy() method (String flavor)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test03e_copy..." % self.__class__.__name__)
atom = StringAtom(itemsize=4)
array1 = self.h5file.create_earray(self.h5file.root, 'array1',
atom=atom, shape=(0, 2),
title="title array1")
array1.flavor = "numpy"
array1.append(numpy.array([["456", "2"], ["3", "457"]], dtype="S4"))
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode='a')
array1 = self.h5file.root.array1
# Copy to another location
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.shape, array2.shape)
self.assertEqual(array1.extdim, array2.extdim)
self.assertEqual(array1.flavor, array2.flavor) # Very important here!
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.atom.type, array2.atom.type)
self.assertEqual(array1.atom.itemsize, array2.atom.itemsize)
self.assertEqual(array1.title, array2.title)
self.assertEqual(str(array1.atom), str(array2.atom))
def test04_copy(self):
"""Checking EArray.copy() method (checking title copying)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test04_copy..." % self.__class__.__name__)
# Create an EArray
atom = Int16Atom()
array1 = self.h5file.create_earray(self.h5file.root, 'array1',
atom=atom, shape=(0, 2),
title="title array1")
array1.append(numpy.array([[456, 2], [3, 457]], dtype='Int16'))
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode='a')
array1 = self.h5file.root.array1
# Copy it to another Array
array2 = array1.copy('/', 'array2', title="title array2")
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
# Assert user attributes
if common.verbose:
print("title of destination array-->", array2.title)
self.assertEqual(array2.title, "title array2")
def test05_copy(self):
"""Checking EArray.copy() method (user attributes copied)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05_copy..." % self.__class__.__name__)
# Create an EArray
atom = Int16Atom()
array1 = self.h5file.create_earray(self.h5file.root, 'array1',
atom=atom, shape=(0, 2),
title="title array1")
array1.append(numpy.array([[456, 2], [3, 457]], dtype='Int16'))
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode='a')
array1 = self.h5file.root.array1
# Copy it to another Array
array2 = array1.copy('/', 'array2', copyuserattrs=1)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Assert user attributes
self.assertEqual(array2.attrs.attr1, "attr1")
self.assertEqual(array2.attrs.attr2, 2)
def test05b_copy(self):
"""Checking EArray.copy() method (user attributes not copied)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05b_copy..." % self.__class__.__name__)
# Create an Array
atom = Int16Atom()
array1 = self.h5file.create_earray(self.h5file.root, 'array1',
atom=atom, shape=(0, 2),
title="title array1")
array1.append(numpy.array([[456, 2], [3, 457]], dtype='Int16'))
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen(mode='a')
array1 = self.h5file.root.array1
# Copy it to another Array
array2 = array1.copy('/', 'array2', copyuserattrs=0)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Assert user attributes
self.assertEqual(hasattr(array2.attrs, "attr1"), 0)
self.assertEqual(hasattr(array2.attrs, "attr2"), 0)
class CloseCopyTestCase(CopyTestCase):
close = 1
class OpenCopyTestCase(CopyTestCase):
close = 0
class CopyIndexTestCase(common.TempFileMixin, TestCase):
nrowsinbuf = 2
def test01_index(self):
"""Checking EArray.copy() method with indexes."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_index..." % self.__class__.__name__)
# Create an EArray
atom = Int32Atom()
array1 = self.h5file.create_earray(self.h5file.root, 'array1',
atom=atom, shape=(0, 2),
title="title array1")
r = numpy.arange(200, dtype='int32')
r.shape = (100, 2)
array1.append(r)
# Select a different buffer size:
array1.nrowsinbuf = self.nrowsinbuf
# Copy to another array
array2 = array1.copy("/", 'array2',
start=self.start,
stop=self.stop,
step=self.step)
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
r2 = r[self.start:self.stop:self.step]
self.assertTrue(allequal(r2, array2.read()))
# Assert the number of rows in array
if common.verbose:
print("nrows in array2-->", array2.nrows)
print("and it should be-->", r2.shape[0])
self.assertEqual(r2.shape[0], array2.nrows)
def test02_indexclosef(self):
"""Checking EArray.copy() method with indexes (close file version)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_indexclosef..." % self.__class__.__name__)
# Create an EArray
atom = Int32Atom()
array1 = self.h5file.create_earray(self.h5file.root, 'array1',
atom=atom, shape=(0, 2),
title="title array1")
r = numpy.arange(200, dtype='int32')
r.shape = (100, 2)
array1.append(r)
# Select a different buffer size:
array1.nrowsinbuf = self.nrowsinbuf
# Copy to another array
array2 = array1.copy("/", 'array2',
start=self.start,
stop=self.stop,
step=self.step)
# Close and reopen the file
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
r2 = r[self.start:self.stop:self.step]
self.assertTrue(allequal(r2, array2.read()))
# Assert the number of rows in array
if common.verbose:
print("nrows in array2-->", array2.nrows)
print("and it should be-->", r2.shape[0])
self.assertEqual(r2.shape[0], array2.nrows)
class CopyIndex1TestCase(CopyIndexTestCase):
nrowsinbuf = 1
start = 0
stop = 7
step = 1
class CopyIndex2TestCase(CopyIndexTestCase):
nrowsinbuf = 2
start = 0
stop = -1
step = 1
class CopyIndex3TestCase(CopyIndexTestCase):
nrowsinbuf = 3
start = 1
stop = 7
step = 1
class CopyIndex4TestCase(CopyIndexTestCase):
nrowsinbuf = 4
start = 0
stop = 6
step = 1
class CopyIndex5TestCase(CopyIndexTestCase):
nrowsinbuf = 2
start = 3
stop = 7
step = 1
class CopyIndex6TestCase(CopyIndexTestCase):
nrowsinbuf = 2
start = 3
stop = 6
step = 2
class CopyIndex7TestCase(CopyIndexTestCase):
start = 0
stop = 7
step = 10
class CopyIndex8TestCase(CopyIndexTestCase):
start = 6
stop = -1 # Negative values means starting from the end
step = 1
class CopyIndex9TestCase(CopyIndexTestCase):
start = 3
stop = 4
step = 1
class CopyIndex10TestCase(CopyIndexTestCase):
nrowsinbuf = 1
start = 3
stop = 4
step = 2
class CopyIndex11TestCase(CopyIndexTestCase):
start = -3
stop = -1
step = 2
class CopyIndex12TestCase(CopyIndexTestCase):
start = -1 # Should point to the last element
stop = None # None should mean the last element (including it)
step = 1
class TruncateTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(TruncateTestCase, self).setUp()
# Create an EArray
atom = Int16Atom(dflt=3)
array1 = self.h5file.create_earray(self.h5file.root, 'array1',
atom=atom, shape=(0, 2),
title="title array1")
# Add a couple of rows
array1.append(numpy.array([[456, 2], [3, 457]], dtype='Int16'))
def test00_truncate(self):
"""Checking EArray.truncate() method (truncating to 0 rows)"""
array1 = self.h5file.root.array1
# Truncate to 0 elements
array1.truncate(0)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
if common.verbose:
print("array1-->", array1.read())
self.assertTrue(allequal(
array1[:], numpy.array([], dtype='Int16').reshape(0, 2)))
def test01_truncate(self):
"""Checking EArray.truncate() method (truncating to 1 rows)"""
array1 = self.h5file.root.array1
# Truncate to 1 element
array1.truncate(1)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
if common.verbose:
print("array1-->", array1.read())
self.assertTrue(allequal(
array1.read(), numpy.array([[456, 2]], dtype='Int16')))
def test02_truncate(self):
"""Checking EArray.truncate() method (truncating to == self.nrows)"""
array1 = self.h5file.root.array1
# Truncate to 2 elements
array1.truncate(2)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
if common.verbose:
print("array1-->", array1.read())
self.assertTrue(
allequal(array1.read(),
numpy.array([[456, 2], [3, 457]], dtype='Int16')))
def test03_truncate(self):
"""Checking EArray.truncate() method (truncating to > self.nrows)"""
array1 = self.h5file.root.array1
# Truncate to 4 elements
array1.truncate(4)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
if common.verbose:
print("array1-->", array1.read())
self.assertEqual(array1.nrows, 4)
# Check the original values
self.assertTrue(allequal(array1[:2], numpy.array([[456, 2], [3, 457]],
dtype='Int16')))
# Check that the added rows have the default values
self.assertTrue(allequal(array1[2:], numpy.array([[3, 3], [3, 3]],
dtype='Int16')))
class TruncateOpenTestCase(TruncateTestCase):
close = 0
class TruncateCloseTestCase(TruncateTestCase):
close = 1
# The next test should be run only in **common.heavy** mode
class Rows64bitsTestCase(common.TempFileMixin, TestCase):
open_mode = 'a'
narows = 1000 * 1000 # each numpy object will have 1 million entries
# narows = 1000 # for testing only
nanumber = 1000 * 3 # That should account for more than 2**31-1
def setUp(self):
super(Rows64bitsTestCase, self).setUp()
# Create an EArray
array = self.h5file.create_earray(
self.h5file.root, 'array',
atom=tables.Int8Atom(), shape=(0,),
filters=tables.Filters(complib='lzo', complevel=1),
# Specifying expectedrows takes more
# CPU, but less disk
expectedrows=self.narows * self.nanumber)
# Fill the array
na = numpy.arange(self.narows, dtype='Int8')
for i in range(self.nanumber):
array.append(na)
def test01_basiccheck(self):
"""Some basic checks for earrays exceeding 2**31 rows"""
array = self.h5file.root.array
if self.close:
if common.verbose:
# Check how many entries there are in the array
print("Before closing")
print("Entries:", array.nrows, type(array.nrows))
print("Entries:", array.nrows / (1000 * 1000), "Millions")
print("Shape:", array.shape)
# Close the file
self._reopen()
array = self.h5file.root.array
if common.verbose:
print("After re-open")
# Check how many entries there are in the array
if common.verbose:
print("Entries:", array.nrows, type(array.nrows))
print("Entries:", array.nrows / (1000 * 1000), "Millions")
print("Shape:", array.shape)
print("Last 10 elements-->", array[-10:])
stop = self.narows % 256
if stop > 127:
stop -= 256
start = stop - 10
print("Should look like-->", numpy.arange(start, stop,
dtype='Int8'))
nrows = self.narows * self.nanumber
# check nrows
self.assertEqual(array.nrows, nrows)
# Check shape
self.assertEqual(array.shape, (nrows,))
# check the 10 first elements
self.assertTrue(allequal(array[:10], numpy.arange(10, dtype='Int8')))
# check the 10 last elements
stop = self.narows % 256
if stop > 127:
stop -= 256
start = stop - 10
self.assertTrue(allequal(array[-10:],
numpy.arange(start, stop, dtype='Int8')))
class Rows64bitsTestCase1(Rows64bitsTestCase):
close = 0
class Rows64bitsTestCase2(Rows64bitsTestCase):
close = 1
# Test for appending zero-sized arrays
class ZeroSizedTestCase(common.TempFileMixin, TestCase):
open_mode = 'a'
def setUp(self):
super(ZeroSizedTestCase, self).setUp()
# Create an EArray
ea = self.h5file.create_earray('/', 'test',
atom=Int32Atom(), shape=(3, 0))
# Append a single row
ea.append([[1], [2], [3]])
def test01_canAppend(self):
"""Appending zero length array."""
fileh = self.h5file
ea = fileh.root.test
np = numpy.empty(shape=(3, 0), dtype='int32')
ea.append(np)
self.assertEqual(ea.nrows, 1, "The number of rows should be 1.")
def test02_appendWithWrongShape(self):
"""Appending zero length array with wrong dimension."""
fileh = self.h5file
ea = fileh.root.test
np = numpy.empty(shape=(3, 0, 3), dtype='int32')
self.assertRaises(ValueError, ea.append, np)
# Test for dealing with multidimensional atoms
class MDAtomTestCase(common.TempFileMixin, TestCase):
def test01a_append(self):
"""Append a row to a (unidimensional) EArray with a MD tables.Atom."""
# Create an EArray
ea = self.h5file.create_earray('/', 'test',
atom=Int32Atom((2, 2)), shape=(0,))
if self.reopen:
self._reopen('a')
ea = self.h5file.root.test
# Append one row
ea.append([[[1, 3], [4, 5]]])
self.assertEqual(ea.nrows, 1)
if common.verbose:
print("First row-->", ea[0])
self.assertTrue(allequal(ea[0], numpy.array([[1, 3], [4, 5]], 'i4')))
def test01b_append(self):
"""Append several rows to a (unidimensional) EArray with a MD
tables.Atom."""
# Create an EArray
ea = self.h5file.create_earray('/', 'test',
atom=Int32Atom((2, 2)), shape=(0,))
if self.reopen:
self._reopen('a')
ea = self.h5file.root.test
# Append three rows
ea.append([[[1]], [[2]], [[3]]]) # Simple broadcast
self.assertEqual(ea.nrows, 3)
if common.verbose:
print("Third row-->", ea[2])
self.assertTrue(allequal(ea[2], numpy.array([[3, 3], [3, 3]], 'i4')))
def test02a_append(self):
"""Append a row to a (multidimensional) EArray with a
MD tables.Atom."""
# Create an EArray
ea = self.h5file.create_earray('/', 'test',
atom=Int32Atom((2,)), shape=(0, 3))
if self.reopen:
self._reopen('a')
ea = self.h5file.root.test
# Append one row
ea.append([[[1, 3], [4, 5], [7, 9]]])
self.assertEqual(ea.nrows, 1)
if common.verbose:
print("First row-->", ea[0])
self.assertTrue(allequal(ea[0], numpy.array(
[[1, 3], [4, 5], [7, 9]], 'i4')))
def test02b_append(self):
"""Append several rows to a (multidimensional) EArray with a MD
tables.Atom."""
# Create an EArray
ea = self.h5file.create_earray('/', 'test',
atom=Int32Atom((2,)), shape=(0, 3))
if self.reopen:
self._reopen('a')
ea = self.h5file.root.test
# Append three rows
ea.append([[[1, -3], [4, -5], [-7, 9]],
[[-1, 3], [-4, 5], [7, -8]],
[[-2, 3], [-5, 5], [7, -9]]])
self.assertEqual(ea.nrows, 3)
if common.verbose:
print("Third row-->", ea[2])
self.assertTrue(allequal(
ea[2], numpy.array([[-2, 3], [-5, 5], [7, -9]], 'i4')))
def test03a_MDMDMD(self):
"""Complex append of a MD array in a MD EArray with a
MD tables.Atom."""
# Create an EArray
ea = self.h5file.create_earray('/', 'test', atom=Int32Atom((2, 4)),
shape=(0, 2, 3))
if self.reopen:
self._reopen('a')
ea = self.h5file.root.test
# Append three rows
# The shape of the atom should be added at the end of the arrays
a = numpy.arange(2 * 3*2*4, dtype='i4').reshape((2, 3, 2, 4))
ea.append([a * 1, a*2, a*3])
self.assertEqual(ea.nrows, 3)
if common.verbose:
print("Third row-->", ea[2])
self.assertTrue(allequal(ea[2], a * 3))
def test03b_MDMDMD(self):
"Complex append of a MD array in a MD EArray with a MD atom (II)."
# Create an EArray
ea = self.h5file.create_earray('/', 'test', atom=Int32Atom((2, 4)),
shape=(2, 0, 3))
if self.reopen:
self._reopen('a')
ea = self.h5file.root.test
# Append three rows
# The shape of the atom should be added at the end of the arrays
a = numpy.arange(2 * 3*2*4, dtype='i4').reshape((2, 1, 3, 2, 4))
ea.append(a * 1)
ea.append(a * 2)
ea.append(a * 3)
self.assertEqual(ea.nrows, 3)
if common.verbose:
print("Third row-->", ea[:, 2, ...])
self.assertTrue(allequal(ea[:, 2, ...], a.reshape((2, 3, 2, 4))*3))
def test03c_MDMDMD(self):
"Complex append of a MD array in a MD EArray with a MD atom (III)."
# Create an EArray
ea = self.h5file.create_earray('/', 'test', atom=Int32Atom((2, 4)),
shape=(2, 3, 0))
if self.reopen:
self._reopen('a')
ea = self.h5file.root.test
# Append three rows
# The shape of the atom should be added at the end of the arrays
a = numpy.arange(2 * 3*2*4, dtype='i4').reshape((2, 3, 1, 2, 4))
ea.append(a * 1)
ea.append(a * 2)
ea.append(a * 3)
self.assertEqual(ea.nrows, 3)
if common.verbose:
print("Third row-->", ea[:, :, 2, ...])
self.assertTrue(allequal(ea[:, :, 2, ...], a.reshape((2, 3, 2, 4))*3))
class MDAtomNoReopen(MDAtomTestCase):
reopen = False
class MDAtomReopen(MDAtomTestCase):
reopen = True
class AccessClosedTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(AccessClosedTestCase, self).setUp()
self.array = self.h5file.create_earray(self.h5file.root, 'array',
atom=Int32Atom(), shape=(0, 10))
self.array.append(numpy.zeros((10, 10)))
def test_read(self):
self.h5file.close()
self.assertRaises(tables.ClosedNodeError, self.array.read)
def test_getitem(self):
self.h5file.close()
self.assertRaises(tables.ClosedNodeError, self.array.__getitem__, 0)
def test_setitem(self):
self.h5file.close()
self.assertRaises(tables.ClosedNodeError, self.array.__setitem__, 0, 0)
def test_append(self):
self.h5file.close()
self.assertRaises(tables.ClosedNodeError, self.array.append,
numpy.zeros((10, 10)))
class TestCreateEArrayArgs(common.TempFileMixin, TestCase):
obj = numpy.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
where = '/'
name = 'earray'
atom = tables.Atom.from_dtype(obj.dtype)
shape = (0,) + obj.shape[1:]
title = 'title'
filters = None
expectedrows = 1000
chunkshape = (1, 2)
byteorder = None
createparents = False
def test_positional_args_01(self):
self.h5file.create_earray(self.where, self.name,
self.atom, self.shape,
self.title, self.filters,
self.expectedrows, self.chunkshape)
self._reopen()
ptarr = self.h5file.get_node(self.where, self.name)
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.nrows, 0)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
def test_positional_args_02(self):
ptarr = self.h5file.create_earray(self.where, self.name,
self.atom, self.shape,
self.title,
self.filters,
self.expectedrows,
self.chunkshape)
ptarr.append(self.obj)
self._reopen()
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.obj.shape)
self.assertEqual(ptarr.nrows, self.obj.shape[0])
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_positional_args_obj(self):
self.h5file.create_earray(self.where, self.name,
None, None,
self.title,
self.filters,
self.expectedrows,
self.chunkshape,
self.byteorder,
self.createparents,
self.obj)
self._reopen()
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.obj.shape)
self.assertEqual(ptarr.nrows, self.obj.shape[0])
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj(self):
self.h5file.create_earray(self.where, self.name, title=self.title,
chunkshape=self.chunkshape,
obj=self.obj)
self._reopen()
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.obj.shape)
self.assertEqual(ptarr.nrows, self.obj.shape[0])
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_atom_shape_01(self):
ptarr = self.h5file.create_earray(self.where, self.name,
title=self.title,
chunkshape=self.chunkshape,
atom=self.atom, shape=self.shape)
ptarr.append(self.obj)
self._reopen()
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.obj.shape)
self.assertEqual(ptarr.nrows, self.obj.shape[0])
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_atom_shape_02(self):
ptarr = self.h5file.create_earray(self.where, self.name,
title=self.title,
chunkshape=self.chunkshape,
atom=self.atom, shape=self.shape)
#ptarr.append(self.obj)
self._reopen()
ptarr = self.h5file.get_node(self.where, self.name)
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.nrows, 0)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
def test_kwargs_obj_atom(self):
ptarr = self.h5file.create_earray(self.where, self.name,
title=self.title,
chunkshape=self.chunkshape,
obj=self.obj,
atom=self.atom)
self._reopen()
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.obj.shape)
self.assertEqual(ptarr.nrows, self.obj.shape[0])
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_shape(self):
ptarr = self.h5file.create_earray(self.where, self.name,
title=self.title,
chunkshape=self.chunkshape,
obj=self.obj,
shape=self.shape)
self._reopen()
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.obj.shape)
self.assertEqual(ptarr.nrows, self.obj.shape[0])
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_atom_shape(self):
ptarr = self.h5file.create_earray(self.where, self.name,
title=self.title,
chunkshape=self.chunkshape,
obj=self.obj,
atom=self.atom,
shape=self.shape)
self._reopen()
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.obj.shape)
self.assertEqual(ptarr.nrows, self.obj.shape[0])
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertEqual(ptarr.chunkshape, self.chunkshape)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_atom_error(self):
atom = tables.Atom.from_dtype(numpy.dtype('complex'))
#shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_earray,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom)
def test_kwargs_obj_shape_error(self):
#atom = tables.Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_earray,
self.where,
self.name,
title=self.title,
obj=self.obj,
shape=shape)
def test_kwargs_obj_atom_shape_error_01(self):
atom = tables.Atom.from_dtype(numpy.dtype('complex'))
#shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_earray,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom,
shape=self.shape)
def test_kwargs_obj_atom_shape_error_02(self):
#atom = tables.Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_earray,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=self.atom,
shape=shape)
def test_kwargs_obj_atom_shape_error_03(self):
atom = tables.Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_earray,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom,
shape=shape)
def suite():
theSuite = unittest.TestSuite()
niter = 1
# common.heavy = 1 # uncomment this only for testing purposes
# theSuite.addTest(unittest.makeSuite(BasicWriteTestCase))
# theSuite.addTest(unittest.makeSuite(Rows64bitsTestCase1))
# theSuite.addTest(unittest.makeSuite(Rows64bitsTestCase2))
for n in range(niter):
theSuite.addTest(unittest.makeSuite(BasicWriteTestCase))
theSuite.addTest(unittest.makeSuite(Basic2WriteTestCase))
theSuite.addTest(unittest.makeSuite(Basic3WriteTestCase))
theSuite.addTest(unittest.makeSuite(Basic4WriteTestCase))
theSuite.addTest(unittest.makeSuite(Basic5WriteTestCase))
theSuite.addTest(unittest.makeSuite(Basic6WriteTestCase))
theSuite.addTest(unittest.makeSuite(Basic7WriteTestCase))
theSuite.addTest(unittest.makeSuite(Basic8WriteTestCase))
theSuite.addTest(unittest.makeSuite(EmptyEArrayTestCase))
theSuite.addTest(unittest.makeSuite(Empty2EArrayTestCase))
theSuite.addTest(unittest.makeSuite(SlicesEArrayTestCase))
theSuite.addTest(unittest.makeSuite(Slices2EArrayTestCase))
theSuite.addTest(unittest.makeSuite(EllipsisEArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis2EArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis3EArrayTestCase))
theSuite.addTest(unittest.makeSuite(ZlibComprTestCase))
theSuite.addTest(unittest.makeSuite(ZlibShuffleTestCase))
theSuite.addTest(unittest.makeSuite(BloscComprTestCase))
theSuite.addTest(unittest.makeSuite(BloscShuffleTestCase))
theSuite.addTest(unittest.makeSuite(LZOComprTestCase))
theSuite.addTest(unittest.makeSuite(LZOShuffleTestCase))
theSuite.addTest(unittest.makeSuite(Bzip2ComprTestCase))
theSuite.addTest(unittest.makeSuite(Bzip2ShuffleTestCase))
theSuite.addTest(unittest.makeSuite(FloatTypeTestCase))
theSuite.addTest(unittest.makeSuite(ComplexTypeTestCase))
theSuite.addTest(unittest.makeSuite(StringTestCase))
theSuite.addTest(unittest.makeSuite(String2TestCase))
theSuite.addTest(unittest.makeSuite(StringComprTestCase))
theSuite.addTest(unittest.makeSuite(
SizeOnDiskInMemoryPropertyTestCase))
theSuite.addTest(unittest.makeSuite(OffsetStrideTestCase))
theSuite.addTest(unittest.makeSuite(Fletcher32TestCase))
theSuite.addTest(unittest.makeSuite(AllFiltersTestCase))
theSuite.addTest(unittest.makeSuite(CloseCopyTestCase))
theSuite.addTest(unittest.makeSuite(OpenCopyTestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex1TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex2TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex3TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex4TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex5TestCase))
theSuite.addTest(unittest.makeSuite(TruncateOpenTestCase))
theSuite.addTest(unittest.makeSuite(TruncateCloseTestCase))
theSuite.addTest(unittest.makeSuite(ZeroSizedTestCase))
theSuite.addTest(unittest.makeSuite(MDAtomNoReopen))
theSuite.addTest(unittest.makeSuite(MDAtomReopen))
theSuite.addTest(unittest.makeSuite(AccessClosedTestCase))
theSuite.addTest(unittest.makeSuite(TestCreateEArrayArgs))
if common.heavy:
theSuite.addTest(unittest.makeSuite(Slices3EArrayTestCase))
theSuite.addTest(unittest.makeSuite(Slices4EArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis4EArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis5EArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis6EArrayTestCase))
theSuite.addTest(unittest.makeSuite(Ellipsis7EArrayTestCase))
theSuite.addTest(unittest.makeSuite(MD3WriteTestCase))
theSuite.addTest(unittest.makeSuite(MD5WriteTestCase))
theSuite.addTest(unittest.makeSuite(MD6WriteTestCase))
theSuite.addTest(unittest.makeSuite(MD7WriteTestCase))
theSuite.addTest(unittest.makeSuite(MD10WriteTestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex6TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex7TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex8TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex9TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex10TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex11TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex12TestCase))
theSuite.addTest(unittest.makeSuite(Rows64bitsTestCase1))
theSuite.addTest(unittest.makeSuite(Rows64bitsTestCase2))
return theSuite
if __name__ == '__main__':
common.parse_argv(sys.argv)
common.print_versions()
unittest.main(defaultTest='suite')
## Local Variables:
## mode: python
## py-indent-offset: 4
## tab-width: 4
## End:
|
bsd-3-clause
|
crowning-/dash
|
qa/rpc-tests/blockchain.py
|
2
|
3904
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test RPC calls related to blockchain state. Tests correspond to code in
# rpc/blockchain.cpp.
#
from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.authproxy import JSONRPCException
from test_framework.util import (
assert_equal,
assert_raises,
assert_is_hex_string,
assert_is_hash_string,
start_nodes,
connect_nodes_bi,
)
class BlockchainTest(BitcoinTestFramework):
"""
Test blockchain-related RPC calls:
- gettxoutsetinfo
- verifychain
"""
def __init__(self):
super().__init__()
self.setup_clean_chain = False
self.num_nodes = 1
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
self.is_network_split = False
self.sync_all()
def run_test(self):
self._test_gettxoutsetinfo()
self._test_getblockheader()
self.nodes[0].verifychain(4, 0)
def _test_gettxoutsetinfo(self):
node = self.nodes[0]
res = node.gettxoutsetinfo()
assert_equal(res['total_amount'], Decimal('98214.28571450'))
assert_equal(res['transactions'], 200)
assert_equal(res['height'], 200)
assert_equal(res['txouts'], 200)
size = res['disk_size']
assert size > 6400
assert size < 64000
assert_equal(len(res['bestblock']), 64)
assert_equal(len(res['hash_serialized_2']), 64)
print("Test that gettxoutsetinfo() works for blockchain with just the genesis block")
b1hash = node.getblockhash(1)
node.invalidateblock(b1hash)
res2 = node.gettxoutsetinfo()
assert_equal(res2['transactions'], 0)
assert_equal(res2['total_amount'], Decimal('0'))
assert_equal(res2['height'], 0)
assert_equal(res2['txouts'], 0)
assert_equal(res2['bestblock'], node.getblockhash(0))
assert_equal(len(res2['hash_serialized_2']), 64)
print("Test that gettxoutsetinfo() returns the same result after invalidate/reconsider block")
node.reconsiderblock(b1hash)
res3 = node.gettxoutsetinfo()
assert_equal(res['total_amount'], res3['total_amount'])
assert_equal(res['transactions'], res3['transactions'])
assert_equal(res['height'], res3['height'])
assert_equal(res['txouts'], res3['txouts'])
assert_equal(res['bestblock'], res3['bestblock'])
assert_equal(res['hash_serialized_2'], res3['hash_serialized_2'])
def _test_getblockheader(self):
node = self.nodes[0]
assert_raises(
JSONRPCException, lambda: node.getblockheader('nonsense'))
besthash = node.getbestblockhash()
secondbesthash = node.getblockhash(199)
header = node.getblockheader(besthash)
assert_equal(header['hash'], besthash)
assert_equal(header['height'], 200)
assert_equal(header['confirmations'], 1)
assert_equal(header['previousblockhash'], secondbesthash)
assert_is_hex_string(header['chainwork'])
assert_is_hash_string(header['hash'])
assert_is_hash_string(header['previousblockhash'])
assert_is_hash_string(header['merkleroot'])
assert_is_hash_string(header['bits'], length=None)
assert isinstance(header['time'], int)
assert isinstance(header['mediantime'], int)
assert isinstance(header['nonce'], int)
assert isinstance(header['version'], int)
assert isinstance(int(header['versionHex'], 16), int)
assert isinstance(header['difficulty'], Decimal)
if __name__ == '__main__':
BlockchainTest().main()
|
mit
|
peiyuwang/pants
|
tests/python/pants_test/backend/jvm/subsystems/test_custom_scala.py
|
8
|
4374
|
# coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from textwrap import dedent
from pants.backend.jvm.subsystems.scala_platform import ScalaPlatform
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.targets.scala_library import ScalaLibrary
from pants.backend.jvm.tasks.scalastyle import Scalastyle
from pants.java.jar.jar_dependency import JarDependency
from pants_test.jvm.nailgun_task_test_base import NailgunTaskTestBase
from pants_test.subsystem.subsystem_util import init_subsystem
class CustomScalaTest(NailgunTaskTestBase):
@classmethod
def task_type(cls):
return Scalastyle
def setUp(self):
super(CustomScalaTest, self).setUp()
self.context() # We don't need the context, but this ensures subsystem option registration.
self.create_file(
relpath='a/scala/pass.scala',
contents=dedent("""
import java.util
object HelloWorld {
def main(args: Array[String]) {
println("Hello, world!")
}
}
"""))
def _create_context(self, scalastyle_config=None, excludes=None, target_roots=None):
# If config is not specified, then we override pants.ini scalastyle such that
# we have a default scalastyle config xml but with empty excludes.
self.set_options(skip=False, config=scalastyle_config, excludes=excludes)
return self.context(target_roots=target_roots)
def _create_scalastyle_config_file(self, rules=None):
# put a default rule there if rules are not specified.
rules = rules or ['org.scalastyle.scalariform.ImportGroupingChecker']
rule_section_xml = ''
for rule in rules:
rule_section_xml += dedent("""
<check level="error" class="{rule}" enabled="true"></check>
""".format(rule=rule))
return self.create_file(
relpath='scalastyle_config.xml',
contents=dedent("""
<scalastyle commentFilter="enabled">
<name>Test Scalastyle configuration</name>
{rule_section_xml}
</scalastyle>
""".format(rule_section_xml=rule_section_xml)))
def scala_platform_setup(self):
options = {
ScalaPlatform.options_scope: {
'version': 'custom',
'suffix_version': '2.10',
}
}
init_subsystem(ScalaPlatform, options)
self.make_target('//:scalastyle',
JarLibrary,
jars=[JarDependency('org.scalastyle', 'scalastyle_2.10', '0.3.2')]
)
self.make_target('//:scala-repl',
JarLibrary,
jars=[
JarDependency(org = 'org.scala-lang',
name = 'jline',
rev = '2.10.5'),
JarDependency(org = 'org.scala-lang',
name = 'scala-compiler',
rev = '2.10.5')])
self.make_target('//:scalac',
JarLibrary,
jars=[JarDependency('org.scala-lang', 'scala-compiler', '2.10.5')])
def test_custom_lib_spec(self):
self.scala_platform_setup()
self.make_target('//:scala-library',
JarLibrary,
jars=[JarDependency('org.scala-lang', 'scala-library', '2.10.5')])
scala_target = self.make_target('a/scala:pass', ScalaLibrary, sources=['pass.scala'])
context = self._create_context(
scalastyle_config=self._create_scalastyle_config_file(),
target_roots=[scala_target]
)
self.execute(context)
def test_no_custom_target(self):
with self.assertRaises(ValueError):
# This should raise:
# ValueError: Tests must make targets for traversable dependency specs
# ahead of them being traversed, ScalaLibrary(a/scala:pass) tried to traverse
# //:scala-library-custom which does not exist.
self.scala_platform_setup()
scala_target = self.make_target('a/scala:pass', ScalaLibrary, sources=['pass.scala'])
context = self._create_context(
scalastyle_config=self._create_scalastyle_config_file(),
target_roots=[scala_target]
)
self.execute(context)
|
apache-2.0
|
richardfergie/googleads-python-lib
|
examples/adxbuyer/v201502/error_handling/handle_partial_failures.py
|
4
|
3576
|
#!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example demonstrates how to handle partial failures.
To get ad groups, run get_ad_groups.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
import re
from googleads import adwords
AD_GROUP_ID = 'INSERT_AD_GROUP_ID_HERE'
def main(client, ad_group_id):
# Enable partial failure.
client.partial_failure = True
# Initialize appropriate service.
ad_group_criterion_service = client.GetService(
'AdGroupCriterionService', version='v201502')
# Construct placement ad group criteria objects.
placements = [
{
'xsi_type': 'BiddableAdGroupCriterion',
'adGroupId': ad_group_id,
'criterion': {
'xsi_type': 'Placement',
'url': 'www.example.com/something'
}
},
{
'xsi_type': 'BiddableAdGroupCriterion',
'adGroupId': ad_group_id,
'criterion': {
'xsi_type': 'Placement',
'url': 'INVALID!!_URL'
}
},
{
'xsi_type': 'BiddableAdGroupCriterion',
'adGroupId': ad_group_id,
'criterion': {
'xsi_type': 'Placement',
'url': 'www.example.com/somethingelse'
}
},
{
'xsi_type': 'BiddableAdGroupCriterion',
'adGroupId': ad_group_id,
'criterion': {
'xsi_type': 'Placement',
'url': 'BAD!!_URL'
}
}
]
# Construct operations and add ad group criteria.
operations = []
for placement in placements:
operations.append(
{
'operator': 'ADD',
'operand': placement
})
result = ad_group_criterion_service.mutate(operations)
# Display results.
for criterion in result['value']:
if criterion['AdGroupCriterion.Type'] == 'BiddableAdGroupCriterion':
print ('Added placement ad group criterion with ad group id \'%s\', '
'criterion id \'%s\' and url \'%s\''
% (criterion['adGroupId'], criterion['criterion']['id'],
criterion['criterion']['url']))
for error in result['partialFailureErrors']:
index = re.findall(r'operations\[(.*)\]\.', error['fieldPath'])
if index:
print ('Placement ad group criterion with ad group id \'%s\' and url '
'\'%s\' triggered a failure for the following reason: \'%s\'.'
% (placements[int(index[0])]['adGroupId'],
placements[int(index[0])]['criterion']['url'],
error['errorString']))
else:
print 'The following failure has occurred: \'%s\'.' % error['errorString']
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, AD_GROUP_ID)
|
apache-2.0
|
Peddle/hue
|
desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/email_notifications.py
|
37
|
5300
|
import sys
import traceback
from optparse import make_option
from django.conf import settings
from django.core.mail import send_mail
from django.core.management.base import BaseCommand
class EmailNotificationCommand(BaseCommand):
"""A BaseCommand subclass which adds sending email fuctionality.
Subclasses will have an extra command line option ``--email-notification``
and will be able to send emails by calling ``send_email_notification()``
if SMTP host and port are specified in settings. The handling of the
command line option is left to the management command implementation.
Configuration is done in settings.EMAIL_NOTIFICATIONS dict.
Configuration example::
EMAIL_NOTIFICATIONS = {
'scripts.my_script': {
'subject': 'my_script subject',
'body': 'my_script body',
'from_email': 'from_email@example.com',
'recipients': ('recipient0@example.com',),
'no_admins': False,
'no_traceback': False,
'notification_level': 0,
'fail_silently': False
},
'scripts.another_script': {
...
},
...
}
Configuration explained:
subject: Email subject.
body: Email body.
from_email: Email from address.
recipients: Sequence of email recipient addresses.
no_admins: When True do not include ADMINS to recipients.
no_traceback: When True do not include traceback to email body.
notification_level: 0: send email on fail, 1: send email always.
fail_silently: Parameter passed to django's send_mail().
"""
option_list = BaseCommand.option_list + (
make_option('--email-notifications',
action='store_true',
dest='email_notifications',
help='Send email notifications for command.'),
make_option('--email-exception',
action='store_true',
dest='email_exception',
help='Send email for command exceptions.'),
)
def run_from_argv(self, argv):
"""Overriden in order to access the command line arguments."""
self.argv_string = ' '.join(argv)
super(EmailNotificationCommand, self).run_from_argv(argv)
def execute(self, *args, **options):
"""Overriden in order to send emails on unhandled exception.
If an unhandled exception in ``def handle(self, *args, **options)``
occurs and `--email-exception` is set or `self.email_exception` is
set to True send an email to ADMINS with the traceback and then
reraise the exception.
"""
try:
super(EmailNotificationCommand, self).execute(*args, **options)
except Exception as e:
if (options.get('email_exception', False) or getattr(self, 'email_exception', False)):
self.send_email_notification(include_traceback=True)
raise e
def send_email_notification(self, notification_id=None,
include_traceback=False, verbosity=1):
"""Send email notifications.
Reads settings from settings.EMAIL_NOTIFICATIONS dict, if available,
using ``notification_id`` as a key or else provides reasonable
defaults.
"""
# Load email notification settings if available
if notification_id is not None:
try:
email_settings = settings.EMAIL_NOTIFICATIONS.get(notification_id, {})
except AttributeError:
email_settings = {}
else:
email_settings = {}
# Exit if no traceback found and not in 'notify always' mode
if (not include_traceback and not email_settings.get('notification_level', 0)):
print(self.style.ERROR("Exiting, not in 'notify always' mode."))
return
# Set email fields.
subject = email_settings.get('subject', "Django extensions email notification.")
body = email_settings.get(
'body',
"Reporting execution of command: '%s'" % self.argv_string
)
# Include traceback
if (include_traceback and not email_settings.get('no_traceback', False)):
try:
exc_type, exc_value, exc_traceback = sys.exc_info()
trb = ''.join(traceback.format_tb(exc_traceback))
body += "\n\nTraceback:\n\n%s\n" % trb
finally:
del exc_traceback
# Set from address
from_email = email_settings.get('from_email', settings.DEFAULT_FROM_EMAIL)
# Calculate recipients
recipients = list(email_settings.get('recipients', []))
if not email_settings.get('no_admins', False):
recipients.extend([a[1] for a in settings.ADMINS])
if not recipients:
if verbosity > 0:
print(self.style.ERROR("No email recipients available."))
return
# Send email...
send_mail(subject, body, from_email, recipients,
fail_silently=email_settings.get('fail_silently', True))
|
apache-2.0
|
gonuke/cyclus
|
getdecay.py
|
6
|
2050
|
#! /usr/bin/env python
from __future__ import print_function, unicode_literals
import os
import sys
import io
import shutil
import tarfile
import argparse
if sys.version_info[0] < 3:
from urllib import urlopen
else:
from urllib.request import urlopen
DECAY_H = os.path.join('src', 'pyne_decay.h')
DECAY_CPP = os.path.join('src', 'pyne_decay.cc')
DECAY_H_REP = os.path.join('src', '_pyne_decay.h')
DECAY_CPP_REP = os.path.join('src', '_pyne_decay.cc')
DECAY_URL = 'http://data.pyne.io/decay.tar.gz'
def ensure_include():
with io.open(DECAY_CPP, 'r') as f:
cc = f.read()
if cc.startswith('#include "pyne.h"'):
return
incs = ('#include "pyne.h"\n'
'#include "pyne_decay.h"\n')
with io.open(DECAY_CPP, 'w') as f:
f.write(incs)
f.write(cc)
def download():
print('Downloading ' + DECAY_URL)
try:
durl = urlopen(DECAY_URL)
d = durl.read()
durl.close()
except IOError:
print('...failed!')
return False
f = io.BytesIO(d)
tar = tarfile.open(fileobj=f, mode='r:gz')
tar.extractall()
tar.close()
durl.close()
shutil.move('decay.h', DECAY_H)
shutil.move('decay.cpp', DECAY_CPP)
return True
def ensure_decay():
mb = 1024**2
if os.path.isfile(DECAY_H) and os.path.isfile(DECAY_CPP) and \
os.stat(DECAY_CPP).st_size > mb:
return
downloaded = download()
if downloaded:
ensure_include()
return
print('!'*42)
print('Decay files could not be downloaded or generated, using surrogates instead.')
print('!'*42 + '\n')
shutil.copy(DECAY_H_REP, DECAY_H)
shutil.copy(DECAY_CPP_REP, DECAY_CPP)
ensure_include()
if __name__ == '__main__':
desc = 'Downloads pre-generated decay source code for Cyclus.'
parser = argparse.ArgumentParser(description=desc)
desc = 'Root directory for Cyclus project code.'
parser.add_argument('--root', help=desc, default='.')
args = parser.parse_args()
os.chdir(args.root)
ensure_decay()
|
bsd-3-clause
|
ijat/Hotspot-PUTRA-Auto-login
|
PyInstaller-3.2/PyInstaller/hooks/hook-PyQt5.QtWebEngineWidgets.py
|
1
|
2207
|
#-----------------------------------------------------------------------------
# Copyright (c) 2014-2016, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
from PyInstaller.utils.hooks import get_qmake_path
import PyInstaller.compat as compat
hiddenimports = ["sip",
"PyQt5.QtCore",
"PyQt5.QtGui",
"PyQt5.QtNetwork",
"PyQt5.QtWebChannel",
]
# Find the additional files necessary for QtWebEngine.
# Currently only implemented for OSX.
# Note that for QtWebEngineProcess to be able to find icudtl.dat the bundle_identifier
# must be set to 'org.qt-project.Qt.QtWebEngineCore'. This can be done by passing
# bundle_identifier='org.qt-project.Qt.QtWebEngineCore' to the BUNDLE command in
# the .spec file. FIXME: This is not ideal and a better solution is required.
qmake = get_qmake_path('5')
if qmake:
libdir = compat.exec_command(qmake, "-query", "QT_INSTALL_LIBS").strip()
if compat.is_darwin:
binaries = [
(os.path.join(libdir, 'QtWebEngineCore.framework', 'Versions', '5',\
'Helpers', 'QtWebEngineProcess.app', 'Contents', 'MacOS', 'QtWebEngineProcess'),
os.path.join('QtWebEngineProcess.app', 'Contents', 'MacOS'))
]
resources_dir = os.path.join(libdir, 'QtWebEngineCore.framework', 'Versions', '5', 'Resources')
datas = [
(os.path.join(resources_dir, 'icudtl.dat'),''),
(os.path.join(resources_dir, 'qtwebengine_resources.pak'), ''),
# The distributed Info.plist has LSUIElement set to true, which prevents the
# icon from appearing in the dock.
(os.path.join(libdir, 'QtWebEngineCore.framework', 'Versions', '5',\
'Helpers', 'QtWebEngineProcess.app', 'Contents', 'Info.plist'),
os.path.join('QtWebEngineProcess.app', 'Contents'))
]
|
gpl-3.0
|
AlexanderFabisch/scikit-learn
|
sklearn/manifold/t_sne.py
|
13
|
34618
|
# Author: Alexander Fabisch -- <afabisch@informatik.uni-bremen.de>
# Author: Christopher Moody <chrisemoody@gmail.com>
# Author: Nick Travers <nickt@squareup.com>
# License: BSD 3 clause (C) 2014
# This is the exact and Barnes-Hut t-SNE implementation. There are other
# modifications of the algorithm:
# * Fast Optimization for t-SNE:
# http://cseweb.ucsd.edu/~lvdmaaten/workshops/nips2010/papers/vandermaaten.pdf
import numpy as np
from scipy import linalg
import scipy.sparse as sp
from scipy.spatial.distance import pdist
from scipy.spatial.distance import squareform
from ..neighbors import BallTree
from ..base import BaseEstimator
from ..utils import check_array
from ..utils import check_random_state
from ..utils.extmath import _ravel
from ..decomposition import RandomizedPCA
from ..metrics.pairwise import pairwise_distances
from . import _utils
from . import _barnes_hut_tsne
from ..utils.fixes import astype
MACHINE_EPSILON = np.finfo(np.double).eps
def _joint_probabilities(distances, desired_perplexity, verbose):
"""Compute joint probabilities p_ij from distances.
Parameters
----------
distances : array, shape (n_samples * (n_samples-1) / 2,)
Distances of samples are stored as condensed matrices, i.e.
we omit the diagonal and duplicate entries and store everything
in a one-dimensional array.
desired_perplexity : float
Desired perplexity of the joint probability distributions.
verbose : int
Verbosity level.
Returns
-------
P : array, shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
"""
# Compute conditional probabilities such that they approximately match
# the desired perplexity
distances = astype(distances, np.float32, copy=False)
conditional_P = _utils._binary_search_perplexity(
distances, None, desired_perplexity, verbose)
P = conditional_P + conditional_P.T
sum_P = np.maximum(np.sum(P), MACHINE_EPSILON)
P = np.maximum(squareform(P) / sum_P, MACHINE_EPSILON)
return P
def _joint_probabilities_nn(distances, neighbors, desired_perplexity, verbose):
"""Compute joint probabilities p_ij from distances using just nearest
neighbors.
This method is approximately equal to _joint_probabilities. The latter
is O(N), but limiting the joint probability to nearest neighbors improves
this substantially to O(uN).
Parameters
----------
distances : array, shape (n_samples * (n_samples-1) / 2,)
Distances of samples are stored as condensed matrices, i.e.
we omit the diagonal and duplicate entries and store everything
in a one-dimensional array.
desired_perplexity : float
Desired perplexity of the joint probability distributions.
verbose : int
Verbosity level.
Returns
-------
P : array, shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
"""
# Compute conditional probabilities such that they approximately match
# the desired perplexity
distances = astype(distances, np.float32, copy=False)
neighbors = astype(neighbors, np.int64, copy=False)
conditional_P = _utils._binary_search_perplexity(
distances, neighbors, desired_perplexity, verbose)
m = "All probabilities should be finite"
assert np.all(np.isfinite(conditional_P)), m
P = conditional_P + conditional_P.T
sum_P = np.maximum(np.sum(P), MACHINE_EPSILON)
P = np.maximum(squareform(P) / sum_P, MACHINE_EPSILON)
assert np.all(np.abs(P) <= 1.0)
return P
def _kl_divergence(params, P, degrees_of_freedom, n_samples, n_components,
skip_num_points=0):
"""t-SNE objective function: gradient of the KL divergence
of p_ijs and q_ijs and the absolute error.
Parameters
----------
params : array, shape (n_params,)
Unraveled embedding.
P : array, shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
degrees_of_freedom : float
Degrees of freedom of the Student's-t distribution.
n_samples : int
Number of samples.
n_components : int
Dimension of the embedded space.
skip_num_points : int (optional, default:0)
This does not compute the gradient for points with indices below
`skip_num_points`. This is useful when computing transforms of new
data where you'd like to keep the old data fixed.
Returns
-------
kl_divergence : float
Kullback-Leibler divergence of p_ij and q_ij.
grad : array, shape (n_params,)
Unraveled gradient of the Kullback-Leibler divergence with respect to
the embedding.
"""
X_embedded = params.reshape(n_samples, n_components)
# Q is a heavy-tailed distribution: Student's t-distribution
n = pdist(X_embedded, "sqeuclidean")
n += 1.
n /= degrees_of_freedom
n **= (degrees_of_freedom + 1.0) / -2.0
Q = np.maximum(n / (2.0 * np.sum(n)), MACHINE_EPSILON)
# Optimization trick below: np.dot(x, y) is faster than
# np.sum(x * y) because it calls BLAS
# Objective: C (Kullback-Leibler divergence of P and Q)
kl_divergence = 2.0 * np.dot(P, np.log(P / Q))
# Gradient: dC/dY
grad = np.ndarray((n_samples, n_components))
PQd = squareform((P - Q) * n)
for i in range(skip_num_points, n_samples):
np.dot(_ravel(PQd[i]), X_embedded[i] - X_embedded, out=grad[i])
grad = grad.ravel()
c = 2.0 * (degrees_of_freedom + 1.0) / degrees_of_freedom
grad *= c
return kl_divergence, grad
def _kl_divergence_error(params, P, neighbors, degrees_of_freedom, n_samples,
n_components):
"""t-SNE objective function: the absolute error of the
KL divergence of p_ijs and q_ijs.
Parameters
----------
params : array, shape (n_params,)
Unraveled embedding.
P : array, shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
neighbors : array (n_samples, K)
The neighbors is not actually required to calculate the
divergence, but is here to match the signature of the
gradient function
degrees_of_freedom : float
Degrees of freedom of the Student's-t distribution.
n_samples : int
Number of samples.
n_components : int
Dimension of the embedded space.
Returns
-------
kl_divergence : float
Kullback-Leibler divergence of p_ij and q_ij.
grad : array, shape (n_params,)
Unraveled gradient of the Kullback-Leibler divergence with respect to
the embedding.
"""
X_embedded = params.reshape(n_samples, n_components)
# Q is a heavy-tailed distribution: Student's t-distribution
n = pdist(X_embedded, "sqeuclidean")
n += 1.
n /= degrees_of_freedom
n **= (degrees_of_freedom + 1.0) / -2.0
Q = np.maximum(n / (2.0 * np.sum(n)), MACHINE_EPSILON)
# Optimization trick below: np.dot(x, y) is faster than
# np.sum(x * y) because it calls BLAS
# Objective: C (Kullback-Leibler divergence of P and Q)
if len(P.shape) == 2:
P = squareform(P)
kl_divergence = 2.0 * np.dot(P, np.log(P / Q))
return kl_divergence
def _kl_divergence_bh(params, P, neighbors, degrees_of_freedom, n_samples,
n_components, angle=0.5, skip_num_points=0,
verbose=False):
"""t-SNE objective function: KL divergence of p_ijs and q_ijs.
Uses Barnes-Hut tree methods to calculate the gradient that
runs in O(NlogN) instead of O(N^2)
Parameters
----------
params : array, shape (n_params,)
Unraveled embedding.
P : array, shape (n_samples * (n_samples-1) / 2,)
Condensed joint probability matrix.
neighbors: int64 array, shape (n_samples, K)
Array with element [i, j] giving the index for the jth
closest neighbor to point i.
degrees_of_freedom : float
Degrees of freedom of the Student's-t distribution.
n_samples : int
Number of samples.
n_components : int
Dimension of the embedded space.
angle : float (default: 0.5)
This is the trade-off between speed and accuracy for Barnes-Hut T-SNE.
'angle' is the angular size (referred to as theta in [3]) of a distant
node as measured from a point. If this size is below 'angle' then it is
used as a summary node of all points contained within it.
This method is not very sensitive to changes in this parameter
in the range of 0.2 - 0.8. Angle less than 0.2 has quickly increasing
computation time and angle greater 0.8 has quickly increasing error.
skip_num_points : int (optional, default:0)
This does not compute the gradient for points with indices below
`skip_num_points`. This is useful when computing transforms of new
data where you'd like to keep the old data fixed.
verbose : int
Verbosity level.
Returns
-------
kl_divergence : float
Kullback-Leibler divergence of p_ij and q_ij.
grad : array, shape (n_params,)
Unraveled gradient of the Kullback-Leibler divergence with respect to
the embedding.
"""
params = astype(params, np.float32, copy=False)
X_embedded = params.reshape(n_samples, n_components)
neighbors = astype(neighbors, np.int64, copy=False)
if len(P.shape) == 1:
sP = squareform(P).astype(np.float32)
else:
sP = P.astype(np.float32)
grad = np.zeros(X_embedded.shape, dtype=np.float32)
error = _barnes_hut_tsne.gradient(sP, X_embedded, neighbors,
grad, angle, n_components, verbose,
dof=degrees_of_freedom)
c = 2.0 * (degrees_of_freedom + 1.0) / degrees_of_freedom
grad = grad.ravel()
grad *= c
return error, grad
def _gradient_descent(objective, p0, it, n_iter, objective_error=None,
n_iter_check=1, n_iter_without_progress=50,
momentum=0.5, learning_rate=1000.0, min_gain=0.01,
min_grad_norm=1e-7, min_error_diff=1e-7, verbose=0,
args=None, kwargs=None):
"""Batch gradient descent with momentum and individual gains.
Parameters
----------
objective : function or callable
Should return a tuple of cost and gradient for a given parameter
vector. When expensive to compute, the cost can optionally
be None and can be computed every n_iter_check steps using
the objective_error function.
p0 : array-like, shape (n_params,)
Initial parameter vector.
it : int
Current number of iterations (this function will be called more than
once during the optimization).
n_iter : int
Maximum number of gradient descent iterations.
n_iter_check : int
Number of iterations before evaluating the global error. If the error
is sufficiently low, we abort the optimization.
objective_error : function or callable
Should return a tuple of cost and gradient for a given parameter
vector.
n_iter_without_progress : int, optional (default: 30)
Maximum number of iterations without progress before we abort the
optimization.
momentum : float, within (0.0, 1.0), optional (default: 0.5)
The momentum generates a weight for previous gradients that decays
exponentially.
learning_rate : float, optional (default: 1000.0)
The learning rate should be extremely high for t-SNE! Values in the
range [100.0, 1000.0] are common.
min_gain : float, optional (default: 0.01)
Minimum individual gain for each parameter.
min_grad_norm : float, optional (default: 1e-7)
If the gradient norm is below this threshold, the optimization will
be aborted.
min_error_diff : float, optional (default: 1e-7)
If the absolute difference of two successive cost function values
is below this threshold, the optimization will be aborted.
verbose : int, optional (default: 0)
Verbosity level.
args : sequence
Arguments to pass to objective function.
kwargs : dict
Keyword arguments to pass to objective function.
Returns
-------
p : array, shape (n_params,)
Optimum parameters.
error : float
Optimum.
i : int
Last iteration.
"""
if args is None:
args = []
if kwargs is None:
kwargs = {}
p = p0.copy().ravel()
update = np.zeros_like(p)
gains = np.ones_like(p)
error = np.finfo(np.float).max
best_error = np.finfo(np.float).max
best_iter = 0
for i in range(it, n_iter):
new_error, grad = objective(p, *args, **kwargs)
grad_norm = linalg.norm(grad)
inc = update * grad >= 0.0
dec = np.invert(inc)
gains[inc] += 0.05
gains[dec] *= 0.95
np.clip(gains, min_gain, np.inf)
grad *= gains
update = momentum * update - learning_rate * grad
p += update
if (i + 1) % n_iter_check == 0:
if new_error is None:
new_error = objective_error(p, *args)
error_diff = np.abs(new_error - error)
error = new_error
if verbose >= 2:
m = "[t-SNE] Iteration %d: error = %.7f, gradient norm = %.7f"
print(m % (i + 1, error, grad_norm))
if error < best_error:
best_error = error
best_iter = i
elif i - best_iter > n_iter_without_progress:
if verbose >= 2:
print("[t-SNE] Iteration %d: did not make any progress "
"during the last %d episodes. Finished."
% (i + 1, n_iter_without_progress))
break
if grad_norm <= min_grad_norm:
if verbose >= 2:
print("[t-SNE] Iteration %d: gradient norm %f. Finished."
% (i + 1, grad_norm))
break
if error_diff <= min_error_diff:
if verbose >= 2:
m = "[t-SNE] Iteration %d: error difference %f. Finished."
print(m % (i + 1, error_diff))
break
if new_error is not None:
error = new_error
return p, error, i
def trustworthiness(X, X_embedded, n_neighbors=5, precomputed=False):
"""Expresses to what extent the local structure is retained.
The trustworthiness is within [0, 1]. It is defined as
.. math::
T(k) = 1 - \frac{2}{nk (2n - 3k - 1)} \sum^n_{i=1}
\sum_{j \in U^{(k)}_i (r(i, j) - k)}
where :math:`r(i, j)` is the rank of the embedded datapoint j
according to the pairwise distances between the embedded datapoints,
:math:`U^{(k)}_i` is the set of points that are in the k nearest
neighbors in the embedded space but not in the original space.
* "Neighborhood Preservation in Nonlinear Projection Methods: An
Experimental Study"
J. Venna, S. Kaski
* "Learning a Parametric Embedding by Preserving Local Structure"
L.J.P. van der Maaten
Parameters
----------
X : array, shape (n_samples, n_features) or (n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row.
X_embedded : array, shape (n_samples, n_components)
Embedding of the training data in low-dimensional space.
n_neighbors : int, optional (default: 5)
Number of neighbors k that will be considered.
precomputed : bool, optional (default: False)
Set this flag if X is a precomputed square distance matrix.
Returns
-------
trustworthiness : float
Trustworthiness of the low-dimensional embedding.
"""
if precomputed:
dist_X = X
else:
dist_X = pairwise_distances(X, squared=True)
dist_X_embedded = pairwise_distances(X_embedded, squared=True)
ind_X = np.argsort(dist_X, axis=1)
ind_X_embedded = np.argsort(dist_X_embedded, axis=1)[:, 1:n_neighbors + 1]
n_samples = X.shape[0]
t = 0.0
ranks = np.zeros(n_neighbors)
for i in range(n_samples):
for j in range(n_neighbors):
ranks[j] = np.where(ind_X[i] == ind_X_embedded[i, j])[0][0]
ranks -= n_neighbors
t += np.sum(ranks[ranks > 0])
t = 1.0 - t * (2.0 / (n_samples * n_neighbors *
(2.0 * n_samples - 3.0 * n_neighbors - 1.0)))
return t
class TSNE(BaseEstimator):
"""t-distributed Stochastic Neighbor Embedding.
t-SNE [1] is a tool to visualize high-dimensional data. It converts
similarities between data points to joint probabilities and tries
to minimize the Kullback-Leibler divergence between the joint
probabilities of the low-dimensional embedding and the
high-dimensional data. t-SNE has a cost function that is not convex,
i.e. with different initializations we can get different results.
It is highly recommended to use another dimensionality reduction
method (e.g. PCA for dense data or TruncatedSVD for sparse data)
to reduce the number of dimensions to a reasonable amount (e.g. 50)
if the number of features is very high. This will suppress some
noise and speed up the computation of pairwise distances between
samples. For more tips see Laurens van der Maaten's FAQ [2].
Read more in the :ref:`User Guide <t_sne>`.
Parameters
----------
n_components : int, optional (default: 2)
Dimension of the embedded space.
perplexity : float, optional (default: 30)
The perplexity is related to the number of nearest neighbors that
is used in other manifold learning algorithms. Larger datasets
usually require a larger perplexity. Consider selcting a value
between 5 and 50. The choice is not extremely critical since t-SNE
is quite insensitive to this parameter.
early_exaggeration : float, optional (default: 4.0)
Controls how tight natural clusters in the original space are in
the embedded space and how much space will be between them. For
larger values, the space between natural clusters will be larger
in the embedded space. Again, the choice of this parameter is not
very critical. If the cost function increases during initial
optimization, the early exaggeration factor or the learning rate
might be too high.
learning_rate : float, optional (default: 1000)
The learning rate can be a critical parameter. It should be
between 100 and 1000. If the cost function increases during initial
optimization, the early exaggeration factor or the learning rate
might be too high. If the cost function gets stuck in a bad local
minimum increasing the learning rate helps sometimes.
n_iter : int, optional (default: 1000)
Maximum number of iterations for the optimization. Should be at
least 200.
n_iter_without_progress : int, optional (default: 30)
Maximum number of iterations without progress before we abort the
optimization.
.. versionadded:: 0.17
parameter *n_iter_without_progress* to control stopping criteria.
min_grad_norm : float, optional (default: 1E-7)
If the gradient norm is below this threshold, the optimization will
be aborted.
metric : string or callable, optional
The metric to use when calculating distance between instances in a
feature array. If metric is a string, it must be one of the options
allowed by scipy.spatial.distance.pdist for its metric parameter, or
a metric listed in pairwise.PAIRWISE_DISTANCE_FUNCTIONS.
If metric is "precomputed", X is assumed to be a distance matrix.
Alternatively, if metric is a callable function, it is called on each
pair of instances (rows) and the resulting value recorded. The callable
should take two arrays from X as input and return a value indicating
the distance between them. The default is "euclidean" which is
interpreted as squared euclidean distance.
init : string, optional (default: "random")
Initialization of embedding. Possible options are 'random' and 'pca'.
PCA initialization cannot be used with precomputed distances and is
usually more globally stable than random initialization.
verbose : int, optional (default: 0)
Verbosity level.
random_state : int or RandomState instance or None (default)
Pseudo Random Number generator seed control. If None, use the
numpy.random singleton. Note that different initializations
might result in different local minima of the cost function.
method : string (default: 'barnes_hut')
By default the gradient calculation algorithm uses Barnes-Hut
approximation running in O(NlogN) time. method='exact'
will run on the slower, but exact, algorithm in O(N^2) time. The
exact algorithm should be used when nearest-neighbor errors need
to be better than 3%. However, the exact method cannot scale to
millions of examples.
.. versionadded:: 0.17
Approximate optimization *method* via the Barnes-Hut.
angle : float (default: 0.5)
Only used if method='barnes_hut'
This is the trade-off between speed and accuracy for Barnes-Hut T-SNE.
'angle' is the angular size (referred to as theta in [3]) of a distant
node as measured from a point. If this size is below 'angle' then it is
used as a summary node of all points contained within it.
This method is not very sensitive to changes in this parameter
in the range of 0.2 - 0.8. Angle less than 0.2 has quickly increasing
computation time and angle greater 0.8 has quickly increasing error.
Attributes
----------
embedding_ : array-like, shape (n_samples, n_components)
Stores the embedding vectors.
Examples
--------
>>> import numpy as np
>>> from sklearn.manifold import TSNE
>>> X = np.array([[0, 0, 0], [0, 1, 1], [1, 0, 1], [1, 1, 1]])
>>> model = TSNE(n_components=2, random_state=0)
>>> np.set_printoptions(suppress=True)
>>> model.fit_transform(X) # doctest: +ELLIPSIS, +NORMALIZE_WHITESPACE
array([[ 0.00017599, 0.00003993],
[ 0.00009891, 0.00021913],
[ 0.00018554, -0.00009357],
[ 0.00009528, -0.00001407]])
References
----------
[1] van der Maaten, L.J.P.; Hinton, G.E. Visualizing High-Dimensional Data
Using t-SNE. Journal of Machine Learning Research 9:2579-2605, 2008.
[2] van der Maaten, L.J.P. t-Distributed Stochastic Neighbor Embedding
http://homepage.tudelft.nl/19j49/t-SNE.html
[3] L.J.P. van der Maaten. Accelerating t-SNE using Tree-Based Algorithms.
Journal of Machine Learning Research 15(Oct):3221-3245, 2014.
http://lvdmaaten.github.io/publications/papers/JMLR_2014.pdf
"""
def __init__(self, n_components=2, perplexity=30.0,
early_exaggeration=4.0, learning_rate=1000.0, n_iter=1000,
n_iter_without_progress=30, min_grad_norm=1e-7,
metric="euclidean", init="random", verbose=0,
random_state=None, method='barnes_hut', angle=0.5):
if init not in ["pca", "random"] or isinstance(init, np.ndarray):
msg = "'init' must be 'pca', 'random' or a NumPy array"
raise ValueError(msg)
self.n_components = n_components
self.perplexity = perplexity
self.early_exaggeration = early_exaggeration
self.learning_rate = learning_rate
self.n_iter = n_iter
self.n_iter_without_progress = n_iter_without_progress
self.min_grad_norm = min_grad_norm
self.metric = metric
self.init = init
self.verbose = verbose
self.random_state = random_state
self.method = method
self.angle = angle
self.embedding_ = None
def _fit(self, X, skip_num_points=0):
"""Fit the model using X as training data.
Note that sparse arrays can only be handled by method='exact'.
It is recommended that you convert your sparse array to dense
(e.g. `X.toarray()`) if it fits in memory, or otherwise using a
dimensionality reduction technique (e.g. TrucnatedSVD).
Parameters
----------
X : array, shape (n_samples, n_features) or (n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row. Note that this
when method='barnes_hut', X cannot be a sparse array and if need be
will be converted to a 32 bit float array. Method='exact' allows
sparse arrays and 64bit floating point inputs.
skip_num_points : int (optional, default:0)
This does not compute the gradient for points with indices below
`skip_num_points`. This is useful when computing transforms of new
data where you'd like to keep the old data fixed.
"""
if self.method not in ['barnes_hut', 'exact']:
raise ValueError("'method' must be 'barnes_hut' or 'exact'")
if self.angle < 0.0 or self.angle > 1.0:
raise ValueError("'angle' must be between 0.0 - 1.0")
if self.method == 'barnes_hut' and sp.issparse(X):
raise TypeError('A sparse matrix was passed, but dense '
'data is required for method="barnes_hut". Use '
'X.toarray() to convert to a dense numpy array if '
'the array is small enough for it to fit in '
'memory. Otherwise consider dimensionality '
'reduction techniques (e.g. TruncatedSVD)')
X = check_array(X, dtype=np.float32)
else:
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'], dtype=np.float64)
random_state = check_random_state(self.random_state)
if self.early_exaggeration < 1.0:
raise ValueError("early_exaggeration must be at least 1, but is "
"%f" % self.early_exaggeration)
if self.n_iter < 200:
raise ValueError("n_iter should be at least 200")
if self.metric == "precomputed":
if self.init == 'pca':
raise ValueError("The parameter init=\"pca\" cannot be used "
"with metric=\"precomputed\".")
if X.shape[0] != X.shape[1]:
raise ValueError("X should be a square distance matrix")
distances = X
else:
if self.verbose:
print("[t-SNE] Computing pairwise distances...")
if self.metric == "euclidean":
distances = pairwise_distances(X, metric=self.metric,
squared=True)
else:
distances = pairwise_distances(X, metric=self.metric)
if not np.all(distances >= 0):
raise ValueError("All distances should be positive, either "
"the metric or precomputed distances given "
"as X are not correct")
# Degrees of freedom of the Student's t-distribution. The suggestion
# degrees_of_freedom = n_components - 1 comes from
# "Learning a Parametric Embedding by Preserving Local Structure"
# Laurens van der Maaten, 2009.
degrees_of_freedom = max(self.n_components - 1.0, 1)
n_samples = X.shape[0]
# the number of nearest neighbors to find
k = min(n_samples - 1, int(3. * self.perplexity + 1))
neighbors_nn = None
if self.method == 'barnes_hut':
if self.verbose:
print("[t-SNE] Computing %i nearest neighbors..." % k)
if self.metric == 'precomputed':
# Use the precomputed distances to find
# the k nearest neighbors and their distances
neighbors_nn = np.argsort(distances, axis=1)[:, :k]
else:
# Find the nearest neighbors for every point
bt = BallTree(X)
# LvdM uses 3 * perplexity as the number of neighbors
# And we add one to not count the data point itself
# In the event that we have very small # of points
# set the neighbors to n - 1
distances_nn, neighbors_nn = bt.query(X, k=k + 1)
neighbors_nn = neighbors_nn[:, 1:]
P = _joint_probabilities_nn(distances, neighbors_nn,
self.perplexity, self.verbose)
else:
P = _joint_probabilities(distances, self.perplexity, self.verbose)
assert np.all(np.isfinite(P)), "All probabilities should be finite"
assert np.all(P >= 0), "All probabilities should be zero or positive"
assert np.all(P <= 1), ("All probabilities should be less "
"or then equal to one")
if self.init == 'pca':
pca = RandomizedPCA(n_components=self.n_components,
random_state=random_state)
X_embedded = pca.fit_transform(X)
elif isinstance(self.init, np.ndarray):
X_embedded = self.init
elif self.init == 'random':
X_embedded = None
else:
raise ValueError("Unsupported initialization scheme: %s"
% self.init)
return self._tsne(P, degrees_of_freedom, n_samples, random_state,
X_embedded=X_embedded,
neighbors=neighbors_nn,
skip_num_points=skip_num_points)
def _tsne(self, P, degrees_of_freedom, n_samples, random_state,
X_embedded=None, neighbors=None, skip_num_points=0):
"""Runs t-SNE."""
# t-SNE minimizes the Kullback-Leiber divergence of the Gaussians P
# and the Student's t-distributions Q. The optimization algorithm that
# we use is batch gradient descent with three stages:
# * early exaggeration with momentum 0.5
# * early exaggeration with momentum 0.8
# * final optimization with momentum 0.8
# The embedding is initialized with iid samples from Gaussians with
# standard deviation 1e-4.
if X_embedded is None:
# Initialize embedding randomly
X_embedded = 1e-4 * random_state.randn(n_samples,
self.n_components)
params = X_embedded.ravel()
opt_args = {}
opt_args = {"n_iter": 50, "momentum": 0.5, "it": 0,
"learning_rate": self.learning_rate,
"verbose": self.verbose, "n_iter_check": 25,
"kwargs": dict(skip_num_points=skip_num_points)}
if self.method == 'barnes_hut':
m = "Must provide an array of neighbors to use Barnes-Hut"
assert neighbors is not None, m
obj_func = _kl_divergence_bh
objective_error = _kl_divergence_error
sP = squareform(P).astype(np.float32)
neighbors = neighbors.astype(np.int64)
args = [sP, neighbors, degrees_of_freedom, n_samples,
self.n_components]
opt_args['args'] = args
opt_args['min_grad_norm'] = 1e-3
opt_args['n_iter_without_progress'] = 30
# Don't always calculate the cost since that calculation
# can be nearly as expensive as the gradient
opt_args['objective_error'] = objective_error
opt_args['kwargs']['angle'] = self.angle
opt_args['kwargs']['verbose'] = self.verbose
else:
obj_func = _kl_divergence
opt_args['args'] = [P, degrees_of_freedom, n_samples,
self.n_components]
opt_args['min_error_diff'] = 0.0
opt_args['min_grad_norm'] = 0.0
# Early exaggeration
P *= self.early_exaggeration
params, error, it = _gradient_descent(obj_func, params, **opt_args)
opt_args['n_iter'] = 100
opt_args['momentum'] = 0.8
opt_args['it'] = it + 1
params, error, it = _gradient_descent(obj_func, params, **opt_args)
if self.verbose:
print("[t-SNE] Error after %d iterations with early "
"exaggeration: %f" % (it + 1, error))
# Save the final number of iterations
self.n_iter_final = it
# Final optimization
P /= self.early_exaggeration
opt_args['n_iter'] = self.n_iter
opt_args['it'] = it + 1
params, error, it = _gradient_descent(obj_func, params, **opt_args)
if self.verbose:
print("[t-SNE] Error after %d iterations: %f" % (it + 1, error))
X_embedded = params.reshape(n_samples, self.n_components)
return X_embedded
def fit_transform(self, X, y=None):
"""Fit X into an embedded space and return that transformed
output.
Parameters
----------
X : array, shape (n_samples, n_features) or (n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row.
Returns
-------
X_new : array, shape (n_samples, n_components)
Embedding of the training data in low-dimensional space.
"""
embedding = self._fit(X)
self.embedding_ = embedding
return self.embedding_
def fit(self, X, y=None):
"""Fit X into an embedded space.
Parameters
----------
X : array, shape (n_samples, n_features) or (n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row. If the method
is 'exact', X may be a sparse matrix of type 'csr', 'csc'
or 'coo'.
"""
self.fit_transform(X)
return self
|
bsd-3-clause
|
arbrandes/edx-configuration
|
util/docker_images.py
|
13
|
1144
|
import yaml
import os
import pathlib2
import itertools
import sys
TRAVIS_BUILD_DIR = os.environ.get("TRAVIS_BUILD_DIR", "")
CONFIG_FILE_PATH = pathlib2.Path(TRAVIS_BUILD_DIR, "util", "parsefiles_config.yml")
def get_used_images(images):
"""
Returns the images and their ranks that are scheduled to be built and that exist in the configuration file.
Input:
images: A set of Docker images
"""
# open config file containing container weights
config_file_path = pathlib2.Path(CONFIG_FILE_PATH)
with (config_file_path.open(mode='r')) as file:
try:
config = yaml.load(file)
except yaml.YAMLError, exc:
LOGGER.error("error in configuration file: %s" % str(exc))
sys.exit(1)
# get container weights
weights = config.get("weights")
# convert all images in config file to a list of tuples (<image>, <weight>)
weights_list = [x.items() for x in weights]
weights_list = list(itertools.chain.from_iterable(weights_list))
# performs intersection between weighted images and input images
return [x for x in weights_list if x[0] in images]
|
agpl-3.0
|
yatinkumbhare/openstack-nova
|
nova/api/openstack/compute/plugins/v3/extended_server_attributes.py
|
36
|
3357
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Extended Server Attributes API extension."""
from nova.api.openstack import api_version_request
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
ALIAS = "os-extended-server-attributes"
authorize = extensions.os_compute_soft_authorizer(ALIAS)
class ExtendedServerAttributesController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(ExtendedServerAttributesController, self).__init__(*args,
**kwargs)
self.api_version_2_3 = api_version_request.APIVersionRequest('2.3')
def _extend_server(self, context, server, instance, requested_version):
key = "OS-EXT-SRV-ATTR:hypervisor_hostname"
server[key] = instance.node
properties = ['host', 'name']
if requested_version >= self.api_version_2_3:
properties += ['reservation_id', 'launch_index',
'hostname', 'kernel_id', 'ramdisk_id',
'root_device_name', 'user_data']
for attr in properties:
if attr == 'name':
key = "OS-EXT-SRV-ATTR:instance_%s" % attr
else:
key = "OS-EXT-SRV-ATTR:%s" % attr
server[key] = instance[attr]
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if authorize(context):
server = resp_obj.obj['server']
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show' method.
self._extend_server(context, server, db_instance,
req.api_version_request)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if authorize(context):
servers = list(resp_obj.obj['servers'])
for server in servers:
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'detail' method.
self._extend_server(context, server, db_instance,
req.api_version_request)
class ExtendedServerAttributes(extensions.V3APIExtensionBase):
"""Extended Server Attributes support."""
name = "ExtendedServerAttributes"
alias = ALIAS
version = 1
def get_controller_extensions(self):
controller = ExtendedServerAttributesController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
def get_resources(self):
return []
|
apache-2.0
|
tsaitsai/jasper-client
|
client/modules/Birthday.py
|
35
|
2013
|
# -*- coding: utf-8-*-
import datetime
import re
import facebook
from client.app_utils import getTimezone
WORDS = ["BIRTHDAY"]
def handle(text, mic, profile):
"""
Responds to user-input, typically speech text, by listing the user's
Facebook friends with birthdays today.
Arguments:
text -- user-input, typically transcribed speech
mic -- used to interact with the user (for both input and output)
profile -- contains information related to the user (e.g., phone
number)
"""
oauth_access_token = profile['keys']["FB_TOKEN"]
graph = facebook.GraphAPI(oauth_access_token)
try:
results = graph.request("me/friends",
args={'fields': 'id,name,birthday'})
except facebook.GraphAPIError:
mic.say("I have not been authorized to query your Facebook. If you " +
"would like to check birthdays in the future, please visit " +
"the Jasper dashboard.")
return
except:
mic.say(
"I apologize, there's a problem with that service at the moment.")
return
needle = datetime.datetime.now(tz=getTimezone(profile)).strftime("%m/%d")
people = []
for person in results['data']:
try:
if needle in person['birthday']:
people.append(person['name'])
except:
continue
if len(people) > 0:
if len(people) == 1:
output = people[0] + " has a birthday today."
else:
output = "Your friends with birthdays today are " + \
", ".join(people[:-1]) + " and " + people[-1] + "."
else:
output = "None of your friends have birthdays today."
mic.say(output)
def isValid(text):
"""
Returns True if the input is related to birthdays.
Arguments:
text -- user-input, typically transcribed speech
"""
return bool(re.search(r'birthday', text, re.IGNORECASE))
|
mit
|
fengbeihong/tempest_automate_ironic
|
tempest/services/volume/json/extensions_client.py
|
12
|
1107
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from tempest.common import service_client
class BaseExtensionsClientJSON(service_client.ServiceClient):
def list_extensions(self):
url = 'extensions'
resp, body = self.get(url)
body = json.loads(body)
self.expected_success(200, resp.status)
return service_client.ResponseBodyList(resp, body['extensions'])
class ExtensionsClientJSON(BaseExtensionsClientJSON):
"""
Volume V1 extensions client.
"""
|
apache-2.0
|
Tesi-Luca-Davide/ryu
|
ryu/services/protocols/bgp/operator/views/fields.py
|
38
|
1875
|
import importlib
import inspect
class Field(object):
def __init__(self, field_name):
self.field_name = field_name
def get(self, obj):
return getattr(obj, self.field_name)
class RelatedViewField(Field):
def __init__(self, field_name, operator_view_class):
super(RelatedViewField, self).__init__(field_name)
self.__operator_view_class = operator_view_class
@property
def _operator_view_class(self):
if inspect.isclass(self.__operator_view_class):
return self.__operator_view_class
elif isinstance(self.__operator_view_class, basestring):
try:
module_name, class_name =\
self.__operator_view_class.rsplit('.', 1)
return class_for_name(module_name, class_name)
except (AttributeError, ValueError, ImportError):
raise WrongOperatorViewClassError(
'There is no "%s" class' % self.__operator_view_class
)
def retrieve_and_wrap(self, obj):
related_obj = self.get(obj)
return self.wrap(related_obj)
def wrap(self, obj):
return self._operator_view_class(obj)
class RelatedListViewField(RelatedViewField):
pass
class RelatedDictViewField(RelatedViewField):
pass
class DataField(Field):
pass
class OptionalDataField(DataField):
def get(self, obj):
if hasattr(obj, self.field_name):
return getattr(obj, self.field_name)
else:
return None
class WrongOperatorViewClassError(Exception):
pass
def class_for_name(module_name, class_name):
# load the module, will raise ImportError if module cannot be loaded
m = importlib.import_module(module_name)
# get the class, will raise AttributeError if class cannot be found
c = getattr(m, class_name)
return c
|
apache-2.0
|
zero-os/0-orchestrator
|
tests/0_orchestrator/test_suite/testcases/basic_tests/test04_storageclusters_apis.py
|
2
|
7791
|
from random import randint
from testcases.testcases_base import TestcasesBase
from nose.tools import with_setup
import time
class TestStorageclustersAPI(TestcasesBase):
def setUp(self):
super().setUp()
if self._testID != 'test003_deploy_new_storagecluster':
nodes = [self.nodeid]
self.number_of_free_disks, disk_type = self.get_max_available_free_disks(nodes)
if not self.number_of_free_disks:
self.skipTest(' [*] No free disks to create storagecluster')
self.lg.info(' [*] Create storage cluster')
self.response, self.data = self.storageclusters_api.post_storageclusters(
nodes=nodes,
driveType=disk_type,
servers=randint(1, self.number_of_free_disks)
)
self.assertEqual(self.response.status_code, 201, " [*] Can't create new storagecluster %s." % self.response.content)
def tearDown(self):
if self._testID != 'test003_deploy_new_storagecluster':
self.lg.info(' [*] Kill storage cluster (SC0)')
self.storageclusters_api.delete_storageclusters_label(self.data['label'])
super(TestStorageclustersAPI, self).tearDown()
def test001_get_storageclusters_label(self):
""" GAT-041
**Test Scenario:**
#. Deploy new storage cluster (SC0)
#. Get storage cluster (SC0), should succeed with 200
#. Get nonexisting storage cluster (SC0), should fail with 404
"""
self.lg.info(' [*] Get storage cluster (SC0), should succeed with 200')
response = self.storageclusters_api.get_storageclusters_label(self.data['label'])
self.assertEqual(response.status_code, 200)
for key in ['label', 'driveType', 'nodes', 'clusterType']:
self.assertEqual(response.json()[key], self.data[key])
self.assertEqual(response.json()['status'], 'ready')
self.lg.info(' [*] Get nonexisting storage cluster (SC0), should fail with 404')
response = self.storageclusters_api.get_storageclusters_label(self.rand_str())
self.assertEqual(response.status_code, 404)
def test002_list_storageclusters(self):
""" GAT-042
**Test Scenario:**
#. Deploy new storage cluster (SC0)
#. List storage clusters, should succeed with 200
"""
self.lg.info(' [*] Get storage cluster (SC0), should succeed with 200')
response = self.storageclusters_api.get_storageclusters()
self.assertEqual(response.status_code, 200)
self.assertIn(self.data['label'], response.json())
def test003_deploy_new_storagecluster(self):
""" GAT-043
**Test Scenario:**
#. Deploy new storage cluster (SC1), should succeed with 201
#. List storage clusters, (SC1) should be listed
#. Kill storage cluster (SC0), should succeed with 204
"""
nodes = [x['id'] for x in self.nodes_info]
number_of_free_disks, disk_type = self.get_max_available_free_disks(nodes)
if not number_of_free_disks:
self.skipTest('[*] No free disks to create storage cluster')
if number_of_free_disks < len(nodes):
servers = number_of_free_disks
nodes = nodes[:servers]
else:
servers = number_of_free_disks - (number_of_free_disks % len(nodes))
self.lg.info(' [*] Deploy storagecluster with {} servers on {} nodes'.format(servers, len(nodes)))
response, data = self.storageclusters_api.post_storageclusters(nodes=nodes, driveType=disk_type, servers=servers)
self.assertEqual(response.status_code, 201)
response = self.storageclusters_api.get_storageclusters_label(data['label'])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()['status'], 'ready')
self.lg.info(' [*] Kill storage cluster (SC1), should succeed with 204')
response = self.storageclusters_api.delete_storageclusters_label(data['label'])
self.assertEqual(response.status_code, 204)
def test004_kill_storagecluster_label(self):
""" GAT-044
**Test Scenario:**
#. Kill storage cluster (SC0), should succeed with 204
#. List storage clusters, (SC0) should be gone
#. Kill nonexisting storage cluster, should fail with 204
"""
self.lg.info(' [*] Kill storage cluster (SC0), should succeed with 204')
response = self.storageclusters_api.delete_storageclusters_label(self.data['label'])
self.assertEqual(response.status_code, 204)
self.lg.info(' [*] List storage clusters, (SC0) should be gone')
response = self.storageclusters_api.get_storageclusters()
self.assertEqual(response.status_code, 200)
self.assertNotIn(self.data['label'], response.json())
self.lg.info(' [*] Kill nonexisting storage cluster, should fail with 204')
response = self.storageclusters_api.delete_storageclusters_label(self.rand_str())
self.assertEqual(response.status_code, 204)
def test005_check_disks_wiped(self):
""" GAT-147
**Test Scenario:**
#. Deploy new storage cluster (SC1), should succeed with 201
#. Check the disks, should be mounted
#. Kill storage cluster (SC1), should succeed with 204
#. Make sure the disks are wiped, should succeed
"""
self.lg.info(' [*] Check the disks, should be mounted')
response = self.nodes_api.get_nodes_mounts(self.nodeid)
mounted_disks_num = sum([1 for x in response.json() if self.data['label'] in x['mountpoint']])
self.assertEqual(self.data['servers'], mounted_disks_num)
self.lg.info(' [*] Kill storage cluster (SC1), should succeed with 204')
response = self.storageclusters_api.delete_storageclusters_label(self.data['label'])
self.assertEqual(response.status_code, 204)
self.lg.info(' [*] Make sure the disks are wiped, should succeed')
free_disks_num , disk_type = self.get_max_available_free_disks([self.nodeid])
self.assertEqual(free_disks_num, self.number_of_free_disks)
response = self.nodes_api.get_nodes_mounts(self.nodeid)
mounted_disks_num = sum([1 for x in response.json() if self.data['label'] in x['mountpoint']])
self.assertEqual(mounted_disks_num, 0)
def test006_delete_storagecluster_with_vdiskstorage(self):
""" GAT-154
**Test Scenario:**
#. Deploy new storage cluster (SC1), should succeed with 201.
#. Create vdiskstorage (VS1) on storage cluster (SC1), should succeed.
#. Kill storage cluster (SC0), should fail with 400 as it has vdiskstorage.
#. Delete vdiskstorage (VS1), should succeed.
#. Kill storage cluster (SC0), should succeed.
"""
self.lg.info(' [*] Create vdiskstorage (VS1) on storage cluster (SC1)')
response, vdiskstorage = self.vdisks_api.post_vdiskstorage(storagecluster=self.data['label'])
self.assertEqual(response.status_code, 201)
self.lg.info(' [*] Kill storage cluster (SC1), should fail with 400')
response = self.storageclusters_api.delete_storageclusters_label(self.data['label'])
self.assertEqual(response.status_code, 400, response.content)
self.lg.info(' [*] Delete vdiskstorage (VS1), should succeed.')
response = self.vdisks_api.delete_vdiskstorage(vdiskstorageid=vdiskstorage['id'])
self.assertEqual(response.status_code, 204)
self.lg.info(' [*] Kill storage cluster (SC0), should succeed')
response = self.storageclusters_api.delete_storageclusters_label(self.data['label'])
self.assertEqual(response.status_code, 204)
|
apache-2.0
|
marqueedev/django
|
tests/middleware_exceptions/tests.py
|
25
|
43526
|
import sys
from django.conf import settings
from django.core.exceptions import MiddlewareNotUsed
from django.core.signals import got_request_exception
from django.http import HttpResponse
from django.template import engines
from django.template.response import TemplateResponse
from django.test import RequestFactory, TestCase, override_settings
from django.test.utils import patch_logger
class TestException(Exception):
pass
# A middleware base class that tracks which methods have been called
class TestMiddleware(object):
def __init__(self):
self.process_request_called = False
self.process_view_called = False
self.process_response_called = False
self.process_template_response_called = False
self.process_exception_called = False
def process_request(self, request):
self.process_request_called = True
def process_view(self, request, view_func, view_args, view_kwargs):
self.process_view_called = True
def process_template_response(self, request, response):
self.process_template_response_called = True
return response
def process_response(self, request, response):
self.process_response_called = True
return response
def process_exception(self, request, exception):
self.process_exception_called = True
# Middleware examples that do the right thing
class RequestMiddleware(TestMiddleware):
def process_request(self, request):
super(RequestMiddleware, self).process_request(request)
return HttpResponse('Request Middleware')
class ViewMiddleware(TestMiddleware):
def process_view(self, request, view_func, view_args, view_kwargs):
super(ViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs)
return HttpResponse('View Middleware')
class ResponseMiddleware(TestMiddleware):
def process_response(self, request, response):
super(ResponseMiddleware, self).process_response(request, response)
return HttpResponse('Response Middleware')
class TemplateResponseMiddleware(TestMiddleware):
def process_template_response(self, request, response):
super(TemplateResponseMiddleware, self).process_template_response(request, response)
template = engines['django'].from_string('Template Response Middleware')
return TemplateResponse(request, template)
class ExceptionMiddleware(TestMiddleware):
def process_exception(self, request, exception):
super(ExceptionMiddleware, self).process_exception(request, exception)
return HttpResponse('Exception Middleware')
# Sample middlewares that raise exceptions
class BadRequestMiddleware(TestMiddleware):
def process_request(self, request):
super(BadRequestMiddleware, self).process_request(request)
raise TestException('Test Request Exception')
class BadViewMiddleware(TestMiddleware):
def process_view(self, request, view_func, view_args, view_kwargs):
super(BadViewMiddleware, self).process_view(request, view_func, view_args, view_kwargs)
raise TestException('Test View Exception')
class BadTemplateResponseMiddleware(TestMiddleware):
def process_template_response(self, request, response):
super(BadTemplateResponseMiddleware, self).process_template_response(request, response)
raise TestException('Test Template Response Exception')
class BadResponseMiddleware(TestMiddleware):
def process_response(self, request, response):
super(BadResponseMiddleware, self).process_response(request, response)
raise TestException('Test Response Exception')
class BadExceptionMiddleware(TestMiddleware):
def process_exception(self, request, exception):
super(BadExceptionMiddleware, self).process_exception(request, exception)
raise TestException('Test Exception Exception')
# Sample middlewares that omit to return an HttpResonse
class NoTemplateResponseMiddleware(TestMiddleware):
def process_template_response(self, request, response):
super(NoTemplateResponseMiddleware, self).process_template_response(request, response)
class NoResponseMiddleware(TestMiddleware):
def process_response(self, request, response):
super(NoResponseMiddleware, self).process_response(request, response)
@override_settings(ROOT_URLCONF='middleware_exceptions.urls')
class BaseMiddlewareExceptionTest(TestCase):
def setUp(self):
self.exceptions = []
got_request_exception.connect(self._on_request_exception)
self.client.handler.load_middleware()
def tearDown(self):
got_request_exception.disconnect(self._on_request_exception)
self.exceptions = []
def _on_request_exception(self, sender, request, **kwargs):
self.exceptions.append(sys.exc_info())
def _add_middleware(self, middleware):
self.client.handler._request_middleware.insert(0, middleware.process_request)
self.client.handler._view_middleware.insert(0, middleware.process_view)
self.client.handler._template_response_middleware.append(middleware.process_template_response)
self.client.handler._response_middleware.append(middleware.process_response)
self.client.handler._exception_middleware.append(middleware.process_exception)
def assert_exceptions_handled(self, url, errors, extra_error=None):
try:
self.client.get(url)
except TestException:
# Test client intentionally re-raises any exceptions being raised
# during request handling. Hence actual testing that exception was
# properly handled is done by relying on got_request_exception
# signal being sent.
pass
except Exception as e:
if type(extra_error) != type(e):
self.fail("Unexpected exception: %s" % e)
self.assertEqual(len(self.exceptions), len(errors))
for i, error in enumerate(errors):
exception, value, tb = self.exceptions[i]
self.assertEqual(value.args, (error, ))
def assert_middleware_usage(self, middleware, request, view, template_response, response, exception):
self.assertEqual(middleware.process_request_called, request)
self.assertEqual(middleware.process_view_called, view)
self.assertEqual(middleware.process_template_response_called, template_response)
self.assertEqual(middleware.process_response_called, response)
self.assertEqual(middleware.process_exception_called, exception)
class MiddlewareTests(BaseMiddlewareExceptionTest):
def test_process_request_middleware(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_template_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = TemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, True, True, False)
self.assert_middleware_usage(middleware, True, True, True, True, False)
self.assert_middleware_usage(post_middleware, True, True, True, True, False)
def test_process_exception_middleware(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_template_response_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = TemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_response_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_not_found(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view'], Exception())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_exception(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead.",
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_middleware_null_view(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = RequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, True)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
middleware = ExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_template_response_error(self):
middleware = TestMiddleware()
self._add_middleware(middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response_error/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(middleware, True, True, True, True, False)
class BadMiddlewareTests(BaseMiddlewareExceptionTest):
def test_process_request_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_template_response_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadTemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response/', ['Test Template Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, True, True, False)
self.assert_middleware_usage(post_middleware, True, True, True, True, False)
def test_process_response_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_bad_middleware(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_not_found(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/not_found/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Error in view', 'Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_exception(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/error/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_request_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead.",
'Test Response Exception'
])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_exception_bad_middleware_null_view(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_request_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadRequestMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Request Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, False, False, True, False)
self.assert_middleware_usage(bad_middleware, True, False, False, True, False)
self.assert_middleware_usage(post_middleware, False, False, False, True, False)
def test_process_view_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadViewMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test View Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, False, False, True, False)
def test_process_response_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Response Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, True)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_exception_bad_middleware_permission_denied(self):
pre_middleware = TestMiddleware()
bad_middleware = BadExceptionMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(bad_middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/permission_denied/', ['Test Exception Exception'])
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(bad_middleware, True, True, False, True, True)
self.assert_middleware_usage(post_middleware, True, True, False, True, True)
def test_process_response_no_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = NoResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/view/', [
"NoResponseMiddleware.process_response didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, False, False)
self.assert_middleware_usage(middleware, True, True, False, True, False)
self.assert_middleware_usage(post_middleware, True, True, False, True, False)
def test_process_template_response_no_response_middleware(self):
pre_middleware = TestMiddleware()
middleware = NoTemplateResponseMiddleware()
post_middleware = TestMiddleware()
self._add_middleware(post_middleware)
self._add_middleware(middleware)
self._add_middleware(pre_middleware)
self.assert_exceptions_handled('/middleware_exceptions/template_response/', [
"NoTemplateResponseMiddleware.process_template_response didn't return an HttpResponse object. It returned None instead."
],
ValueError())
# Check that the right middleware methods have been invoked
self.assert_middleware_usage(pre_middleware, True, True, False, True, False)
self.assert_middleware_usage(middleware, True, True, True, True, False)
self.assert_middleware_usage(post_middleware, True, True, True, True, False)
_missing = object()
@override_settings(ROOT_URLCONF='middleware_exceptions.urls')
class RootUrlconfTests(TestCase):
@override_settings(ROOT_URLCONF=None)
def test_missing_root_urlconf(self):
# Removing ROOT_URLCONF is safe, as override_settings will restore
# the previously defined settings.
del settings.ROOT_URLCONF
self.assertRaises(AttributeError, self.client.get, "/middleware_exceptions/view/")
class MyMiddleware(object):
def __init__(self):
raise MiddlewareNotUsed
def process_request(self, request):
pass
class MyMiddlewareWithExceptionMessage(object):
def __init__(self):
raise MiddlewareNotUsed('spam eggs')
def process_request(self, request):
pass
@override_settings(
DEBUG=True,
ROOT_URLCONF='middleware_exceptions.urls',
)
class MiddlewareNotUsedTests(TestCase):
rf = RequestFactory()
def test_raise_exception(self):
request = self.rf.get('middleware_exceptions/view/')
with self.assertRaises(MiddlewareNotUsed):
MyMiddleware().process_request(request)
@override_settings(MIDDLEWARE_CLASSES=[
'middleware_exceptions.tests.MyMiddleware',
])
def test_log(self):
with patch_logger('django.request', 'debug') as calls:
self.client.get('/middleware_exceptions/view/')
self.assertEqual(len(calls), 1)
self.assertEqual(
calls[0],
"MiddlewareNotUsed: 'middleware_exceptions.tests.MyMiddleware'"
)
@override_settings(MIDDLEWARE_CLASSES=[
'middleware_exceptions.tests.MyMiddlewareWithExceptionMessage',
])
def test_log_custom_message(self):
with patch_logger('django.request', 'debug') as calls:
self.client.get('/middleware_exceptions/view/')
self.assertEqual(len(calls), 1)
self.assertEqual(
calls[0],
"MiddlewareNotUsed('middleware_exceptions.tests.MyMiddlewareWithExceptionMessage'): spam eggs"
)
@override_settings(DEBUG=False)
def test_do_not_log_when_debug_is_false(self):
with patch_logger('django.request', 'debug') as calls:
self.client.get('/middleware_exceptions/view/')
self.assertEqual(len(calls), 0)
|
bsd-3-clause
|
heenbo/mosquitto-heenbo
|
test/broker/08-ssl-connect-no-identity.py
|
7
|
1376
|
#!/usr/bin/env python
# Client connects without a certificate to a server that has use_identity_as_username=true. Should be rejected.
import subprocess
import socket
import ssl
import sys
import time
if sys.version < '2.7':
print("WARNING: SSL not supported on Python 2.6")
exit(0)
import inspect, os, sys
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
keepalive = 10
connect_packet = mosq_test.gen_connect("connect-no-identity-test", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=4)
broker = mosq_test.start_broker(filename=os.path.basename(__file__), port=1889)
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ssock = ssl.wrap_socket(sock, ca_certs="../ssl/test-root-ca.crt", cert_reqs=ssl.CERT_REQUIRED)
ssock.settimeout(20)
ssock.connect(("localhost", 1888))
ssock.send(connect_packet)
if mosq_test.expect_packet(ssock, "connack", connack_packet):
rc = 0
ssock.close()
finally:
time.sleep(2)
broker.terminate()
broker.wait()
if rc:
(stdo, stde) = broker.communicate()
print(stde)
exit(rc)
|
gpl-3.0
|
cloudpbl-senrigan/combinator
|
tools/gyp/pylib/gyp/msvs_emulation.py
|
3
|
46622
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace('%', '%%')
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def EncodeRspFileList(args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args: return ''
if args[0].startswith('call '):
call, program = args[0].split(' ', 1)
program = call + ' ' + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
return _FindDirectXInstallation.dxsdk_dir
dxsdk_dir = os.environ.get('DXSDK_DIR')
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in p.communicate()[0].splitlines():
if 'InstallPath' in line:
dxsdk_dir = line.split(' ')[3] + "\\"
# Cache return value
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
return dxsdk_dir
def GetGlobalVSMacroEnv(vs_version):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents. Returns all variables that are independent of the target."""
env = {}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
if vs_version.Path():
env['$(VSInstallDir)'] = vs_version.Path()
env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
dxsdk_dir = _FindDirectXInstallation()
env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
return env
def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
"""Finds msvs_system_include_dirs that are common to all targets, removes
them from all targets, and returns an OrderedSet containing them."""
all_system_includes = OrderedSet(
configs[0].get('msvs_system_include_dirs', []))
for config in configs[1:]:
system_includes = config.get('msvs_system_include_dirs', [])
all_system_includes = all_system_includes & OrderedSet(system_includes)
if not all_system_includes:
return None
# Expand macros in all_system_includes.
env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
expanded_system_includes = OrderedSet([ExpandMacros(include, env)
for include in all_system_includes])
if any(['$' in include for include in expanded_system_includes]):
# Some path relies on target-specific variables, bail.
return None
# Remove system includes shared by all targets from the targets.
for config in configs:
includes = config.get('msvs_system_include_dirs', [])
if includes: # Don't insert a msvs_system_include_dirs key if not needed.
# This must check the unexpanded includes list:
new_includes = [i for i in includes if i not in all_system_includes]
config['msvs_system_include_dirs'] = new_includes
return expanded_system_includes
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
supported_fields = [
('msvs_configuration_attributes', dict),
('msvs_settings', dict),
('msvs_system_include_dirs', list),
('msvs_disabled_warnings', list),
('msvs_precompiled_header', str),
('msvs_precompiled_source', str),
('msvs_configuration_platform', str),
('msvs_target_platform', str),
]
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.iteritems():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
unsupported_fields = [
'msvs_prebuild',
'msvs_postbuild',
]
unsupported = []
for field in unsupported_fields:
for config in configs.values():
if field in config:
unsupported += ["%s not supported (target %s)." %
(field, spec['target_name'])]
if unsupported:
raise Exception('\n'.join(unsupported))
def GetExtension(self):
"""Returns the extension for the target, with no leading dot.
Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
the target type.
"""
ext = self.spec.get('product_extension', None)
if ext:
return ext
return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
target_name = self.spec.get('product_prefix', '') + \
self.spec.get('product_name', self.spec['target_name'])
target_dir = base_to_build + '\\' if base_to_build else ''
target_ext = '.' + self.GetExtension()
target_file_name = target_name + target_ext
replacements = {
'$(InputName)': '${root}',
'$(InputPath)': '${source}',
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(OutDir)\\': target_dir,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
'$(ProjectName)': self.spec['target_name'],
'$(TargetDir)\\': target_dir,
'$(TargetExt)': target_ext,
'$(TargetFileName)': target_file_name,
'$(TargetName)': target_name,
'$(TargetPath)': os.path.join(target_dir, target_file_name),
}
replacements.update(GetGlobalVSMacroEnv(self.vs_version))
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix='', default=None):
return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map)
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, '')
platform = self.msvs_target_platform.get(config, '')
if not platform: # If no specific override, use the configuration's.
platform = configuration_platform
# Map from platform to architecture.
return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
def _TargetConfig(self, config):
"""Returns the target-specific configuration."""
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release_x64'), and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
if arch == 'x64' and not config.endswith('_x64'):
config += '_x64'
if arch == 'x86' and config.endswith('_x64'):
config = config.rsplit('_', 1)[0]
return config
def _Setting(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings."""
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes."""
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path, default, prefix, append, map)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
"""Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE'))
if self._ConfigAttrib(['CharacterSet'], config) == '2':
defines.append('_MBCS')
defines.extend(self._Setting(
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
return defines
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(
('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
if pdbname:
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overriden map file name for a target or returns None
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
if map_file:
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
return map_file
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
generate_debug_info = self._Setting(
('VCLinkerTool', 'GenerateDebugInformation'), config)
if generate_debug_info == 'true':
if output_file:
return expand_special(self.ConvertVSMacros(output_file, config=config))
else:
return default
else:
return None
def GetAsmflags(self, config):
"""Returns the flags that need to be added to ml invocations."""
config = self._TargetConfig(config)
asmflags = []
safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
if safeseh == 'true':
asmflags.append('/safeseh')
return asmflags
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('DisableSpecificWarnings', prefix='/wd')
cl('StringPooling', map={'true': '/GF'})
cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
cl('CallingConvention',
map={'0': 'd', '1': 'r', '2': 'z'}, prefix='/G')
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
cl('MinimalRebuild', map={'true': '/Gm'})
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('DefaultCharIsUnsigned', map={'true': '/J'})
cl('TreatWChar_tAsBuiltInType',
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
cl('EnableEnhancedInstructionSet',
map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32'}, prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e'):
# New flag required in 2013 to maintain previous PDB behavior.
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = os.path.split(self.msvs_precompiled_header[config])[1]
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._TargetConfig(config)
return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._TargetConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._TargetConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[])
libpaths = [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._TargetConfig(config)
libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags)
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
lib('AdditionalOptions')
return libflags
def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
return None
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(
('VCLinkerTool', 'ProfileGuidedDatabase'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, output_name, is_executable, build_dir):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
ld('TreatLinkerWarningAsErrors', prefix='/WX',
map={'true': '', 'false': ':NO'})
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
if pdb:
ldflags.append('/PDB:' + pdb)
pgd = self.GetPGDName(config, expand_special)
if pgd:
ldflags.append('/PGD:' + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
else '/MAP'})
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
minimum_required_version = self._Setting(
('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
if minimum_required_version:
minimum_required_version = ',' + minimum_required_version
ld('SubSystem',
map={'1': 'CONSOLE%s' % minimum_required_version,
'2': 'WINDOWS%s' % minimum_required_version},
prefix='/SUBSYSTEM:')
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('BaseAddress', prefix='/BASE:')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('ForceSymbolReferences', prefix='/INCLUDE:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration',
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
'4': ':PGUPDATE'},
prefix='/LTCG')
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
ld('ImageHasSafeExceptionHandlers', map={'true': '/SAFESEH'})
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
ldflags)
if not base_flags:
ldflags.append('/DYNAMICBASE')
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, intermediate_manifest, manifest_files = \
self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
is_executable and not have_def_file, build_dir)
ldflags.extend(manifest_flags)
return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
allow_isolation, build_dir):
"""Returns a 3-tuple:
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config,
default='true')
if generate_manifest != 'true':
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
return ['/MANIFEST:NO'], [], []
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
]
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
# include into the list of manifests. This allows us to avoid the need to
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
# still used, and the intermediate manifest is used to assert that the
# final manifest we get from merging all the additional manifest files
# (plus the one we generate here) isn't modified by merging the
# intermediate into it.
# Always NO, because we generate a manifest file that has what we want.
flags.append('/MANIFESTUAC:NO')
config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true')
manifest_files = []
generated_manifest_outer = \
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
"</assembly>"
if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0')
execution_level_map = {
'0': 'asInvoker',
'1': 'highestAvailable',
'2': 'requireAdministrator'
}
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false')
inner = '''
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='%s' uiAccess='%s' />
</requestedPrivileges>
</security>
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
else:
inner = ''
generated_manifest_contents = generated_manifest_outer % inner
generated_name = name + '.generated.manifest'
# Need to join with the build_dir here as we're writing it during
# generation time, but we return the un-joined version because the build
# will occur in that directory. We only write the file if the contents
# have changed so that simply regenerating the project files doesn't
# cause a relink.
build_dir_generated_name = os.path.join(build_dir, generated_name)
gyp.common.EnsureDirExists(build_dir_generated_name)
f = gyp.common.WriteOnDiff(build_dir_generated_name)
f.write(generated_manifest_contents)
f.close()
manifest_files = [generated_name]
if allow_isolation:
flags.append('/ALLOWISOLATION')
manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path)
return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
for f in files]
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
default='true')
return embed == 'true'
def IsLinkIncremental(self, config):
"""Returns whether the target should be linked incrementally."""
config = self._TargetConfig(config)
link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
return link_inc != '1'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags)
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
rcflags.append('/I' + gyp_to_ninja_path('.'))
rc('PreprocessorDefinitions', prefix='/d')
# /l arg must be in hex without leading '0x'
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
cd = ('cd %s' % path_to_base).replace('\\', '/')
args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
bash_cmd = ' '.join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
'bash -c "%s ; %s"' % (cd, bash_cmd))
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0
def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for a particular extension."""
for rule in spec.get('rules', []):
if rule['extension'] == extension:
return True
return False
def _HasExplicitIdlActions(self, spec):
"""Determine if an action should not run midl for .idl files."""
return any([action.get('explicit_idl_action', 0)
for action in spec.get('actions', [])])
def HasExplicitIdlRulesOrActions(self, spec):
"""Determine if there's an explicit rule or action for idl files. When
there isn't we need to generate implicit rules to build MIDL .idl files."""
return (self._HasExplicitRuleForExtension(spec, 'idl') or
self._HasExplicitIdlActions(spec))
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, 'asm')
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = ('.c',)
cc_exts = ('.cc', '.cxx', '.cpp')
return ((source_ext in c_exts and pch_source_ext in c_exts) or
(source_ext in cc_exts and pch_source_ext in cc_exts))
class PrecompiledHeader(object):
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
self.settings = settings
self.config = config
pch_source = self.settings.msvs_precompiled_source[self.config]
self.pch_source = gyp_to_build_path(pch_source)
filename, _ = os.path.splitext(pch_source)
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self.output_obj)]
return []
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(self, input, output, implicit, command,
cflags_c, cflags_cc, expand_special):
"""Get the modified cflags and implicit dependencies that should be used
for the pch compilation step."""
if input == self.pch_source:
pch_output = ['/Yc' + self._PchHeader()]
if command == 'cxx':
return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
self.output_obj, [])
elif command == 'cc':
return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
self.output_obj, [])
return [], output, implicit
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'),
allow_fallback=False)
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
system_includes, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
# Inject system includes from gyp files into INCLUDE.
if system_includes:
system_includes = system_includes | OrderedSet(
env.get('INCLUDE', '').split(';'))
env['INCLUDE'] = ';'.join(system_includes)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
no_specials = filter(lambda x: '$' not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = filter(lambda x: not os.path.exists(x), relative)
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
|
mit
|
phenoxim/nova
|
nova/api/openstack/compute/quota_classes.py
|
3
|
5453
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import webob
from nova.api.openstack.compute.schemas import quota_classes
from nova.api.openstack import wsgi
from nova.api import validation
from nova import exception
from nova import objects
from nova.policies import quota_class_sets as qcs_policies
from nova import quota
from nova import utils
QUOTAS = quota.QUOTAS
# NOTE(gmann): Quotas which were returned in v2 but in v2.1 those
# were not returned. Fixed in microversion 2.50. Bug#1693168.
EXTENDED_QUOTAS = ['server_groups', 'server_group_members']
# NOTE(gmann): Network related quotas are filter out in
# microversion 2.50. Bug#1701211.
FILTERED_QUOTAS_2_50 = ["fixed_ips", "floating_ips", "networks",
"security_group_rules", "security_groups"]
# Microversion 2.57 removes personality (injected) files from the API.
FILTERED_QUOTAS_2_57 = list(FILTERED_QUOTAS_2_50)
FILTERED_QUOTAS_2_57.extend(['injected_files', 'injected_file_content_bytes',
'injected_file_path_bytes'])
class QuotaClassSetsController(wsgi.Controller):
supported_quotas = []
def __init__(self, **kwargs):
self.supported_quotas = QUOTAS.resources
def _format_quota_set(self, quota_class, quota_set, filtered_quotas=None,
exclude_server_groups=False):
"""Convert the quota object to a result dict."""
if quota_class:
result = dict(id=str(quota_class))
else:
result = {}
original_quotas = copy.deepcopy(self.supported_quotas)
if filtered_quotas:
original_quotas = [resource for resource in original_quotas
if resource not in filtered_quotas]
# NOTE(gmann): Before microversion v2.50, v2.1 API does not return the
# 'server_groups' & 'server_group_members' key in quota class API
# response.
if exclude_server_groups:
for resource in EXTENDED_QUOTAS:
original_quotas.remove(resource)
for resource in original_quotas:
if resource in quota_set:
result[resource] = quota_set[resource]
return dict(quota_class_set=result)
@wsgi.Controller.api_version('2.1', '2.49')
@wsgi.expected_errors(())
def show(self, req, id):
return self._show(req, id, exclude_server_groups=True)
@wsgi.Controller.api_version('2.50', '2.56') # noqa
@wsgi.expected_errors(())
def show(self, req, id):
return self._show(req, id, FILTERED_QUOTAS_2_50)
@wsgi.Controller.api_version('2.57') # noqa
@wsgi.expected_errors(())
def show(self, req, id):
return self._show(req, id, FILTERED_QUOTAS_2_57)
def _show(self, req, id, filtered_quotas=None,
exclude_server_groups=False):
context = req.environ['nova.context']
context.can(qcs_policies.POLICY_ROOT % 'show', {'quota_class': id})
values = QUOTAS.get_class_quotas(context, id)
return self._format_quota_set(id, values, filtered_quotas,
exclude_server_groups)
@wsgi.Controller.api_version("2.1", "2.49") # noqa
@wsgi.expected_errors(400)
@validation.schema(quota_classes.update)
def update(self, req, id, body):
return self._update(req, id, body, exclude_server_groups=True)
@wsgi.Controller.api_version("2.50", "2.56") # noqa
@wsgi.expected_errors(400)
@validation.schema(quota_classes.update_v250)
def update(self, req, id, body):
return self._update(req, id, body, FILTERED_QUOTAS_2_50)
@wsgi.Controller.api_version("2.57") # noqa
@wsgi.expected_errors(400)
@validation.schema(quota_classes.update_v257)
def update(self, req, id, body):
return self._update(req, id, body, FILTERED_QUOTAS_2_57)
def _update(self, req, id, body, filtered_quotas=None,
exclude_server_groups=False):
context = req.environ['nova.context']
context.can(qcs_policies.POLICY_ROOT % 'update', {'quota_class': id})
try:
utils.check_string_length(id, 'quota_class_name',
min_length=1, max_length=255)
except exception.InvalidInput as e:
raise webob.exc.HTTPBadRequest(
explanation=e.format_message())
quota_class = id
for key, value in body['quota_class_set'].items():
try:
objects.Quotas.update_class(context, quota_class, key, value)
except exception.QuotaClassNotFound:
objects.Quotas.create_class(context, quota_class, key, value)
values = QUOTAS.get_class_quotas(context, quota_class)
return self._format_quota_set(None, values, filtered_quotas,
exclude_server_groups)
|
apache-2.0
|
jamesblunt/edx-platform
|
scripts/hotfix.py
|
154
|
1898
|
#!/usr/bin/env python
"""
Script to generate alton and git commands for executing hotfixes
Commands for:
- cutting amis
- creating hotfix tag
The script should be run with the hotfix's git hash as a command-line argument.
i.e. `python scripts/hotfix.py <hotfix hash>`
"""
from __future__ import print_function
from datetime import date
import sys
import argparse
import textwrap
def generate_alton_commands(hotfix_hash):
"""
Generates commands for alton to cut amis from the git hash of the hotfix.
"""
template = textwrap.dedent("""
@alton cut ami for stage-edx-edxapp from prod-edx-edxapp with edx_platform_version={hotfix_hash}
@alton cut ami for prod-edge-edxapp from prod-edge-edxapp with edx_platform_version={hotfix_hash}
@alton cut ami for prod-edx-edxapp from prod-edx-edxapp with edx_platform_version={hotfix_hash}
""")
return template.strip().format(hotfix_hash=hotfix_hash)
def generate_git_command(hotfix_hash):
"""
Generates command to tag the git hash of the hotfix.
"""
git_string = 'git tag -a hotfix-{iso_date} -m "Hotfix for {msg_date}" {hotfix_hash}'.format(
iso_date=date.today().isoformat(),
msg_date=date.today().strftime("%b %d, %Y"),
hotfix_hash=hotfix_hash,
)
return git_string
def main():
parser = argparse.ArgumentParser(description="Generate alton and git commands for hotfixes")
parser.add_argument("hash", help="git hash for hotfix")
args = parser.parse_args()
hotfix_hash = args.hash
print("\nHere are the alton commands to cut the hotfix amis:")
print(generate_alton_commands(hotfix_hash))
print("\nHere is the git command to generate the hotfix tag:")
print(generate_git_command(hotfix_hash))
print("\nOnce you create the git tag, push the tag by running:")
print("git push --tags\n")
if __name__ == '__main__':
main()
|
agpl-3.0
|
shaftoe/home-assistant
|
tests/components/automation/test_template.py
|
8
|
13520
|
"""The tests for the Template automation."""
import unittest
from homeassistant.core import callback
from homeassistant.setup import setup_component
import homeassistant.components.automation as automation
from tests.common import (
get_test_home_assistant, assert_setup_component, mock_component)
# pylint: disable=invalid-name
class TestAutomationTemplate(unittest.TestCase):
"""Test the event automation."""
def setUp(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
mock_component(self.hass, 'group')
self.hass.states.set('test.entity', 'hello')
self.calls = []
@callback
def record_call(service):
"""Helper to record calls."""
self.calls.append(service)
self.hass.services.register('test', 'automation', record_call)
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def test_if_fires_on_change_bool(self):
"""Test for firing on boolean change."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': '{{ true }}',
},
'action': {
'service': 'test.automation'
}
}
})
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(1, len(self.calls))
automation.turn_off(self.hass)
self.hass.block_till_done()
self.hass.states.set('test.entity', 'planet')
self.hass.block_till_done()
self.assertEqual(1, len(self.calls))
def test_if_fires_on_change_str(self):
"""Test for firing on change."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': 'true',
},
'action': {
'service': 'test.automation'
}
}
})
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(1, len(self.calls))
def test_if_fires_on_change_str_crazy(self):
"""Test for firing on change."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': 'TrUE',
},
'action': {
'service': 'test.automation'
}
}
})
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(1, len(self.calls))
def test_if_not_fires_on_change_bool(self):
"""Test for not firing on boolean change."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': '{{ false }}',
},
'action': {
'service': 'test.automation'
}
}
})
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(0, len(self.calls))
def test_if_not_fires_on_change_str(self):
"""Test for not firing on string change."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': 'False',
},
'action': {
'service': 'test.automation'
}
}
})
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(0, len(self.calls))
def test_if_not_fires_on_change_str_crazy(self):
"""Test for not firing on string change."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': 'Anything other than "true" is false.',
},
'action': {
'service': 'test.automation'
}
}
})
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(0, len(self.calls))
def test_if_fires_on_no_change(self):
"""Test for firing on no change."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': '{{ true }}',
},
'action': {
'service': 'test.automation'
}
}
})
self.hass.block_till_done()
self.calls = []
self.hass.states.set('test.entity', 'hello')
self.hass.block_till_done()
self.assertEqual(0, len(self.calls))
def test_if_fires_on_two_change(self):
"""Test for firing on two changes."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': '{{ true }}',
},
'action': {
'service': 'test.automation'
}
}
})
# Trigger once
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(1, len(self.calls))
# Trigger again
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(1, len(self.calls))
def test_if_fires_on_change_with_template(self):
"""Test for firing on change with template."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': '{{ is_state("test.entity", "world") }}',
},
'action': {
'service': 'test.automation'
}
}
})
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(1, len(self.calls))
def test_if_not_fires_on_change_with_template(self):
"""Test for not firing on change with template."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': '{{ is_state("test.entity", "hello") }}',
},
'action': {
'service': 'test.automation'
}
}
})
self.hass.block_till_done()
self.calls = []
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
assert len(self.calls) == 0
def test_if_fires_on_change_with_template_advanced(self):
"""Test for firing on change with template advanced."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': '''{%- if is_state("test.entity", "world") -%}
true
{%- else -%}
false
{%- endif -%}''',
},
'action': {
'service': 'test.automation',
'data_template': {
'some':
'{{ trigger.%s }}' % '}} - {{ trigger.'.join((
'platform', 'entity_id', 'from_state.state',
'to_state.state'))
},
}
}
})
self.hass.block_till_done()
self.calls = []
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(1, len(self.calls))
self.assertEqual(
'template - test.entity - hello - world',
self.calls[0].data['some'])
def test_if_fires_on_no_change_with_template_advanced(self):
"""Test for firing on no change with template advanced."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': '''{%- if is_state("test.entity", "world") -%}
true
{%- else -%}
false
{%- endif -%}''',
},
'action': {
'service': 'test.automation'
}
}
})
# Different state
self.hass.states.set('test.entity', 'worldz')
self.hass.block_till_done()
self.assertEqual(0, len(self.calls))
# Different state
self.hass.states.set('test.entity', 'hello')
self.hass.block_till_done()
self.assertEqual(0, len(self.calls))
def test_if_fires_on_change_with_template_2(self):
"""Test for firing on change with template."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template':
'{{ not is_state("test.entity", "world") }}',
},
'action': {
'service': 'test.automation'
}
}
})
self.hass.block_till_done()
self.calls = []
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
assert len(self.calls) == 0
self.hass.states.set('test.entity', 'home')
self.hass.block_till_done()
assert len(self.calls) == 1
self.hass.states.set('test.entity', 'work')
self.hass.block_till_done()
assert len(self.calls) == 1
self.hass.states.set('test.entity', 'not_home')
self.hass.block_till_done()
assert len(self.calls) == 1
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
assert len(self.calls) == 1
self.hass.states.set('test.entity', 'home')
self.hass.block_till_done()
assert len(self.calls) == 2
def test_if_action(self):
"""Test for firing if action."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'event',
'event_type': 'test_event',
},
'condition': [{
'condition': 'template',
'value_template': '{{ is_state("test.entity", "world") }}'
}],
'action': {
'service': 'test.automation'
}
}
})
# Condition is not true yet
self.hass.bus.fire('test_event')
self.hass.block_till_done()
self.assertEqual(0, len(self.calls))
# Change condition to true, but it shouldn't be triggered yet
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(0, len(self.calls))
# Condition is true and event is triggered
self.hass.bus.fire('test_event')
self.hass.block_till_done()
self.assertEqual(1, len(self.calls))
def test_if_fires_on_change_with_bad_template(self):
"""Test for firing on change with bad template."""
with assert_setup_component(0):
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': '{{ ',
},
'action': {
'service': 'test.automation'
}
}
})
def test_if_fires_on_change_with_bad_template_2(self):
"""Test for firing on change with bad template."""
assert setup_component(self.hass, automation.DOMAIN, {
automation.DOMAIN: {
'trigger': {
'platform': 'template',
'value_template': '{{ xyz | round(0) }}',
},
'action': {
'service': 'test.automation'
}
}
})
self.hass.states.set('test.entity', 'world')
self.hass.block_till_done()
self.assertEqual(0, len(self.calls))
|
apache-2.0
|
libre-informatique/SymfonyLibrinfoCRMBundle
|
src/Resources/doc/conf.py
|
18
|
1918
|
import sys, os
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
import sphinx_rtd_theme
extensions = [
'sphinx.ext.intersphinx',
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig'
]
templates_path = ['_templates']
source_suffix = ['.rst']
master_doc = 'index'
project = u'Blast Project'
copyright = u'2017, Libre-Informatique'
version = ''
release = ''
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
pygments_style = 'sphinx'
todo_include_todos = True
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_static_path = ['_static']
htmlhelp_basename = 'doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
# man_pages = [
# (master_doc, 'blast', u'Blast Documentation',
# [author], 1)
# ]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
# texinfo_documents = [
# (master_doc, 'Blast', u'Blast Documentation',
# author, 'Blast', 'One line description of project.',
# 'Miscellaneous'),
# ]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {}
|
lgpl-3.0
|
gudcjfdldu/volatility
|
volatility/plugins/overlays/windows/win7_sp0_x86_vtypes.py
|
58
|
471672
|
ntkrnlmp_types = {
'_u' : [ 0x50, {
'KeyNode' : [ 0x0, ['_CM_KEY_NODE']],
'KeyValue' : [ 0x0, ['_CM_KEY_VALUE']],
'KeySecurity' : [ 0x0, ['_CM_KEY_SECURITY']],
'KeyIndex' : [ 0x0, ['_CM_KEY_INDEX']],
'ValueData' : [ 0x0, ['_CM_BIG_DATA']],
'KeyList' : [ 0x0, ['array', 1, ['unsigned long']]],
'KeyString' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'_GENERAL_LOOKASIDE_POOL' : [ 0x48, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x8, ['unsigned short']],
'MaximumDepth' : [ 0xa, ['unsigned short']],
'TotalAllocates' : [ 0xc, ['unsigned long']],
'AllocateMisses' : [ 0x10, ['unsigned long']],
'AllocateHits' : [ 0x10, ['unsigned long']],
'TotalFrees' : [ 0x14, ['unsigned long']],
'FreeMisses' : [ 0x18, ['unsigned long']],
'FreeHits' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x20, ['unsigned long']],
'Size' : [ 0x24, ['unsigned long']],
'AllocateEx' : [ 0x28, ['pointer', ['void']]],
'Allocate' : [ 0x28, ['pointer', ['void']]],
'FreeEx' : [ 0x2c, ['pointer', ['void']]],
'Free' : [ 0x2c, ['pointer', ['void']]],
'ListEntry' : [ 0x30, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x38, ['unsigned long']],
'LastAllocateMisses' : [ 0x3c, ['unsigned long']],
'LastAllocateHits' : [ 0x3c, ['unsigned long']],
'Future' : [ 0x40, ['array', 2, ['unsigned long']]],
} ],
'_RTL_DYNAMIC_HASH_TABLE_ENTRY' : [ 0xc, {
'Linkage' : [ 0x0, ['_LIST_ENTRY']],
'Signature' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_2008' : [ 0x4, {
'AsULONG' : [ 0x0, ['unsigned long']],
'IncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'DecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
} ],
'_PPM_PERF_STATES' : [ 0x80, {
'Count' : [ 0x0, ['unsigned long']],
'MaxFrequency' : [ 0x4, ['unsigned long']],
'PStateCap' : [ 0x8, ['unsigned long']],
'TStateCap' : [ 0xc, ['unsigned long']],
'MaxPerfState' : [ 0x10, ['unsigned long']],
'MinPerfState' : [ 0x14, ['unsigned long']],
'LowestPState' : [ 0x18, ['unsigned long']],
'IncreaseTime' : [ 0x1c, ['unsigned long']],
'DecreaseTime' : [ 0x20, ['unsigned long']],
'BusyAdjThreshold' : [ 0x24, ['unsigned char']],
'Reserved' : [ 0x25, ['unsigned char']],
'ThrottleStatesOnly' : [ 0x26, ['unsigned char']],
'PolicyType' : [ 0x27, ['unsigned char']],
'TimerInterval' : [ 0x28, ['unsigned long']],
'Flags' : [ 0x2c, ['__unnamed_2008']],
'TargetProcessors' : [ 0x30, ['_KAFFINITY_EX']],
'PStateHandler' : [ 0x3c, ['pointer', ['void']]],
'PStateContext' : [ 0x40, ['unsigned long']],
'TStateHandler' : [ 0x44, ['pointer', ['void']]],
'TStateContext' : [ 0x48, ['unsigned long']],
'FeedbackHandler' : [ 0x4c, ['pointer', ['void']]],
'GetFFHThrottleState' : [ 0x50, ['pointer', ['void']]],
'State' : [ 0x58, ['array', 1, ['_PPM_PERF_STATE']]],
} ],
'_M128A' : [ 0x10, {
'Low' : [ 0x0, ['unsigned long long']],
'High' : [ 0x8, ['long long']],
} ],
'_HEAP_LOOKASIDE' : [ 0x30, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x8, ['unsigned short']],
'MaximumDepth' : [ 0xa, ['unsigned short']],
'TotalAllocates' : [ 0xc, ['unsigned long']],
'AllocateMisses' : [ 0x10, ['unsigned long']],
'TotalFrees' : [ 0x14, ['unsigned long']],
'FreeMisses' : [ 0x18, ['unsigned long']],
'LastTotalAllocates' : [ 0x1c, ['unsigned long']],
'LastAllocateMisses' : [ 0x20, ['unsigned long']],
'Counters' : [ 0x24, ['array', 2, ['unsigned long']]],
} ],
'_WMI_TRACE_PACKET' : [ 0x4, {
'Size' : [ 0x0, ['unsigned short']],
'HookId' : [ 0x2, ['unsigned short']],
'Type' : [ 0x2, ['unsigned char']],
'Group' : [ 0x3, ['unsigned char']],
} ],
'_KTIMER' : [ 0x28, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'DueTime' : [ 0x10, ['_ULARGE_INTEGER']],
'TimerListEntry' : [ 0x18, ['_LIST_ENTRY']],
'Dpc' : [ 0x20, ['pointer', ['_KDPC']]],
'Period' : [ 0x24, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE' : [ 0x44, {
'Signature' : [ 0x0, ['unsigned long']],
'CriticalSection' : [ 0x4, ['_RTL_CRITICAL_SECTION']],
'RtlHandleTable' : [ 0x1c, ['_RTL_HANDLE_TABLE']],
'NumberOfBuckets' : [ 0x3c, ['unsigned long']],
'Buckets' : [ 0x40, ['array', 1, ['pointer', ['_RTL_ATOM_TABLE_ENTRY']]]],
} ],
'_POP_POWER_ACTION' : [ 0xb0, {
'Updates' : [ 0x0, ['unsigned char']],
'State' : [ 0x1, ['unsigned char']],
'Shutdown' : [ 0x2, ['unsigned char']],
'Action' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'LightestState' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Flags' : [ 0xc, ['unsigned long']],
'Status' : [ 0x10, ['long']],
'DeviceType' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceMax'})]],
'DeviceTypeFlags' : [ 0x18, ['unsigned long']],
'IrpMinor' : [ 0x1c, ['unsigned char']],
'Waking' : [ 0x1d, ['unsigned char']],
'SystemState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'NextSystemState' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'EffectiveSystemState' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'CurrentSystemState' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ShutdownBugCode' : [ 0x30, ['pointer', ['_POP_SHUTDOWN_BUG_CHECK']]],
'DevState' : [ 0x34, ['pointer', ['_POP_DEVICE_SYS_STATE']]],
'HiberContext' : [ 0x38, ['pointer', ['_POP_HIBER_CONTEXT']]],
'WakeTime' : [ 0x40, ['unsigned long long']],
'SleepTime' : [ 0x48, ['unsigned long long']],
'ProgrammedRTCTime' : [ 0x50, ['unsigned long long']],
'WakeOnRTC' : [ 0x58, ['unsigned char']],
'WakeTimerInfo' : [ 0x5c, ['pointer', ['_DIAGNOSTIC_BUFFER']]],
'FilteredCapabilities' : [ 0x60, ['SYSTEM_POWER_CAPABILITIES']],
} ],
'_CM_KEY_VALUE' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned short']],
'NameLength' : [ 0x2, ['unsigned short']],
'DataLength' : [ 0x4, ['unsigned long']],
'Data' : [ 0x8, ['unsigned long']],
'Type' : [ 0xc, ['unsigned long']],
'Flags' : [ 0x10, ['unsigned short']],
'Spare' : [ 0x12, ['unsigned short']],
'Name' : [ 0x14, ['array', 1, ['wchar']]],
} ],
'_AMD64_DBGKD_CONTROL_SET' : [ 0x1c, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long long']],
'CurrentSymbolStart' : [ 0xc, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0x14, ['unsigned long long']],
} ],
'_PO_DEVICE_NOTIFY' : [ 0x3c, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'PowerChildren' : [ 0x8, ['_LIST_ENTRY']],
'PowerParents' : [ 0x10, ['_LIST_ENTRY']],
'TargetDevice' : [ 0x18, ['pointer', ['_DEVICE_OBJECT']]],
'OrderLevel' : [ 0x1c, ['unsigned char']],
'DeviceObject' : [ 0x20, ['pointer', ['_DEVICE_OBJECT']]],
'DeviceName' : [ 0x24, ['pointer', ['unsigned short']]],
'DriverName' : [ 0x28, ['pointer', ['unsigned short']]],
'ChildCount' : [ 0x2c, ['unsigned long']],
'ActiveChild' : [ 0x30, ['unsigned long']],
'ParentCount' : [ 0x34, ['unsigned long']],
'ActiveParent' : [ 0x38, ['unsigned long']],
} ],
'_CM_KEY_SECURITY_CACHE_ENTRY' : [ 0x8, {
'Cell' : [ 0x0, ['unsigned long']],
'CachedSecurity' : [ 0x4, ['pointer', ['_CM_KEY_SECURITY_CACHE']]],
} ],
'_FS_FILTER_CALLBACK_DATA' : [ 0x24, {
'SizeOfFsFilterCallbackData' : [ 0x0, ['unsigned long']],
'Operation' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'DeviceObject' : [ 0x8, ['pointer', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0xc, ['pointer', ['_FILE_OBJECT']]],
'Parameters' : [ 0x10, ['_FS_FILTER_PARAMETERS']],
} ],
'_GDI_TEB_BATCH32' : [ 0x4e0, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x4, ['unsigned long']],
'Buffer' : [ 0x8, ['array', 310, ['unsigned long']]],
} ],
'_PROC_IDLE_STATE_ACCOUNTING' : [ 0x228, {
'TotalTime' : [ 0x0, ['unsigned long long']],
'IdleTransitions' : [ 0x8, ['unsigned long']],
'FailedTransitions' : [ 0xc, ['unsigned long']],
'InvalidBucketIndex' : [ 0x10, ['unsigned long']],
'MinTime' : [ 0x18, ['unsigned long long']],
'MaxTime' : [ 0x20, ['unsigned long long']],
'IdleTimeBuckets' : [ 0x28, ['array', 16, ['_PROC_IDLE_STATE_BUCKET']]],
} ],
'_IMAGE_SECURITY_CONTEXT' : [ 0x4, {
'PageHashes' : [ 0x0, ['pointer', ['void']]],
'Value' : [ 0x0, ['unsigned long']],
'SecurityBeingCreated' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'SecurityMandatory' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'PageHashPointer' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_204b' : [ 0x4, {
'Level' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_204d' : [ 0x4, {
'Type' : [ 0x0, ['unsigned long']],
} ],
'_POP_ACTION_TRIGGER' : [ 0x10, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyDeviceMemory', 4: 'PolicyInitiatePowerActionAPI', 5: 'PolicySetPowerStateAPI', 6: 'PolicyImmediateDozeS4', 7: 'PolicySystemIdle', 8: 'PolicyDeviceMax'})]],
'Flags' : [ 0x4, ['unsigned long']],
'Wait' : [ 0x8, ['pointer', ['_POP_TRIGGER_WAIT']]],
'Battery' : [ 0xc, ['__unnamed_204b']],
'Button' : [ 0xc, ['__unnamed_204d']],
} ],
'_KENLISTMENT_HISTORY' : [ 0x8, {
'Notification' : [ 0x0, ['unsigned long']],
'NewState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
} ],
'_FAST_IO_DISPATCH' : [ 0x70, {
'SizeOfFastIoDispatch' : [ 0x0, ['unsigned long']],
'FastIoCheckIfPossible' : [ 0x4, ['pointer', ['void']]],
'FastIoRead' : [ 0x8, ['pointer', ['void']]],
'FastIoWrite' : [ 0xc, ['pointer', ['void']]],
'FastIoQueryBasicInfo' : [ 0x10, ['pointer', ['void']]],
'FastIoQueryStandardInfo' : [ 0x14, ['pointer', ['void']]],
'FastIoLock' : [ 0x18, ['pointer', ['void']]],
'FastIoUnlockSingle' : [ 0x1c, ['pointer', ['void']]],
'FastIoUnlockAll' : [ 0x20, ['pointer', ['void']]],
'FastIoUnlockAllByKey' : [ 0x24, ['pointer', ['void']]],
'FastIoDeviceControl' : [ 0x28, ['pointer', ['void']]],
'AcquireFileForNtCreateSection' : [ 0x2c, ['pointer', ['void']]],
'ReleaseFileForNtCreateSection' : [ 0x30, ['pointer', ['void']]],
'FastIoDetachDevice' : [ 0x34, ['pointer', ['void']]],
'FastIoQueryNetworkOpenInfo' : [ 0x38, ['pointer', ['void']]],
'AcquireForModWrite' : [ 0x3c, ['pointer', ['void']]],
'MdlRead' : [ 0x40, ['pointer', ['void']]],
'MdlReadComplete' : [ 0x44, ['pointer', ['void']]],
'PrepareMdlWrite' : [ 0x48, ['pointer', ['void']]],
'MdlWriteComplete' : [ 0x4c, ['pointer', ['void']]],
'FastIoReadCompressed' : [ 0x50, ['pointer', ['void']]],
'FastIoWriteCompressed' : [ 0x54, ['pointer', ['void']]],
'MdlReadCompleteCompressed' : [ 0x58, ['pointer', ['void']]],
'MdlWriteCompleteCompressed' : [ 0x5c, ['pointer', ['void']]],
'FastIoQueryOpen' : [ 0x60, ['pointer', ['void']]],
'ReleaseForModWrite' : [ 0x64, ['pointer', ['void']]],
'AcquireForCcFlush' : [ 0x68, ['pointer', ['void']]],
'ReleaseForCcFlush' : [ 0x6c, ['pointer', ['void']]],
} ],
'_CM_CELL_REMAP_BLOCK' : [ 0x8, {
'OldCell' : [ 0x0, ['unsigned long']],
'NewCell' : [ 0x4, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY_ENTRY' : [ 0xc, {
'ChainLink' : [ 0x0, ['pointer', ['_OBJECT_DIRECTORY_ENTRY']]],
'Object' : [ 0x4, ['pointer', ['void']]],
'HashValue' : [ 0x8, ['unsigned long']],
} ],
'_LOADER_PARAMETER_EXTENSION' : [ 0xe8, {
'Size' : [ 0x0, ['unsigned long']],
'Profile' : [ 0x4, ['_PROFILE_PARAMETER_BLOCK']],
'EmInfFileImage' : [ 0x14, ['pointer', ['void']]],
'EmInfFileSize' : [ 0x18, ['unsigned long']],
'TriageDumpBlock' : [ 0x1c, ['pointer', ['void']]],
'LoaderPagesSpanned' : [ 0x20, ['unsigned long']],
'HeadlessLoaderBlock' : [ 0x24, ['pointer', ['_HEADLESS_LOADER_BLOCK']]],
'SMBiosEPSHeader' : [ 0x28, ['pointer', ['_SMBIOS_TABLE_HEADER']]],
'DrvDBImage' : [ 0x2c, ['pointer', ['void']]],
'DrvDBSize' : [ 0x30, ['unsigned long']],
'NetworkLoaderBlock' : [ 0x34, ['pointer', ['_NETWORK_LOADER_BLOCK']]],
'HalpIRQLToTPR' : [ 0x38, ['pointer', ['unsigned char']]],
'HalpVectorToIRQL' : [ 0x3c, ['pointer', ['unsigned char']]],
'FirmwareDescriptorListHead' : [ 0x40, ['_LIST_ENTRY']],
'AcpiTable' : [ 0x48, ['pointer', ['void']]],
'AcpiTableSize' : [ 0x4c, ['unsigned long']],
'LastBootSucceeded' : [ 0x50, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'LastBootShutdown' : [ 0x50, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'IoPortAccessSupported' : [ 0x50, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x50, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'LoaderPerformanceData' : [ 0x54, ['pointer', ['_LOADER_PERFORMANCE_DATA']]],
'BootApplicationPersistentData' : [ 0x58, ['_LIST_ENTRY']],
'WmdTestResult' : [ 0x60, ['pointer', ['void']]],
'BootIdentifier' : [ 0x64, ['_GUID']],
'ResumePages' : [ 0x74, ['unsigned long']],
'DumpHeader' : [ 0x78, ['pointer', ['void']]],
'BgContext' : [ 0x7c, ['pointer', ['void']]],
'NumaLocalityInfo' : [ 0x80, ['pointer', ['void']]],
'NumaGroupAssignment' : [ 0x84, ['pointer', ['void']]],
'AttachedHives' : [ 0x88, ['_LIST_ENTRY']],
'MemoryCachingRequirementsCount' : [ 0x90, ['unsigned long']],
'MemoryCachingRequirements' : [ 0x94, ['pointer', ['void']]],
'TpmBootEntropyResult' : [ 0x98, ['_TPM_BOOT_ENTROPY_LDR_RESULT']],
'ProcessorCounterFrequency' : [ 0xe0, ['unsigned long long']],
} ],
'_PI_RESOURCE_ARBITER_ENTRY' : [ 0x38, {
'DeviceArbiterList' : [ 0x0, ['_LIST_ENTRY']],
'ResourceType' : [ 0x8, ['unsigned char']],
'ArbiterInterface' : [ 0xc, ['pointer', ['_ARBITER_INTERFACE']]],
'DeviceNode' : [ 0x10, ['pointer', ['_DEVICE_NODE']]],
'ResourceList' : [ 0x14, ['_LIST_ENTRY']],
'BestResourceList' : [ 0x1c, ['_LIST_ENTRY']],
'BestConfig' : [ 0x24, ['_LIST_ENTRY']],
'ActiveArbiterList' : [ 0x2c, ['_LIST_ENTRY']],
'State' : [ 0x34, ['unsigned char']],
'ResourcesChanged' : [ 0x35, ['unsigned char']],
} ],
'_SECURITY_DESCRIPTOR' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['pointer', ['void']]],
'Group' : [ 0x8, ['pointer', ['void']]],
'Sacl' : [ 0xc, ['pointer', ['_ACL']]],
'Dacl' : [ 0x10, ['pointer', ['_ACL']]],
} ],
'_RTL_USER_PROCESS_PARAMETERS' : [ 0x298, {
'MaximumLength' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'DebugFlags' : [ 0xc, ['unsigned long']],
'ConsoleHandle' : [ 0x10, ['pointer', ['void']]],
'ConsoleFlags' : [ 0x14, ['unsigned long']],
'StandardInput' : [ 0x18, ['pointer', ['void']]],
'StandardOutput' : [ 0x1c, ['pointer', ['void']]],
'StandardError' : [ 0x20, ['pointer', ['void']]],
'CurrentDirectory' : [ 0x24, ['_CURDIR']],
'DllPath' : [ 0x30, ['_UNICODE_STRING']],
'ImagePathName' : [ 0x38, ['_UNICODE_STRING']],
'CommandLine' : [ 0x40, ['_UNICODE_STRING']],
'Environment' : [ 0x48, ['pointer', ['void']]],
'StartingX' : [ 0x4c, ['unsigned long']],
'StartingY' : [ 0x50, ['unsigned long']],
'CountX' : [ 0x54, ['unsigned long']],
'CountY' : [ 0x58, ['unsigned long']],
'CountCharsX' : [ 0x5c, ['unsigned long']],
'CountCharsY' : [ 0x60, ['unsigned long']],
'FillAttribute' : [ 0x64, ['unsigned long']],
'WindowFlags' : [ 0x68, ['unsigned long']],
'ShowWindowFlags' : [ 0x6c, ['unsigned long']],
'WindowTitle' : [ 0x70, ['_UNICODE_STRING']],
'DesktopInfo' : [ 0x78, ['_UNICODE_STRING']],
'ShellInfo' : [ 0x80, ['_UNICODE_STRING']],
'RuntimeData' : [ 0x88, ['_UNICODE_STRING']],
'CurrentDirectores' : [ 0x90, ['array', 32, ['_RTL_DRIVE_LETTER_CURDIR']]],
'EnvironmentSize' : [ 0x290, ['unsigned long']],
'EnvironmentVersion' : [ 0x294, ['unsigned long']],
} ],
'_PHYSICAL_MEMORY_RUN' : [ 0x8, {
'BasePage' : [ 0x0, ['unsigned long']],
'PageCount' : [ 0x4, ['unsigned long']],
} ],
'_RTL_SRWLOCK' : [ 0x4, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'Value' : [ 0x0, ['unsigned long']],
'Ptr' : [ 0x0, ['pointer', ['void']]],
} ],
'_ALPC_MESSAGE_ZONE' : [ 0x18, {
'Mdl' : [ 0x0, ['pointer', ['_MDL']]],
'UserVa' : [ 0x4, ['pointer', ['void']]],
'UserLimit' : [ 0x8, ['pointer', ['void']]],
'SystemVa' : [ 0xc, ['pointer', ['void']]],
'SystemLimit' : [ 0x10, ['pointer', ['void']]],
'Size' : [ 0x14, ['unsigned long']],
} ],
'_KTMOBJECT_NAMESPACE_LINK' : [ 0x14, {
'Links' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'Expired' : [ 0x10, ['unsigned char']],
} ],
'_CACHE_MANAGER_CALLBACKS' : [ 0x10, {
'AcquireForLazyWrite' : [ 0x0, ['pointer', ['void']]],
'ReleaseFromLazyWrite' : [ 0x4, ['pointer', ['void']]],
'AcquireForReadAhead' : [ 0x8, ['pointer', ['void']]],
'ReleaseFromReadAhead' : [ 0xc, ['pointer', ['void']]],
} ],
'_PROC_PERF_LOAD' : [ 0x2, {
'BusyPercentage' : [ 0x0, ['unsigned char']],
'FrequencyPercentage' : [ 0x1, ['unsigned char']],
} ],
'_PROC_HISTORY_ENTRY' : [ 0x4, {
'Utility' : [ 0x0, ['unsigned short']],
'Frequency' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_RTL_RANGE' : [ 0x20, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'UserData' : [ 0x10, ['pointer', ['void']]],
'Owner' : [ 0x14, ['pointer', ['void']]],
'Attributes' : [ 0x18, ['unsigned char']],
'Flags' : [ 0x19, ['unsigned char']],
} ],
'_SYSTEM_POWER_POLICY' : [ 0xe8, {
'Revision' : [ 0x0, ['unsigned long']],
'PowerButton' : [ 0x4, ['POWER_ACTION_POLICY']],
'SleepButton' : [ 0x10, ['POWER_ACTION_POLICY']],
'LidClose' : [ 0x1c, ['POWER_ACTION_POLICY']],
'LidOpenWake' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Reserved' : [ 0x2c, ['unsigned long']],
'Idle' : [ 0x30, ['POWER_ACTION_POLICY']],
'IdleTimeout' : [ 0x3c, ['unsigned long']],
'IdleSensitivity' : [ 0x40, ['unsigned char']],
'DynamicThrottle' : [ 0x41, ['unsigned char']],
'Spare2' : [ 0x42, ['array', 2, ['unsigned char']]],
'MinSleep' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MaxSleep' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ReducedLatencySleep' : [ 0x4c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WinLogonFlags' : [ 0x50, ['unsigned long']],
'Spare3' : [ 0x54, ['unsigned long']],
'DozeS4Timeout' : [ 0x58, ['unsigned long']],
'BroadcastCapacityResolution' : [ 0x5c, ['unsigned long']],
'DischargePolicy' : [ 0x60, ['array', 4, ['SYSTEM_POWER_LEVEL']]],
'VideoTimeout' : [ 0xc0, ['unsigned long']],
'VideoDimDisplay' : [ 0xc4, ['unsigned char']],
'VideoReserved' : [ 0xc8, ['array', 3, ['unsigned long']]],
'SpindownTimeout' : [ 0xd4, ['unsigned long']],
'OptimizeForPower' : [ 0xd8, ['unsigned char']],
'FanThrottleTolerance' : [ 0xd9, ['unsigned char']],
'ForcedThrottle' : [ 0xda, ['unsigned char']],
'MinThrottle' : [ 0xdb, ['unsigned char']],
'OverThrottled' : [ 0xdc, ['POWER_ACTION_POLICY']],
} ],
'_POOL_HEADER' : [ 0x8, {
'PreviousSize' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned short')]],
'PoolIndex' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 16, native_type='unsigned short')]],
'BlockSize' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned short')]],
'PoolType' : [ 0x2, ['BitField', dict(start_bit = 9, end_bit = 16, native_type='unsigned short')]],
'Ulong1' : [ 0x0, ['unsigned long']],
'PoolTag' : [ 0x4, ['unsigned long']],
'AllocatorBackTraceIndex' : [ 0x4, ['unsigned short']],
'PoolTagHash' : [ 0x6, ['unsigned short']],
} ],
'_ETW_PROVIDER_TABLE_ENTRY' : [ 0x10, {
'RefCount' : [ 0x0, ['long']],
'State' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'EtwProviderStateFree', 1: 'EtwProviderStateTransition', 2: 'EtwProviderStateActive', 3: 'EtwProviderStateMax'})]],
'RegEntry' : [ 0x8, ['pointer', ['_ETW_REG_ENTRY']]],
'Caller' : [ 0xc, ['pointer', ['void']]],
} ],
'_SE_AUDIT_PROCESS_CREATION_INFO' : [ 0x4, {
'ImageFileName' : [ 0x0, ['pointer', ['_OBJECT_NAME_INFORMATION']]],
} ],
'_HEAP_ENTRY_EXTRA' : [ 0x8, {
'AllocatorBackTraceIndex' : [ 0x0, ['unsigned short']],
'TagIndex' : [ 0x2, ['unsigned short']],
'Settable' : [ 0x4, ['unsigned long']],
'ZeroInit' : [ 0x0, ['unsigned long long']],
} ],
'_VF_POOL_TRACE' : [ 0x40, {
'Address' : [ 0x0, ['pointer', ['void']]],
'Size' : [ 0x4, ['unsigned long']],
'Thread' : [ 0x8, ['pointer', ['_ETHREAD']]],
'StackTrace' : [ 0xc, ['array', 13, ['pointer', ['void']]]],
} ],
'__unnamed_20df' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MM_SESSION_SPACE_FLAGS']],
} ],
'_MM_SESSION_SPACE' : [ 0x2000, {
'ReferenceCount' : [ 0x0, ['long']],
'u' : [ 0x4, ['__unnamed_20df']],
'SessionId' : [ 0x8, ['unsigned long']],
'ProcessReferenceToSession' : [ 0xc, ['long']],
'ProcessList' : [ 0x10, ['_LIST_ENTRY']],
'LastProcessSwappedOutTime' : [ 0x18, ['_LARGE_INTEGER']],
'SessionPageDirectoryIndex' : [ 0x20, ['unsigned long']],
'NonPagablePages' : [ 0x24, ['unsigned long']],
'CommittedPages' : [ 0x28, ['unsigned long']],
'PagedPoolStart' : [ 0x2c, ['pointer', ['void']]],
'PagedPoolEnd' : [ 0x30, ['pointer', ['void']]],
'SessionObject' : [ 0x34, ['pointer', ['void']]],
'SessionObjectHandle' : [ 0x38, ['pointer', ['void']]],
'ResidentProcessCount' : [ 0x3c, ['long']],
'SessionPoolAllocationFailures' : [ 0x40, ['array', 4, ['unsigned long']]],
'ImageList' : [ 0x50, ['_LIST_ENTRY']],
'LocaleId' : [ 0x58, ['unsigned long']],
'AttachCount' : [ 0x5c, ['unsigned long']],
'AttachGate' : [ 0x60, ['_KGATE']],
'WsListEntry' : [ 0x70, ['_LIST_ENTRY']],
'Lookaside' : [ 0x80, ['array', 25, ['_GENERAL_LOOKASIDE']]],
'Session' : [ 0xd00, ['_MMSESSION']],
'PagedPoolInfo' : [ 0xd38, ['_MM_PAGED_POOL_INFO']],
'Vm' : [ 0xd70, ['_MMSUPPORT']],
'Wsle' : [ 0xddc, ['pointer', ['_MMWSLE']]],
'DriverUnload' : [ 0xde0, ['pointer', ['void']]],
'PagedPool' : [ 0xe00, ['_POOL_DESCRIPTOR']],
'PageTables' : [ 0x1f40, ['pointer', ['_MMPTE']]],
'SpecialPool' : [ 0x1f44, ['_MI_SPECIAL_POOL']],
'SessionPteLock' : [ 0x1f68, ['_KGUARDED_MUTEX']],
'PoolBigEntriesInUse' : [ 0x1f88, ['long']],
'PagedPoolPdeCount' : [ 0x1f8c, ['unsigned long']],
'SpecialPoolPdeCount' : [ 0x1f90, ['unsigned long']],
'DynamicSessionPdeCount' : [ 0x1f94, ['unsigned long']],
'SystemPteInfo' : [ 0x1f98, ['_MI_SYSTEM_PTE_TYPE']],
'PoolTrackTableExpansion' : [ 0x1fc8, ['pointer', ['void']]],
'PoolTrackTableExpansionSize' : [ 0x1fcc, ['unsigned long']],
'PoolTrackBigPages' : [ 0x1fd0, ['pointer', ['void']]],
'PoolTrackBigPagesSize' : [ 0x1fd4, ['unsigned long']],
'IoState' : [ 0x1fd8, ['Enumeration', dict(target = 'long', choices = {1: 'IoSessionStateCreated', 2: 'IoSessionStateInitialized', 3: 'IoSessionStateConnected', 4: 'IoSessionStateDisconnected', 5: 'IoSessionStateDisconnectedLoggedOn', 6: 'IoSessionStateLoggedOn', 7: 'IoSessionStateLoggedOff', 8: 'IoSessionStateTerminated', 9: 'IoSessionStateMax'})]],
'IoStateSequence' : [ 0x1fdc, ['unsigned long']],
'IoNotificationEvent' : [ 0x1fe0, ['_KEVENT']],
'SessionPoolPdes' : [ 0x1ff0, ['_RTL_BITMAP']],
'CpuQuotaBlock' : [ 0x1ff8, ['pointer', ['_PS_CPU_QUOTA_BLOCK']]],
} ],
'_OBJECT_HANDLE_COUNT_ENTRY' : [ 0x8, {
'Process' : [ 0x0, ['pointer', ['_EPROCESS']]],
'HandleCount' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'LockCount' : [ 0x4, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_CLIENT_ID' : [ 0x8, {
'UniqueProcess' : [ 0x0, ['pointer', ['void']]],
'UniqueThread' : [ 0x4, ['pointer', ['void']]],
} ],
'_WHEA_MEMORY_ERROR_SECTION' : [ 0x49, {
'ValidBits' : [ 0x0, ['_WHEA_MEMORY_ERROR_SECTION_VALIDBITS']],
'ErrorStatus' : [ 0x8, ['_WHEA_ERROR_STATUS']],
'PhysicalAddress' : [ 0x10, ['unsigned long long']],
'PhysicalAddressMask' : [ 0x18, ['unsigned long long']],
'Node' : [ 0x20, ['unsigned short']],
'Card' : [ 0x22, ['unsigned short']],
'Module' : [ 0x24, ['unsigned short']],
'Bank' : [ 0x26, ['unsigned short']],
'Device' : [ 0x28, ['unsigned short']],
'Row' : [ 0x2a, ['unsigned short']],
'Column' : [ 0x2c, ['unsigned short']],
'BitPosition' : [ 0x2e, ['unsigned short']],
'RequesterId' : [ 0x30, ['unsigned long long']],
'ResponderId' : [ 0x38, ['unsigned long long']],
'TargetId' : [ 0x40, ['unsigned long long']],
'ErrorType' : [ 0x48, ['unsigned char']],
} ],
'_KWAIT_STATUS_REGISTER' : [ 0x1, {
'Flags' : [ 0x0, ['unsigned char']],
'State' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned char')]],
'Affinity' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Priority' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Apc' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'UserApc' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Alert' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_VI_DEADLOCK_RESOURCE' : [ 0x80, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'VfDeadlockUnknown', 1: 'VfDeadlockMutex', 2: 'VfDeadlockMutexAbandoned', 3: 'VfDeadlockFastMutex', 4: 'VfDeadlockFastMutexUnsafe', 5: 'VfDeadlockSpinLock', 6: 'VfDeadlockInStackQueuedSpinLock', 7: 'VfDeadlockUnusedSpinLock', 8: 'VfDeadlockEresource', 9: 'VfDeadlockTypeMaximum'})]],
'NodeCount' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'RecursionCount' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'ResourceAddress' : [ 0x8, ['pointer', ['void']]],
'ThreadOwner' : [ 0xc, ['pointer', ['_VI_DEADLOCK_THREAD']]],
'ResourceList' : [ 0x10, ['_LIST_ENTRY']],
'HashChainList' : [ 0x18, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x18, ['_LIST_ENTRY']],
'StackTrace' : [ 0x20, ['array', 8, ['pointer', ['void']]]],
'LastAcquireTrace' : [ 0x40, ['array', 8, ['pointer', ['void']]]],
'LastReleaseTrace' : [ 0x60, ['array', 8, ['pointer', ['void']]]],
} ],
'_DBGKD_GET_SET_BUS_DATA' : [ 0x14, {
'BusDataType' : [ 0x0, ['unsigned long']],
'BusNumber' : [ 0x4, ['unsigned long']],
'SlotNumber' : [ 0x8, ['unsigned long']],
'Offset' : [ 0xc, ['unsigned long']],
'Length' : [ 0x10, ['unsigned long']],
} ],
'_MMSECTION_FLAGS' : [ 0x4, {
'BeingDeleted' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'BeingCreated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'BeingPurged' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'NoModifiedWriting' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'FailAllIo' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Based' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'File' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Networked' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Rom' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'PhysicalMemory' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Reserve' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'Commit' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WasPurged' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'UserReference' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'DeleteOnClose' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'FilePointerNull' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'GlobalOnlyPerSession' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'SetMappedFileIoComplete' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'CollidedFlush' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'UserWritable' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 32, native_type='unsigned long')]],
} ],
'_SECURITY_CLIENT_CONTEXT' : [ 0x3c, {
'SecurityQos' : [ 0x0, ['_SECURITY_QUALITY_OF_SERVICE']],
'ClientToken' : [ 0xc, ['pointer', ['void']]],
'DirectlyAccessClientToken' : [ 0x10, ['unsigned char']],
'DirectAccessEffectiveOnly' : [ 0x11, ['unsigned char']],
'ServerIsRemote' : [ 0x12, ['unsigned char']],
'ClientTokenControl' : [ 0x14, ['_TOKEN_CONTROL']],
} ],
'_MM_PAGED_POOL_INFO' : [ 0x38, {
'Mutex' : [ 0x0, ['_KGUARDED_MUTEX']],
'PagedPoolAllocationMap' : [ 0x20, ['_RTL_BITMAP']],
'FirstPteForPagedPool' : [ 0x28, ['pointer', ['_MMPTE']]],
'PagedPoolHint' : [ 0x2c, ['unsigned long']],
'PagedPoolCommit' : [ 0x30, ['unsigned long']],
'AllocatedPagedPool' : [ 0x34, ['unsigned long']],
} ],
'_BITMAP_RANGE' : [ 0x20, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'BasePage' : [ 0x8, ['long long']],
'FirstDirtyPage' : [ 0x10, ['unsigned long']],
'LastDirtyPage' : [ 0x14, ['unsigned long']],
'DirtyPages' : [ 0x18, ['unsigned long']],
'Bitmap' : [ 0x1c, ['pointer', ['unsigned long']]],
} ],
'_IO_SECURITY_CONTEXT' : [ 0x10, {
'SecurityQos' : [ 0x0, ['pointer', ['_SECURITY_QUALITY_OF_SERVICE']]],
'AccessState' : [ 0x4, ['pointer', ['_ACCESS_STATE']]],
'DesiredAccess' : [ 0x8, ['unsigned long']],
'FullCreateOptions' : [ 0xc, ['unsigned long']],
} ],
'_PROC_PERF_DOMAIN' : [ 0x78, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'Master' : [ 0x8, ['pointer', ['_KPRCB']]],
'Members' : [ 0xc, ['_KAFFINITY_EX']],
'FeedbackHandler' : [ 0x18, ['pointer', ['void']]],
'GetFFHThrottleState' : [ 0x1c, ['pointer', ['void']]],
'BoostPolicyHandler' : [ 0x20, ['pointer', ['void']]],
'PerfSelectionHandler' : [ 0x24, ['pointer', ['void']]],
'PerfHandler' : [ 0x28, ['pointer', ['void']]],
'Processors' : [ 0x2c, ['pointer', ['_PROC_PERF_CONSTRAINT']]],
'PerfChangeTime' : [ 0x30, ['unsigned long long']],
'ProcessorCount' : [ 0x38, ['unsigned long']],
'PreviousFrequencyMhz' : [ 0x3c, ['unsigned long']],
'CurrentFrequencyMhz' : [ 0x40, ['unsigned long']],
'PreviousFrequency' : [ 0x44, ['unsigned long']],
'CurrentFrequency' : [ 0x48, ['unsigned long']],
'CurrentPerfContext' : [ 0x4c, ['unsigned long']],
'DesiredFrequency' : [ 0x50, ['unsigned long']],
'MaxFrequency' : [ 0x54, ['unsigned long']],
'MinPerfPercent' : [ 0x58, ['unsigned long']],
'MinThrottlePercent' : [ 0x5c, ['unsigned long']],
'MaxPercent' : [ 0x60, ['unsigned long']],
'MinPercent' : [ 0x64, ['unsigned long']],
'ConstrainedMaxPercent' : [ 0x68, ['unsigned long']],
'ConstrainedMinPercent' : [ 0x6c, ['unsigned long']],
'Coordination' : [ 0x70, ['unsigned char']],
'PerfChangeIntervalCount' : [ 0x74, ['long']],
} ],
'_X86_DBGKD_CONTROL_SET' : [ 0x10, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long']],
'CurrentSymbolStart' : [ 0x8, ['unsigned long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long']],
} ],
'_HANDLE_TRACE_DB_ENTRY' : [ 0x50, {
'ClientId' : [ 0x0, ['_CLIENT_ID']],
'Handle' : [ 0x8, ['pointer', ['void']]],
'Type' : [ 0xc, ['unsigned long']],
'StackTrace' : [ 0x10, ['array', 16, ['pointer', ['void']]]],
} ],
'_TP_NBQ_GUARD' : [ 0x10, {
'GuardLinks' : [ 0x0, ['_LIST_ENTRY']],
'Guards' : [ 0x8, ['array', 2, ['pointer', ['void']]]],
} ],
'_DUMMY_FILE_OBJECT' : [ 0xa0, {
'ObjectHeader' : [ 0x0, ['_OBJECT_HEADER']],
'FileObjectBody' : [ 0x20, ['array', 128, ['unsigned char']]],
} ],
'_POP_TRIGGER_WAIT' : [ 0x20, {
'Event' : [ 0x0, ['_KEVENT']],
'Status' : [ 0x10, ['long']],
'Link' : [ 0x14, ['_LIST_ENTRY']],
'Trigger' : [ 0x1c, ['pointer', ['_POP_ACTION_TRIGGER']]],
} ],
'_RELATION_LIST' : [ 0x14, {
'Count' : [ 0x0, ['unsigned long']],
'TagCount' : [ 0x4, ['unsigned long']],
'FirstLevel' : [ 0x8, ['unsigned long']],
'MaxLevel' : [ 0xc, ['unsigned long']],
'Entries' : [ 0x10, ['array', 1, ['pointer', ['_RELATION_LIST_ENTRY']]]],
} ],
'_IO_TIMER' : [ 0x18, {
'Type' : [ 0x0, ['short']],
'TimerFlag' : [ 0x2, ['short']],
'TimerList' : [ 0x4, ['_LIST_ENTRY']],
'TimerRoutine' : [ 0xc, ['pointer', ['void']]],
'Context' : [ 0x10, ['pointer', ['void']]],
'DeviceObject' : [ 0x14, ['pointer', ['_DEVICE_OBJECT']]],
} ],
'_ARBITER_TEST_ALLOCATION_PARAMETERS' : [ 0xc, {
'ArbitrationList' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x4, ['unsigned long']],
'AllocateFrom' : [ 0x8, ['pointer', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_MI_SPECIAL_POOL' : [ 0x24, {
'PteBase' : [ 0x0, ['pointer', ['_MMPTE']]],
'Lock' : [ 0x4, ['unsigned long']],
'Paged' : [ 0x8, ['_MI_SPECIAL_POOL_PTE_LIST']],
'NonPaged' : [ 0x10, ['_MI_SPECIAL_POOL_PTE_LIST']],
'PagesInUse' : [ 0x18, ['long']],
'SpecialPoolPdes' : [ 0x1c, ['_RTL_BITMAP']],
} ],
'_ARBITER_QUERY_CONFLICT_PARAMETERS' : [ 0x10, {
'PhysicalDeviceObject' : [ 0x0, ['pointer', ['_DEVICE_OBJECT']]],
'ConflictingResource' : [ 0x4, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'ConflictCount' : [ 0x8, ['pointer', ['unsigned long']]],
'Conflicts' : [ 0xc, ['pointer', ['pointer', ['_ARBITER_CONFLICT_INFO']]]],
} ],
'_PHYSICAL_MEMORY_DESCRIPTOR' : [ 0x10, {
'NumberOfRuns' : [ 0x0, ['unsigned long']],
'NumberOfPages' : [ 0x4, ['unsigned long']],
'Run' : [ 0x8, ['array', 1, ['_PHYSICAL_MEMORY_RUN']]],
} ],
'_PNP_DEVICE_EVENT_LIST' : [ 0x4c, {
'Status' : [ 0x0, ['long']],
'EventQueueMutex' : [ 0x4, ['_KMUTANT']],
'Lock' : [ 0x24, ['_KGUARDED_MUTEX']],
'List' : [ 0x44, ['_LIST_ENTRY']],
} ],
'_MAILSLOT_CREATE_PARAMETERS' : [ 0x18, {
'MailslotQuota' : [ 0x0, ['unsigned long']],
'MaximumMessageSize' : [ 0x4, ['unsigned long']],
'ReadTimeout' : [ 0x8, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x10, ['unsigned char']],
} ],
'_PO_IRP_MANAGER' : [ 0x10, {
'DeviceIrpQueue' : [ 0x0, ['_PO_IRP_QUEUE']],
'SystemIrpQueue' : [ 0x8, ['_PO_IRP_QUEUE']],
} ],
'_PPM_PERF_STATE' : [ 0x28, {
'Frequency' : [ 0x0, ['unsigned long']],
'Power' : [ 0x4, ['unsigned long']],
'PercentFrequency' : [ 0x8, ['unsigned char']],
'IncreaseLevel' : [ 0x9, ['unsigned char']],
'DecreaseLevel' : [ 0xa, ['unsigned char']],
'Type' : [ 0xb, ['unsigned char']],
'Control' : [ 0x10, ['unsigned long long']],
'Status' : [ 0x18, ['unsigned long long']],
'TotalHitCount' : [ 0x20, ['unsigned long']],
'DesiredCount' : [ 0x24, ['unsigned long']],
} ],
'_PPM_FFH_THROTTLE_STATE_INFO' : [ 0x20, {
'EnableLogging' : [ 0x0, ['unsigned char']],
'MismatchCount' : [ 0x4, ['unsigned long']],
'Initialized' : [ 0x8, ['unsigned char']],
'LastValue' : [ 0x10, ['unsigned long long']],
'LastLogTickCount' : [ 0x18, ['_LARGE_INTEGER']],
} ],
'_SECURITY_DESCRIPTOR_RELATIVE' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['unsigned long']],
'Group' : [ 0x8, ['unsigned long']],
'Sacl' : [ 0xc, ['unsigned long']],
'Dacl' : [ 0x10, ['unsigned long']],
} ],
'_CLIENT_ID64' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['unsigned long long']],
'UniqueThread' : [ 0x8, ['unsigned long long']],
} ],
'_KDPC_DATA' : [ 0x14, {
'DpcListHead' : [ 0x0, ['_LIST_ENTRY']],
'DpcLock' : [ 0x8, ['unsigned long']],
'DpcQueueDepth' : [ 0xc, ['long']],
'DpcCount' : [ 0x10, ['unsigned long']],
} ],
'_NAMED_PIPE_CREATE_PARAMETERS' : [ 0x28, {
'NamedPipeType' : [ 0x0, ['unsigned long']],
'ReadMode' : [ 0x4, ['unsigned long']],
'CompletionMode' : [ 0x8, ['unsigned long']],
'MaximumInstances' : [ 0xc, ['unsigned long']],
'InboundQuota' : [ 0x10, ['unsigned long']],
'OutboundQuota' : [ 0x14, ['unsigned long']],
'DefaultTimeout' : [ 0x18, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x20, ['unsigned char']],
} ],
'_CM_BIG_DATA' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_216f' : [ 0x8, {
'UserData' : [ 0x0, ['pointer', ['void']]],
'Owner' : [ 0x4, ['pointer', ['void']]],
} ],
'__unnamed_2171' : [ 0x8, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_RTLP_RANGE_LIST_ENTRY' : [ 0x28, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'Allocated' : [ 0x10, ['__unnamed_216f']],
'Merged' : [ 0x10, ['__unnamed_2171']],
'Attributes' : [ 0x18, ['unsigned char']],
'PublicFlags' : [ 0x19, ['unsigned char']],
'PrivateFlags' : [ 0x1a, ['unsigned short']],
'ListEntry' : [ 0x1c, ['_LIST_ENTRY']],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY' : [ 0xc, {
'ListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Packet' : [ 0x4, ['pointer', ['_IO_MINI_COMPLETION_PACKET_USER']]],
'Lookaside' : [ 0x8, ['pointer', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
} ],
'__unnamed_2179' : [ 0x2, {
'AsUSHORT' : [ 0x0, ['unsigned short']],
'AllowScaling' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Disabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 16, native_type='unsigned short')]],
} ],
'PROCESSOR_IDLESTATE_POLICY' : [ 0x20, {
'Revision' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['__unnamed_2179']],
'PolicyCount' : [ 0x4, ['unsigned long']],
'Policy' : [ 0x8, ['array', 3, ['PROCESSOR_IDLESTATE_INFO']]],
} ],
'_ACTIVATION_CONTEXT_STACK' : [ 0x18, {
'ActiveFrame' : [ 0x0, ['pointer', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'FrameListCache' : [ 0x4, ['_LIST_ENTRY']],
'Flags' : [ 0xc, ['unsigned long']],
'NextCookieSequenceNumber' : [ 0x10, ['unsigned long']],
'StackId' : [ 0x14, ['unsigned long']],
} ],
'_MSUBSECTION' : [ 0x38, {
'ControlArea' : [ 0x0, ['pointer', ['_CONTROL_AREA']]],
'SubsectionBase' : [ 0x4, ['pointer', ['_MMPTE']]],
'NextSubsection' : [ 0x8, ['pointer', ['_SUBSECTION']]],
'NextMappedSubsection' : [ 0x8, ['pointer', ['_MSUBSECTION']]],
'PtesInSubsection' : [ 0xc, ['unsigned long']],
'UnusedPtes' : [ 0x10, ['unsigned long']],
'GlobalPerSessionHead' : [ 0x10, ['pointer', ['_MM_AVL_TABLE']]],
'u' : [ 0x14, ['__unnamed_1ef2']],
'StartingSector' : [ 0x18, ['unsigned long']],
'NumberOfFullSectors' : [ 0x1c, ['unsigned long']],
'u1' : [ 0x20, ['__unnamed_1f80']],
'LeftChild' : [ 0x24, ['pointer', ['_MMSUBSECTION_NODE']]],
'RightChild' : [ 0x28, ['pointer', ['_MMSUBSECTION_NODE']]],
'DereferenceList' : [ 0x2c, ['_LIST_ENTRY']],
'NumberOfMappedViews' : [ 0x34, ['unsigned long']],
} ],
'_RTL_DRIVE_LETTER_CURDIR' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned short']],
'Length' : [ 0x2, ['unsigned short']],
'TimeStamp' : [ 0x4, ['unsigned long']],
'DosPath' : [ 0x8, ['_STRING']],
} ],
'_VIRTUAL_EFI_RUNTIME_SERVICES' : [ 0x38, {
'GetTime' : [ 0x0, ['unsigned long']],
'SetTime' : [ 0x4, ['unsigned long']],
'GetWakeupTime' : [ 0x8, ['unsigned long']],
'SetWakeupTime' : [ 0xc, ['unsigned long']],
'SetVirtualAddressMap' : [ 0x10, ['unsigned long']],
'ConvertPointer' : [ 0x14, ['unsigned long']],
'GetVariable' : [ 0x18, ['unsigned long']],
'GetNextVariableName' : [ 0x1c, ['unsigned long']],
'SetVariable' : [ 0x20, ['unsigned long']],
'GetNextHighMonotonicCount' : [ 0x24, ['unsigned long']],
'ResetSystem' : [ 0x28, ['unsigned long']],
'UpdateCapsule' : [ 0x2c, ['unsigned long']],
'QueryCapsuleCapabilities' : [ 0x30, ['unsigned long']],
'QueryVariableInfo' : [ 0x34, ['unsigned long']],
} ],
'_MI_SPECIAL_POOL_PTE_LIST' : [ 0x8, {
'FreePteHead' : [ 0x0, ['_MMPTE']],
'FreePteTail' : [ 0x4, ['_MMPTE']],
} ],
'SYSTEM_POWER_CAPABILITIES' : [ 0x4c, {
'PowerButtonPresent' : [ 0x0, ['unsigned char']],
'SleepButtonPresent' : [ 0x1, ['unsigned char']],
'LidPresent' : [ 0x2, ['unsigned char']],
'SystemS1' : [ 0x3, ['unsigned char']],
'SystemS2' : [ 0x4, ['unsigned char']],
'SystemS3' : [ 0x5, ['unsigned char']],
'SystemS4' : [ 0x6, ['unsigned char']],
'SystemS5' : [ 0x7, ['unsigned char']],
'HiberFilePresent' : [ 0x8, ['unsigned char']],
'FullWake' : [ 0x9, ['unsigned char']],
'VideoDimPresent' : [ 0xa, ['unsigned char']],
'ApmPresent' : [ 0xb, ['unsigned char']],
'UpsPresent' : [ 0xc, ['unsigned char']],
'ThermalControl' : [ 0xd, ['unsigned char']],
'ProcessorThrottle' : [ 0xe, ['unsigned char']],
'ProcessorMinThrottle' : [ 0xf, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0x10, ['unsigned char']],
'FastSystemS4' : [ 0x11, ['unsigned char']],
'spare2' : [ 0x12, ['array', 3, ['unsigned char']]],
'DiskSpinDown' : [ 0x15, ['unsigned char']],
'spare3' : [ 0x16, ['array', 8, ['unsigned char']]],
'SystemBatteriesPresent' : [ 0x1e, ['unsigned char']],
'BatteriesAreShortTerm' : [ 0x1f, ['unsigned char']],
'BatteryScale' : [ 0x20, ['array', 3, ['BATTERY_REPORTING_SCALE']]],
'AcOnLineWake' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SoftLidWake' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'RtcWake' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MinDeviceWakeState' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DefaultLowLatencyWake' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_218f' : [ 0x4, {
'ImageCommitment' : [ 0x0, ['unsigned long']],
'CreatingProcess' : [ 0x0, ['pointer', ['_EPROCESS']]],
} ],
'__unnamed_2193' : [ 0x4, {
'ImageInformation' : [ 0x0, ['pointer', ['_MI_SECTION_IMAGE_INFORMATION']]],
'FirstMappedVa' : [ 0x0, ['pointer', ['void']]],
} ],
'_SEGMENT' : [ 0x30, {
'ControlArea' : [ 0x0, ['pointer', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x4, ['unsigned long']],
'SegmentFlags' : [ 0x8, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0xc, ['unsigned long']],
'SizeOfSegment' : [ 0x10, ['unsigned long long']],
'ExtendInfo' : [ 0x18, ['pointer', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x18, ['pointer', ['void']]],
'SegmentLock' : [ 0x1c, ['_EX_PUSH_LOCK']],
'u1' : [ 0x20, ['__unnamed_218f']],
'u2' : [ 0x24, ['__unnamed_2193']],
'PrototypePte' : [ 0x28, ['pointer', ['_MMPTE']]],
'ThePtes' : [ 0x2c, ['array', 1, ['_MMPTE']]],
} ],
'_DIAGNOSTIC_CONTEXT' : [ 0x10, {
'CallerType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'KernelRequester', 1: 'UserProcessRequester', 2: 'UserSharedServiceRequester'})]],
'Process' : [ 0x4, ['pointer', ['_EPROCESS']]],
'ServiceTag' : [ 0x8, ['unsigned long']],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
'ReasonSize' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_219c' : [ 0x4, {
'MissedEtwRegistration' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_219e' : [ 0x4, {
'Flags' : [ 0x0, ['__unnamed_219c']],
'Whole' : [ 0x0, ['unsigned long']],
} ],
'_VF_TARGET_VERIFIED_DRIVER_DATA' : [ 0x90, {
'SuspectDriverEntry' : [ 0x0, ['pointer', ['_VF_SUSPECT_DRIVER_ENTRY']]],
'WMICallback' : [ 0x4, ['pointer', ['void']]],
'EtwHandlesListHead' : [ 0x8, ['_LIST_ENTRY']],
'u1' : [ 0x10, ['__unnamed_219e']],
'Signature' : [ 0x14, ['unsigned long']],
'PoolPageHeaders' : [ 0x18, ['_SLIST_HEADER']],
'PoolTrackers' : [ 0x20, ['_SLIST_HEADER']],
'CurrentPagedPoolAllocations' : [ 0x28, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x2c, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x30, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x34, ['unsigned long']],
'PagedBytes' : [ 0x38, ['unsigned long']],
'NonPagedBytes' : [ 0x3c, ['unsigned long']],
'PeakPagedBytes' : [ 0x40, ['unsigned long']],
'PeakNonPagedBytes' : [ 0x44, ['unsigned long']],
'RaiseIrqls' : [ 0x48, ['unsigned long']],
'AcquireSpinLocks' : [ 0x4c, ['unsigned long']],
'SynchronizeExecutions' : [ 0x50, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x54, ['unsigned long']],
'AllocationsFailed' : [ 0x58, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x5c, ['unsigned long']],
'LockedBytes' : [ 0x60, ['unsigned long']],
'PeakLockedBytes' : [ 0x64, ['unsigned long']],
'MappedLockedBytes' : [ 0x68, ['unsigned long']],
'PeakMappedLockedBytes' : [ 0x6c, ['unsigned long']],
'MappedIoSpaceBytes' : [ 0x70, ['unsigned long']],
'PeakMappedIoSpaceBytes' : [ 0x74, ['unsigned long']],
'PagesForMdlBytes' : [ 0x78, ['unsigned long']],
'PeakPagesForMdlBytes' : [ 0x7c, ['unsigned long']],
'ContiguousMemoryBytes' : [ 0x80, ['unsigned long']],
'PeakContiguousMemoryBytes' : [ 0x84, ['unsigned long']],
'ContiguousMemoryListHead' : [ 0x88, ['_LIST_ENTRY']],
} ],
'_PCAT_FIRMWARE_INFORMATION' : [ 0x4, {
'PlaceHolder' : [ 0x0, ['unsigned long']],
} ],
'_PRIVATE_CACHE_MAP' : [ 0x58, {
'NodeTypeCode' : [ 0x0, ['short']],
'Flags' : [ 0x0, ['_PRIVATE_CACHE_MAP_FLAGS']],
'UlongFlags' : [ 0x0, ['unsigned long']],
'ReadAheadMask' : [ 0x4, ['unsigned long']],
'FileObject' : [ 0x8, ['pointer', ['_FILE_OBJECT']]],
'FileOffset1' : [ 0x10, ['_LARGE_INTEGER']],
'BeyondLastByte1' : [ 0x18, ['_LARGE_INTEGER']],
'FileOffset2' : [ 0x20, ['_LARGE_INTEGER']],
'BeyondLastByte2' : [ 0x28, ['_LARGE_INTEGER']],
'SequentialReadCount' : [ 0x30, ['unsigned long']],
'ReadAheadLength' : [ 0x34, ['unsigned long']],
'ReadAheadOffset' : [ 0x38, ['_LARGE_INTEGER']],
'ReadAheadBeyondLastByte' : [ 0x40, ['_LARGE_INTEGER']],
'ReadAheadSpinLock' : [ 0x48, ['unsigned long']],
'PrivateLinks' : [ 0x4c, ['_LIST_ENTRY']],
'ReadAheadWorkItem' : [ 0x54, ['pointer', ['void']]],
} ],
'_CM_KEY_NODE' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'LastWriteTime' : [ 0x4, ['_LARGE_INTEGER']],
'Spare' : [ 0xc, ['unsigned long']],
'Parent' : [ 0x10, ['unsigned long']],
'SubKeyCounts' : [ 0x14, ['array', 2, ['unsigned long']]],
'SubKeyLists' : [ 0x1c, ['array', 2, ['unsigned long']]],
'ValueList' : [ 0x24, ['_CHILD_LIST']],
'ChildHiveReference' : [ 0x1c, ['_CM_KEY_REFERENCE']],
'Security' : [ 0x2c, ['unsigned long']],
'Class' : [ 0x30, ['unsigned long']],
'MaxNameLen' : [ 0x34, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'UserFlags' : [ 0x34, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'VirtControlFlags' : [ 0x34, ['BitField', dict(start_bit = 20, end_bit = 24, native_type='unsigned long')]],
'Debug' : [ 0x34, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'MaxClassLen' : [ 0x38, ['unsigned long']],
'MaxValueNameLen' : [ 0x3c, ['unsigned long']],
'MaxValueDataLen' : [ 0x40, ['unsigned long']],
'WorkVar' : [ 0x44, ['unsigned long']],
'NameLength' : [ 0x48, ['unsigned short']],
'ClassLength' : [ 0x4a, ['unsigned short']],
'Name' : [ 0x4c, ['array', 1, ['wchar']]],
} ],
'_TPM_BOOT_ENTROPY_LDR_RESULT' : [ 0x48, {
'Policy' : [ 0x0, ['unsigned long long']],
'ResultCode' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'TpmBootEntropyStructureUninitialized', 1: 'TpmBootEntropyDisabledByPolicy', 2: 'TpmBootEntropyNoTpmFound', 3: 'TpmBootEntropyTpmError', 4: 'TpmBootEntropySuccess'})]],
'ResultStatus' : [ 0xc, ['long']],
'Time' : [ 0x10, ['unsigned long long']],
'EntropyLength' : [ 0x18, ['unsigned long']],
'EntropyData' : [ 0x1c, ['array', 40, ['unsigned char']]],
} ],
'_RTL_HANDLE_TABLE' : [ 0x20, {
'MaximumNumberOfHandles' : [ 0x0, ['unsigned long']],
'SizeOfHandleTableEntry' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['array', 2, ['unsigned long']]],
'FreeHandles' : [ 0x10, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
'CommittedHandles' : [ 0x14, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
'UnCommittedHandles' : [ 0x18, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
'MaxReservedHandles' : [ 0x1c, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_PTE_TRACKER' : [ 0x30, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Mdl' : [ 0x8, ['pointer', ['_MDL']]],
'Count' : [ 0xc, ['unsigned long']],
'SystemVa' : [ 0x10, ['pointer', ['void']]],
'StartVa' : [ 0x14, ['pointer', ['void']]],
'Offset' : [ 0x18, ['unsigned long']],
'Length' : [ 0x1c, ['unsigned long']],
'Page' : [ 0x20, ['unsigned long']],
'IoMapping' : [ 0x24, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Matched' : [ 0x24, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'CacheAttribute' : [ 0x24, ['BitField', dict(start_bit = 2, end_bit = 4, native_type='unsigned long')]],
'Spare' : [ 0x24, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'CallingAddress' : [ 0x28, ['pointer', ['void']]],
'CallersCaller' : [ 0x2c, ['pointer', ['void']]],
} ],
'_KTHREAD_COUNTERS' : [ 0x1a8, {
'WaitReasonBitMap' : [ 0x0, ['unsigned long long']],
'UserData' : [ 0x8, ['pointer', ['_THREAD_PERFORMANCE_DATA']]],
'Flags' : [ 0xc, ['unsigned long']],
'ContextSwitches' : [ 0x10, ['unsigned long']],
'CycleTimeBias' : [ 0x18, ['unsigned long long']],
'HardwareCounters' : [ 0x20, ['unsigned long long']],
'HwCounter' : [ 0x28, ['array', 16, ['_COUNTER_READING']]],
} ],
'_SHARED_CACHE_MAP_LIST_CURSOR' : [ 0xc, {
'SharedCacheMapLinks' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_GET_VERSION64' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned char']],
'KdSecondaryVersion' : [ 0x5, ['unsigned char']],
'Flags' : [ 0x6, ['unsigned short']],
'MachineType' : [ 0x8, ['unsigned short']],
'MaxPacketType' : [ 0xa, ['unsigned char']],
'MaxStateChange' : [ 0xb, ['unsigned char']],
'MaxManipulate' : [ 0xc, ['unsigned char']],
'Simulation' : [ 0xd, ['unsigned char']],
'Unused' : [ 0xe, ['array', 1, ['unsigned short']]],
'KernBase' : [ 0x10, ['unsigned long long']],
'PsLoadedModuleList' : [ 0x18, ['unsigned long long']],
'DebuggerDataList' : [ 0x20, ['unsigned long long']],
} ],
'_HMAP_ENTRY' : [ 0x10, {
'BlockAddress' : [ 0x0, ['unsigned long']],
'BinAddress' : [ 0x4, ['unsigned long']],
'CmView' : [ 0x8, ['pointer', ['_CM_VIEW_OF_FILE']]],
'MemAlloc' : [ 0xc, ['unsigned long']],
} ],
'_RTL_ATOM_TABLE_ENTRY' : [ 0x10, {
'HashLink' : [ 0x0, ['pointer', ['_RTL_ATOM_TABLE_ENTRY']]],
'HandleIndex' : [ 0x4, ['unsigned short']],
'Atom' : [ 0x6, ['unsigned short']],
'ReferenceCount' : [ 0x8, ['unsigned short']],
'Flags' : [ 0xa, ['unsigned char']],
'NameLength' : [ 0xb, ['unsigned char']],
'Name' : [ 0xc, ['array', 1, ['wchar']]],
} ],
'_TXN_PARAMETER_BLOCK' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'TxFsContext' : [ 0x2, ['unsigned short']],
'TransactionObject' : [ 0x4, ['pointer', ['void']]],
} ],
'_LOADER_PERFORMANCE_DATA' : [ 0x10, {
'StartTime' : [ 0x0, ['unsigned long long']],
'EndTime' : [ 0x8, ['unsigned long long']],
} ],
'_PNP_DEVICE_ACTION_ENTRY' : [ 0x20, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DeviceObject' : [ 0x8, ['pointer', ['_DEVICE_OBJECT']]],
'RequestType' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'AssignResources', 1: 'ClearDeviceProblem', 2: 'ClearProblem', 3: 'ClearEjectProblem', 4: 'HaltDevice', 5: 'QueryPowerRelations', 6: 'Rebalance', 7: 'ReenumerateBootDevices', 8: 'ReenumerateDeviceOnly', 9: 'ReenumerateDeviceTree', 10: 'ReenumerateRootDevices', 11: 'RequeryDeviceState', 12: 'ResetDevice', 13: 'ResourceRequirementsChanged', 14: 'RestartEnumeration', 15: 'SetDeviceProblem', 16: 'StartDevice', 17: 'StartSystemDevicesPass0', 18: 'StartSystemDevicesPass1'})]],
'ReorderingBarrier' : [ 0x10, ['unsigned char']],
'RequestArgument' : [ 0x14, ['unsigned long']],
'CompletionEvent' : [ 0x18, ['pointer', ['_KEVENT']]],
'CompletionStatus' : [ 0x1c, ['pointer', ['long']]],
} ],
'_COUNTER_READING' : [ 0x18, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PMCCounter', 1: 'MaxHardwareCounterType'})]],
'Index' : [ 0x4, ['unsigned long']],
'Start' : [ 0x8, ['unsigned long long']],
'Total' : [ 0x10, ['unsigned long long']],
} ],
'_MMSESSION' : [ 0x38, {
'SystemSpaceViewLock' : [ 0x0, ['_KGUARDED_MUTEX']],
'SystemSpaceViewLockPointer' : [ 0x20, ['pointer', ['_KGUARDED_MUTEX']]],
'SystemSpaceViewTable' : [ 0x24, ['pointer', ['_MMVIEW']]],
'SystemSpaceHashSize' : [ 0x28, ['unsigned long']],
'SystemSpaceHashEntries' : [ 0x2c, ['unsigned long']],
'SystemSpaceHashKey' : [ 0x30, ['unsigned long']],
'BitmapFailures' : [ 0x34, ['unsigned long']],
} ],
'_ETW_REG_ENTRY' : [ 0x2c, {
'RegList' : [ 0x0, ['_LIST_ENTRY']],
'GuidEntry' : [ 0x8, ['pointer', ['_ETW_GUID_ENTRY']]],
'Index' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['unsigned short']],
'EnableMask' : [ 0x10, ['unsigned char']],
'SessionId' : [ 0x14, ['unsigned long']],
'ReplyQueue' : [ 0x14, ['pointer', ['_ETW_REPLY_QUEUE']]],
'ReplySlot' : [ 0x14, ['array', 4, ['pointer', ['_ETW_REG_ENTRY']]]],
'Process' : [ 0x24, ['pointer', ['_EPROCESS']]],
'Callback' : [ 0x24, ['pointer', ['void']]],
'CallbackContext' : [ 0x28, ['pointer', ['void']]],
} ],
'_LPCP_PORT_OBJECT' : [ 0xa4, {
'ConnectionPort' : [ 0x0, ['pointer', ['_LPCP_PORT_OBJECT']]],
'ConnectedPort' : [ 0x4, ['pointer', ['_LPCP_PORT_OBJECT']]],
'MsgQueue' : [ 0x8, ['_LPCP_PORT_QUEUE']],
'Creator' : [ 0x18, ['_CLIENT_ID']],
'ClientSectionBase' : [ 0x20, ['pointer', ['void']]],
'ServerSectionBase' : [ 0x24, ['pointer', ['void']]],
'PortContext' : [ 0x28, ['pointer', ['void']]],
'ClientThread' : [ 0x2c, ['pointer', ['_ETHREAD']]],
'SecurityQos' : [ 0x30, ['_SECURITY_QUALITY_OF_SERVICE']],
'StaticSecurity' : [ 0x3c, ['_SECURITY_CLIENT_CONTEXT']],
'LpcReplyChainHead' : [ 0x78, ['_LIST_ENTRY']],
'LpcDataInfoChainHead' : [ 0x80, ['_LIST_ENTRY']],
'ServerProcess' : [ 0x88, ['pointer', ['_EPROCESS']]],
'MappingProcess' : [ 0x88, ['pointer', ['_EPROCESS']]],
'MaxMessageLength' : [ 0x8c, ['unsigned short']],
'MaxConnectionInfoLength' : [ 0x8e, ['unsigned short']],
'Flags' : [ 0x90, ['unsigned long']],
'WaitEvent' : [ 0x94, ['_KEVENT']],
} ],
'_ARBITER_LIST_ENTRY' : [ 0x38, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'AlternativeCount' : [ 0x8, ['unsigned long']],
'Alternatives' : [ 0xc, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'PhysicalDeviceObject' : [ 0x10, ['pointer', ['_DEVICE_OBJECT']]],
'RequestSource' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Flags' : [ 0x18, ['unsigned long']],
'WorkSpace' : [ 0x1c, ['long']],
'InterfaceType' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'SlotNumber' : [ 0x24, ['unsigned long']],
'BusNumber' : [ 0x28, ['unsigned long']],
'Assignment' : [ 0x2c, ['pointer', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
'SelectedAlternative' : [ 0x30, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'Result' : [ 0x34, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterResultSuccess', 1: 'ArbiterResultExternalConflict', 2: 'ArbiterResultNullRequest', -1: 'ArbiterResultUndefined'})]],
} ],
'_POP_DEVICE_SYS_STATE' : [ 0x1a8, {
'IrpMinor' : [ 0x0, ['unsigned char']],
'SystemState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SpinLock' : [ 0x8, ['unsigned long']],
'Thread' : [ 0xc, ['pointer', ['_KTHREAD']]],
'AbortEvent' : [ 0x10, ['pointer', ['_KEVENT']]],
'ReadySemaphore' : [ 0x14, ['pointer', ['_KSEMAPHORE']]],
'FinishedSemaphore' : [ 0x18, ['pointer', ['_KSEMAPHORE']]],
'GetNewDeviceList' : [ 0x1c, ['unsigned char']],
'Order' : [ 0x20, ['_PO_DEVICE_NOTIFY_ORDER']],
'Pending' : [ 0x190, ['_LIST_ENTRY']],
'Status' : [ 0x198, ['long']],
'FailedDevice' : [ 0x19c, ['pointer', ['_DEVICE_OBJECT']]],
'Waking' : [ 0x1a0, ['unsigned char']],
'Cancelled' : [ 0x1a1, ['unsigned char']],
'IgnoreErrors' : [ 0x1a2, ['unsigned char']],
'IgnoreNotImplemented' : [ 0x1a3, ['unsigned char']],
'TimeRefreshLockAcquired' : [ 0x1a4, ['unsigned char']],
} ],
'_SEGMENT_FLAGS' : [ 0x4, {
'TotalNumberOfPtes4132' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 10, native_type='unsigned long')]],
'ExtraSharedWowSubsections' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'LargePages' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WatchProto' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'DebugSymbolsLoaded' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'WriteCombined' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NoCache' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'FloppyMedia' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'DefaultProtectionMask' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 22, native_type='unsigned long')]],
'Binary32' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'ContainsDebug' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_VF_KE_CRITICAL_REGION_TRACE' : [ 0x20, {
'Thread' : [ 0x0, ['pointer', ['_ETHREAD']]],
'StackTrace' : [ 0x4, ['array', 7, ['pointer', ['void']]]],
} ],
'_DIAGNOSTIC_BUFFER' : [ 0x18, {
'Size' : [ 0x0, ['unsigned long']],
'CallerType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'KernelRequester', 1: 'UserProcessRequester', 2: 'UserSharedServiceRequester'})]],
'ProcessImageNameOffset' : [ 0x8, ['unsigned long']],
'ProcessId' : [ 0xc, ['unsigned long']],
'ServiceTag' : [ 0x10, ['unsigned long']],
'DeviceDescriptionOffset' : [ 0x8, ['unsigned long']],
'DevicePathOffset' : [ 0xc, ['unsigned long']],
'ReasonOffset' : [ 0x14, ['unsigned long']],
} ],
'_EX_WORK_QUEUE' : [ 0x3c, {
'WorkerQueue' : [ 0x0, ['_KQUEUE']],
'DynamicThreadCount' : [ 0x28, ['unsigned long']],
'WorkItemsProcessed' : [ 0x2c, ['unsigned long']],
'WorkItemsProcessedLastPass' : [ 0x30, ['unsigned long']],
'QueueDepthLastPass' : [ 0x34, ['unsigned long']],
'Info' : [ 0x38, ['EX_QUEUE_WORKER_INFO']],
} ],
'_CLIENT_ID32' : [ 0x8, {
'UniqueProcess' : [ 0x0, ['unsigned long']],
'UniqueThread' : [ 0x4, ['unsigned long']],
} ],
'_CM_KEY_INDEX' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_VI_DEADLOCK_THREAD' : [ 0x20, {
'Thread' : [ 0x0, ['pointer', ['_KTHREAD']]],
'CurrentSpinNode' : [ 0x4, ['pointer', ['_VI_DEADLOCK_NODE']]],
'CurrentOtherNode' : [ 0x8, ['pointer', ['_VI_DEADLOCK_NODE']]],
'ListEntry' : [ 0xc, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0xc, ['_LIST_ENTRY']],
'NodeCount' : [ 0x14, ['unsigned long']],
'PagingCount' : [ 0x18, ['unsigned long']],
'ThreadUsesEresources' : [ 0x1c, ['unsigned char']],
} ],
'_PPM_IDLE_STATE' : [ 0x40, {
'DomainMembers' : [ 0x0, ['_KAFFINITY_EX']],
'IdleCheck' : [ 0xc, ['pointer', ['void']]],
'IdleHandler' : [ 0x10, ['pointer', ['void']]],
'HvConfig' : [ 0x18, ['unsigned long long']],
'Context' : [ 0x20, ['pointer', ['void']]],
'Latency' : [ 0x24, ['unsigned long']],
'Power' : [ 0x28, ['unsigned long']],
'TimeCheck' : [ 0x2c, ['unsigned long']],
'StateFlags' : [ 0x30, ['unsigned long']],
'PromotePercent' : [ 0x34, ['unsigned char']],
'DemotePercent' : [ 0x35, ['unsigned char']],
'PromotePercentBase' : [ 0x36, ['unsigned char']],
'DemotePercentBase' : [ 0x37, ['unsigned char']],
'StateType' : [ 0x38, ['unsigned char']],
} ],
'_KRESOURCEMANAGER' : [ 0x154, {
'NotificationAvailable' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x10, ['unsigned long']],
'State' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'KResourceManagerUninitialized', 1: 'KResourceManagerOffline', 2: 'KResourceManagerOnline'})]],
'Flags' : [ 0x18, ['unsigned long']],
'Mutex' : [ 0x1c, ['_KMUTANT']],
'NamespaceLink' : [ 0x3c, ['_KTMOBJECT_NAMESPACE_LINK']],
'RmId' : [ 0x50, ['_GUID']],
'NotificationQueue' : [ 0x60, ['_KQUEUE']],
'NotificationMutex' : [ 0x88, ['_KMUTANT']],
'EnlistmentHead' : [ 0xa8, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0xb0, ['unsigned long']],
'NotificationRoutine' : [ 0xb4, ['pointer', ['void']]],
'Key' : [ 0xb8, ['pointer', ['void']]],
'ProtocolListHead' : [ 0xbc, ['_LIST_ENTRY']],
'PendingPropReqListHead' : [ 0xc4, ['_LIST_ENTRY']],
'CRMListEntry' : [ 0xcc, ['_LIST_ENTRY']],
'Tm' : [ 0xd4, ['pointer', ['_KTM']]],
'Description' : [ 0xd8, ['_UNICODE_STRING']],
'Enlistments' : [ 0xe0, ['_KTMOBJECT_NAMESPACE']],
'CompletionBinding' : [ 0x140, ['_KRESOURCEMANAGER_COMPLETION_BINDING']],
} ],
'_GDI_TEB_BATCH64' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'__unnamed_2215' : [ 0x4, {
'NodeSize' : [ 0x0, ['unsigned long']],
'UseLookaside' : [ 0x0, ['unsigned long']],
} ],
'_VF_AVL_TREE' : [ 0x40, {
'Lock' : [ 0x0, ['long']],
'NodeToFree' : [ 0x4, ['pointer', ['void']]],
'NodeRangeSize' : [ 0x8, ['unsigned long']],
'NodeCount' : [ 0xc, ['unsigned long']],
'Tables' : [ 0x10, ['pointer', ['_VF_AVL_TABLE']]],
'TablesNo' : [ 0x14, ['unsigned long']],
'u1' : [ 0x18, ['__unnamed_2215']],
} ],
'_FILE_NETWORK_OPEN_INFORMATION' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'AllocationSize' : [ 0x20, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x28, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x30, ['unsigned long']],
} ],
'_WHEA_MEMORY_ERROR_SECTION_VALIDBITS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PhysicalAddress' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'PhysicalAddressMask' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Node' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Card' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Module' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Bank' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'Device' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Row' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Column' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'BitPosition' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'RequesterId' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'ResponderId' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'TargetId' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 64, native_type='unsigned long long')]],
'ValidBits' : [ 0x0, ['unsigned long long']],
} ],
'_RELATION_LIST_ENTRY' : [ 0xc, {
'Count' : [ 0x0, ['unsigned long']],
'MaxCount' : [ 0x4, ['unsigned long']],
'Devices' : [ 0x8, ['array', 1, ['pointer', ['_DEVICE_OBJECT']]]],
} ],
'_HEAP_FREE_ENTRY_EXTRA' : [ 0x4, {
'TagIndex' : [ 0x0, ['unsigned short']],
'FreeBackTraceIndex' : [ 0x2, ['unsigned short']],
} ],
'_VI_DEADLOCK_GLOBALS' : [ 0x40e0, {
'TimeAcquire' : [ 0x0, ['long long']],
'TimeRelease' : [ 0x8, ['long long']],
'ResourceDatabase' : [ 0x10, ['pointer', ['_LIST_ENTRY']]],
'ResourceDatabaseCount' : [ 0x14, ['unsigned long']],
'ResourceAddressRange' : [ 0x18, ['array', 1023, ['_VF_ADDRESS_RANGE']]],
'ThreadDatabase' : [ 0x2010, ['pointer', ['_LIST_ENTRY']]],
'ThreadDatabaseCount' : [ 0x2014, ['unsigned long']],
'ThreadAddressRange' : [ 0x2018, ['array', 1023, ['_VF_ADDRESS_RANGE']]],
'AllocationFailures' : [ 0x4010, ['unsigned long']],
'NodesTrimmedBasedOnAge' : [ 0x4014, ['unsigned long']],
'NodesTrimmedBasedOnCount' : [ 0x4018, ['unsigned long']],
'NodesSearched' : [ 0x401c, ['unsigned long']],
'MaxNodesSearched' : [ 0x4020, ['unsigned long']],
'SequenceNumber' : [ 0x4024, ['unsigned long']],
'RecursionDepthLimit' : [ 0x4028, ['unsigned long']],
'SearchedNodesLimit' : [ 0x402c, ['unsigned long']],
'DepthLimitHits' : [ 0x4030, ['unsigned long']],
'SearchLimitHits' : [ 0x4034, ['unsigned long']],
'ABC_ACB_Skipped' : [ 0x4038, ['unsigned long']],
'OutOfOrderReleases' : [ 0x403c, ['unsigned long']],
'NodesReleasedOutOfOrder' : [ 0x4040, ['unsigned long']],
'TotalReleases' : [ 0x4044, ['unsigned long']],
'RootNodesDeleted' : [ 0x4048, ['unsigned long']],
'ForgetHistoryCounter' : [ 0x404c, ['unsigned long']],
'Instigator' : [ 0x4050, ['pointer', ['void']]],
'NumberOfParticipants' : [ 0x4054, ['unsigned long']],
'Participant' : [ 0x4058, ['array', 32, ['pointer', ['_VI_DEADLOCK_NODE']]]],
'ChildrenCountWatermark' : [ 0x40d8, ['long']],
} ],
'_KTM' : [ 0x238, {
'cookie' : [ 0x0, ['unsigned long']],
'Mutex' : [ 0x4, ['_KMUTANT']],
'State' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'KKtmUninitialized', 1: 'KKtmInitialized', 2: 'KKtmRecovering', 3: 'KKtmOnline', 4: 'KKtmRecoveryFailed', 5: 'KKtmOffline'})]],
'NamespaceLink' : [ 0x28, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmIdentity' : [ 0x3c, ['_GUID']],
'Flags' : [ 0x4c, ['unsigned long']],
'VolatileFlags' : [ 0x50, ['unsigned long']],
'LogFileName' : [ 0x54, ['_UNICODE_STRING']],
'LogFileObject' : [ 0x5c, ['pointer', ['_FILE_OBJECT']]],
'MarshallingContext' : [ 0x60, ['pointer', ['void']]],
'LogManagementContext' : [ 0x64, ['pointer', ['void']]],
'Transactions' : [ 0x68, ['_KTMOBJECT_NAMESPACE']],
'ResourceManagers' : [ 0xc8, ['_KTMOBJECT_NAMESPACE']],
'LsnOrderedMutex' : [ 0x128, ['_KMUTANT']],
'LsnOrderedList' : [ 0x148, ['_LIST_ENTRY']],
'CommitVirtualClock' : [ 0x150, ['_LARGE_INTEGER']],
'CommitVirtualClockMutex' : [ 0x158, ['_FAST_MUTEX']],
'BaseLsn' : [ 0x178, ['_CLS_LSN']],
'CurrentReadLsn' : [ 0x180, ['_CLS_LSN']],
'LastRecoveredLsn' : [ 0x188, ['_CLS_LSN']],
'TmRmHandle' : [ 0x190, ['pointer', ['void']]],
'TmRm' : [ 0x194, ['pointer', ['_KRESOURCEMANAGER']]],
'LogFullNotifyEvent' : [ 0x198, ['_KEVENT']],
'CheckpointWorkItem' : [ 0x1a8, ['_WORK_QUEUE_ITEM']],
'CheckpointTargetLsn' : [ 0x1b8, ['_CLS_LSN']],
'LogFullCompletedWorkItem' : [ 0x1c0, ['_WORK_QUEUE_ITEM']],
'LogWriteResource' : [ 0x1d0, ['_ERESOURCE']],
'LogFlags' : [ 0x208, ['unsigned long']],
'LogFullStatus' : [ 0x20c, ['long']],
'RecoveryStatus' : [ 0x210, ['long']],
'LastCheckBaseLsn' : [ 0x218, ['_CLS_LSN']],
'RestartOrderedList' : [ 0x220, ['_LIST_ENTRY']],
'OfflineWorkItem' : [ 0x228, ['_WORK_QUEUE_ITEM']],
} ],
'_CONFIGURATION_COMPONENT' : [ 0x24, {
'Class' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SystemClass', 1: 'ProcessorClass', 2: 'CacheClass', 3: 'AdapterClass', 4: 'ControllerClass', 5: 'PeripheralClass', 6: 'MemoryClass', 7: 'MaximumClass'})]],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ArcSystem', 1: 'CentralProcessor', 2: 'FloatingPointProcessor', 3: 'PrimaryIcache', 4: 'PrimaryDcache', 5: 'SecondaryIcache', 6: 'SecondaryDcache', 7: 'SecondaryCache', 8: 'EisaAdapter', 9: 'TcAdapter', 10: 'ScsiAdapter', 11: 'DtiAdapter', 12: 'MultiFunctionAdapter', 13: 'DiskController', 14: 'TapeController', 15: 'CdromController', 16: 'WormController', 17: 'SerialController', 18: 'NetworkController', 19: 'DisplayController', 20: 'ParallelController', 21: 'PointerController', 22: 'KeyboardController', 23: 'AudioController', 24: 'OtherController', 25: 'DiskPeripheral', 26: 'FloppyDiskPeripheral', 27: 'TapePeripheral', 28: 'ModemPeripheral', 29: 'MonitorPeripheral', 30: 'PrinterPeripheral', 31: 'PointerPeripheral', 32: 'KeyboardPeripheral', 33: 'TerminalPeripheral', 34: 'OtherPeripheral', 35: 'LinePeripheral', 36: 'NetworkPeripheral', 37: 'SystemMemory', 38: 'DockingInformation', 39: 'RealModeIrqRoutingTable', 40: 'RealModePCIEnumeration', 41: 'MaximumType'})]],
'Flags' : [ 0x8, ['_DEVICE_FLAGS']],
'Version' : [ 0xc, ['unsigned short']],
'Revision' : [ 0xe, ['unsigned short']],
'Key' : [ 0x10, ['unsigned long']],
'AffinityMask' : [ 0x14, ['unsigned long']],
'Group' : [ 0x14, ['unsigned short']],
'GroupIndex' : [ 0x16, ['unsigned short']],
'ConfigurationDataLength' : [ 0x18, ['unsigned long']],
'IdentifierLength' : [ 0x1c, ['unsigned long']],
'Identifier' : [ 0x20, ['pointer', ['unsigned char']]],
} ],
'_VF_BTS_RECORD' : [ 0xc, {
'JumpedFrom' : [ 0x0, ['pointer', ['void']]],
'JumpedTo' : [ 0x4, ['pointer', ['void']]],
'Unused1' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'Predicted' : [ 0x8, ['BitField', dict(start_bit = 3, end_bit = 7, native_type='unsigned long')]],
'Unused2' : [ 0x8, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
} ],
'_KTRANSACTION' : [ 0x1e0, {
'OutcomeEvent' : [ 0x0, ['_KEVENT']],
'cookie' : [ 0x10, ['unsigned long']],
'Mutex' : [ 0x14, ['_KMUTANT']],
'TreeTx' : [ 0x34, ['pointer', ['_KTRANSACTION']]],
'GlobalNamespaceLink' : [ 0x38, ['_KTMOBJECT_NAMESPACE_LINK']],
'TmNamespaceLink' : [ 0x4c, ['_KTMOBJECT_NAMESPACE_LINK']],
'UOW' : [ 0x60, ['_GUID']],
'State' : [ 0x70, ['Enumeration', dict(target = 'long', choices = {0: 'KTransactionUninitialized', 1: 'KTransactionActive', 2: 'KTransactionPreparing', 3: 'KTransactionPrepared', 4: 'KTransactionInDoubt', 5: 'KTransactionCommitted', 6: 'KTransactionAborted', 7: 'KTransactionDelegated', 8: 'KTransactionPrePreparing', 9: 'KTransactionForgotten', 10: 'KTransactionRecovering', 11: 'KTransactionPrePrepared'})]],
'Flags' : [ 0x74, ['unsigned long']],
'EnlistmentHead' : [ 0x78, ['_LIST_ENTRY']],
'EnlistmentCount' : [ 0x80, ['unsigned long']],
'RecoverableEnlistmentCount' : [ 0x84, ['unsigned long']],
'PrePrepareRequiredEnlistmentCount' : [ 0x88, ['unsigned long']],
'PrepareRequiredEnlistmentCount' : [ 0x8c, ['unsigned long']],
'OutcomeRequiredEnlistmentCount' : [ 0x90, ['unsigned long']],
'PendingResponses' : [ 0x94, ['unsigned long']],
'SuperiorEnlistment' : [ 0x98, ['pointer', ['_KENLISTMENT']]],
'LastLsn' : [ 0xa0, ['_CLS_LSN']],
'PromotedEntry' : [ 0xa8, ['_LIST_ENTRY']],
'PromoterTransaction' : [ 0xb0, ['pointer', ['_KTRANSACTION']]],
'PromotePropagation' : [ 0xb4, ['pointer', ['void']]],
'IsolationLevel' : [ 0xb8, ['unsigned long']],
'IsolationFlags' : [ 0xbc, ['unsigned long']],
'Timeout' : [ 0xc0, ['_LARGE_INTEGER']],
'Description' : [ 0xc8, ['_UNICODE_STRING']],
'RollbackThread' : [ 0xd0, ['pointer', ['_KTHREAD']]],
'RollbackWorkItem' : [ 0xd4, ['_WORK_QUEUE_ITEM']],
'RollbackDpc' : [ 0xe4, ['_KDPC']],
'RollbackTimer' : [ 0x108, ['_KTIMER']],
'LsnOrderedEntry' : [ 0x130, ['_LIST_ENTRY']],
'Outcome' : [ 0x138, ['Enumeration', dict(target = 'long', choices = {0: 'KTxOutcomeUninitialized', 1: 'KTxOutcomeUndetermined', 2: 'KTxOutcomeCommitted', 3: 'KTxOutcomeAborted', 4: 'KTxOutcomeUnavailable'})]],
'Tm' : [ 0x13c, ['pointer', ['_KTM']]],
'CommitReservation' : [ 0x140, ['long long']],
'TransactionHistory' : [ 0x148, ['array', 10, ['_KTRANSACTION_HISTORY']]],
'TransactionHistoryCount' : [ 0x198, ['unsigned long']],
'DTCPrivateInformation' : [ 0x19c, ['pointer', ['void']]],
'DTCPrivateInformationLength' : [ 0x1a0, ['unsigned long']],
'DTCPrivateInformationMutex' : [ 0x1a4, ['_KMUTANT']],
'PromotedTxSelfHandle' : [ 0x1c4, ['pointer', ['void']]],
'PendingPromotionCount' : [ 0x1c8, ['unsigned long']],
'PromotionCompletedEvent' : [ 0x1cc, ['_KEVENT']],
} ],
'_PRIVATE_CACHE_MAP_FLAGS' : [ 0x4, {
'DontUse' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'ReadAheadActive' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'ReadAheadEnabled' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'PagePriority' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 21, native_type='unsigned long')]],
'Available' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 32, native_type='unsigned long')]],
} ],
'_CM_KCB_UOW' : [ 0x38, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBLock' : [ 0x8, ['pointer', ['_CM_INTENT_LOCK']]],
'KeyLock' : [ 0xc, ['pointer', ['_CM_INTENT_LOCK']]],
'KCBListEntry' : [ 0x10, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x18, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'Transaction' : [ 0x1c, ['pointer', ['_CM_TRANS']]],
'UoWState' : [ 0x20, ['unsigned long']],
'ActionType' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'UoWAddThisKey', 1: 'UoWAddChildKey', 2: 'UoWDeleteThisKey', 3: 'UoWDeleteChildKey', 4: 'UoWSetValueNew', 5: 'UoWSetValueExisting', 6: 'UoWDeleteValue', 7: 'UoWSetKeyUserFlags', 8: 'UoWSetLastWriteTime', 9: 'UoWSetSecurityDescriptor', 10: 'UoWRenameSubKey', 11: 'UoWRenameOldSubKey', 12: 'UoWRenameNewSubKey', 13: 'UoWIsolation', 14: 'UoWInvalid'})]],
'StorageType' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'Stable', 1: 'Volatile', 2: 'InvalidStorage'})]],
'ChildKCB' : [ 0x30, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'VolatileKeyCell' : [ 0x30, ['unsigned long']],
'OldValueCell' : [ 0x30, ['unsigned long']],
'NewValueCell' : [ 0x34, ['unsigned long']],
'UserFlags' : [ 0x30, ['unsigned long']],
'LastWriteTime' : [ 0x30, ['_LARGE_INTEGER']],
'TxSecurityCell' : [ 0x30, ['unsigned long']],
'OldChildKCB' : [ 0x30, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'NewChildKCB' : [ 0x34, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'OtherChildKCB' : [ 0x30, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'ThisVolatileKeyCell' : [ 0x34, ['unsigned long']],
} ],
'_KPROCESSOR_STATE' : [ 0x320, {
'ContextFrame' : [ 0x0, ['_CONTEXT']],
'SpecialRegisters' : [ 0x2cc, ['_KSPECIAL_REGISTERS']],
} ],
'_MMPTE_TRANSITION' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_VF_WATCHDOG_IRP' : [ 0x14, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Irp' : [ 0x8, ['pointer', ['_IRP']]],
'DueTickCount' : [ 0xc, ['unsigned long']],
'Inserted' : [ 0x10, ['unsigned char']],
'TrackedStackLocation' : [ 0x11, ['unsigned char']],
'CancelTimeoutTicks' : [ 0x12, ['unsigned short']],
} ],
'_flags' : [ 0x1, {
'Removable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'GroupAssigned' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'GroupCommitted' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'GroupAssignmentFixed' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Fill' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
} ],
'__unnamed_2270' : [ 0x8, {
'Head' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long long')]],
'Tail' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 48, native_type='unsigned long long')]],
'ActiveThreadCount' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_2272' : [ 0x8, {
's1' : [ 0x0, ['__unnamed_2270']],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'_ALPC_COMPLETION_LIST_STATE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_2272']],
} ],
'_PSP_CPU_SHARE_CAPTURED_WEIGHT_DATA' : [ 0x8, {
'CapturedCpuShareWeight' : [ 0x0, ['unsigned long']],
'CapturedTotalWeight' : [ 0x4, ['unsigned long']],
'CombinedData' : [ 0x0, ['long long']],
} ],
'_CM_NAME_HASH' : [ 0xc, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x4, ['pointer', ['_CM_NAME_HASH']]],
'NameLength' : [ 0x8, ['unsigned short']],
'Name' : [ 0xa, ['array', 1, ['wchar']]],
} ],
'_PROC_IDLE_STATE_BUCKET' : [ 0x20, {
'TotalTime' : [ 0x0, ['unsigned long long']],
'MinTime' : [ 0x8, ['unsigned long long']],
'MaxTime' : [ 0x10, ['unsigned long long']],
'Count' : [ 0x18, ['unsigned long']],
} ],
'_MMSECURE_FLAGS' : [ 0x4, {
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoWrite' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 12, native_type='unsigned long')]],
} ],
'_PO_IRP_QUEUE' : [ 0x8, {
'CurrentIrp' : [ 0x0, ['pointer', ['_IRP']]],
'PendingIrpList' : [ 0x4, ['pointer', ['_IRP']]],
} ],
'__unnamed_2285' : [ 0x4, {
'Active' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OnlyTryAcquireUsed' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ReleasedOutOfOrder' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SequenceNumber' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'Whole' : [ 0x0, ['unsigned long']],
} ],
'_VI_DEADLOCK_NODE' : [ 0x6c, {
'Parent' : [ 0x0, ['pointer', ['_VI_DEADLOCK_NODE']]],
'ChildrenList' : [ 0x4, ['_LIST_ENTRY']],
'SiblingsList' : [ 0xc, ['_LIST_ENTRY']],
'ResourceList' : [ 0x14, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x14, ['_LIST_ENTRY']],
'Root' : [ 0x1c, ['pointer', ['_VI_DEADLOCK_RESOURCE']]],
'ThreadEntry' : [ 0x20, ['pointer', ['_VI_DEADLOCK_THREAD']]],
'u1' : [ 0x24, ['__unnamed_2285']],
'ChildrenCount' : [ 0x28, ['long']],
'StackTrace' : [ 0x2c, ['array', 8, ['pointer', ['void']]]],
'ParentStackTrace' : [ 0x4c, ['array', 8, ['pointer', ['void']]]],
} ],
'PROCESSOR_IDLESTATE_INFO' : [ 0x8, {
'TimeCheck' : [ 0x0, ['unsigned long']],
'DemotePercent' : [ 0x4, ['unsigned char']],
'PromotePercent' : [ 0x5, ['unsigned char']],
'Spare' : [ 0x6, ['array', 2, ['unsigned char']]],
} ],
'_KTMOBJECT_NAMESPACE' : [ 0x60, {
'Table' : [ 0x0, ['_RTL_AVL_TABLE']],
'Mutex' : [ 0x38, ['_KMUTANT']],
'LinksOffset' : [ 0x58, ['unsigned short']],
'GuidOffset' : [ 0x5a, ['unsigned short']],
'Expired' : [ 0x5c, ['unsigned char']],
} ],
'_LPCP_PORT_QUEUE' : [ 0x10, {
'NonPagedPortQueue' : [ 0x0, ['pointer', ['_LPCP_NONPAGED_PORT_QUEUE']]],
'Semaphore' : [ 0x4, ['pointer', ['_KSEMAPHORE']]],
'ReceiveHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_CM_KEY_REFERENCE' : [ 0x8, {
'KeyCell' : [ 0x0, ['unsigned long']],
'KeyHive' : [ 0x4, ['pointer', ['_HHIVE']]],
} ],
'SYSTEM_POWER_LEVEL' : [ 0x18, {
'Enable' : [ 0x0, ['unsigned char']],
'Spare' : [ 0x1, ['array', 3, ['unsigned char']]],
'BatteryLevel' : [ 0x4, ['unsigned long']],
'PowerPolicy' : [ 0x8, ['POWER_ACTION_POLICY']],
'MinSystemState' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_OBJECT_DUMP_CONTROL' : [ 0x8, {
'Stream' : [ 0x0, ['pointer', ['void']]],
'Detail' : [ 0x4, ['unsigned long']],
} ],
'_VF_ADDRESS_RANGE' : [ 0x8, {
'Start' : [ 0x0, ['pointer', ['unsigned char']]],
'End' : [ 0x4, ['pointer', ['unsigned char']]],
} ],
'_OBJECT_SYMBOLIC_LINK' : [ 0x18, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LinkTarget' : [ 0x8, ['_UNICODE_STRING']],
'DosDeviceDriveIndex' : [ 0x10, ['unsigned long']],
} ],
'_LPCP_NONPAGED_PORT_QUEUE' : [ 0x18, {
'Semaphore' : [ 0x0, ['_KSEMAPHORE']],
'BackPointer' : [ 0x14, ['pointer', ['_LPCP_PORT_OBJECT']]],
} ],
'_KRESOURCEMANAGER_COMPLETION_BINDING' : [ 0x14, {
'NotificationListHead' : [ 0x0, ['_LIST_ENTRY']],
'Port' : [ 0x8, ['pointer', ['void']]],
'Key' : [ 0xc, ['unsigned long']],
'BindingProcess' : [ 0x10, ['pointer', ['_EPROCESS']]],
} ],
'_VF_TRACKER' : [ 0x10, {
'TrackerFlags' : [ 0x0, ['unsigned long']],
'TrackerSize' : [ 0x4, ['unsigned long']],
'TrackerIndex' : [ 0x8, ['unsigned long']],
'TraceDepth' : [ 0xc, ['unsigned long']],
} ],
'_CALL_PERFORMANCE_DATA' : [ 0x204, {
'SpinLock' : [ 0x0, ['unsigned long']],
'HashTable' : [ 0x4, ['array', 64, ['_LIST_ENTRY']]],
} ],
'_ARBITER_ALTERNATIVE' : [ 0x38, {
'Minimum' : [ 0x0, ['unsigned long long']],
'Maximum' : [ 0x8, ['unsigned long long']],
'Length' : [ 0x10, ['unsigned long long']],
'Alignment' : [ 0x18, ['unsigned long long']],
'Priority' : [ 0x20, ['long']],
'Flags' : [ 0x24, ['unsigned long']],
'Descriptor' : [ 0x28, ['pointer', ['_IO_RESOURCE_DESCRIPTOR']]],
'Reserved' : [ 0x2c, ['array', 3, ['unsigned long']]],
} ],
'_WHEA_ERROR_STATUS' : [ 0x8, {
'ErrorStatus' : [ 0x0, ['unsigned long long']],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'ErrorType' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Address' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long long')]],
'Control' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long long')]],
'Data' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long long')]],
'Responder' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long long')]],
'Requester' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long long')]],
'FirstError' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long long')]],
'Overflow' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 64, native_type='unsigned long long')]],
} ],
'_WHEA_PERSISTENCE_INFO' : [ 0x8, {
'Signature' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long long')]],
'Length' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 40, native_type='unsigned long long')]],
'Identifier' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 56, native_type='unsigned long long')]],
'Attributes' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 58, native_type='unsigned long long')]],
'DoNotLog' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 59, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 59, end_bit = 64, native_type='unsigned long long')]],
'AsULONGLONG' : [ 0x0, ['unsigned long long']],
} ],
'_MI_SECTION_IMAGE_INFORMATION' : [ 0x38, {
'ExportedImageInformation' : [ 0x0, ['_SECTION_IMAGE_INFORMATION']],
'InternalImageInformation' : [ 0x30, ['_MI_EXTRA_IMAGE_INFORMATION']],
} ],
'_HEAP_USERDATA_HEADER' : [ 0x10, {
'SFreeListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'SubSegment' : [ 0x0, ['pointer', ['_HEAP_SUBSEGMENT']]],
'Reserved' : [ 0x4, ['pointer', ['void']]],
'SizeIndex' : [ 0x8, ['unsigned long']],
'Signature' : [ 0xc, ['unsigned long']],
} ],
'_STACK_TABLE' : [ 0x8040, {
'NumStackTraces' : [ 0x0, ['unsigned short']],
'TraceCapacity' : [ 0x2, ['unsigned short']],
'StackTrace' : [ 0x4, ['array', 16, ['pointer', ['_OBJECT_REF_TRACE']]]],
'StackTableHash' : [ 0x44, ['array', 16381, ['unsigned short']]],
} ],
'_TOKEN_CONTROL' : [ 0x28, {
'TokenId' : [ 0x0, ['_LUID']],
'AuthenticationId' : [ 0x8, ['_LUID']],
'ModifiedId' : [ 0x10, ['_LUID']],
'TokenSource' : [ 0x18, ['_TOKEN_SOURCE']],
} ],
'_DEFERRED_WRITE' : [ 0x24, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'FileObject' : [ 0x4, ['pointer', ['_FILE_OBJECT']]],
'BytesToWrite' : [ 0x8, ['unsigned long']],
'DeferredWriteLinks' : [ 0xc, ['_LIST_ENTRY']],
'Event' : [ 0x14, ['pointer', ['_KEVENT']]],
'PostRoutine' : [ 0x18, ['pointer', ['void']]],
'Context1' : [ 0x1c, ['pointer', ['void']]],
'Context2' : [ 0x20, ['pointer', ['void']]],
} ],
'_ARBITER_ORDERING_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned short']],
'Maximum' : [ 0x2, ['unsigned short']],
'Orderings' : [ 0x4, ['pointer', ['_ARBITER_ORDERING']]],
} ],
'_SECTION_IMAGE_INFORMATION' : [ 0x30, {
'TransferAddress' : [ 0x0, ['pointer', ['void']]],
'ZeroBits' : [ 0x4, ['unsigned long']],
'MaximumStackSize' : [ 0x8, ['unsigned long']],
'CommittedStackSize' : [ 0xc, ['unsigned long']],
'SubSystemType' : [ 0x10, ['unsigned long']],
'SubSystemMinorVersion' : [ 0x14, ['unsigned short']],
'SubSystemMajorVersion' : [ 0x16, ['unsigned short']],
'SubSystemVersion' : [ 0x14, ['unsigned long']],
'GpValue' : [ 0x18, ['unsigned long']],
'ImageCharacteristics' : [ 0x1c, ['unsigned short']],
'DllCharacteristics' : [ 0x1e, ['unsigned short']],
'Machine' : [ 0x20, ['unsigned short']],
'ImageContainsCode' : [ 0x22, ['unsigned char']],
'ImageFlags' : [ 0x23, ['unsigned char']],
'ComPlusNativeReady' : [ 0x23, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ComPlusILOnly' : [ 0x23, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ImageDynamicallyRelocated' : [ 0x23, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'ImageMappedFlat' : [ 0x23, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Reserved' : [ 0x23, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'LoaderFlags' : [ 0x24, ['unsigned long']],
'ImageFileSize' : [ 0x28, ['unsigned long']],
'CheckSum' : [ 0x2c, ['unsigned long']],
} ],
'_VF_AVL_TABLE' : [ 0x3c, {
'RtlTable' : [ 0x0, ['_RTL_AVL_TABLE']],
'ReservedNode' : [ 0x38, ['pointer', ['_VF_AVL_TREE_NODE']]],
} ],
'_TOKEN_AUDIT_POLICY' : [ 0x1b, {
'PerUserPolicy' : [ 0x0, ['array', 27, ['unsigned char']]],
} ],
'__unnamed_22db' : [ 0x8, {
'EndingOffset' : [ 0x0, ['pointer', ['_LARGE_INTEGER']]],
'ResourceToRelease' : [ 0x4, ['pointer', ['pointer', ['_ERESOURCE']]]],
} ],
'__unnamed_22dd' : [ 0x4, {
'ResourceToRelease' : [ 0x0, ['pointer', ['_ERESOURCE']]],
} ],
'__unnamed_22e1' : [ 0x8, {
'SyncType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SyncTypeOther', 1: 'SyncTypeCreateSection'})]],
'PageProtection' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_22e5' : [ 0x8, {
'NotificationType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NotifyTypeCreate', 1: 'NotifyTypeRetired'})]],
'SafeToRecurse' : [ 0x4, ['unsigned char']],
} ],
'__unnamed_22e7' : [ 0x14, {
'Argument1' : [ 0x0, ['pointer', ['void']]],
'Argument2' : [ 0x4, ['pointer', ['void']]],
'Argument3' : [ 0x8, ['pointer', ['void']]],
'Argument4' : [ 0xc, ['pointer', ['void']]],
'Argument5' : [ 0x10, ['pointer', ['void']]],
} ],
'_FS_FILTER_PARAMETERS' : [ 0x14, {
'AcquireForModifiedPageWriter' : [ 0x0, ['__unnamed_22db']],
'ReleaseForModifiedPageWriter' : [ 0x0, ['__unnamed_22dd']],
'AcquireForSectionSynchronization' : [ 0x0, ['__unnamed_22e1']],
'NotifyStreamFileObject' : [ 0x0, ['__unnamed_22e5']],
'Others' : [ 0x0, ['__unnamed_22e7']],
} ],
'_PROFILE_PARAMETER_BLOCK' : [ 0x10, {
'Status' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'DockingState' : [ 0x4, ['unsigned short']],
'Capabilities' : [ 0x6, ['unsigned short']],
'DockID' : [ 0x8, ['unsigned long']],
'SerialNumber' : [ 0xc, ['unsigned long']],
} ],
'_COMPRESSED_DATA_INFO' : [ 0xc, {
'CompressionFormatAndEngine' : [ 0x0, ['unsigned short']],
'CompressionUnitShift' : [ 0x2, ['unsigned char']],
'ChunkShift' : [ 0x3, ['unsigned char']],
'ClusterShift' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'NumberOfChunks' : [ 0x6, ['unsigned short']],
'CompressedChunkSizes' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_POP_HIBER_CONTEXT' : [ 0xa0, {
'WriteToFile' : [ 0x0, ['unsigned char']],
'ReserveLoaderMemory' : [ 0x1, ['unsigned char']],
'ReserveFreeMemory' : [ 0x2, ['unsigned char']],
'Reset' : [ 0x3, ['unsigned char']],
'HiberFlags' : [ 0x4, ['unsigned char']],
'WroteHiberFile' : [ 0x5, ['unsigned char']],
'MapFrozen' : [ 0x6, ['unsigned char']],
'MemoryMap' : [ 0x8, ['_RTL_BITMAP']],
'DiscardedMemoryPages' : [ 0x10, ['_RTL_BITMAP']],
'ClonedRanges' : [ 0x18, ['_LIST_ENTRY']],
'ClonedRangeCount' : [ 0x20, ['unsigned long']],
'NextCloneRange' : [ 0x24, ['pointer', ['_LIST_ENTRY']]],
'NextPreserve' : [ 0x28, ['unsigned long']],
'LoaderMdl' : [ 0x2c, ['pointer', ['_MDL']]],
'AllocatedMdl' : [ 0x30, ['pointer', ['_MDL']]],
'PagesOut' : [ 0x38, ['unsigned long long']],
'IoPages' : [ 0x40, ['pointer', ['void']]],
'IoPagesCount' : [ 0x44, ['unsigned long']],
'CurrentMcb' : [ 0x48, ['pointer', ['void']]],
'DumpStack' : [ 0x4c, ['pointer', ['_DUMP_STACK_CONTEXT']]],
'WakeState' : [ 0x50, ['pointer', ['_KPROCESSOR_STATE']]],
'PreferredIoWriteSize' : [ 0x54, ['unsigned long']],
'IoProgress' : [ 0x58, ['unsigned long']],
'HiberVa' : [ 0x5c, ['unsigned long']],
'HiberPte' : [ 0x60, ['_LARGE_INTEGER']],
'Status' : [ 0x68, ['long']],
'MemoryImage' : [ 0x6c, ['pointer', ['PO_MEMORY_IMAGE']]],
'CompressionWorkspace' : [ 0x70, ['pointer', ['void']]],
'CompressedWriteBuffer' : [ 0x74, ['pointer', ['unsigned char']]],
'CompressedWriteBufferSize' : [ 0x78, ['unsigned long']],
'MaxCompressedOutputSize' : [ 0x7c, ['unsigned long']],
'PerformanceStats' : [ 0x80, ['pointer', ['unsigned long']]],
'CompressionBlock' : [ 0x84, ['pointer', ['void']]],
'DmaIO' : [ 0x88, ['pointer', ['void']]],
'TemporaryHeap' : [ 0x8c, ['pointer', ['void']]],
'BootLoaderLogMdl' : [ 0x90, ['pointer', ['_MDL']]],
'FirmwareRuntimeInformationMdl' : [ 0x94, ['pointer', ['_MDL']]],
'ResumeContext' : [ 0x98, ['pointer', ['void']]],
'ResumeContextPages' : [ 0x9c, ['unsigned long']],
} ],
'_OBJECT_REF_TRACE' : [ 0x40, {
'StackTrace' : [ 0x0, ['array', 16, ['pointer', ['void']]]],
} ],
'_OBJECT_NAME_INFORMATION' : [ 0x8, {
'Name' : [ 0x0, ['_UNICODE_STRING']],
} ],
'_PCW_COUNTER_INFORMATION' : [ 0x10, {
'CounterMask' : [ 0x0, ['unsigned long long']],
'InstanceMask' : [ 0x8, ['pointer', ['_UNICODE_STRING']]],
} ],
'_DUMP_STACK_CONTEXT' : [ 0xb0, {
'Init' : [ 0x0, ['_DUMP_INITIALIZATION_CONTEXT']],
'PartitionOffset' : [ 0x70, ['_LARGE_INTEGER']],
'DumpPointers' : [ 0x78, ['pointer', ['void']]],
'PointersLength' : [ 0x7c, ['unsigned long']],
'ModulePrefix' : [ 0x80, ['pointer', ['unsigned short']]],
'DriverList' : [ 0x84, ['_LIST_ENTRY']],
'InitMsg' : [ 0x8c, ['_STRING']],
'ProgMsg' : [ 0x94, ['_STRING']],
'DoneMsg' : [ 0x9c, ['_STRING']],
'FileObject' : [ 0xa4, ['pointer', ['void']]],
'UsageType' : [ 0xa8, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'_FILE_STANDARD_INFORMATION' : [ 0x18, {
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x8, ['_LARGE_INTEGER']],
'NumberOfLinks' : [ 0x10, ['unsigned long']],
'DeletePending' : [ 0x14, ['unsigned char']],
'Directory' : [ 0x15, ['unsigned char']],
} ],
'_POP_SHUTDOWN_BUG_CHECK' : [ 0x20, {
'ThreadHandle' : [ 0x0, ['pointer', ['void']]],
'ThreadId' : [ 0x4, ['pointer', ['void']]],
'ProcessId' : [ 0x8, ['pointer', ['void']]],
'Code' : [ 0xc, ['unsigned long']],
'Parameter1' : [ 0x10, ['unsigned long']],
'Parameter2' : [ 0x14, ['unsigned long']],
'Parameter3' : [ 0x18, ['unsigned long']],
'Parameter4' : [ 0x1c, ['unsigned long']],
} ],
'_MI_EXTRA_IMAGE_INFORMATION' : [ 0x8, {
'SizeOfHeaders' : [ 0x0, ['unsigned long']],
'SizeOfImage' : [ 0x4, ['unsigned long']],
} ],
'_PCW_MASK_INFORMATION' : [ 0x20, {
'CounterMask' : [ 0x0, ['unsigned long long']],
'InstanceMask' : [ 0x8, ['pointer', ['_UNICODE_STRING']]],
'InstanceId' : [ 0xc, ['unsigned long']],
'CollectMultiple' : [ 0x10, ['unsigned char']],
'Buffer' : [ 0x14, ['pointer', ['_PCW_BUFFER']]],
'CancelEvent' : [ 0x18, ['pointer', ['_KEVENT']]],
} ],
'_RTL_HANDLE_TABLE_ENTRY' : [ 0x4, {
'Flags' : [ 0x0, ['unsigned long']],
'NextFree' : [ 0x0, ['pointer', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'__unnamed_230b' : [ 0x10, {
'TestAllocation' : [ 0x0, ['_ARBITER_TEST_ALLOCATION_PARAMETERS']],
'RetestAllocation' : [ 0x0, ['_ARBITER_RETEST_ALLOCATION_PARAMETERS']],
'BootAllocation' : [ 0x0, ['_ARBITER_BOOT_ALLOCATION_PARAMETERS']],
'QueryAllocatedResources' : [ 0x0, ['_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS']],
'QueryConflict' : [ 0x0, ['_ARBITER_QUERY_CONFLICT_PARAMETERS']],
'QueryArbitrate' : [ 0x0, ['_ARBITER_QUERY_ARBITRATE_PARAMETERS']],
'AddReserved' : [ 0x0, ['_ARBITER_ADD_RESERVED_PARAMETERS']],
} ],
'_ARBITER_PARAMETERS' : [ 0x10, {
'Parameters' : [ 0x0, ['__unnamed_230b']],
} ],
'__unnamed_230f' : [ 0x8, {
'idxRecord' : [ 0x0, ['unsigned long']],
'cidContainer' : [ 0x4, ['unsigned long']],
} ],
'_CLS_LSN' : [ 0x8, {
'offset' : [ 0x0, ['__unnamed_230f']],
'ullOffset' : [ 0x0, ['unsigned long long']],
} ],
'POWER_ACTION_POLICY' : [ 0xc, {
'Action' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'Flags' : [ 0x4, ['unsigned long']],
'EventCode' : [ 0x8, ['unsigned long']],
} ],
'PO_MEMORY_IMAGE' : [ 0xe0, {
'Signature' : [ 0x0, ['unsigned long']],
'ImageType' : [ 0x4, ['unsigned long']],
'CheckSum' : [ 0x8, ['unsigned long']],
'LengthSelf' : [ 0xc, ['unsigned long']],
'PageSelf' : [ 0x10, ['unsigned long']],
'PageSize' : [ 0x14, ['unsigned long']],
'SystemTime' : [ 0x18, ['_LARGE_INTEGER']],
'InterruptTime' : [ 0x20, ['unsigned long long']],
'FeatureFlags' : [ 0x28, ['unsigned long']],
'HiberFlags' : [ 0x2c, ['unsigned char']],
'spare' : [ 0x2d, ['array', 3, ['unsigned char']]],
'NoHiberPtes' : [ 0x30, ['unsigned long']],
'HiberVa' : [ 0x34, ['unsigned long']],
'HiberPte' : [ 0x38, ['_LARGE_INTEGER']],
'NoFreePages' : [ 0x40, ['unsigned long']],
'FreeMapCheck' : [ 0x44, ['unsigned long']],
'WakeCheck' : [ 0x48, ['unsigned long']],
'FirstTablePage' : [ 0x4c, ['unsigned long']],
'PerfInfo' : [ 0x50, ['_PO_HIBER_PERF']],
'FirmwareRuntimeInformationPages' : [ 0xa8, ['unsigned long']],
'FirmwareRuntimeInformation' : [ 0xac, ['array', 1, ['unsigned long']]],
'NoBootLoaderLogPages' : [ 0xb0, ['unsigned long']],
'BootLoaderLogPages' : [ 0xb4, ['array', 8, ['unsigned long']]],
'NotUsed' : [ 0xd4, ['unsigned long']],
'ResumeContextCheck' : [ 0xd8, ['unsigned long']],
'ResumeContextPages' : [ 0xdc, ['unsigned long']],
} ],
'EX_QUEUE_WORKER_INFO' : [ 0x4, {
'QueueDisabled' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MakeThreadsAsNecessary' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'WaitMode' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WorkerCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'QueueWorkerInfo' : [ 0x0, ['long']],
} ],
'BATTERY_REPORTING_SCALE' : [ 0x8, {
'Granularity' : [ 0x0, ['unsigned long']],
'Capacity' : [ 0x4, ['unsigned long']],
} ],
'_CURDIR' : [ 0xc, {
'DosPath' : [ 0x0, ['_UNICODE_STRING']],
'Handle' : [ 0x8, ['pointer', ['void']]],
} ],
'_PO_HIBER_PERF' : [ 0x58, {
'IoTicks' : [ 0x0, ['unsigned long long']],
'InitTicks' : [ 0x8, ['unsigned long long']],
'CopyTicks' : [ 0x10, ['unsigned long long']],
'ElapsedTicks' : [ 0x18, ['unsigned long long']],
'CompressTicks' : [ 0x20, ['unsigned long long']],
'ResumeAppTime' : [ 0x28, ['unsigned long long']],
'HiberFileResumeTime' : [ 0x30, ['unsigned long long']],
'BytesCopied' : [ 0x38, ['unsigned long long']],
'PagesProcessed' : [ 0x40, ['unsigned long long']],
'PagesWritten' : [ 0x48, ['unsigned long']],
'DumpCount' : [ 0x4c, ['unsigned long']],
'FileRuns' : [ 0x50, ['unsigned long']],
} ],
'_DEVICE_FLAGS' : [ 0x4, {
'Failed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Removable' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ConsoleIn' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ConsoleOut' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Input' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Output' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
} ],
'_RTL_BALANCED_LINKS' : [ 0x10, {
'Parent' : [ 0x0, ['pointer', ['_RTL_BALANCED_LINKS']]],
'LeftChild' : [ 0x4, ['pointer', ['_RTL_BALANCED_LINKS']]],
'RightChild' : [ 0x8, ['pointer', ['_RTL_BALANCED_LINKS']]],
'Balance' : [ 0xc, ['unsigned char']],
'Reserved' : [ 0xd, ['array', 3, ['unsigned char']]],
} ],
'_MMVIEW' : [ 0x18, {
'Entry' : [ 0x0, ['unsigned long']],
'Writable' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ControlArea' : [ 0x4, ['pointer', ['_CONTROL_AREA']]],
'ViewLinks' : [ 0x8, ['_LIST_ENTRY']],
'SessionViewVa' : [ 0x10, ['pointer', ['void']]],
'SessionId' : [ 0x14, ['unsigned long']],
} ],
'_MM_SESSION_SPACE_FLAGS' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeletePending' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PoolInitialized' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DynamicVaInitialized' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'WsInitialized' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PoolDestroyed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ObjectInitialized' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Filler' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
} ],
'_HEADLESS_LOADER_BLOCK' : [ 0x34, {
'UsedBiosSettings' : [ 0x0, ['unsigned char']],
'DataBits' : [ 0x1, ['unsigned char']],
'StopBits' : [ 0x2, ['unsigned char']],
'Parity' : [ 0x3, ['unsigned char']],
'BaudRate' : [ 0x4, ['unsigned long']],
'PortNumber' : [ 0x8, ['unsigned long']],
'PortAddress' : [ 0xc, ['pointer', ['unsigned char']]],
'PciDeviceId' : [ 0x10, ['unsigned short']],
'PciVendorId' : [ 0x12, ['unsigned short']],
'PciBusNumber' : [ 0x14, ['unsigned char']],
'PciBusSegment' : [ 0x16, ['unsigned short']],
'PciSlotNumber' : [ 0x18, ['unsigned char']],
'PciFunctionNumber' : [ 0x19, ['unsigned char']],
'PciFlags' : [ 0x1c, ['unsigned long']],
'SystemGUID' : [ 0x20, ['_GUID']],
'IsMMIODevice' : [ 0x30, ['unsigned char']],
'TerminalType' : [ 0x31, ['unsigned char']],
} ],
'__unnamed_2337' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned long']],
'CheckSum' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_2339' : [ 0x10, {
'DiskId' : [ 0x0, ['_GUID']],
} ],
'__unnamed_233b' : [ 0x10, {
'Mbr' : [ 0x0, ['__unnamed_2337']],
'Gpt' : [ 0x0, ['__unnamed_2339']],
} ],
'_DUMP_INITIALIZATION_CONTEXT' : [ 0x70, {
'Length' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'MemoryBlock' : [ 0x8, ['pointer', ['void']]],
'CommonBuffer' : [ 0xc, ['array', 2, ['pointer', ['void']]]],
'PhysicalAddress' : [ 0x18, ['array', 2, ['_LARGE_INTEGER']]],
'StallRoutine' : [ 0x28, ['pointer', ['void']]],
'OpenRoutine' : [ 0x2c, ['pointer', ['void']]],
'WriteRoutine' : [ 0x30, ['pointer', ['void']]],
'FinishRoutine' : [ 0x34, ['pointer', ['void']]],
'AdapterObject' : [ 0x38, ['pointer', ['_ADAPTER_OBJECT']]],
'MappedRegisterBase' : [ 0x3c, ['pointer', ['void']]],
'PortConfiguration' : [ 0x40, ['pointer', ['void']]],
'CrashDump' : [ 0x44, ['unsigned char']],
'MaximumTransferSize' : [ 0x48, ['unsigned long']],
'CommonBufferSize' : [ 0x4c, ['unsigned long']],
'TargetAddress' : [ 0x50, ['pointer', ['void']]],
'WritePendingRoutine' : [ 0x54, ['pointer', ['void']]],
'PartitionStyle' : [ 0x58, ['unsigned long']],
'DiskInfo' : [ 0x5c, ['__unnamed_233b']],
} ],
'_MI_SYSTEM_PTE_TYPE' : [ 0x30, {
'Bitmap' : [ 0x0, ['_RTL_BITMAP']],
'Flags' : [ 0x8, ['unsigned long']],
'Hint' : [ 0xc, ['unsigned long']],
'BasePte' : [ 0x10, ['pointer', ['_MMPTE']]],
'FailureCount' : [ 0x14, ['pointer', ['unsigned long']]],
'Vm' : [ 0x18, ['pointer', ['_MMSUPPORT']]],
'TotalSystemPtes' : [ 0x1c, ['long']],
'TotalFreeSystemPtes' : [ 0x20, ['long']],
'CachedPteCount' : [ 0x24, ['long']],
'PteFailures' : [ 0x28, ['unsigned long']],
'SpinLock' : [ 0x2c, ['unsigned long']],
'GlobalMutex' : [ 0x2c, ['pointer', ['_KGUARDED_MUTEX']]],
} ],
'_NETWORK_LOADER_BLOCK' : [ 0x10, {
'DHCPServerACK' : [ 0x0, ['pointer', ['unsigned char']]],
'DHCPServerACKLength' : [ 0x4, ['unsigned long']],
'BootServerReplyPacket' : [ 0x8, ['pointer', ['unsigned char']]],
'BootServerReplyPacketLength' : [ 0xc, ['unsigned long']],
} ],
'_CM_KEY_SECURITY' : [ 0x28, {
'Signature' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'Flink' : [ 0x4, ['unsigned long']],
'Blink' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['unsigned long']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'Descriptor' : [ 0x14, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_PO_DEVICE_NOTIFY_ORDER' : [ 0x170, {
'Locked' : [ 0x0, ['unsigned char']],
'WarmEjectPdoPointer' : [ 0x4, ['pointer', ['pointer', ['_DEVICE_OBJECT']]]],
'OrderLevel' : [ 0x8, ['array', 9, ['_PO_NOTIFY_ORDER_LEVEL']]],
} ],
'_ARBITER_CONFLICT_INFO' : [ 0x18, {
'OwningObject' : [ 0x0, ['pointer', ['_DEVICE_OBJECT']]],
'Start' : [ 0x8, ['unsigned long long']],
'End' : [ 0x10, ['unsigned long long']],
} ],
'_PO_NOTIFY_ORDER_LEVEL' : [ 0x28, {
'DeviceCount' : [ 0x0, ['unsigned long']],
'ActiveCount' : [ 0x4, ['unsigned long']],
'WaitSleep' : [ 0x8, ['_LIST_ENTRY']],
'ReadySleep' : [ 0x10, ['_LIST_ENTRY']],
'ReadyS0' : [ 0x18, ['_LIST_ENTRY']],
'WaitS0' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_THREAD_PERFORMANCE_DATA' : [ 0x1c0, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'ProcessorNumber' : [ 0x4, ['_PROCESSOR_NUMBER']],
'ContextSwitches' : [ 0x8, ['unsigned long']],
'HwCountersCount' : [ 0xc, ['unsigned long']],
'UpdateCount' : [ 0x10, ['unsigned long long']],
'WaitReasonBitMap' : [ 0x18, ['unsigned long long']],
'HardwareCounters' : [ 0x20, ['unsigned long long']],
'CycleTime' : [ 0x28, ['_COUNTER_READING']],
'HwCounters' : [ 0x40, ['array', 16, ['_COUNTER_READING']]],
} ],
'_ETW_REPLY_QUEUE' : [ 0x2c, {
'Queue' : [ 0x0, ['_KQUEUE']],
'EventsLost' : [ 0x28, ['long']],
} ],
'_ARBITER_QUERY_ALLOCATED_RESOURCES_PARAMETERS' : [ 0x4, {
'AllocatedResources' : [ 0x0, ['pointer', ['pointer', ['_CM_PARTIAL_RESOURCE_LIST']]]],
} ],
'_KSPECIAL_REGISTERS' : [ 0x54, {
'Cr0' : [ 0x0, ['unsigned long']],
'Cr2' : [ 0x4, ['unsigned long']],
'Cr3' : [ 0x8, ['unsigned long']],
'Cr4' : [ 0xc, ['unsigned long']],
'KernelDr0' : [ 0x10, ['unsigned long']],
'KernelDr1' : [ 0x14, ['unsigned long']],
'KernelDr2' : [ 0x18, ['unsigned long']],
'KernelDr3' : [ 0x1c, ['unsigned long']],
'KernelDr6' : [ 0x20, ['unsigned long']],
'KernelDr7' : [ 0x24, ['unsigned long']],
'Gdtr' : [ 0x28, ['_DESCRIPTOR']],
'Idtr' : [ 0x30, ['_DESCRIPTOR']],
'Tr' : [ 0x38, ['unsigned short']],
'Ldtr' : [ 0x3a, ['unsigned short']],
'Reserved' : [ 0x3c, ['array', 6, ['unsigned long']]],
} ],
'_RTL_ACTIVATION_CONTEXT_STACK_FRAME' : [ 0xc, {
'Previous' : [ 0x0, ['pointer', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'ActivationContext' : [ 0x4, ['pointer', ['_ACTIVATION_CONTEXT']]],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_ARBITER_ORDERING' : [ 0x10, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
} ],
'_RTL_AVL_TABLE' : [ 0x38, {
'BalancedRoot' : [ 0x0, ['_RTL_BALANCED_LINKS']],
'OrderedPointer' : [ 0x10, ['pointer', ['void']]],
'WhichOrderedElement' : [ 0x14, ['unsigned long']],
'NumberGenericTableElements' : [ 0x18, ['unsigned long']],
'DepthOfTree' : [ 0x1c, ['unsigned long']],
'RestartKey' : [ 0x20, ['pointer', ['_RTL_BALANCED_LINKS']]],
'DeleteCount' : [ 0x24, ['unsigned long']],
'CompareRoutine' : [ 0x28, ['pointer', ['void']]],
'AllocateRoutine' : [ 0x2c, ['pointer', ['void']]],
'FreeRoutine' : [ 0x30, ['pointer', ['void']]],
'TableContext' : [ 0x34, ['pointer', ['void']]],
} ],
'_KTRANSACTION_HISTORY' : [ 0x8, {
'RecordType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {1: 'KTMOH_CommitTransaction_Result', 2: 'KTMOH_RollbackTransaction_Result'})]],
'Payload' : [ 0x4, ['unsigned long']],
} ],
'_DESCRIPTOR' : [ 0x8, {
'Pad' : [ 0x0, ['unsigned short']],
'Limit' : [ 0x2, ['unsigned short']],
'Base' : [ 0x4, ['unsigned long']],
} ],
'LIST_ENTRY64' : [ 0x10, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['unsigned long long']],
} ],
'LIST_ENTRY32' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long']],
'Blink' : [ 0x4, ['unsigned long']],
} ],
'_KSYSTEM_TIME' : [ 0xc, {
'LowPart' : [ 0x0, ['unsigned long']],
'High1Time' : [ 0x4, ['long']],
'High2Time' : [ 0x8, ['long']],
} ],
'_KUSER_SHARED_DATA' : [ 0x5f0, {
'TickCountLowDeprecated' : [ 0x0, ['unsigned long']],
'TickCountMultiplier' : [ 0x4, ['unsigned long']],
'InterruptTime' : [ 0x8, ['_KSYSTEM_TIME']],
'SystemTime' : [ 0x14, ['_KSYSTEM_TIME']],
'TimeZoneBias' : [ 0x20, ['_KSYSTEM_TIME']],
'ImageNumberLow' : [ 0x2c, ['unsigned short']],
'ImageNumberHigh' : [ 0x2e, ['unsigned short']],
'NtSystemRoot' : [ 0x30, ['array', 260, ['wchar']]],
'MaxStackTraceDepth' : [ 0x238, ['unsigned long']],
'CryptoExponent' : [ 0x23c, ['unsigned long']],
'TimeZoneId' : [ 0x240, ['unsigned long']],
'LargePageMinimum' : [ 0x244, ['unsigned long']],
'Reserved2' : [ 0x248, ['array', 7, ['unsigned long']]],
'NtProductType' : [ 0x264, ['Enumeration', dict(target = 'long', choices = {1: 'NtProductWinNt', 2: 'NtProductLanManNt', 3: 'NtProductServer'})]],
'ProductTypeIsValid' : [ 0x268, ['unsigned char']],
'NtMajorVersion' : [ 0x26c, ['unsigned long']],
'NtMinorVersion' : [ 0x270, ['unsigned long']],
'ProcessorFeatures' : [ 0x274, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x2b4, ['unsigned long']],
'Reserved3' : [ 0x2b8, ['unsigned long']],
'TimeSlip' : [ 0x2bc, ['unsigned long']],
'AlternativeArchitecture' : [ 0x2c0, ['Enumeration', dict(target = 'long', choices = {0: 'StandardDesign', 1: 'NEC98x86', 2: 'EndAlternatives'})]],
'AltArchitecturePad' : [ 0x2c4, ['array', 1, ['unsigned long']]],
'SystemExpirationDate' : [ 0x2c8, ['_LARGE_INTEGER']],
'SuiteMask' : [ 0x2d0, ['unsigned long']],
'KdDebuggerEnabled' : [ 0x2d4, ['unsigned char']],
'NXSupportPolicy' : [ 0x2d5, ['unsigned char']],
'ActiveConsoleId' : [ 0x2d8, ['unsigned long']],
'DismountCount' : [ 0x2dc, ['unsigned long']],
'ComPlusPackage' : [ 0x2e0, ['unsigned long']],
'LastSystemRITEventTickCount' : [ 0x2e4, ['unsigned long']],
'NumberOfPhysicalPages' : [ 0x2e8, ['unsigned long']],
'SafeBootMode' : [ 0x2ec, ['unsigned char']],
'TscQpcData' : [ 0x2ed, ['unsigned char']],
'TscQpcEnabled' : [ 0x2ed, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'TscQpcSpareFlag' : [ 0x2ed, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'TscQpcShift' : [ 0x2ed, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'TscQpcPad' : [ 0x2ee, ['array', 2, ['unsigned char']]],
'SharedDataFlags' : [ 0x2f0, ['unsigned long']],
'DbgErrorPortPresent' : [ 0x2f0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DbgElevationEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'DbgVirtEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'DbgInstallerDetectEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'DbgSystemDllRelocated' : [ 0x2f0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DbgDynProcessorEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'DbgSEHValidationEnabled' : [ 0x2f0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SpareBits' : [ 0x2f0, ['BitField', dict(start_bit = 7, end_bit = 32, native_type='unsigned long')]],
'DataFlagsPad' : [ 0x2f4, ['array', 1, ['unsigned long']]],
'TestRetInstruction' : [ 0x2f8, ['unsigned long long']],
'SystemCall' : [ 0x300, ['unsigned long']],
'SystemCallReturn' : [ 0x304, ['unsigned long']],
'SystemCallPad' : [ 0x308, ['array', 3, ['unsigned long long']]],
'TickCount' : [ 0x320, ['_KSYSTEM_TIME']],
'TickCountQuad' : [ 0x320, ['unsigned long long']],
'ReservedTickCountOverlay' : [ 0x320, ['array', 3, ['unsigned long']]],
'TickCountPad' : [ 0x32c, ['array', 1, ['unsigned long']]],
'Cookie' : [ 0x330, ['unsigned long']],
'CookiePad' : [ 0x334, ['array', 1, ['unsigned long']]],
'ConsoleSessionForegroundProcessId' : [ 0x338, ['long long']],
'Wow64SharedInformation' : [ 0x340, ['array', 16, ['unsigned long']]],
'UserModeGlobalLogger' : [ 0x380, ['array', 16, ['unsigned short']]],
'ImageFileExecutionOptions' : [ 0x3a0, ['unsigned long']],
'LangGenerationCount' : [ 0x3a4, ['unsigned long']],
'Reserved5' : [ 0x3a8, ['unsigned long long']],
'InterruptTimeBias' : [ 0x3b0, ['unsigned long long']],
'TscQpcBias' : [ 0x3b8, ['unsigned long long']],
'ActiveProcessorCount' : [ 0x3c0, ['unsigned long']],
'ActiveGroupCount' : [ 0x3c4, ['unsigned short']],
'Reserved4' : [ 0x3c6, ['unsigned short']],
'AitSamplingValue' : [ 0x3c8, ['unsigned long']],
'AppCompatFlag' : [ 0x3cc, ['unsigned long']],
'SystemDllNativeRelocation' : [ 0x3d0, ['unsigned long long']],
'SystemDllWowRelocation' : [ 0x3d8, ['unsigned long']],
'XStatePad' : [ 0x3dc, ['array', 1, ['unsigned long']]],
'XState' : [ 0x3e0, ['_XSTATE_CONFIGURATION']],
} ],
'__unnamed_1041' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
} ],
'_ULARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
'u' : [ 0x0, ['__unnamed_1041']],
'QuadPart' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_1045' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_LARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
'u' : [ 0x0, ['__unnamed_1045']],
'QuadPart' : [ 0x0, ['long long']],
} ],
'__unnamed_105e' : [ 0x4, {
'LongFunction' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Persistent' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Private' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_1060' : [ 0x4, {
'Flags' : [ 0x0, ['unsigned long']],
's' : [ 0x0, ['__unnamed_105e']],
} ],
'_TP_CALLBACK_ENVIRON_V3' : [ 0x28, {
'Version' : [ 0x0, ['unsigned long']],
'Pool' : [ 0x4, ['pointer', ['_TP_POOL']]],
'CleanupGroup' : [ 0x8, ['pointer', ['_TP_CLEANUP_GROUP']]],
'CleanupGroupCancelCallback' : [ 0xc, ['pointer', ['void']]],
'RaceDll' : [ 0x10, ['pointer', ['void']]],
'ActivationContext' : [ 0x14, ['pointer', ['_ACTIVATION_CONTEXT']]],
'FinalizationCallback' : [ 0x18, ['pointer', ['void']]],
'u' : [ 0x1c, ['__unnamed_1060']],
'CallbackPriority' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'TP_CALLBACK_PRIORITY_HIGH', 1: 'TP_CALLBACK_PRIORITY_NORMAL', 2: 'TP_CALLBACK_PRIORITY_LOW', 3: 'TP_CALLBACK_PRIORITY_INVALID'})]],
'Size' : [ 0x24, ['unsigned long']],
} ],
'_TP_TASK' : [ 0x20, {
'Callbacks' : [ 0x0, ['pointer', ['_TP_TASK_CALLBACKS']]],
'NumaNode' : [ 0x4, ['unsigned long']],
'IdealProcessor' : [ 0x8, ['unsigned char']],
'PostGuard' : [ 0xc, ['_TP_NBQ_GUARD']],
'NBQNode' : [ 0x1c, ['pointer', ['void']]],
} ],
'_TP_TASK_CALLBACKS' : [ 0x8, {
'ExecuteCallback' : [ 0x0, ['pointer', ['void']]],
'Unposted' : [ 0x4, ['pointer', ['void']]],
} ],
'_TP_DIRECT' : [ 0xc, {
'Callback' : [ 0x0, ['pointer', ['void']]],
'NumaNode' : [ 0x4, ['unsigned long']],
'IdealProcessor' : [ 0x8, ['unsigned char']],
} ],
'_TEB' : [ 0xfe4, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'EnvironmentPointer' : [ 0x1c, ['pointer', ['void']]],
'ClientId' : [ 0x20, ['_CLIENT_ID']],
'ActiveRpcHandle' : [ 0x28, ['pointer', ['void']]],
'ThreadLocalStoragePointer' : [ 0x2c, ['pointer', ['void']]],
'ProcessEnvironmentBlock' : [ 0x30, ['pointer', ['_PEB']]],
'LastErrorValue' : [ 0x34, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x38, ['unsigned long']],
'CsrClientThread' : [ 0x3c, ['pointer', ['void']]],
'Win32ThreadInfo' : [ 0x40, ['pointer', ['void']]],
'User32Reserved' : [ 0x44, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xac, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0xc0, ['pointer', ['void']]],
'CurrentLocale' : [ 0xc4, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0xc8, ['unsigned long']],
'SystemReserved1' : [ 0xcc, ['array', 54, ['pointer', ['void']]]],
'ExceptionCode' : [ 0x1a4, ['long']],
'ActivationContextStackPointer' : [ 0x1a8, ['pointer', ['_ACTIVATION_CONTEXT_STACK']]],
'SpareBytes' : [ 0x1ac, ['array', 36, ['unsigned char']]],
'TxFsContext' : [ 0x1d0, ['unsigned long']],
'GdiTebBatch' : [ 0x1d4, ['_GDI_TEB_BATCH']],
'RealClientId' : [ 0x6b4, ['_CLIENT_ID']],
'GdiCachedProcessHandle' : [ 0x6bc, ['pointer', ['void']]],
'GdiClientPID' : [ 0x6c0, ['unsigned long']],
'GdiClientTID' : [ 0x6c4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x6c8, ['pointer', ['void']]],
'Win32ClientInfo' : [ 0x6cc, ['array', 62, ['unsigned long']]],
'glDispatchTable' : [ 0x7c4, ['array', 233, ['pointer', ['void']]]],
'glReserved1' : [ 0xb68, ['array', 29, ['unsigned long']]],
'glReserved2' : [ 0xbdc, ['pointer', ['void']]],
'glSectionInfo' : [ 0xbe0, ['pointer', ['void']]],
'glSection' : [ 0xbe4, ['pointer', ['void']]],
'glTable' : [ 0xbe8, ['pointer', ['void']]],
'glCurrentRC' : [ 0xbec, ['pointer', ['void']]],
'glContext' : [ 0xbf0, ['pointer', ['void']]],
'LastStatusValue' : [ 0xbf4, ['unsigned long']],
'StaticUnicodeString' : [ 0xbf8, ['_UNICODE_STRING']],
'StaticUnicodeBuffer' : [ 0xc00, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0xe0c, ['pointer', ['void']]],
'TlsSlots' : [ 0xe10, ['array', 64, ['pointer', ['void']]]],
'TlsLinks' : [ 0xf10, ['_LIST_ENTRY']],
'Vdm' : [ 0xf18, ['pointer', ['void']]],
'ReservedForNtRpc' : [ 0xf1c, ['pointer', ['void']]],
'DbgSsReserved' : [ 0xf20, ['array', 2, ['pointer', ['void']]]],
'HardErrorMode' : [ 0xf28, ['unsigned long']],
'Instrumentation' : [ 0xf2c, ['array', 9, ['pointer', ['void']]]],
'ActivityId' : [ 0xf50, ['_GUID']],
'SubProcessTag' : [ 0xf60, ['pointer', ['void']]],
'EtwLocalData' : [ 0xf64, ['pointer', ['void']]],
'EtwTraceData' : [ 0xf68, ['pointer', ['void']]],
'WinSockData' : [ 0xf6c, ['pointer', ['void']]],
'GdiBatchCount' : [ 0xf70, ['unsigned long']],
'CurrentIdealProcessor' : [ 0xf74, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0xf74, ['unsigned long']],
'ReservedPad0' : [ 0xf74, ['unsigned char']],
'ReservedPad1' : [ 0xf75, ['unsigned char']],
'ReservedPad2' : [ 0xf76, ['unsigned char']],
'IdealProcessor' : [ 0xf77, ['unsigned char']],
'GuaranteedStackBytes' : [ 0xf78, ['unsigned long']],
'ReservedForPerf' : [ 0xf7c, ['pointer', ['void']]],
'ReservedForOle' : [ 0xf80, ['pointer', ['void']]],
'WaitingOnLoaderLock' : [ 0xf84, ['unsigned long']],
'SavedPriorityState' : [ 0xf88, ['pointer', ['void']]],
'SoftPatchPtr1' : [ 0xf8c, ['unsigned long']],
'ThreadPoolData' : [ 0xf90, ['pointer', ['void']]],
'TlsExpansionSlots' : [ 0xf94, ['pointer', ['pointer', ['void']]]],
'MuiGeneration' : [ 0xf98, ['unsigned long']],
'IsImpersonating' : [ 0xf9c, ['unsigned long']],
'NlsCache' : [ 0xfa0, ['pointer', ['void']]],
'pShimData' : [ 0xfa4, ['pointer', ['void']]],
'HeapVirtualAffinity' : [ 0xfa8, ['unsigned long']],
'CurrentTransactionHandle' : [ 0xfac, ['pointer', ['void']]],
'ActiveFrame' : [ 0xfb0, ['pointer', ['_TEB_ACTIVE_FRAME']]],
'FlsData' : [ 0xfb4, ['pointer', ['void']]],
'PreferredLanguages' : [ 0xfb8, ['pointer', ['void']]],
'UserPrefLanguages' : [ 0xfbc, ['pointer', ['void']]],
'MergedPrefLanguages' : [ 0xfc0, ['pointer', ['void']]],
'MuiImpersonation' : [ 0xfc4, ['unsigned long']],
'CrossTebFlags' : [ 0xfc8, ['unsigned short']],
'SpareCrossTebBits' : [ 0xfc8, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0xfca, ['unsigned short']],
'SafeThunkCall' : [ 0xfca, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0xfca, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0xfca, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0xfca, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0xfca, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0xfca, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0xfca, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0xfca, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0xfca, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0xfca, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0xfca, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0xfca, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0xfcc, ['pointer', ['void']]],
'TxnScopeExitCallback' : [ 0xfd0, ['pointer', ['void']]],
'TxnScopeContext' : [ 0xfd4, ['pointer', ['void']]],
'LockCount' : [ 0xfd8, ['unsigned long']],
'SpareUlong0' : [ 0xfdc, ['unsigned long']],
'ResourceRetValue' : [ 0xfe0, ['pointer', ['void']]],
} ],
'_LIST_ENTRY' : [ 0x8, {
'Flink' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
'Blink' : [ 0x4, ['pointer', ['_LIST_ENTRY']]],
} ],
'_SINGLE_LIST_ENTRY' : [ 0x4, {
'Next' : [ 0x0, ['pointer', ['_SINGLE_LIST_ENTRY']]],
} ],
'_RTL_DYNAMIC_HASH_TABLE_CONTEXT' : [ 0xc, {
'ChainHead' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
'PrevLinkage' : [ 0x4, ['pointer', ['_LIST_ENTRY']]],
'Signature' : [ 0x8, ['unsigned long']],
} ],
'_RTL_DYNAMIC_HASH_TABLE_ENUMERATOR' : [ 0x14, {
'HashEntry' : [ 0x0, ['_RTL_DYNAMIC_HASH_TABLE_ENTRY']],
'ChainHead' : [ 0xc, ['pointer', ['_LIST_ENTRY']]],
'BucketIndex' : [ 0x10, ['unsigned long']],
} ],
'_RTL_DYNAMIC_HASH_TABLE' : [ 0x24, {
'Flags' : [ 0x0, ['unsigned long']],
'Shift' : [ 0x4, ['unsigned long']],
'TableSize' : [ 0x8, ['unsigned long']],
'Pivot' : [ 0xc, ['unsigned long']],
'DivisorMask' : [ 0x10, ['unsigned long']],
'NumEntries' : [ 0x14, ['unsigned long']],
'NonEmptyBuckets' : [ 0x18, ['unsigned long']],
'NumEnumerators' : [ 0x1c, ['unsigned long']],
'Directory' : [ 0x20, ['pointer', ['void']]],
} ],
'_UNICODE_STRING' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['pointer', ['unsigned short']]],
} ],
'_STRING' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['pointer', ['unsigned char']]],
} ],
'_LUID' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_IMAGE_NT_HEADERS' : [ 0xf8, {
'Signature' : [ 0x0, ['unsigned long']],
'FileHeader' : [ 0x4, ['_IMAGE_FILE_HEADER']],
'OptionalHeader' : [ 0x18, ['_IMAGE_OPTIONAL_HEADER']],
} ],
'_IMAGE_DOS_HEADER' : [ 0x40, {
'e_magic' : [ 0x0, ['unsigned short']],
'e_cblp' : [ 0x2, ['unsigned short']],
'e_cp' : [ 0x4, ['unsigned short']],
'e_crlc' : [ 0x6, ['unsigned short']],
'e_cparhdr' : [ 0x8, ['unsigned short']],
'e_minalloc' : [ 0xa, ['unsigned short']],
'e_maxalloc' : [ 0xc, ['unsigned short']],
'e_ss' : [ 0xe, ['unsigned short']],
'e_sp' : [ 0x10, ['unsigned short']],
'e_csum' : [ 0x12, ['unsigned short']],
'e_ip' : [ 0x14, ['unsigned short']],
'e_cs' : [ 0x16, ['unsigned short']],
'e_lfarlc' : [ 0x18, ['unsigned short']],
'e_ovno' : [ 0x1a, ['unsigned short']],
'e_res' : [ 0x1c, ['array', 4, ['unsigned short']]],
'e_oemid' : [ 0x24, ['unsigned short']],
'e_oeminfo' : [ 0x26, ['unsigned short']],
'e_res2' : [ 0x28, ['array', 10, ['unsigned short']]],
'e_lfanew' : [ 0x3c, ['long']],
} ],
'_KPCR' : [ 0x3748, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'Used_ExceptionList' : [ 0x0, ['pointer', ['_EXCEPTION_REGISTRATION_RECORD']]],
'Used_StackBase' : [ 0x4, ['pointer', ['void']]],
'Spare2' : [ 0x8, ['pointer', ['void']]],
'TssCopy' : [ 0xc, ['pointer', ['void']]],
'ContextSwitches' : [ 0x10, ['unsigned long']],
'SetMemberCopy' : [ 0x14, ['unsigned long']],
'Used_Self' : [ 0x18, ['pointer', ['void']]],
'SelfPcr' : [ 0x1c, ['pointer', ['_KPCR']]],
'Prcb' : [ 0x20, ['pointer', ['_KPRCB']]],
'Irql' : [ 0x24, ['unsigned char']],
'IRR' : [ 0x28, ['unsigned long']],
'IrrActive' : [ 0x2c, ['unsigned long']],
'IDR' : [ 0x30, ['unsigned long']],
'KdVersionBlock' : [ 0x34, ['pointer', ['void']]],
'IDT' : [ 0x38, ['pointer', ['_KIDTENTRY']]],
'GDT' : [ 0x3c, ['pointer', ['_KGDTENTRY']]],
'TSS' : [ 0x40, ['pointer', ['_KTSS']]],
'MajorVersion' : [ 0x44, ['unsigned short']],
'MinorVersion' : [ 0x46, ['unsigned short']],
'SetMember' : [ 0x48, ['unsigned long']],
'StallScaleFactor' : [ 0x4c, ['unsigned long']],
'SpareUnused' : [ 0x50, ['unsigned char']],
'Number' : [ 0x51, ['unsigned char']],
'Spare0' : [ 0x52, ['unsigned char']],
'SecondLevelCacheAssociativity' : [ 0x53, ['unsigned char']],
'VdmAlert' : [ 0x54, ['unsigned long']],
'KernelReserved' : [ 0x58, ['array', 14, ['unsigned long']]],
'SecondLevelCacheSize' : [ 0x90, ['unsigned long']],
'HalReserved' : [ 0x94, ['array', 16, ['unsigned long']]],
'InterruptMode' : [ 0xd4, ['unsigned long']],
'Spare1' : [ 0xd8, ['unsigned char']],
'KernelReserved2' : [ 0xdc, ['array', 17, ['unsigned long']]],
'PrcbData' : [ 0x120, ['_KPRCB']],
} ],
'_KPRCB' : [ 0x3628, {
'MinorVersion' : [ 0x0, ['unsigned short']],
'MajorVersion' : [ 0x2, ['unsigned short']],
'CurrentThread' : [ 0x4, ['pointer', ['_KTHREAD']]],
'NextThread' : [ 0x8, ['pointer', ['_KTHREAD']]],
'IdleThread' : [ 0xc, ['pointer', ['_KTHREAD']]],
'LegacyNumber' : [ 0x10, ['unsigned char']],
'NestingLevel' : [ 0x11, ['unsigned char']],
'BuildType' : [ 0x12, ['unsigned short']],
'CpuType' : [ 0x14, ['unsigned char']],
'CpuID' : [ 0x15, ['unsigned char']],
'CpuStep' : [ 0x16, ['unsigned short']],
'CpuStepping' : [ 0x16, ['unsigned char']],
'CpuModel' : [ 0x17, ['unsigned char']],
'ProcessorState' : [ 0x18, ['_KPROCESSOR_STATE']],
'KernelReserved' : [ 0x338, ['array', 16, ['unsigned long']]],
'HalReserved' : [ 0x378, ['array', 16, ['unsigned long']]],
'CFlushSize' : [ 0x3b8, ['unsigned long']],
'CoresPerPhysicalProcessor' : [ 0x3bc, ['unsigned char']],
'LogicalProcessorsPerCore' : [ 0x3bd, ['unsigned char']],
'PrcbPad0' : [ 0x3be, ['array', 2, ['unsigned char']]],
'MHz' : [ 0x3c0, ['unsigned long']],
'CpuVendor' : [ 0x3c4, ['unsigned char']],
'GroupIndex' : [ 0x3c5, ['unsigned char']],
'Group' : [ 0x3c6, ['unsigned short']],
'GroupSetMember' : [ 0x3c8, ['unsigned long']],
'Number' : [ 0x3cc, ['unsigned long']],
'PrcbPad1' : [ 0x3d0, ['array', 72, ['unsigned char']]],
'LockQueue' : [ 0x418, ['array', 17, ['_KSPIN_LOCK_QUEUE']]],
'NpxThread' : [ 0x4a0, ['pointer', ['_KTHREAD']]],
'InterruptCount' : [ 0x4a4, ['unsigned long']],
'KernelTime' : [ 0x4a8, ['unsigned long']],
'UserTime' : [ 0x4ac, ['unsigned long']],
'DpcTime' : [ 0x4b0, ['unsigned long']],
'DpcTimeCount' : [ 0x4b4, ['unsigned long']],
'InterruptTime' : [ 0x4b8, ['unsigned long']],
'AdjustDpcThreshold' : [ 0x4bc, ['unsigned long']],
'PageColor' : [ 0x4c0, ['unsigned long']],
'DebuggerSavedIRQL' : [ 0x4c4, ['unsigned char']],
'NodeColor' : [ 0x4c5, ['unsigned char']],
'PrcbPad20' : [ 0x4c6, ['array', 2, ['unsigned char']]],
'NodeShiftedColor' : [ 0x4c8, ['unsigned long']],
'ParentNode' : [ 0x4cc, ['pointer', ['_KNODE']]],
'SecondaryColorMask' : [ 0x4d0, ['unsigned long']],
'DpcTimeLimit' : [ 0x4d4, ['unsigned long']],
'PrcbPad21' : [ 0x4d8, ['array', 2, ['unsigned long']]],
'CcFastReadNoWait' : [ 0x4e0, ['unsigned long']],
'CcFastReadWait' : [ 0x4e4, ['unsigned long']],
'CcFastReadNotPossible' : [ 0x4e8, ['unsigned long']],
'CcCopyReadNoWait' : [ 0x4ec, ['unsigned long']],
'CcCopyReadWait' : [ 0x4f0, ['unsigned long']],
'CcCopyReadNoWaitMiss' : [ 0x4f4, ['unsigned long']],
'MmSpinLockOrdering' : [ 0x4f8, ['long']],
'IoReadOperationCount' : [ 0x4fc, ['long']],
'IoWriteOperationCount' : [ 0x500, ['long']],
'IoOtherOperationCount' : [ 0x504, ['long']],
'IoReadTransferCount' : [ 0x508, ['_LARGE_INTEGER']],
'IoWriteTransferCount' : [ 0x510, ['_LARGE_INTEGER']],
'IoOtherTransferCount' : [ 0x518, ['_LARGE_INTEGER']],
'CcFastMdlReadNoWait' : [ 0x520, ['unsigned long']],
'CcFastMdlReadWait' : [ 0x524, ['unsigned long']],
'CcFastMdlReadNotPossible' : [ 0x528, ['unsigned long']],
'CcMapDataNoWait' : [ 0x52c, ['unsigned long']],
'CcMapDataWait' : [ 0x530, ['unsigned long']],
'CcPinMappedDataCount' : [ 0x534, ['unsigned long']],
'CcPinReadNoWait' : [ 0x538, ['unsigned long']],
'CcPinReadWait' : [ 0x53c, ['unsigned long']],
'CcMdlReadNoWait' : [ 0x540, ['unsigned long']],
'CcMdlReadWait' : [ 0x544, ['unsigned long']],
'CcLazyWriteHotSpots' : [ 0x548, ['unsigned long']],
'CcLazyWriteIos' : [ 0x54c, ['unsigned long']],
'CcLazyWritePages' : [ 0x550, ['unsigned long']],
'CcDataFlushes' : [ 0x554, ['unsigned long']],
'CcDataPages' : [ 0x558, ['unsigned long']],
'CcLostDelayedWrites' : [ 0x55c, ['unsigned long']],
'CcFastReadResourceMiss' : [ 0x560, ['unsigned long']],
'CcCopyReadWaitMiss' : [ 0x564, ['unsigned long']],
'CcFastMdlReadResourceMiss' : [ 0x568, ['unsigned long']],
'CcMapDataNoWaitMiss' : [ 0x56c, ['unsigned long']],
'CcMapDataWaitMiss' : [ 0x570, ['unsigned long']],
'CcPinReadNoWaitMiss' : [ 0x574, ['unsigned long']],
'CcPinReadWaitMiss' : [ 0x578, ['unsigned long']],
'CcMdlReadNoWaitMiss' : [ 0x57c, ['unsigned long']],
'CcMdlReadWaitMiss' : [ 0x580, ['unsigned long']],
'CcReadAheadIos' : [ 0x584, ['unsigned long']],
'KeAlignmentFixupCount' : [ 0x588, ['unsigned long']],
'KeExceptionDispatchCount' : [ 0x58c, ['unsigned long']],
'KeSystemCalls' : [ 0x590, ['unsigned long']],
'AvailableTime' : [ 0x594, ['unsigned long']],
'PrcbPad22' : [ 0x598, ['array', 2, ['unsigned long']]],
'PPLookasideList' : [ 0x5a0, ['array', 16, ['_PP_LOOKASIDE_LIST']]],
'PPNPagedLookasideList' : [ 0x620, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PPPagedLookasideList' : [ 0xf20, ['array', 32, ['_GENERAL_LOOKASIDE_POOL']]],
'PacketBarrier' : [ 0x1820, ['unsigned long']],
'ReverseStall' : [ 0x1824, ['long']],
'IpiFrame' : [ 0x1828, ['pointer', ['void']]],
'PrcbPad3' : [ 0x182c, ['array', 52, ['unsigned char']]],
'CurrentPacket' : [ 0x1860, ['array', 3, ['pointer', ['void']]]],
'TargetSet' : [ 0x186c, ['unsigned long']],
'WorkerRoutine' : [ 0x1870, ['pointer', ['void']]],
'IpiFrozen' : [ 0x1874, ['unsigned long']],
'PrcbPad4' : [ 0x1878, ['array', 40, ['unsigned char']]],
'RequestSummary' : [ 0x18a0, ['unsigned long']],
'SignalDone' : [ 0x18a4, ['pointer', ['_KPRCB']]],
'PrcbPad50' : [ 0x18a8, ['array', 56, ['unsigned char']]],
'DpcData' : [ 0x18e0, ['array', 2, ['_KDPC_DATA']]],
'DpcStack' : [ 0x1908, ['pointer', ['void']]],
'MaximumDpcQueueDepth' : [ 0x190c, ['long']],
'DpcRequestRate' : [ 0x1910, ['unsigned long']],
'MinimumDpcRate' : [ 0x1914, ['unsigned long']],
'DpcLastCount' : [ 0x1918, ['unsigned long']],
'PrcbLock' : [ 0x191c, ['unsigned long']],
'DpcGate' : [ 0x1920, ['_KGATE']],
'ThreadDpcEnable' : [ 0x1930, ['unsigned char']],
'QuantumEnd' : [ 0x1931, ['unsigned char']],
'DpcRoutineActive' : [ 0x1932, ['unsigned char']],
'IdleSchedule' : [ 0x1933, ['unsigned char']],
'DpcRequestSummary' : [ 0x1934, ['long']],
'DpcRequestSlot' : [ 0x1934, ['array', 2, ['short']]],
'NormalDpcState' : [ 0x1934, ['short']],
'DpcThreadActive' : [ 0x1936, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'ThreadDpcState' : [ 0x1936, ['short']],
'TimerHand' : [ 0x1938, ['unsigned long']],
'LastTick' : [ 0x193c, ['unsigned long']],
'MasterOffset' : [ 0x1940, ['long']],
'PrcbPad41' : [ 0x1944, ['array', 2, ['unsigned long']]],
'PeriodicCount' : [ 0x194c, ['unsigned long']],
'PeriodicBias' : [ 0x1950, ['unsigned long']],
'TickOffset' : [ 0x1958, ['unsigned long long']],
'TimerTable' : [ 0x1960, ['_KTIMER_TABLE']],
'CallDpc' : [ 0x31a0, ['_KDPC']],
'ClockKeepAlive' : [ 0x31c0, ['long']],
'ClockCheckSlot' : [ 0x31c4, ['unsigned char']],
'ClockPollCycle' : [ 0x31c5, ['unsigned char']],
'PrcbPad6' : [ 0x31c6, ['array', 2, ['unsigned char']]],
'DpcWatchdogPeriod' : [ 0x31c8, ['long']],
'DpcWatchdogCount' : [ 0x31cc, ['long']],
'ThreadWatchdogPeriod' : [ 0x31d0, ['long']],
'ThreadWatchdogCount' : [ 0x31d4, ['long']],
'KeSpinLockOrdering' : [ 0x31d8, ['long']],
'PrcbPad70' : [ 0x31dc, ['array', 1, ['unsigned long']]],
'WaitListHead' : [ 0x31e0, ['_LIST_ENTRY']],
'WaitLock' : [ 0x31e8, ['unsigned long']],
'ReadySummary' : [ 0x31ec, ['unsigned long']],
'QueueIndex' : [ 0x31f0, ['unsigned long']],
'DeferredReadyListHead' : [ 0x31f4, ['_SINGLE_LIST_ENTRY']],
'StartCycles' : [ 0x31f8, ['unsigned long long']],
'CycleTime' : [ 0x3200, ['unsigned long long']],
'HighCycleTime' : [ 0x3208, ['unsigned long']],
'PrcbPad71' : [ 0x320c, ['unsigned long']],
'PrcbPad72' : [ 0x3210, ['array', 2, ['unsigned long long']]],
'DispatcherReadyListHead' : [ 0x3220, ['array', 32, ['_LIST_ENTRY']]],
'ChainedInterruptList' : [ 0x3320, ['pointer', ['void']]],
'LookasideIrpFloat' : [ 0x3324, ['long']],
'MmPageFaultCount' : [ 0x3328, ['long']],
'MmCopyOnWriteCount' : [ 0x332c, ['long']],
'MmTransitionCount' : [ 0x3330, ['long']],
'MmCacheTransitionCount' : [ 0x3334, ['long']],
'MmDemandZeroCount' : [ 0x3338, ['long']],
'MmPageReadCount' : [ 0x333c, ['long']],
'MmPageReadIoCount' : [ 0x3340, ['long']],
'MmCacheReadCount' : [ 0x3344, ['long']],
'MmCacheIoCount' : [ 0x3348, ['long']],
'MmDirtyPagesWriteCount' : [ 0x334c, ['long']],
'MmDirtyWriteIoCount' : [ 0x3350, ['long']],
'MmMappedPagesWriteCount' : [ 0x3354, ['long']],
'MmMappedWriteIoCount' : [ 0x3358, ['long']],
'CachedCommit' : [ 0x335c, ['unsigned long']],
'CachedResidentAvailable' : [ 0x3360, ['unsigned long']],
'HyperPte' : [ 0x3364, ['pointer', ['void']]],
'PrcbPad8' : [ 0x3368, ['array', 4, ['unsigned char']]],
'VendorString' : [ 0x336c, ['array', 13, ['unsigned char']]],
'InitialApicId' : [ 0x3379, ['unsigned char']],
'LogicalProcessorsPerPhysicalProcessor' : [ 0x337a, ['unsigned char']],
'PrcbPad9' : [ 0x337b, ['array', 5, ['unsigned char']]],
'FeatureBits' : [ 0x3380, ['unsigned long']],
'UpdateSignature' : [ 0x3388, ['_LARGE_INTEGER']],
'IsrTime' : [ 0x3390, ['unsigned long long']],
'RuntimeAccumulation' : [ 0x3398, ['unsigned long long']],
'PowerState' : [ 0x33a0, ['_PROCESSOR_POWER_STATE']],
'DpcWatchdogDpc' : [ 0x3468, ['_KDPC']],
'DpcWatchdogTimer' : [ 0x3488, ['_KTIMER']],
'WheaInfo' : [ 0x34b0, ['pointer', ['void']]],
'EtwSupport' : [ 0x34b4, ['pointer', ['void']]],
'InterruptObjectPool' : [ 0x34b8, ['_SLIST_HEADER']],
'HypercallPageList' : [ 0x34c0, ['_SLIST_HEADER']],
'HypercallPageVirtual' : [ 0x34c8, ['pointer', ['void']]],
'VirtualApicAssist' : [ 0x34cc, ['pointer', ['void']]],
'StatisticsPage' : [ 0x34d0, ['pointer', ['unsigned long long']]],
'RateControl' : [ 0x34d4, ['pointer', ['void']]],
'Cache' : [ 0x34d8, ['array', 5, ['_CACHE_DESCRIPTOR']]],
'CacheCount' : [ 0x3514, ['unsigned long']],
'CacheProcessorMask' : [ 0x3518, ['array', 5, ['unsigned long']]],
'PackageProcessorSet' : [ 0x352c, ['_KAFFINITY_EX']],
'PrcbPad91' : [ 0x3538, ['array', 1, ['unsigned long']]],
'CoreProcessorSet' : [ 0x353c, ['unsigned long']],
'TimerExpirationDpc' : [ 0x3540, ['_KDPC']],
'SpinLockAcquireCount' : [ 0x3560, ['unsigned long']],
'SpinLockContentionCount' : [ 0x3564, ['unsigned long']],
'SpinLockSpinCount' : [ 0x3568, ['unsigned long']],
'IpiSendRequestBroadcastCount' : [ 0x356c, ['unsigned long']],
'IpiSendRequestRoutineCount' : [ 0x3570, ['unsigned long']],
'IpiSendSoftwareInterruptCount' : [ 0x3574, ['unsigned long']],
'ExInitializeResourceCount' : [ 0x3578, ['unsigned long']],
'ExReInitializeResourceCount' : [ 0x357c, ['unsigned long']],
'ExDeleteResourceCount' : [ 0x3580, ['unsigned long']],
'ExecutiveResourceAcquiresCount' : [ 0x3584, ['unsigned long']],
'ExecutiveResourceContentionsCount' : [ 0x3588, ['unsigned long']],
'ExecutiveResourceReleaseExclusiveCount' : [ 0x358c, ['unsigned long']],
'ExecutiveResourceReleaseSharedCount' : [ 0x3590, ['unsigned long']],
'ExecutiveResourceConvertsCount' : [ 0x3594, ['unsigned long']],
'ExAcqResExclusiveAttempts' : [ 0x3598, ['unsigned long']],
'ExAcqResExclusiveAcquiresExclusive' : [ 0x359c, ['unsigned long']],
'ExAcqResExclusiveAcquiresExclusiveRecursive' : [ 0x35a0, ['unsigned long']],
'ExAcqResExclusiveWaits' : [ 0x35a4, ['unsigned long']],
'ExAcqResExclusiveNotAcquires' : [ 0x35a8, ['unsigned long']],
'ExAcqResSharedAttempts' : [ 0x35ac, ['unsigned long']],
'ExAcqResSharedAcquiresExclusive' : [ 0x35b0, ['unsigned long']],
'ExAcqResSharedAcquiresShared' : [ 0x35b4, ['unsigned long']],
'ExAcqResSharedAcquiresSharedRecursive' : [ 0x35b8, ['unsigned long']],
'ExAcqResSharedWaits' : [ 0x35bc, ['unsigned long']],
'ExAcqResSharedNotAcquires' : [ 0x35c0, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAttempts' : [ 0x35c4, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresExclusive' : [ 0x35c8, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresShared' : [ 0x35cc, ['unsigned long']],
'ExAcqResSharedStarveExclusiveAcquiresSharedRecursive' : [ 0x35d0, ['unsigned long']],
'ExAcqResSharedStarveExclusiveWaits' : [ 0x35d4, ['unsigned long']],
'ExAcqResSharedStarveExclusiveNotAcquires' : [ 0x35d8, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAttempts' : [ 0x35dc, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresExclusive' : [ 0x35e0, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresShared' : [ 0x35e4, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveAcquiresSharedRecursive' : [ 0x35e8, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveWaits' : [ 0x35ec, ['unsigned long']],
'ExAcqResSharedWaitForExclusiveNotAcquires' : [ 0x35f0, ['unsigned long']],
'ExSetResOwnerPointerExclusive' : [ 0x35f4, ['unsigned long']],
'ExSetResOwnerPointerSharedNew' : [ 0x35f8, ['unsigned long']],
'ExSetResOwnerPointerSharedOld' : [ 0x35fc, ['unsigned long']],
'ExTryToAcqExclusiveAttempts' : [ 0x3600, ['unsigned long']],
'ExTryToAcqExclusiveAcquires' : [ 0x3604, ['unsigned long']],
'ExBoostExclusiveOwner' : [ 0x3608, ['unsigned long']],
'ExBoostSharedOwners' : [ 0x360c, ['unsigned long']],
'ExEtwSynchTrackingNotificationsCount' : [ 0x3610, ['unsigned long']],
'ExEtwSynchTrackingNotificationsAccountedCount' : [ 0x3614, ['unsigned long']],
'Context' : [ 0x3618, ['pointer', ['_CONTEXT']]],
'ContextFlags' : [ 0x361c, ['unsigned long']],
'ExtendedState' : [ 0x3620, ['pointer', ['_XSAVE_AREA']]],
} ],
'_KAPC' : [ 0x30, {
'Type' : [ 0x0, ['unsigned char']],
'SpareByte0' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'SpareByte1' : [ 0x3, ['unsigned char']],
'SpareLong0' : [ 0x4, ['unsigned long']],
'Thread' : [ 0x8, ['pointer', ['_KTHREAD']]],
'ApcListEntry' : [ 0xc, ['_LIST_ENTRY']],
'KernelRoutine' : [ 0x14, ['pointer', ['void']]],
'RundownRoutine' : [ 0x18, ['pointer', ['void']]],
'NormalRoutine' : [ 0x1c, ['pointer', ['void']]],
'NormalContext' : [ 0x20, ['pointer', ['void']]],
'SystemArgument1' : [ 0x24, ['pointer', ['void']]],
'SystemArgument2' : [ 0x28, ['pointer', ['void']]],
'ApcStateIndex' : [ 0x2c, ['unsigned char']],
'ApcMode' : [ 0x2d, ['unsigned char']],
'Inserted' : [ 0x2e, ['unsigned char']],
} ],
'_KTHREAD' : [ 0x200, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'CycleTime' : [ 0x10, ['unsigned long long']],
'HighCycleTime' : [ 0x18, ['unsigned long']],
'QuantumTarget' : [ 0x20, ['unsigned long long']],
'InitialStack' : [ 0x28, ['pointer', ['void']]],
'StackLimit' : [ 0x2c, ['pointer', ['void']]],
'KernelStack' : [ 0x30, ['pointer', ['void']]],
'ThreadLock' : [ 0x34, ['unsigned long']],
'WaitRegister' : [ 0x38, ['_KWAIT_STATUS_REGISTER']],
'Running' : [ 0x39, ['unsigned char']],
'Alerted' : [ 0x3a, ['array', 2, ['unsigned char']]],
'KernelStackResident' : [ 0x3c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadyTransition' : [ 0x3c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessReadyQueue' : [ 0x3c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WaitNext' : [ 0x3c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SystemAffinityActive' : [ 0x3c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Alertable' : [ 0x3c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'GdiFlushActive' : [ 0x3c, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'UserStackWalkActive' : [ 0x3c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ApcInterruptRequest' : [ 0x3c, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'ForceDeferSchedule' : [ 0x3c, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'QuantumEndMigrate' : [ 0x3c, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'UmsDirectedSwitchEnable' : [ 0x3c, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'TimerActive' : [ 0x3c, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'Reserved' : [ 0x3c, ['BitField', dict(start_bit = 13, end_bit = 32, native_type='unsigned long')]],
'MiscFlags' : [ 0x3c, ['long']],
'ApcState' : [ 0x40, ['_KAPC_STATE']],
'ApcStateFill' : [ 0x40, ['array', 23, ['unsigned char']]],
'Priority' : [ 0x57, ['unsigned char']],
'NextProcessor' : [ 0x58, ['unsigned long']],
'DeferredProcessor' : [ 0x5c, ['unsigned long']],
'ApcQueueLock' : [ 0x60, ['unsigned long']],
'ContextSwitches' : [ 0x64, ['unsigned long']],
'State' : [ 0x68, ['unsigned char']],
'NpxState' : [ 0x69, ['unsigned char']],
'WaitIrql' : [ 0x6a, ['unsigned char']],
'WaitMode' : [ 0x6b, ['unsigned char']],
'WaitStatus' : [ 0x6c, ['long']],
'WaitBlockList' : [ 0x70, ['pointer', ['_KWAIT_BLOCK']]],
'WaitListEntry' : [ 0x74, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x74, ['_SINGLE_LIST_ENTRY']],
'Queue' : [ 0x7c, ['pointer', ['_KQUEUE']]],
'WaitTime' : [ 0x80, ['unsigned long']],
'KernelApcDisable' : [ 0x84, ['short']],
'SpecialApcDisable' : [ 0x86, ['short']],
'CombinedApcDisable' : [ 0x84, ['unsigned long']],
'Teb' : [ 0x88, ['pointer', ['void']]],
'Timer' : [ 0x90, ['_KTIMER']],
'AutoAlignment' : [ 0xb8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DisableBoost' : [ 0xb8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'EtwStackTraceApc1Inserted' : [ 0xb8, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EtwStackTraceApc2Inserted' : [ 0xb8, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CalloutActive' : [ 0xb8, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ApcQueueable' : [ 0xb8, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'EnableStackSwap' : [ 0xb8, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'GuiThread' : [ 0xb8, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'UmsPerformingSyscall' : [ 0xb8, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'ReservedFlags' : [ 0xb8, ['BitField', dict(start_bit = 9, end_bit = 32, native_type='unsigned long')]],
'ThreadFlags' : [ 0xb8, ['long']],
'ServiceTable' : [ 0xbc, ['pointer', ['void']]],
'WaitBlock' : [ 0xc0, ['array', 4, ['_KWAIT_BLOCK']]],
'QueueListEntry' : [ 0x120, ['_LIST_ENTRY']],
'TrapFrame' : [ 0x128, ['pointer', ['_KTRAP_FRAME']]],
'FirstArgument' : [ 0x12c, ['pointer', ['void']]],
'CallbackStack' : [ 0x130, ['pointer', ['void']]],
'CallbackDepth' : [ 0x130, ['unsigned long']],
'ApcStateIndex' : [ 0x134, ['unsigned char']],
'BasePriority' : [ 0x135, ['unsigned char']],
'PriorityDecrement' : [ 0x136, ['unsigned char']],
'ForegroundBoost' : [ 0x136, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned char')]],
'UnusualBoost' : [ 0x136, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'Preempted' : [ 0x137, ['unsigned char']],
'AdjustReason' : [ 0x138, ['unsigned char']],
'AdjustIncrement' : [ 0x139, ['unsigned char']],
'PreviousMode' : [ 0x13a, ['unsigned char']],
'Saturation' : [ 0x13b, ['unsigned char']],
'SystemCallNumber' : [ 0x13c, ['unsigned long']],
'FreezeCount' : [ 0x140, ['unsigned long']],
'UserAffinity' : [ 0x144, ['_GROUP_AFFINITY']],
'Process' : [ 0x150, ['pointer', ['_KPROCESS']]],
'Affinity' : [ 0x154, ['_GROUP_AFFINITY']],
'IdealProcessor' : [ 0x160, ['unsigned long']],
'UserIdealProcessor' : [ 0x164, ['unsigned long']],
'ApcStatePointer' : [ 0x168, ['array', 2, ['pointer', ['_KAPC_STATE']]]],
'SavedApcState' : [ 0x170, ['_KAPC_STATE']],
'SavedApcStateFill' : [ 0x170, ['array', 23, ['unsigned char']]],
'WaitReason' : [ 0x187, ['unsigned char']],
'SuspendCount' : [ 0x188, ['unsigned char']],
'Spare1' : [ 0x189, ['unsigned char']],
'OtherPlatformFill' : [ 0x18a, ['unsigned char']],
'Win32Thread' : [ 0x18c, ['pointer', ['void']]],
'StackBase' : [ 0x190, ['pointer', ['void']]],
'SuspendApc' : [ 0x194, ['_KAPC']],
'SuspendApcFill0' : [ 0x194, ['array', 1, ['unsigned char']]],
'ResourceIndex' : [ 0x195, ['unsigned char']],
'SuspendApcFill1' : [ 0x194, ['array', 3, ['unsigned char']]],
'QuantumReset' : [ 0x197, ['unsigned char']],
'SuspendApcFill2' : [ 0x194, ['array', 4, ['unsigned char']]],
'KernelTime' : [ 0x198, ['unsigned long']],
'SuspendApcFill3' : [ 0x194, ['array', 36, ['unsigned char']]],
'WaitPrcb' : [ 0x1b8, ['pointer', ['_KPRCB']]],
'SuspendApcFill4' : [ 0x194, ['array', 40, ['unsigned char']]],
'LegoData' : [ 0x1bc, ['pointer', ['void']]],
'SuspendApcFill5' : [ 0x194, ['array', 47, ['unsigned char']]],
'LargeStack' : [ 0x1c3, ['unsigned char']],
'UserTime' : [ 0x1c4, ['unsigned long']],
'SuspendSemaphore' : [ 0x1c8, ['_KSEMAPHORE']],
'SuspendSemaphorefill' : [ 0x1c8, ['array', 20, ['unsigned char']]],
'SListFaultCount' : [ 0x1dc, ['unsigned long']],
'ThreadListEntry' : [ 0x1e0, ['_LIST_ENTRY']],
'MutantListHead' : [ 0x1e8, ['_LIST_ENTRY']],
'SListFaultAddress' : [ 0x1f0, ['pointer', ['void']]],
'ThreadCounters' : [ 0x1f4, ['pointer', ['_KTHREAD_COUNTERS']]],
'XStateSave' : [ 0x1f8, ['pointer', ['_XSTATE_SAVE']]],
} ],
'_KSPIN_LOCK_QUEUE' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['_KSPIN_LOCK_QUEUE']]],
'Lock' : [ 0x4, ['pointer', ['unsigned long']]],
} ],
'_FAST_MUTEX' : [ 0x20, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x4, ['pointer', ['_KTHREAD']]],
'Contention' : [ 0x8, ['unsigned long']],
'Event' : [ 0xc, ['_KEVENT']],
'OldIrql' : [ 0x1c, ['unsigned long']],
} ],
'_KEVENT' : [ 0x10, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_SLIST_HEADER' : [ 0x8, {
'Alignment' : [ 0x0, ['unsigned long long']],
'Next' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x4, ['unsigned short']],
'Sequence' : [ 0x6, ['unsigned short']],
} ],
'_LOOKASIDE_LIST_EX' : [ 0x48, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE_POOL']],
} ],
'_NPAGED_LOOKASIDE_LIST' : [ 0xc0, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
'Lock__ObsoleteButDoNotDelete' : [ 0x80, ['unsigned long']],
} ],
'_PAGED_LOOKASIDE_LIST' : [ 0xc0, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
'Lock__ObsoleteButDoNotDelete' : [ 0x80, ['_FAST_MUTEX']],
} ],
'_QUAD' : [ 0x8, {
'UseThisFieldToCopy' : [ 0x0, ['long long']],
'DoNotUseThisField' : [ 0x0, ['double']],
} ],
'_IO_STATUS_BLOCK' : [ 0x8, {
'Status' : [ 0x0, ['long']],
'Pointer' : [ 0x0, ['pointer', ['void']]],
'Information' : [ 0x4, ['unsigned long']],
} ],
'_EX_PUSH_LOCK' : [ 0x4, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='unsigned long')]],
'Value' : [ 0x0, ['unsigned long']],
'Ptr' : [ 0x0, ['pointer', ['void']]],
} ],
'_PROCESSOR_NUMBER' : [ 0x4, {
'Group' : [ 0x0, ['unsigned short']],
'Number' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
} ],
'_EX_PUSH_LOCK_CACHE_AWARE' : [ 0x80, {
'Locks' : [ 0x0, ['array', 32, ['pointer', ['_EX_PUSH_LOCK']]]],
} ],
'_PP_LOOKASIDE_LIST' : [ 0x8, {
'P' : [ 0x0, ['pointer', ['_GENERAL_LOOKASIDE']]],
'L' : [ 0x4, ['pointer', ['_GENERAL_LOOKASIDE']]],
} ],
'_GENERAL_LOOKASIDE' : [ 0x80, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'SingleListHead' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Depth' : [ 0x8, ['unsigned short']],
'MaximumDepth' : [ 0xa, ['unsigned short']],
'TotalAllocates' : [ 0xc, ['unsigned long']],
'AllocateMisses' : [ 0x10, ['unsigned long']],
'AllocateHits' : [ 0x10, ['unsigned long']],
'TotalFrees' : [ 0x14, ['unsigned long']],
'FreeMisses' : [ 0x18, ['unsigned long']],
'FreeHits' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x20, ['unsigned long']],
'Size' : [ 0x24, ['unsigned long']],
'AllocateEx' : [ 0x28, ['pointer', ['void']]],
'Allocate' : [ 0x28, ['pointer', ['void']]],
'FreeEx' : [ 0x2c, ['pointer', ['void']]],
'Free' : [ 0x2c, ['pointer', ['void']]],
'ListEntry' : [ 0x30, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x38, ['unsigned long']],
'LastAllocateMisses' : [ 0x3c, ['unsigned long']],
'LastAllocateHits' : [ 0x3c, ['unsigned long']],
'Future' : [ 0x40, ['array', 2, ['unsigned long']]],
} ],
'_EX_FAST_REF' : [ 0x4, {
'Object' : [ 0x0, ['pointer', ['void']]],
'RefCnt' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'Value' : [ 0x0, ['unsigned long']],
} ],
'_EX_PUSH_LOCK_WAIT_BLOCK' : [ 0x30, {
'WakeEvent' : [ 0x0, ['_KEVENT']],
'Next' : [ 0x10, ['pointer', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Last' : [ 0x14, ['pointer', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Previous' : [ 0x18, ['pointer', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'ShareCount' : [ 0x1c, ['long']],
'Flags' : [ 0x20, ['long']],
} ],
'_ETHREAD' : [ 0x2b8, {
'Tcb' : [ 0x0, ['_KTHREAD']],
'CreateTime' : [ 0x200, ['_LARGE_INTEGER']],
'ExitTime' : [ 0x208, ['_LARGE_INTEGER']],
'KeyedWaitChain' : [ 0x208, ['_LIST_ENTRY']],
'ExitStatus' : [ 0x210, ['long']],
'PostBlockList' : [ 0x214, ['_LIST_ENTRY']],
'ForwardLinkShadow' : [ 0x214, ['pointer', ['void']]],
'StartAddress' : [ 0x218, ['pointer', ['void']]],
'TerminationPort' : [ 0x21c, ['pointer', ['_TERMINATION_PORT']]],
'ReaperLink' : [ 0x21c, ['pointer', ['_ETHREAD']]],
'KeyedWaitValue' : [ 0x21c, ['pointer', ['void']]],
'ActiveTimerListLock' : [ 0x220, ['unsigned long']],
'ActiveTimerListHead' : [ 0x224, ['_LIST_ENTRY']],
'Cid' : [ 0x22c, ['_CLIENT_ID']],
'KeyedWaitSemaphore' : [ 0x234, ['_KSEMAPHORE']],
'AlpcWaitSemaphore' : [ 0x234, ['_KSEMAPHORE']],
'ClientSecurity' : [ 0x248, ['_PS_CLIENT_SECURITY_CONTEXT']],
'IrpList' : [ 0x24c, ['_LIST_ENTRY']],
'TopLevelIrp' : [ 0x254, ['unsigned long']],
'DeviceToVerify' : [ 0x258, ['pointer', ['_DEVICE_OBJECT']]],
'CpuQuotaApc' : [ 0x25c, ['pointer', ['_PSP_CPU_QUOTA_APC']]],
'Win32StartAddress' : [ 0x260, ['pointer', ['void']]],
'LegacyPowerObject' : [ 0x264, ['pointer', ['void']]],
'ThreadListEntry' : [ 0x268, ['_LIST_ENTRY']],
'RundownProtect' : [ 0x270, ['_EX_RUNDOWN_REF']],
'ThreadLock' : [ 0x274, ['_EX_PUSH_LOCK']],
'ReadClusterSize' : [ 0x278, ['unsigned long']],
'MmLockOrdering' : [ 0x27c, ['long']],
'CrossThreadFlags' : [ 0x280, ['unsigned long']],
'Terminated' : [ 0x280, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ThreadInserted' : [ 0x280, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HideFromDebugger' : [ 0x280, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ActiveImpersonationInfo' : [ 0x280, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SystemThread' : [ 0x280, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'HardErrorsAreDisabled' : [ 0x280, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x280, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SkipCreationMsg' : [ 0x280, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SkipTerminationMsg' : [ 0x280, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'CopyTokenOnOpen' : [ 0x280, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ThreadIoPriority' : [ 0x280, ['BitField', dict(start_bit = 10, end_bit = 13, native_type='unsigned long')]],
'ThreadPagePriority' : [ 0x280, ['BitField', dict(start_bit = 13, end_bit = 16, native_type='unsigned long')]],
'RundownFail' : [ 0x280, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NeedsWorkingSetAging' : [ 0x280, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'SameThreadPassiveFlags' : [ 0x284, ['unsigned long']],
'ActiveExWorker' : [ 0x284, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ExWorkerCanWaitUser' : [ 0x284, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'MemoryMaker' : [ 0x284, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ClonedThread' : [ 0x284, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'KeyedEventInUse' : [ 0x284, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'RateApcState' : [ 0x284, ['BitField', dict(start_bit = 5, end_bit = 7, native_type='unsigned long')]],
'SelfTerminate' : [ 0x284, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SameThreadApcFlags' : [ 0x288, ['unsigned long']],
'Spare' : [ 0x288, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'StartAddressInvalid' : [ 0x288, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'EtwPageFaultCalloutActive' : [ 0x288, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'OwnsProcessWorkingSetExclusive' : [ 0x288, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'OwnsProcessWorkingSetShared' : [ 0x288, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsSystemCacheWorkingSetExclusive' : [ 0x288, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsSystemCacheWorkingSetShared' : [ 0x288, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsSessionWorkingSetExclusive' : [ 0x288, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'OwnsSessionWorkingSetShared' : [ 0x289, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'OwnsProcessAddressSpaceExclusive' : [ 0x289, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'OwnsProcessAddressSpaceShared' : [ 0x289, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SuppressSymbolLoad' : [ 0x289, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Prefetching' : [ 0x289, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsDynamicMemoryShared' : [ 0x289, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsChangeControlAreaExclusive' : [ 0x289, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsChangeControlAreaShared' : [ 0x289, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'OwnsPagedPoolWorkingSetExclusive' : [ 0x28a, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'OwnsPagedPoolWorkingSetShared' : [ 0x28a, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'OwnsSystemPtesWorkingSetExclusive' : [ 0x28a, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'OwnsSystemPtesWorkingSetShared' : [ 0x28a, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'TrimTrigger' : [ 0x28a, ['BitField', dict(start_bit = 4, end_bit = 6, native_type='unsigned char')]],
'Spare1' : [ 0x28a, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'PriorityRegionActive' : [ 0x28b, ['unsigned char']],
'CacheManagerActive' : [ 0x28c, ['unsigned char']],
'DisablePageFaultClustering' : [ 0x28d, ['unsigned char']],
'ActiveFaultCount' : [ 0x28e, ['unsigned char']],
'LockOrderState' : [ 0x28f, ['unsigned char']],
'AlpcMessageId' : [ 0x290, ['unsigned long']],
'AlpcMessage' : [ 0x294, ['pointer', ['void']]],
'AlpcReceiveAttributeSet' : [ 0x294, ['unsigned long']],
'AlpcWaitListEntry' : [ 0x298, ['_LIST_ENTRY']],
'CacheManagerCount' : [ 0x2a0, ['unsigned long']],
'IoBoostCount' : [ 0x2a4, ['unsigned long']],
'IrpListLock' : [ 0x2a8, ['unsigned long']],
'ReservedForSynchTracking' : [ 0x2ac, ['pointer', ['void']]],
'CmCallbackListHead' : [ 0x2b0, ['_SINGLE_LIST_ENTRY']],
} ],
'_EPROCESS' : [ 0x2c0, {
'Pcb' : [ 0x0, ['_KPROCESS']],
'ProcessLock' : [ 0x98, ['_EX_PUSH_LOCK']],
'CreateTime' : [ 0xa0, ['_LARGE_INTEGER']],
'ExitTime' : [ 0xa8, ['_LARGE_INTEGER']],
'RundownProtect' : [ 0xb0, ['_EX_RUNDOWN_REF']],
'UniqueProcessId' : [ 0xb4, ['pointer', ['void']]],
'ActiveProcessLinks' : [ 0xb8, ['_LIST_ENTRY']],
'ProcessQuotaUsage' : [ 0xc0, ['array', 2, ['unsigned long']]],
'ProcessQuotaPeak' : [ 0xc8, ['array', 2, ['unsigned long']]],
'CommitCharge' : [ 0xd0, ['unsigned long']],
'QuotaBlock' : [ 0xd4, ['pointer', ['_EPROCESS_QUOTA_BLOCK']]],
'CpuQuotaBlock' : [ 0xd8, ['pointer', ['_PS_CPU_QUOTA_BLOCK']]],
'PeakVirtualSize' : [ 0xdc, ['unsigned long']],
'VirtualSize' : [ 0xe0, ['unsigned long']],
'SessionProcessLinks' : [ 0xe4, ['_LIST_ENTRY']],
'DebugPort' : [ 0xec, ['pointer', ['void']]],
'ExceptionPortData' : [ 0xf0, ['pointer', ['void']]],
'ExceptionPortValue' : [ 0xf0, ['unsigned long']],
'ExceptionPortState' : [ 0xf0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'ObjectTable' : [ 0xf4, ['pointer', ['_HANDLE_TABLE']]],
'Token' : [ 0xf8, ['_EX_FAST_REF']],
'WorkingSetPage' : [ 0xfc, ['unsigned long']],
'AddressCreationLock' : [ 0x100, ['_EX_PUSH_LOCK']],
'RotateInProgress' : [ 0x104, ['pointer', ['_ETHREAD']]],
'ForkInProgress' : [ 0x108, ['pointer', ['_ETHREAD']]],
'HardwareTrigger' : [ 0x10c, ['unsigned long']],
'PhysicalVadRoot' : [ 0x110, ['pointer', ['_MM_AVL_TABLE']]],
'CloneRoot' : [ 0x114, ['pointer', ['void']]],
'NumberOfPrivatePages' : [ 0x118, ['unsigned long']],
'NumberOfLockedPages' : [ 0x11c, ['unsigned long']],
'Win32Process' : [ 0x120, ['pointer', ['void']]],
'Job' : [ 0x124, ['pointer', ['_EJOB']]],
'SectionObject' : [ 0x128, ['pointer', ['void']]],
'SectionBaseAddress' : [ 0x12c, ['pointer', ['void']]],
'Cookie' : [ 0x130, ['unsigned long']],
'Spare8' : [ 0x134, ['unsigned long']],
'WorkingSetWatch' : [ 0x138, ['pointer', ['_PAGEFAULT_HISTORY']]],
'Win32WindowStation' : [ 0x13c, ['pointer', ['void']]],
'InheritedFromUniqueProcessId' : [ 0x140, ['pointer', ['void']]],
'LdtInformation' : [ 0x144, ['pointer', ['void']]],
'VdmObjects' : [ 0x148, ['pointer', ['void']]],
'ConsoleHostProcess' : [ 0x14c, ['unsigned long']],
'DeviceMap' : [ 0x150, ['pointer', ['void']]],
'EtwDataSource' : [ 0x154, ['pointer', ['void']]],
'FreeTebHint' : [ 0x158, ['pointer', ['void']]],
'PageDirectoryPte' : [ 0x160, ['_HARDWARE_PTE']],
'Filler' : [ 0x160, ['unsigned long long']],
'Session' : [ 0x168, ['pointer', ['void']]],
'ImageFileName' : [ 0x16c, ['array', 15, ['unsigned char']]],
'PriorityClass' : [ 0x17b, ['unsigned char']],
'JobLinks' : [ 0x17c, ['_LIST_ENTRY']],
'LockedPagesList' : [ 0x184, ['pointer', ['void']]],
'ThreadListHead' : [ 0x188, ['_LIST_ENTRY']],
'SecurityPort' : [ 0x190, ['pointer', ['void']]],
'PaeTop' : [ 0x194, ['pointer', ['void']]],
'ActiveThreads' : [ 0x198, ['unsigned long']],
'ImagePathHash' : [ 0x19c, ['unsigned long']],
'DefaultHardErrorProcessing' : [ 0x1a0, ['unsigned long']],
'LastThreadExitStatus' : [ 0x1a4, ['long']],
'Peb' : [ 0x1a8, ['pointer', ['_PEB']]],
'PrefetchTrace' : [ 0x1ac, ['_EX_FAST_REF']],
'ReadOperationCount' : [ 0x1b0, ['_LARGE_INTEGER']],
'WriteOperationCount' : [ 0x1b8, ['_LARGE_INTEGER']],
'OtherOperationCount' : [ 0x1c0, ['_LARGE_INTEGER']],
'ReadTransferCount' : [ 0x1c8, ['_LARGE_INTEGER']],
'WriteTransferCount' : [ 0x1d0, ['_LARGE_INTEGER']],
'OtherTransferCount' : [ 0x1d8, ['_LARGE_INTEGER']],
'CommitChargeLimit' : [ 0x1e0, ['unsigned long']],
'CommitChargePeak' : [ 0x1e4, ['unsigned long']],
'AweInfo' : [ 0x1e8, ['pointer', ['void']]],
'SeAuditProcessCreationInfo' : [ 0x1ec, ['_SE_AUDIT_PROCESS_CREATION_INFO']],
'Vm' : [ 0x1f0, ['_MMSUPPORT']],
'MmProcessLinks' : [ 0x25c, ['_LIST_ENTRY']],
'HighestUserAddress' : [ 0x264, ['pointer', ['void']]],
'ModifiedPageCount' : [ 0x268, ['unsigned long']],
'Flags2' : [ 0x26c, ['unsigned long']],
'JobNotReallyActive' : [ 0x26c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AccountingFolded' : [ 0x26c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'NewProcessReported' : [ 0x26c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ExitProcessReported' : [ 0x26c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ReportCommitChanges' : [ 0x26c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LastReportMemory' : [ 0x26c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'ReportPhysicalPageChanges' : [ 0x26c, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'HandleTableRundown' : [ 0x26c, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'NeedsHandleRundown' : [ 0x26c, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'RefTraceEnabled' : [ 0x26c, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'NumaAware' : [ 0x26c, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ProtectedProcess' : [ 0x26c, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'DefaultPagePriority' : [ 0x26c, ['BitField', dict(start_bit = 12, end_bit = 15, native_type='unsigned long')]],
'PrimaryTokenFrozen' : [ 0x26c, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessVerifierTarget' : [ 0x26c, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'StackRandomizationDisabled' : [ 0x26c, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'AffinityPermanent' : [ 0x26c, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'AffinityUpdateEnable' : [ 0x26c, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'PropagateNode' : [ 0x26c, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'ExplicitAffinity' : [ 0x26c, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Flags' : [ 0x270, ['unsigned long']],
'CreateReported' : [ 0x270, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoDebugInherit' : [ 0x270, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessExiting' : [ 0x270, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessDelete' : [ 0x270, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow64SplitPages' : [ 0x270, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'VmDeleted' : [ 0x270, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'OutswapEnabled' : [ 0x270, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Outswapped' : [ 0x270, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ForkFailed' : [ 0x270, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Wow64VaSpace4Gb' : [ 0x270, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'AddressSpaceInitialized' : [ 0x270, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
'SetTimerResolution' : [ 0x270, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x270, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'DeprioritizeViews' : [ 0x270, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WriteWatch' : [ 0x270, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessInSession' : [ 0x270, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'OverrideAddressSpace' : [ 0x270, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HasAddressSpace' : [ 0x270, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'LaunchPrefetched' : [ 0x270, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'InjectInpageErrors' : [ 0x270, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'VmTopDown' : [ 0x270, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'ImageNotifyDone' : [ 0x270, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'PdeUpdateNeeded' : [ 0x270, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'VdmAllowed' : [ 0x270, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'CrossSessionCreate' : [ 0x270, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'ProcessInserted' : [ 0x270, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'DefaultIoPriority' : [ 0x270, ['BitField', dict(start_bit = 27, end_bit = 30, native_type='unsigned long')]],
'ProcessSelfDelete' : [ 0x270, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'SetTimerResolutionLink' : [ 0x270, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'ExitStatus' : [ 0x274, ['long']],
'VadRoot' : [ 0x278, ['_MM_AVL_TABLE']],
'AlpcContext' : [ 0x298, ['_ALPC_PROCESS_CONTEXT']],
'TimerResolutionLink' : [ 0x2a8, ['_LIST_ENTRY']],
'RequestedTimerResolution' : [ 0x2b0, ['unsigned long']],
'ActiveThreadsHighWatermark' : [ 0x2b4, ['unsigned long']],
'SmallestTimerResolution' : [ 0x2b8, ['unsigned long']],
'TimerResolutionStackRecord' : [ 0x2bc, ['pointer', ['_PO_DIAG_STACK_RECORD']]],
} ],
'_KPROCESS' : [ 0x98, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'ProfileListHead' : [ 0x10, ['_LIST_ENTRY']],
'DirectoryTableBase' : [ 0x18, ['unsigned long']],
'LdtDescriptor' : [ 0x1c, ['_KGDTENTRY']],
'Int21Descriptor' : [ 0x24, ['_KIDTENTRY']],
'ThreadListHead' : [ 0x2c, ['_LIST_ENTRY']],
'ProcessLock' : [ 0x34, ['unsigned long']],
'Affinity' : [ 0x38, ['_KAFFINITY_EX']],
'ReadyListHead' : [ 0x44, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x4c, ['_SINGLE_LIST_ENTRY']],
'ActiveProcessors' : [ 0x50, ['_KAFFINITY_EX']],
'AutoAlignment' : [ 0x5c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='long')]],
'DisableBoost' : [ 0x5c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='long')]],
'DisableQuantum' : [ 0x5c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='long')]],
'ActiveGroupsMask' : [ 0x5c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ReservedFlags' : [ 0x5c, ['BitField', dict(start_bit = 4, end_bit = 32, native_type='long')]],
'ProcessFlags' : [ 0x5c, ['long']],
'BasePriority' : [ 0x60, ['unsigned char']],
'QuantumReset' : [ 0x61, ['unsigned char']],
'Visited' : [ 0x62, ['unsigned char']],
'Unused3' : [ 0x63, ['unsigned char']],
'ThreadSeed' : [ 0x64, ['array', 1, ['unsigned long']]],
'IdealNode' : [ 0x68, ['array', 1, ['unsigned short']]],
'IdealGlobalNode' : [ 0x6a, ['unsigned short']],
'Flags' : [ 0x6c, ['_KEXECUTE_OPTIONS']],
'Unused1' : [ 0x6d, ['unsigned char']],
'IopmOffset' : [ 0x6e, ['unsigned short']],
'Unused4' : [ 0x70, ['unsigned long']],
'StackCount' : [ 0x74, ['_KSTACK_COUNT']],
'ProcessListEntry' : [ 0x78, ['_LIST_ENTRY']],
'CycleTime' : [ 0x80, ['unsigned long long']],
'KernelTime' : [ 0x88, ['unsigned long']],
'UserTime' : [ 0x8c, ['unsigned long']],
'VdmTrapcHandler' : [ 0x90, ['pointer', ['void']]],
} ],
'__unnamed_1291' : [ 0x2c, {
'InitialPrivilegeSet' : [ 0x0, ['_INITIAL_PRIVILEGE_SET']],
'PrivilegeSet' : [ 0x0, ['_PRIVILEGE_SET']],
} ],
'_ACCESS_STATE' : [ 0x74, {
'OperationID' : [ 0x0, ['_LUID']],
'SecurityEvaluated' : [ 0x8, ['unsigned char']],
'GenerateAudit' : [ 0x9, ['unsigned char']],
'GenerateOnClose' : [ 0xa, ['unsigned char']],
'PrivilegesAllocated' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['unsigned long']],
'RemainingDesiredAccess' : [ 0x10, ['unsigned long']],
'PreviouslyGrantedAccess' : [ 0x14, ['unsigned long']],
'OriginalDesiredAccess' : [ 0x18, ['unsigned long']],
'SubjectSecurityContext' : [ 0x1c, ['_SECURITY_SUBJECT_CONTEXT']],
'SecurityDescriptor' : [ 0x2c, ['pointer', ['void']]],
'AuxData' : [ 0x30, ['pointer', ['void']]],
'Privileges' : [ 0x34, ['__unnamed_1291']],
'AuditPrivileges' : [ 0x60, ['unsigned char']],
'ObjectName' : [ 0x64, ['_UNICODE_STRING']],
'ObjectTypeName' : [ 0x6c, ['_UNICODE_STRING']],
} ],
'_AUX_ACCESS_DATA' : [ 0xc0, {
'PrivilegesUsed' : [ 0x0, ['pointer', ['_PRIVILEGE_SET']]],
'GenericMapping' : [ 0x4, ['_GENERIC_MAPPING']],
'AccessesToAudit' : [ 0x14, ['unsigned long']],
'MaximumAuditMask' : [ 0x18, ['unsigned long']],
'TransactionId' : [ 0x1c, ['_GUID']],
'NewSecurityDescriptor' : [ 0x2c, ['pointer', ['void']]],
'ExistingSecurityDescriptor' : [ 0x30, ['pointer', ['void']]],
'ParentSecurityDescriptor' : [ 0x34, ['pointer', ['void']]],
'DeRefSecurityDescriptor' : [ 0x38, ['pointer', ['void']]],
'SDLock' : [ 0x3c, ['pointer', ['void']]],
'AccessReasons' : [ 0x40, ['_ACCESS_REASONS']],
} ],
'__unnamed_12a0' : [ 0x4, {
'MasterIrp' : [ 0x0, ['pointer', ['_IRP']]],
'IrpCount' : [ 0x0, ['long']],
'SystemBuffer' : [ 0x0, ['pointer', ['void']]],
} ],
'__unnamed_12a5' : [ 0x8, {
'UserApcRoutine' : [ 0x0, ['pointer', ['void']]],
'IssuingProcess' : [ 0x0, ['pointer', ['void']]],
'UserApcContext' : [ 0x4, ['pointer', ['void']]],
} ],
'__unnamed_12a7' : [ 0x8, {
'AsynchronousParameters' : [ 0x0, ['__unnamed_12a5']],
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'__unnamed_12b2' : [ 0x28, {
'DeviceQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DriverContext' : [ 0x0, ['array', 4, ['pointer', ['void']]]],
'Thread' : [ 0x10, ['pointer', ['_ETHREAD']]],
'AuxiliaryBuffer' : [ 0x14, ['pointer', ['unsigned char']]],
'ListEntry' : [ 0x18, ['_LIST_ENTRY']],
'CurrentStackLocation' : [ 0x20, ['pointer', ['_IO_STACK_LOCATION']]],
'PacketType' : [ 0x20, ['unsigned long']],
'OriginalFileObject' : [ 0x24, ['pointer', ['_FILE_OBJECT']]],
} ],
'__unnamed_12b4' : [ 0x30, {
'Overlay' : [ 0x0, ['__unnamed_12b2']],
'Apc' : [ 0x0, ['_KAPC']],
'CompletionKey' : [ 0x0, ['pointer', ['void']]],
} ],
'_IRP' : [ 0x70, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'MdlAddress' : [ 0x4, ['pointer', ['_MDL']]],
'Flags' : [ 0x8, ['unsigned long']],
'AssociatedIrp' : [ 0xc, ['__unnamed_12a0']],
'ThreadListEntry' : [ 0x10, ['_LIST_ENTRY']],
'IoStatus' : [ 0x18, ['_IO_STATUS_BLOCK']],
'RequestorMode' : [ 0x20, ['unsigned char']],
'PendingReturned' : [ 0x21, ['unsigned char']],
'StackCount' : [ 0x22, ['unsigned char']],
'CurrentLocation' : [ 0x23, ['unsigned char']],
'Cancel' : [ 0x24, ['unsigned char']],
'CancelIrql' : [ 0x25, ['unsigned char']],
'ApcEnvironment' : [ 0x26, ['unsigned char']],
'AllocationFlags' : [ 0x27, ['unsigned char']],
'UserIosb' : [ 0x28, ['pointer', ['_IO_STATUS_BLOCK']]],
'UserEvent' : [ 0x2c, ['pointer', ['_KEVENT']]],
'Overlay' : [ 0x30, ['__unnamed_12a7']],
'CancelRoutine' : [ 0x38, ['pointer', ['void']]],
'UserBuffer' : [ 0x3c, ['pointer', ['void']]],
'Tail' : [ 0x40, ['__unnamed_12b4']],
} ],
'__unnamed_12bb' : [ 0x10, {
'SecurityContext' : [ 0x0, ['pointer', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x4, ['unsigned long']],
'FileAttributes' : [ 0x8, ['unsigned short']],
'ShareAccess' : [ 0xa, ['unsigned short']],
'EaLength' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_12bf' : [ 0x10, {
'SecurityContext' : [ 0x0, ['pointer', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned short']],
'ShareAccess' : [ 0xa, ['unsigned short']],
'Parameters' : [ 0xc, ['pointer', ['_NAMED_PIPE_CREATE_PARAMETERS']]],
} ],
'__unnamed_12c3' : [ 0x10, {
'SecurityContext' : [ 0x0, ['pointer', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned short']],
'ShareAccess' : [ 0xa, ['unsigned short']],
'Parameters' : [ 0xc, ['pointer', ['_MAILSLOT_CREATE_PARAMETERS']]],
} ],
'__unnamed_12c5' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'Key' : [ 0x4, ['unsigned long']],
'ByteOffset' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'__unnamed_12c9' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileName' : [ 0x4, ['pointer', ['_UNICODE_STRING']]],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
'FileIndex' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_12cb' : [ 0x8, {
'Length' : [ 0x0, ['unsigned long']],
'CompletionFilter' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_12cd' : [ 0x8, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
} ],
'__unnamed_12cf' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileIoStatusBlockRangeInformation', 43: 'FileIoPriorityHintInformation', 44: 'FileSfioReserveInformation', 45: 'FileSfioVolumeInformation', 46: 'FileHardLinkInformation', 47: 'FileProcessIdsUsingFileInformation', 48: 'FileNormalizedNameInformation', 49: 'FileNetworkPhysicalNameInformation', 50: 'FileIdGlobalTxDirectoryInformation', 51: 'FileIsRemoteDeviceInformation', 52: 'FileAttributeCacheInformation', 53: 'FileNumaNodeInformation', 54: 'FileStandardLinkInformation', 55: 'FileRemoteProtocolInformation', 56: 'FileMaximumInformation'})]],
'FileObject' : [ 0x8, ['pointer', ['_FILE_OBJECT']]],
'ReplaceIfExists' : [ 0xc, ['unsigned char']],
'AdvanceOnly' : [ 0xd, ['unsigned char']],
'ClusterCount' : [ 0xc, ['unsigned long']],
'DeleteHandle' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_12d1' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'EaList' : [ 0x4, ['pointer', ['void']]],
'EaListLength' : [ 0x8, ['unsigned long']],
'EaIndex' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_12d3' : [ 0x4, {
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_12d7' : [ 0x8, {
'Length' : [ 0x0, ['unsigned long']],
'FsInformationClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {1: 'FileFsVolumeInformation', 2: 'FileFsLabelInformation', 3: 'FileFsSizeInformation', 4: 'FileFsDeviceInformation', 5: 'FileFsAttributeInformation', 6: 'FileFsControlInformation', 7: 'FileFsFullSizeInformation', 8: 'FileFsObjectIdInformation', 9: 'FileFsDriverPathInformation', 10: 'FileFsVolumeFlagsInformation', 11: 'FileFsMaximumInformation'})]],
} ],
'__unnamed_12d9' : [ 0x10, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x4, ['unsigned long']],
'FsControlCode' : [ 0x8, ['unsigned long']],
'Type3InputBuffer' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_12dc' : [ 0x10, {
'Length' : [ 0x0, ['pointer', ['_LARGE_INTEGER']]],
'Key' : [ 0x4, ['unsigned long']],
'ByteOffset' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'__unnamed_12de' : [ 0x10, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x4, ['unsigned long']],
'IoControlCode' : [ 0x8, ['unsigned long']],
'Type3InputBuffer' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_12e0' : [ 0x8, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_12e2' : [ 0x8, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'SecurityDescriptor' : [ 0x4, ['pointer', ['void']]],
} ],
'__unnamed_12e6' : [ 0x8, {
'Vpb' : [ 0x0, ['pointer', ['_VPB']]],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_12ea' : [ 0x4, {
'Srb' : [ 0x0, ['pointer', ['_SCSI_REQUEST_BLOCK']]],
} ],
'__unnamed_12ee' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'StartSid' : [ 0x4, ['pointer', ['void']]],
'SidList' : [ 0x8, ['pointer', ['_FILE_GET_QUOTA_INFORMATION']]],
'SidListLength' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_12f2' : [ 0x4, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusRelations', 1: 'EjectionRelations', 2: 'PowerRelations', 3: 'RemovalRelations', 4: 'TargetDeviceRelation', 5: 'SingleBusRelations', 6: 'TransportRelations'})]],
} ],
'__unnamed_12f8' : [ 0x10, {
'InterfaceType' : [ 0x0, ['pointer', ['_GUID']]],
'Size' : [ 0x4, ['unsigned short']],
'Version' : [ 0x6, ['unsigned short']],
'Interface' : [ 0x8, ['pointer', ['_INTERFACE']]],
'InterfaceSpecificData' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_12fc' : [ 0x4, {
'Capabilities' : [ 0x0, ['pointer', ['_DEVICE_CAPABILITIES']]],
} ],
'__unnamed_1300' : [ 0x4, {
'IoResourceRequirementList' : [ 0x0, ['pointer', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
} ],
'__unnamed_1302' : [ 0x10, {
'WhichSpace' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x4, ['pointer', ['void']]],
'Offset' : [ 0x8, ['unsigned long']],
'Length' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1304' : [ 0x1, {
'Lock' : [ 0x0, ['unsigned char']],
} ],
'__unnamed_1308' : [ 0x4, {
'IdType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusQueryDeviceID', 1: 'BusQueryHardwareIDs', 2: 'BusQueryCompatibleIDs', 3: 'BusQueryInstanceID', 4: 'BusQueryDeviceSerialNumber', 5: 'BusQueryContainerID'})]],
} ],
'__unnamed_130c' : [ 0x8, {
'DeviceTextType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceTextDescription', 1: 'DeviceTextLocationInformation'})]],
'LocaleId' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1310' : [ 0x8, {
'InPath' : [ 0x0, ['unsigned char']],
'Reserved' : [ 0x1, ['array', 3, ['unsigned char']]],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'__unnamed_1314' : [ 0x4, {
'PowerState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_1318' : [ 0x4, {
'PowerSequence' : [ 0x0, ['pointer', ['_POWER_SEQUENCE']]],
} ],
'__unnamed_1320' : [ 0x10, {
'SystemContext' : [ 0x0, ['unsigned long']],
'SystemPowerStateContext' : [ 0x0, ['_SYSTEM_POWER_STATE_CONTEXT']],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SystemPowerState', 1: 'DevicePowerState'})]],
'State' : [ 0x8, ['_POWER_STATE']],
'ShutdownType' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
} ],
'__unnamed_1324' : [ 0x8, {
'AllocatedResources' : [ 0x0, ['pointer', ['_CM_RESOURCE_LIST']]],
'AllocatedResourcesTranslated' : [ 0x4, ['pointer', ['_CM_RESOURCE_LIST']]],
} ],
'__unnamed_1326' : [ 0x10, {
'ProviderId' : [ 0x0, ['unsigned long']],
'DataPath' : [ 0x4, ['pointer', ['void']]],
'BufferSize' : [ 0x8, ['unsigned long']],
'Buffer' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_1328' : [ 0x10, {
'Argument1' : [ 0x0, ['pointer', ['void']]],
'Argument2' : [ 0x4, ['pointer', ['void']]],
'Argument3' : [ 0x8, ['pointer', ['void']]],
'Argument4' : [ 0xc, ['pointer', ['void']]],
} ],
'__unnamed_132a' : [ 0x10, {
'Create' : [ 0x0, ['__unnamed_12bb']],
'CreatePipe' : [ 0x0, ['__unnamed_12bf']],
'CreateMailslot' : [ 0x0, ['__unnamed_12c3']],
'Read' : [ 0x0, ['__unnamed_12c5']],
'Write' : [ 0x0, ['__unnamed_12c5']],
'QueryDirectory' : [ 0x0, ['__unnamed_12c9']],
'NotifyDirectory' : [ 0x0, ['__unnamed_12cb']],
'QueryFile' : [ 0x0, ['__unnamed_12cd']],
'SetFile' : [ 0x0, ['__unnamed_12cf']],
'QueryEa' : [ 0x0, ['__unnamed_12d1']],
'SetEa' : [ 0x0, ['__unnamed_12d3']],
'QueryVolume' : [ 0x0, ['__unnamed_12d7']],
'SetVolume' : [ 0x0, ['__unnamed_12d7']],
'FileSystemControl' : [ 0x0, ['__unnamed_12d9']],
'LockControl' : [ 0x0, ['__unnamed_12dc']],
'DeviceIoControl' : [ 0x0, ['__unnamed_12de']],
'QuerySecurity' : [ 0x0, ['__unnamed_12e0']],
'SetSecurity' : [ 0x0, ['__unnamed_12e2']],
'MountVolume' : [ 0x0, ['__unnamed_12e6']],
'VerifyVolume' : [ 0x0, ['__unnamed_12e6']],
'Scsi' : [ 0x0, ['__unnamed_12ea']],
'QueryQuota' : [ 0x0, ['__unnamed_12ee']],
'SetQuota' : [ 0x0, ['__unnamed_12d3']],
'QueryDeviceRelations' : [ 0x0, ['__unnamed_12f2']],
'QueryInterface' : [ 0x0, ['__unnamed_12f8']],
'DeviceCapabilities' : [ 0x0, ['__unnamed_12fc']],
'FilterResourceRequirements' : [ 0x0, ['__unnamed_1300']],
'ReadWriteConfig' : [ 0x0, ['__unnamed_1302']],
'SetLock' : [ 0x0, ['__unnamed_1304']],
'QueryId' : [ 0x0, ['__unnamed_1308']],
'QueryDeviceText' : [ 0x0, ['__unnamed_130c']],
'UsageNotification' : [ 0x0, ['__unnamed_1310']],
'WaitWake' : [ 0x0, ['__unnamed_1314']],
'PowerSequence' : [ 0x0, ['__unnamed_1318']],
'Power' : [ 0x0, ['__unnamed_1320']],
'StartDevice' : [ 0x0, ['__unnamed_1324']],
'WMI' : [ 0x0, ['__unnamed_1326']],
'Others' : [ 0x0, ['__unnamed_1328']],
} ],
'_IO_STACK_LOCATION' : [ 0x24, {
'MajorFunction' : [ 0x0, ['unsigned char']],
'MinorFunction' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned char']],
'Control' : [ 0x3, ['unsigned char']],
'Parameters' : [ 0x4, ['__unnamed_132a']],
'DeviceObject' : [ 0x14, ['pointer', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x18, ['pointer', ['_FILE_OBJECT']]],
'CompletionRoutine' : [ 0x1c, ['pointer', ['void']]],
'Context' : [ 0x20, ['pointer', ['void']]],
} ],
'__unnamed_1340' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Wcb' : [ 0x0, ['_WAIT_CONTEXT_BLOCK']],
} ],
'_DEVICE_OBJECT' : [ 0xb8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'ReferenceCount' : [ 0x4, ['long']],
'DriverObject' : [ 0x8, ['pointer', ['_DRIVER_OBJECT']]],
'NextDevice' : [ 0xc, ['pointer', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x10, ['pointer', ['_DEVICE_OBJECT']]],
'CurrentIrp' : [ 0x14, ['pointer', ['_IRP']]],
'Timer' : [ 0x18, ['pointer', ['_IO_TIMER']]],
'Flags' : [ 0x1c, ['unsigned long']],
'Characteristics' : [ 0x20, ['unsigned long']],
'Vpb' : [ 0x24, ['pointer', ['_VPB']]],
'DeviceExtension' : [ 0x28, ['pointer', ['void']]],
'DeviceType' : [ 0x2c, ['unsigned long']],
'StackSize' : [ 0x30, ['unsigned char']],
'Queue' : [ 0x34, ['__unnamed_1340']],
'AlignmentRequirement' : [ 0x5c, ['unsigned long']],
'DeviceQueue' : [ 0x60, ['_KDEVICE_QUEUE']],
'Dpc' : [ 0x74, ['_KDPC']],
'ActiveThreadCount' : [ 0x94, ['unsigned long']],
'SecurityDescriptor' : [ 0x98, ['pointer', ['void']]],
'DeviceLock' : [ 0x9c, ['_KEVENT']],
'SectorSize' : [ 0xac, ['unsigned short']],
'Spare1' : [ 0xae, ['unsigned short']],
'DeviceObjectExtension' : [ 0xb0, ['pointer', ['_DEVOBJ_EXTENSION']]],
'Reserved' : [ 0xb4, ['pointer', ['void']]],
} ],
'_KDPC' : [ 0x20, {
'Type' : [ 0x0, ['unsigned char']],
'Importance' : [ 0x1, ['unsigned char']],
'Number' : [ 0x2, ['unsigned short']],
'DpcListEntry' : [ 0x4, ['_LIST_ENTRY']],
'DeferredRoutine' : [ 0xc, ['pointer', ['void']]],
'DeferredContext' : [ 0x10, ['pointer', ['void']]],
'SystemArgument1' : [ 0x14, ['pointer', ['void']]],
'SystemArgument2' : [ 0x18, ['pointer', ['void']]],
'DpcData' : [ 0x1c, ['pointer', ['void']]],
} ],
'_IO_DRIVER_CREATE_CONTEXT' : [ 0x10, {
'Size' : [ 0x0, ['short']],
'ExtraCreateParameter' : [ 0x4, ['pointer', ['_ECP_LIST']]],
'DeviceObjectHint' : [ 0x8, ['pointer', ['void']]],
'TxnParameters' : [ 0xc, ['pointer', ['_TXN_PARAMETER_BLOCK']]],
} ],
'_IO_PRIORITY_INFO' : [ 0x10, {
'Size' : [ 0x0, ['unsigned long']],
'ThreadPriority' : [ 0x4, ['unsigned long']],
'PagePriority' : [ 0x8, ['unsigned long']],
'IoPriority' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IoPriorityVeryLow', 1: 'IoPriorityLow', 2: 'IoPriorityNormal', 3: 'IoPriorityHigh', 4: 'IoPriorityCritical', 5: 'MaxIoPriorityTypes'})]],
} ],
'_OBJECT_ATTRIBUTES' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x4, ['pointer', ['void']]],
'ObjectName' : [ 0x8, ['pointer', ['_UNICODE_STRING']]],
'Attributes' : [ 0xc, ['unsigned long']],
'SecurityDescriptor' : [ 0x10, ['pointer', ['void']]],
'SecurityQualityOfService' : [ 0x14, ['pointer', ['void']]],
} ],
'_OBJECT_HANDLE_INFORMATION' : [ 0x8, {
'HandleAttributes' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
} ],
'_EVENT_DATA_DESCRIPTOR' : [ 0x10, {
'Ptr' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_EVENT_DESCRIPTOR' : [ 0x10, {
'Id' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned char']],
'Channel' : [ 0x3, ['unsigned char']],
'Level' : [ 0x4, ['unsigned char']],
'Opcode' : [ 0x5, ['unsigned char']],
'Task' : [ 0x6, ['unsigned short']],
'Keyword' : [ 0x8, ['unsigned long long']],
} ],
'_PERFINFO_GROUPMASK' : [ 0x20, {
'Masks' : [ 0x0, ['array', 8, ['unsigned long']]],
} ],
'_FILE_OBJECT' : [ 0x80, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
'Vpb' : [ 0x8, ['pointer', ['_VPB']]],
'FsContext' : [ 0xc, ['pointer', ['void']]],
'FsContext2' : [ 0x10, ['pointer', ['void']]],
'SectionObjectPointer' : [ 0x14, ['pointer', ['_SECTION_OBJECT_POINTERS']]],
'PrivateCacheMap' : [ 0x18, ['pointer', ['void']]],
'FinalStatus' : [ 0x1c, ['long']],
'RelatedFileObject' : [ 0x20, ['pointer', ['_FILE_OBJECT']]],
'LockOperation' : [ 0x24, ['unsigned char']],
'DeletePending' : [ 0x25, ['unsigned char']],
'ReadAccess' : [ 0x26, ['unsigned char']],
'WriteAccess' : [ 0x27, ['unsigned char']],
'DeleteAccess' : [ 0x28, ['unsigned char']],
'SharedRead' : [ 0x29, ['unsigned char']],
'SharedWrite' : [ 0x2a, ['unsigned char']],
'SharedDelete' : [ 0x2b, ['unsigned char']],
'Flags' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['_UNICODE_STRING']],
'CurrentByteOffset' : [ 0x38, ['_LARGE_INTEGER']],
'Waiters' : [ 0x40, ['unsigned long']],
'Busy' : [ 0x44, ['unsigned long']],
'LastLock' : [ 0x48, ['pointer', ['void']]],
'Lock' : [ 0x4c, ['_KEVENT']],
'Event' : [ 0x5c, ['_KEVENT']],
'CompletionContext' : [ 0x6c, ['pointer', ['_IO_COMPLETION_CONTEXT']]],
'IrpListLock' : [ 0x70, ['unsigned long']],
'IrpList' : [ 0x74, ['_LIST_ENTRY']],
'FileObjectExtension' : [ 0x7c, ['pointer', ['void']]],
} ],
'_EX_RUNDOWN_REF' : [ 0x4, {
'Count' : [ 0x0, ['unsigned long']],
'Ptr' : [ 0x0, ['pointer', ['void']]],
} ],
'_MM_PAGE_ACCESS_INFO_HEADER' : [ 0x38, {
'Link' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Type' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'MmPteAccessType', 1: 'MmCcReadAheadType', 2: 'MmPfnRepurposeType', 3: 'MmMaximumPageAccessType'})]],
'EmptySequenceNumber' : [ 0x8, ['unsigned long']],
'CurrentFileIndex' : [ 0x8, ['unsigned long']],
'CreateTime' : [ 0x10, ['unsigned long long']],
'EmptyTime' : [ 0x18, ['unsigned long long']],
'TempEntry' : [ 0x18, ['pointer', ['_MM_PAGE_ACCESS_INFO']]],
'PageEntry' : [ 0x20, ['pointer', ['_MM_PAGE_ACCESS_INFO']]],
'FileEntry' : [ 0x24, ['pointer', ['unsigned long']]],
'FirstFileEntry' : [ 0x28, ['pointer', ['unsigned long']]],
'Process' : [ 0x2c, ['pointer', ['_EPROCESS']]],
'SessionId' : [ 0x30, ['unsigned long']],
'PageFrameEntry' : [ 0x20, ['pointer', ['unsigned long']]],
'LastPageFrameEntry' : [ 0x24, ['pointer', ['unsigned long']]],
} ],
'_WHEA_ERROR_PACKET_V2' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned long']],
'Version' : [ 0x4, ['unsigned long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['_WHEA_ERROR_PACKET_FLAGS']],
'ErrorType' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrTypeProcessor', 1: 'WheaErrTypeMemory', 2: 'WheaErrTypePCIExpress', 3: 'WheaErrTypeNMI', 4: 'WheaErrTypePCIXBus', 5: 'WheaErrTypePCIXDevice', 6: 'WheaErrTypeGeneric'})]],
'ErrorSeverity' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'ErrorSourceId' : [ 0x18, ['unsigned long']],
'ErrorSourceType' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSrcTypeMCE', 1: 'WheaErrSrcTypeCMC', 2: 'WheaErrSrcTypeCPE', 3: 'WheaErrSrcTypeNMI', 4: 'WheaErrSrcTypePCIe', 5: 'WheaErrSrcTypeGeneric', 6: 'WheaErrSrcTypeINIT', 7: 'WheaErrSrcTypeBOOT', 8: 'WheaErrSrcTypeSCIGeneric', 9: 'WheaErrSrcTypeIPFMCA', 10: 'WheaErrSrcTypeIPFCMC', 11: 'WheaErrSrcTypeIPFCPE', 12: 'WheaErrSrcTypeMax'})]],
'NotifyType' : [ 0x20, ['_GUID']],
'Context' : [ 0x30, ['unsigned long long']],
'DataFormat' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'WheaDataFormatIPFSalRecord', 1: 'WheaDataFormatXPFMCA', 2: 'WheaDataFormatMemory', 3: 'WheaDataFormatPCIExpress', 4: 'WheaDataFormatNMIPort', 5: 'WheaDataFormatPCIXBus', 6: 'WheaDataFormatPCIXDevice', 7: 'WheaDataFormatGeneric', 8: 'WheaDataFormatMax'})]],
'Reserved1' : [ 0x3c, ['unsigned long']],
'DataOffset' : [ 0x40, ['unsigned long']],
'DataLength' : [ 0x44, ['unsigned long']],
'PshedDataOffset' : [ 0x48, ['unsigned long']],
'PshedDataLength' : [ 0x4c, ['unsigned long']],
} ],
'_WHEA_ERROR_RECORD' : [ 0xc8, {
'Header' : [ 0x0, ['_WHEA_ERROR_RECORD_HEADER']],
'SectionDescriptor' : [ 0x80, ['array', 1, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR']]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR' : [ 0x48, {
'SectionOffset' : [ 0x0, ['unsigned long']],
'SectionLength' : [ 0x4, ['unsigned long']],
'Revision' : [ 0x8, ['_WHEA_REVISION']],
'ValidBits' : [ 0xa, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS']],
'Reserved' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS']],
'SectionType' : [ 0x10, ['_GUID']],
'FRUId' : [ 0x20, ['_GUID']],
'SectionSeverity' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'FRUText' : [ 0x34, ['array', 20, ['unsigned char']]],
} ],
'_GUID' : [ 0x10, {
'Data1' : [ 0x0, ['unsigned long']],
'Data2' : [ 0x4, ['unsigned short']],
'Data3' : [ 0x6, ['unsigned short']],
'Data4' : [ 0x8, ['array', 8, ['unsigned char']]],
} ],
'_FSRTL_ADVANCED_FCB_HEADER' : [ 0x40, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned char']],
'IsFastIoPossible' : [ 0x5, ['unsigned char']],
'Flags2' : [ 0x6, ['unsigned char']],
'Reserved' : [ 0x7, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned char')]],
'Version' : [ 0x7, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'Resource' : [ 0x8, ['pointer', ['_ERESOURCE']]],
'PagingIoResource' : [ 0xc, ['pointer', ['_ERESOURCE']]],
'AllocationSize' : [ 0x10, ['_LARGE_INTEGER']],
'FileSize' : [ 0x18, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x20, ['_LARGE_INTEGER']],
'FastMutex' : [ 0x28, ['pointer', ['_FAST_MUTEX']]],
'FilterContexts' : [ 0x2c, ['_LIST_ENTRY']],
'PushLock' : [ 0x34, ['_EX_PUSH_LOCK']],
'FileContextSupportPointer' : [ 0x38, ['pointer', ['pointer', ['void']]]],
} ],
'_iobuf' : [ 0x20, {
'_ptr' : [ 0x0, ['pointer', ['unsigned char']]],
'_cnt' : [ 0x4, ['long']],
'_base' : [ 0x8, ['pointer', ['unsigned char']]],
'_flag' : [ 0xc, ['long']],
'_file' : [ 0x10, ['long']],
'_charbuf' : [ 0x14, ['long']],
'_bufsiz' : [ 0x18, ['long']],
'_tmpfname' : [ 0x1c, ['pointer', ['unsigned char']]],
} ],
'__unnamed_14ad' : [ 0x4, {
'Long' : [ 0x0, ['unsigned long']],
'VolatileLong' : [ 0x0, ['unsigned long']],
'Flush' : [ 0x0, ['_HARDWARE_PTE']],
'Hard' : [ 0x0, ['_MMPTE_HARDWARE']],
'Proto' : [ 0x0, ['_MMPTE_PROTOTYPE']],
'Soft' : [ 0x0, ['_MMPTE_SOFTWARE']],
'TimeStamp' : [ 0x0, ['_MMPTE_TIMESTAMP']],
'Trans' : [ 0x0, ['_MMPTE_TRANSITION']],
'Subsect' : [ 0x0, ['_MMPTE_SUBSECTION']],
'List' : [ 0x0, ['_MMPTE_LIST']],
} ],
'_MMPTE' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_14ad']],
} ],
'__unnamed_14be' : [ 0xc, {
'I386' : [ 0x0, ['_I386_LOADER_BLOCK']],
'Ia64' : [ 0x0, ['_IA64_LOADER_BLOCK']],
} ],
'_LOADER_PARAMETER_BLOCK' : [ 0x88, {
'OsMajorVersion' : [ 0x0, ['unsigned long']],
'OsMinorVersion' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'LoadOrderListHead' : [ 0x10, ['_LIST_ENTRY']],
'MemoryDescriptorListHead' : [ 0x18, ['_LIST_ENTRY']],
'BootDriverListHead' : [ 0x20, ['_LIST_ENTRY']],
'KernelStack' : [ 0x28, ['unsigned long']],
'Prcb' : [ 0x2c, ['unsigned long']],
'Process' : [ 0x30, ['unsigned long']],
'Thread' : [ 0x34, ['unsigned long']],
'RegistryLength' : [ 0x38, ['unsigned long']],
'RegistryBase' : [ 0x3c, ['pointer', ['void']]],
'ConfigurationRoot' : [ 0x40, ['pointer', ['_CONFIGURATION_COMPONENT_DATA']]],
'ArcBootDeviceName' : [ 0x44, ['pointer', ['unsigned char']]],
'ArcHalDeviceName' : [ 0x48, ['pointer', ['unsigned char']]],
'NtBootPathName' : [ 0x4c, ['pointer', ['unsigned char']]],
'NtHalPathName' : [ 0x50, ['pointer', ['unsigned char']]],
'LoadOptions' : [ 0x54, ['pointer', ['unsigned char']]],
'NlsData' : [ 0x58, ['pointer', ['_NLS_DATA_BLOCK']]],
'ArcDiskInformation' : [ 0x5c, ['pointer', ['_ARC_DISK_INFORMATION']]],
'OemFontFile' : [ 0x60, ['pointer', ['void']]],
'Extension' : [ 0x64, ['pointer', ['_LOADER_PARAMETER_EXTENSION']]],
'u' : [ 0x68, ['__unnamed_14be']],
'FirmwareInformation' : [ 0x74, ['_FIRMWARE_INFORMATION_LOADER_BLOCK']],
} ],
'_KLOCK_QUEUE_HANDLE' : [ 0xc, {
'LockQueue' : [ 0x0, ['_KSPIN_LOCK_QUEUE']],
'OldIrql' : [ 0x8, ['unsigned char']],
} ],
'_MMPFNLIST' : [ 0x14, {
'Total' : [ 0x0, ['unsigned long']],
'ListName' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ZeroedPageList', 1: 'FreePageList', 2: 'StandbyPageList', 3: 'ModifiedPageList', 4: 'ModifiedNoWritePageList', 5: 'BadPageList', 6: 'ActiveAndValid', 7: 'TransitionPage'})]],
'Flink' : [ 0x8, ['unsigned long']],
'Blink' : [ 0xc, ['unsigned long']],
'Lock' : [ 0x10, ['unsigned long']],
} ],
'__unnamed_14ef' : [ 0x4, {
'Flink' : [ 0x0, ['unsigned long']],
'WsIndex' : [ 0x0, ['unsigned long']],
'Event' : [ 0x0, ['pointer', ['_KEVENT']]],
'Next' : [ 0x0, ['pointer', ['void']]],
'VolatileNext' : [ 0x0, ['pointer', ['void']]],
'KernelStackOwner' : [ 0x0, ['pointer', ['_KTHREAD']]],
'NextStackPfn' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
} ],
'__unnamed_14f1' : [ 0x4, {
'Blink' : [ 0x0, ['unsigned long']],
'ImageProtoPte' : [ 0x0, ['pointer', ['_MMPTE']]],
'ShareCount' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_14f4' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'VolatileReferenceCount' : [ 0x0, ['short']],
'ShortFlags' : [ 0x2, ['unsigned short']],
} ],
'__unnamed_14f6' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'e1' : [ 0x2, ['_MMPFNENTRY']],
'e2' : [ 0x0, ['__unnamed_14f4']],
} ],
'__unnamed_14fb' : [ 0x4, {
'PteFrame' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 25, native_type='unsigned long')]],
'PfnImageVerified' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'AweAllocation' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'PrototypePte' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'PageColor' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 32, native_type='unsigned long')]],
} ],
'_MMPFN' : [ 0x18, {
'u1' : [ 0x0, ['__unnamed_14ef']],
'u2' : [ 0x4, ['__unnamed_14f1']],
'PteAddress' : [ 0x8, ['pointer', ['_MMPTE']]],
'VolatilePteAddress' : [ 0x8, ['pointer', ['void']]],
'Lock' : [ 0x8, ['long']],
'PteLong' : [ 0x8, ['unsigned long']],
'u3' : [ 0xc, ['__unnamed_14f6']],
'OriginalPte' : [ 0x10, ['_MMPTE']],
'AweReferenceCount' : [ 0x10, ['long']],
'u4' : [ 0x14, ['__unnamed_14fb']],
} ],
'_MI_COLOR_BASE' : [ 0x8, {
'ColorPointer' : [ 0x0, ['pointer', ['unsigned short']]],
'ColorMask' : [ 0x4, ['unsigned short']],
'ColorNode' : [ 0x6, ['unsigned short']],
} ],
'_MMSUPPORT' : [ 0x6c, {
'WorkingSetMutex' : [ 0x0, ['_EX_PUSH_LOCK']],
'ExitGate' : [ 0x4, ['pointer', ['_KGATE']]],
'AccessLog' : [ 0x8, ['pointer', ['void']]],
'WorkingSetExpansionLinks' : [ 0xc, ['_LIST_ENTRY']],
'AgeDistribution' : [ 0x14, ['array', 7, ['unsigned long']]],
'MinimumWorkingSetSize' : [ 0x30, ['unsigned long']],
'WorkingSetSize' : [ 0x34, ['unsigned long']],
'WorkingSetPrivateSize' : [ 0x38, ['unsigned long']],
'MaximumWorkingSetSize' : [ 0x3c, ['unsigned long']],
'ChargedWslePages' : [ 0x40, ['unsigned long']],
'ActualWslePages' : [ 0x44, ['unsigned long']],
'WorkingSetSizeOverhead' : [ 0x48, ['unsigned long']],
'PeakWorkingSetSize' : [ 0x4c, ['unsigned long']],
'HardFaultCount' : [ 0x50, ['unsigned long']],
'VmWorkingSetList' : [ 0x54, ['pointer', ['_MMWSL']]],
'NextPageColor' : [ 0x58, ['unsigned short']],
'LastTrimStamp' : [ 0x5a, ['unsigned short']],
'PageFaultCount' : [ 0x5c, ['unsigned long']],
'RepurposeCount' : [ 0x60, ['unsigned long']],
'Spare' : [ 0x64, ['array', 1, ['unsigned long']]],
'Flags' : [ 0x68, ['_MMSUPPORT_FLAGS']],
} ],
'_MMWSL' : [ 0x6a8, {
'FirstFree' : [ 0x0, ['unsigned long']],
'FirstDynamic' : [ 0x4, ['unsigned long']],
'LastEntry' : [ 0x8, ['unsigned long']],
'NextSlot' : [ 0xc, ['unsigned long']],
'Wsle' : [ 0x10, ['pointer', ['_MMWSLE']]],
'LowestPagableAddress' : [ 0x14, ['pointer', ['void']]],
'LastInitializedWsle' : [ 0x18, ['unsigned long']],
'NextAgingSlot' : [ 0x1c, ['unsigned long']],
'NumberOfCommittedPageTables' : [ 0x20, ['unsigned long']],
'VadBitMapHint' : [ 0x24, ['unsigned long']],
'NonDirectCount' : [ 0x28, ['unsigned long']],
'LastVadBit' : [ 0x2c, ['unsigned long']],
'MaximumLastVadBit' : [ 0x30, ['unsigned long']],
'LastAllocationSizeHint' : [ 0x34, ['unsigned long']],
'LastAllocationSize' : [ 0x38, ['unsigned long']],
'NonDirectHash' : [ 0x3c, ['pointer', ['_MMWSLE_NONDIRECT_HASH']]],
'HashTableStart' : [ 0x40, ['pointer', ['_MMWSLE_HASH']]],
'HighestPermittedHashAddress' : [ 0x44, ['pointer', ['_MMWSLE_HASH']]],
'UsedPageTableEntries' : [ 0x48, ['array', 768, ['unsigned short']]],
'CommittedPageTables' : [ 0x648, ['array', 24, ['unsigned long']]],
} ],
'__unnamed_152b' : [ 0x4, {
'VirtualAddress' : [ 0x0, ['pointer', ['void']]],
'Long' : [ 0x0, ['unsigned long']],
'e1' : [ 0x0, ['_MMWSLENTRY']],
'e2' : [ 0x0, ['_MMWSLE_FREE_ENTRY']],
} ],
'_MMWSLE' : [ 0x4, {
'u1' : [ 0x0, ['__unnamed_152b']],
} ],
'__unnamed_153a' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MMSECTION_FLAGS']],
} ],
'__unnamed_1544' : [ 0xc, {
'NumberOfSystemCacheViews' : [ 0x0, ['unsigned long']],
'ImageRelocationStartBit' : [ 0x0, ['unsigned long']],
'WritableUserReferences' : [ 0x4, ['long']],
'ImageRelocationSizeIn64k' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'Unused' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 30, native_type='unsigned long')]],
'BitMap64' : [ 0x4, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'ImageActive' : [ 0x4, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubsectionRoot' : [ 0x8, ['pointer', ['_MM_SUBSECTION_AVL_TABLE']]],
'SeImageStub' : [ 0x8, ['pointer', ['_MI_IMAGE_SECURITY_REFERENCE']]],
} ],
'__unnamed_1546' : [ 0xc, {
'e2' : [ 0x0, ['__unnamed_1544']],
} ],
'_CONTROL_AREA' : [ 0x50, {
'Segment' : [ 0x0, ['pointer', ['_SEGMENT']]],
'DereferenceList' : [ 0x4, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0xc, ['unsigned long']],
'NumberOfPfnReferences' : [ 0x10, ['unsigned long']],
'NumberOfMappedViews' : [ 0x14, ['unsigned long']],
'NumberOfUserReferences' : [ 0x18, ['unsigned long']],
'u' : [ 0x1c, ['__unnamed_153a']],
'FlushInProgressCount' : [ 0x20, ['unsigned long']],
'FilePointer' : [ 0x24, ['_EX_FAST_REF']],
'ControlAreaLock' : [ 0x28, ['long']],
'ModifiedWriteCount' : [ 0x2c, ['unsigned long']],
'StartingFrame' : [ 0x2c, ['unsigned long']],
'WaitingForDeletion' : [ 0x30, ['pointer', ['_MI_SECTION_CREATION_GATE']]],
'u2' : [ 0x34, ['__unnamed_1546']],
'LockedPages' : [ 0x40, ['long long']],
'ViewList' : [ 0x48, ['_LIST_ENTRY']],
} ],
'_MM_STORE_KEY' : [ 0x4, {
'KeyLow' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 28, native_type='unsigned long')]],
'KeyHigh' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 32, native_type='unsigned long')]],
'EntireKey' : [ 0x0, ['unsigned long']],
} ],
'_MMPAGING_FILE' : [ 0x50, {
'Size' : [ 0x0, ['unsigned long']],
'MaximumSize' : [ 0x4, ['unsigned long']],
'MinimumSize' : [ 0x8, ['unsigned long']],
'FreeSpace' : [ 0xc, ['unsigned long']],
'PeakUsage' : [ 0x10, ['unsigned long']],
'HighestPage' : [ 0x14, ['unsigned long']],
'File' : [ 0x18, ['pointer', ['_FILE_OBJECT']]],
'Entry' : [ 0x1c, ['array', 2, ['pointer', ['_MMMOD_WRITER_MDL_ENTRY']]]],
'PageFileName' : [ 0x24, ['_UNICODE_STRING']],
'Bitmap' : [ 0x2c, ['pointer', ['_RTL_BITMAP']]],
'EvictStoreBitmap' : [ 0x30, ['pointer', ['_RTL_BITMAP']]],
'BitmapHint' : [ 0x34, ['unsigned long']],
'LastAllocationSize' : [ 0x38, ['unsigned long']],
'ToBeEvictedCount' : [ 0x3c, ['unsigned long']],
'PageFileNumber' : [ 0x40, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned short')]],
'BootPartition' : [ 0x40, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'Spare0' : [ 0x40, ['BitField', dict(start_bit = 5, end_bit = 16, native_type='unsigned short')]],
'AdriftMdls' : [ 0x42, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Spare1' : [ 0x42, ['BitField', dict(start_bit = 1, end_bit = 16, native_type='unsigned short')]],
'FileHandle' : [ 0x44, ['pointer', ['void']]],
'Lock' : [ 0x48, ['unsigned long']],
'LockOwner' : [ 0x4c, ['pointer', ['_ETHREAD']]],
} ],
'_RTL_BITMAP' : [ 0x8, {
'SizeOfBitMap' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x4, ['pointer', ['unsigned long']]],
} ],
'_MM_AVL_TABLE' : [ 0x20, {
'BalancedRoot' : [ 0x0, ['_MMADDRESS_NODE']],
'DepthOfTree' : [ 0x14, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long')]],
'Unused' : [ 0x14, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long')]],
'NumberGenericTableElements' : [ 0x14, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
'NodeHint' : [ 0x18, ['pointer', ['void']]],
'NodeFreeHint' : [ 0x1c, ['pointer', ['void']]],
} ],
'__unnamed_1580' : [ 0x4, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long')]],
'Parent' : [ 0x0, ['pointer', ['_MMVAD']]],
} ],
'__unnamed_1583' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'VadFlags' : [ 0x0, ['_MMVAD_FLAGS']],
} ],
'__unnamed_1586' : [ 0x4, {
'LongFlags3' : [ 0x0, ['unsigned long']],
'VadFlags3' : [ 0x0, ['_MMVAD_FLAGS3']],
} ],
'_MMVAD_SHORT' : [ 0x20, {
'u1' : [ 0x0, ['__unnamed_1580']],
'LeftChild' : [ 0x4, ['pointer', ['_MMVAD']]],
'RightChild' : [ 0x8, ['pointer', ['_MMVAD']]],
'StartingVpn' : [ 0xc, ['unsigned long']],
'EndingVpn' : [ 0x10, ['unsigned long']],
'u' : [ 0x14, ['__unnamed_1583']],
'PushLock' : [ 0x18, ['_EX_PUSH_LOCK']],
'u5' : [ 0x1c, ['__unnamed_1586']],
} ],
'__unnamed_158e' : [ 0x4, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long')]],
'Parent' : [ 0x0, ['pointer', ['_MMADDRESS_NODE']]],
} ],
'_MMADDRESS_NODE' : [ 0x14, {
'u1' : [ 0x0, ['__unnamed_158e']],
'LeftChild' : [ 0x4, ['pointer', ['_MMADDRESS_NODE']]],
'RightChild' : [ 0x8, ['pointer', ['_MMADDRESS_NODE']]],
'StartingVpn' : [ 0xc, ['unsigned long']],
'EndingVpn' : [ 0x10, ['unsigned long']],
} ],
'__unnamed_1593' : [ 0x4, {
'LongFlags2' : [ 0x0, ['unsigned long']],
'VadFlags2' : [ 0x0, ['_MMVAD_FLAGS2']],
} ],
'_MMVAD' : [ 0x3c, {
'u1' : [ 0x0, ['__unnamed_1580']],
'LeftChild' : [ 0x4, ['pointer', ['_MMVAD']]],
'RightChild' : [ 0x8, ['pointer', ['_MMVAD']]],
'StartingVpn' : [ 0xc, ['unsigned long']],
'EndingVpn' : [ 0x10, ['unsigned long']],
'u' : [ 0x14, ['__unnamed_1583']],
'PushLock' : [ 0x18, ['_EX_PUSH_LOCK']],
'u5' : [ 0x1c, ['__unnamed_1586']],
'u2' : [ 0x20, ['__unnamed_1593']],
'Subsection' : [ 0x24, ['pointer', ['_SUBSECTION']]],
'MappedSubsection' : [ 0x24, ['pointer', ['_MSUBSECTION']]],
'FirstPrototypePte' : [ 0x28, ['pointer', ['_MMPTE']]],
'LastContiguousPte' : [ 0x2c, ['pointer', ['_MMPTE']]],
'ViewLinks' : [ 0x30, ['_LIST_ENTRY']],
'VadsProcess' : [ 0x38, ['pointer', ['_EPROCESS']]],
} ],
'__unnamed_159e' : [ 0x20, {
'Mdl' : [ 0x0, ['_MDL']],
'Page' : [ 0x1c, ['array', 1, ['unsigned long']]],
} ],
'_MI_PAGEFILE_TRACES' : [ 0x40, {
'Status' : [ 0x0, ['long']],
'Priority' : [ 0x4, ['unsigned char']],
'IrpPriority' : [ 0x5, ['unsigned char']],
'CurrentTime' : [ 0x8, ['_LARGE_INTEGER']],
'AvailablePages' : [ 0x10, ['unsigned long']],
'ModifiedPagesTotal' : [ 0x14, ['unsigned long']],
'ModifiedPagefilePages' : [ 0x18, ['unsigned long']],
'ModifiedNoWritePages' : [ 0x1c, ['unsigned long']],
'MdlHack' : [ 0x20, ['__unnamed_159e']],
} ],
'__unnamed_15a4' : [ 0x8, {
'IoStatus' : [ 0x0, ['_IO_STATUS_BLOCK']],
} ],
'__unnamed_15a6' : [ 0x4, {
'KeepForever' : [ 0x0, ['unsigned long']],
} ],
'_MMMOD_WRITER_MDL_ENTRY' : [ 0x60, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'u' : [ 0x8, ['__unnamed_15a4']],
'Irp' : [ 0x10, ['pointer', ['_IRP']]],
'u1' : [ 0x14, ['__unnamed_15a6']],
'PagingFile' : [ 0x18, ['pointer', ['_MMPAGING_FILE']]],
'File' : [ 0x1c, ['pointer', ['_FILE_OBJECT']]],
'ControlArea' : [ 0x20, ['pointer', ['_CONTROL_AREA']]],
'FileResource' : [ 0x24, ['pointer', ['_ERESOURCE']]],
'WriteOffset' : [ 0x28, ['_LARGE_INTEGER']],
'IssueTime' : [ 0x30, ['_LARGE_INTEGER']],
'PointerMdl' : [ 0x38, ['pointer', ['_MDL']]],
'Mdl' : [ 0x3c, ['_MDL']],
'Page' : [ 0x58, ['array', 1, ['unsigned long']]],
} ],
'_MDL' : [ 0x1c, {
'Next' : [ 0x0, ['pointer', ['_MDL']]],
'Size' : [ 0x4, ['short']],
'MdlFlags' : [ 0x6, ['short']],
'Process' : [ 0x8, ['pointer', ['_EPROCESS']]],
'MappedSystemVa' : [ 0xc, ['pointer', ['void']]],
'StartVa' : [ 0x10, ['pointer', ['void']]],
'ByteCount' : [ 0x14, ['unsigned long']],
'ByteOffset' : [ 0x18, ['unsigned long']],
} ],
'_HHIVE' : [ 0x2ec, {
'Signature' : [ 0x0, ['unsigned long']],
'GetCellRoutine' : [ 0x4, ['pointer', ['void']]],
'ReleaseCellRoutine' : [ 0x8, ['pointer', ['void']]],
'Allocate' : [ 0xc, ['pointer', ['void']]],
'Free' : [ 0x10, ['pointer', ['void']]],
'FileSetSize' : [ 0x14, ['pointer', ['void']]],
'FileWrite' : [ 0x18, ['pointer', ['void']]],
'FileRead' : [ 0x1c, ['pointer', ['void']]],
'FileFlush' : [ 0x20, ['pointer', ['void']]],
'HiveLoadFailure' : [ 0x24, ['pointer', ['void']]],
'BaseBlock' : [ 0x28, ['pointer', ['_HBASE_BLOCK']]],
'DirtyVector' : [ 0x2c, ['_RTL_BITMAP']],
'DirtyCount' : [ 0x34, ['unsigned long']],
'DirtyAlloc' : [ 0x38, ['unsigned long']],
'BaseBlockAlloc' : [ 0x3c, ['unsigned long']],
'Cluster' : [ 0x40, ['unsigned long']],
'Flat' : [ 0x44, ['unsigned char']],
'ReadOnly' : [ 0x45, ['unsigned char']],
'DirtyFlag' : [ 0x46, ['unsigned char']],
'HvBinHeadersUse' : [ 0x48, ['unsigned long']],
'HvFreeCellsUse' : [ 0x4c, ['unsigned long']],
'HvUsedCellsUse' : [ 0x50, ['unsigned long']],
'CmUsedCellsUse' : [ 0x54, ['unsigned long']],
'HiveFlags' : [ 0x58, ['unsigned long']],
'CurrentLog' : [ 0x5c, ['unsigned long']],
'LogSize' : [ 0x60, ['array', 2, ['unsigned long']]],
'RefreshCount' : [ 0x68, ['unsigned long']],
'StorageTypeCount' : [ 0x6c, ['unsigned long']],
'Version' : [ 0x70, ['unsigned long']],
'Storage' : [ 0x74, ['array', 2, ['_DUAL']]],
} ],
'_CM_VIEW_OF_FILE' : [ 0x30, {
'MappedViewLinks' : [ 0x0, ['_LIST_ENTRY']],
'PinnedViewLinks' : [ 0x8, ['_LIST_ENTRY']],
'FlushedViewLinks' : [ 0x10, ['_LIST_ENTRY']],
'CmHive' : [ 0x18, ['pointer', ['_CMHIVE']]],
'Bcb' : [ 0x1c, ['pointer', ['void']]],
'ViewAddress' : [ 0x20, ['pointer', ['void']]],
'FileOffset' : [ 0x24, ['unsigned long']],
'Size' : [ 0x28, ['unsigned long']],
'UseCount' : [ 0x2c, ['unsigned long']],
} ],
'_CMHIVE' : [ 0x630, {
'Hive' : [ 0x0, ['_HHIVE']],
'FileHandles' : [ 0x2ec, ['array', 6, ['pointer', ['void']]]],
'NotifyList' : [ 0x304, ['_LIST_ENTRY']],
'HiveList' : [ 0x30c, ['_LIST_ENTRY']],
'PreloadedHiveList' : [ 0x314, ['_LIST_ENTRY']],
'HiveRundown' : [ 0x31c, ['_EX_RUNDOWN_REF']],
'ParseCacheEntries' : [ 0x320, ['_LIST_ENTRY']],
'KcbCacheTable' : [ 0x328, ['pointer', ['_CM_KEY_HASH_TABLE_ENTRY']]],
'KcbCacheTableSize' : [ 0x32c, ['unsigned long']],
'Identity' : [ 0x330, ['unsigned long']],
'HiveLock' : [ 0x334, ['pointer', ['_FAST_MUTEX']]],
'ViewLock' : [ 0x338, ['_EX_PUSH_LOCK']],
'ViewLockOwner' : [ 0x33c, ['pointer', ['_KTHREAD']]],
'ViewLockLast' : [ 0x340, ['unsigned long']],
'ViewUnLockLast' : [ 0x344, ['unsigned long']],
'WriterLock' : [ 0x348, ['pointer', ['_FAST_MUTEX']]],
'FlusherLock' : [ 0x34c, ['pointer', ['_ERESOURCE']]],
'FlushDirtyVector' : [ 0x350, ['_RTL_BITMAP']],
'FlushOffsetArray' : [ 0x358, ['pointer', ['CMP_OFFSET_ARRAY']]],
'FlushOffsetArrayCount' : [ 0x35c, ['unsigned long']],
'FlushHiveTruncated' : [ 0x360, ['unsigned long']],
'FlushLock2' : [ 0x364, ['pointer', ['_FAST_MUTEX']]],
'SecurityLock' : [ 0x368, ['_EX_PUSH_LOCK']],
'MappedViewList' : [ 0x36c, ['_LIST_ENTRY']],
'PinnedViewList' : [ 0x374, ['_LIST_ENTRY']],
'FlushedViewList' : [ 0x37c, ['_LIST_ENTRY']],
'MappedViewCount' : [ 0x384, ['unsigned short']],
'PinnedViewCount' : [ 0x386, ['unsigned short']],
'UseCount' : [ 0x388, ['unsigned long']],
'ViewsPerHive' : [ 0x38c, ['unsigned long']],
'FileObject' : [ 0x390, ['pointer', ['_FILE_OBJECT']]],
'LastShrinkHiveSize' : [ 0x394, ['unsigned long']],
'ActualFileSize' : [ 0x398, ['_LARGE_INTEGER']],
'FileFullPath' : [ 0x3a0, ['_UNICODE_STRING']],
'FileUserName' : [ 0x3a8, ['_UNICODE_STRING']],
'HiveRootPath' : [ 0x3b0, ['_UNICODE_STRING']],
'SecurityCount' : [ 0x3b8, ['unsigned long']],
'SecurityCacheSize' : [ 0x3bc, ['unsigned long']],
'SecurityHitHint' : [ 0x3c0, ['long']],
'SecurityCache' : [ 0x3c4, ['pointer', ['_CM_KEY_SECURITY_CACHE_ENTRY']]],
'SecurityHash' : [ 0x3c8, ['array', 64, ['_LIST_ENTRY']]],
'UnloadEventCount' : [ 0x5c8, ['unsigned long']],
'UnloadEventArray' : [ 0x5cc, ['pointer', ['pointer', ['_KEVENT']]]],
'RootKcb' : [ 0x5d0, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'Frozen' : [ 0x5d4, ['unsigned char']],
'UnloadWorkItem' : [ 0x5d8, ['pointer', ['_CM_WORKITEM']]],
'UnloadWorkItemHolder' : [ 0x5dc, ['_CM_WORKITEM']],
'GrowOnlyMode' : [ 0x5f0, ['unsigned char']],
'GrowOffset' : [ 0x5f4, ['unsigned long']],
'KcbConvertListHead' : [ 0x5f8, ['_LIST_ENTRY']],
'KnodeConvertListHead' : [ 0x600, ['_LIST_ENTRY']],
'CellRemapArray' : [ 0x608, ['pointer', ['_CM_CELL_REMAP_BLOCK']]],
'Flags' : [ 0x60c, ['unsigned long']],
'TrustClassEntry' : [ 0x610, ['_LIST_ENTRY']],
'FlushCount' : [ 0x618, ['unsigned long']],
'CmRm' : [ 0x61c, ['pointer', ['_CM_RM']]],
'CmRmInitFailPoint' : [ 0x620, ['unsigned long']],
'CmRmInitFailStatus' : [ 0x624, ['long']],
'CreatorOwner' : [ 0x628, ['pointer', ['_KTHREAD']]],
'RundownThread' : [ 0x62c, ['pointer', ['_KTHREAD']]],
} ],
'_CM_KEY_CONTROL_BLOCK' : [ 0xa0, {
'RefCount' : [ 0x0, ['unsigned long']],
'ExtFlags' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'PrivateAlloc' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'Delete' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HiveUnloaded' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'Decommissioned' : [ 0x4, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'LockTablePresent' : [ 0x4, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'TotalLevels' : [ 0x4, ['BitField', dict(start_bit = 21, end_bit = 31, native_type='unsigned long')]],
'DelayedDeref' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DelayedClose' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Parking' : [ 0x8, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'KeyHash' : [ 0xc, ['_CM_KEY_HASH']],
'ConvKey' : [ 0xc, ['unsigned long']],
'NextHash' : [ 0x10, ['pointer', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x14, ['pointer', ['_HHIVE']]],
'KeyCell' : [ 0x18, ['unsigned long']],
'KcbPushlock' : [ 0x1c, ['_EX_PUSH_LOCK']],
'Owner' : [ 0x20, ['pointer', ['_KTHREAD']]],
'SharedCount' : [ 0x20, ['long']],
'SlotHint' : [ 0x24, ['unsigned long']],
'ParentKcb' : [ 0x28, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'NameBlock' : [ 0x2c, ['pointer', ['_CM_NAME_CONTROL_BLOCK']]],
'CachedSecurity' : [ 0x30, ['pointer', ['_CM_KEY_SECURITY_CACHE']]],
'ValueCache' : [ 0x34, ['_CACHED_CHILD_LIST']],
'IndexHint' : [ 0x3c, ['pointer', ['_CM_INDEX_HINT_BLOCK']]],
'HashKey' : [ 0x3c, ['unsigned long']],
'SubKeyCount' : [ 0x3c, ['unsigned long']],
'KeyBodyListHead' : [ 0x40, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x40, ['_LIST_ENTRY']],
'KeyBodyArray' : [ 0x48, ['array', 4, ['pointer', ['_CM_KEY_BODY']]]],
'KcbLastWriteTime' : [ 0x58, ['_LARGE_INTEGER']],
'KcbMaxNameLen' : [ 0x60, ['unsigned short']],
'KcbMaxValueNameLen' : [ 0x62, ['unsigned short']],
'KcbMaxValueDataLen' : [ 0x64, ['unsigned long']],
'KcbUserFlags' : [ 0x68, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'KcbVirtControlFlags' : [ 0x68, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'KcbDebug' : [ 0x68, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'Flags' : [ 0x68, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'KCBUoWListHead' : [ 0x6c, ['_LIST_ENTRY']],
'DelayQueueEntry' : [ 0x74, ['_LIST_ENTRY']],
'Stolen' : [ 0x74, ['pointer', ['unsigned char']]],
'TransKCBOwner' : [ 0x7c, ['pointer', ['_CM_TRANS']]],
'KCBLock' : [ 0x80, ['_CM_INTENT_LOCK']],
'KeyLock' : [ 0x88, ['_CM_INTENT_LOCK']],
'TransValueCache' : [ 0x90, ['_CHILD_LIST']],
'TransValueListOwner' : [ 0x98, ['pointer', ['_CM_TRANS']]],
'FullKCBName' : [ 0x9c, ['pointer', ['_UNICODE_STRING']]],
} ],
'_CM_KEY_HASH_TABLE_ENTRY' : [ 0xc, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'Owner' : [ 0x4, ['pointer', ['_KTHREAD']]],
'Entry' : [ 0x8, ['pointer', ['_CM_KEY_HASH']]],
} ],
'__unnamed_162b' : [ 0xc, {
'Failure' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: '_None', 1: '_CmInitializeHive', 2: '_HvInitializeHive', 3: '_HvpBuildMap', 4: '_HvpBuildMapAndCopy', 5: '_HvpInitMap', 6: '_HvLoadHive', 7: '_HvpReadFileImageAndBuildMap', 8: '_HvpRecoverData', 9: '_HvpRecoverWholeHive', 10: '_HvpMapFileImageAndBuildMap', 11: '_CmpValidateHiveSecurityDescriptors', 12: '_HvpEnlistBinInMap', 13: '_CmCheckRegistry', 14: '_CmRegistryIO', 15: '_CmCheckRegistry2', 16: '_CmpCheckKey', 17: '_CmpCheckValueList', 18: '_HvCheckHive', 19: '_HvCheckBin'})]],
'Status' : [ 0x4, ['long']],
'Point' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_162e' : [ 0xc, {
'Action' : [ 0x0, ['unsigned long']],
'Handle' : [ 0x4, ['pointer', ['void']]],
'Status' : [ 0x8, ['long']],
} ],
'__unnamed_1630' : [ 0x4, {
'CheckStack' : [ 0x0, ['pointer', ['void']]],
} ],
'__unnamed_1632' : [ 0x10, {
'Cell' : [ 0x0, ['unsigned long']],
'CellPoint' : [ 0x4, ['pointer', ['_CELL_DATA']]],
'RootPoint' : [ 0x8, ['pointer', ['void']]],
'Index' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1634' : [ 0x10, {
'List' : [ 0x0, ['pointer', ['_CELL_DATA']]],
'Index' : [ 0x4, ['unsigned long']],
'Cell' : [ 0x8, ['unsigned long']],
'CellPoint' : [ 0xc, ['pointer', ['_CELL_DATA']]],
} ],
'__unnamed_1638' : [ 0xc, {
'Space' : [ 0x0, ['unsigned long']],
'MapPoint' : [ 0x4, ['unsigned long']],
'BinPoint' : [ 0x8, ['pointer', ['_HBIN']]],
} ],
'__unnamed_163c' : [ 0x8, {
'Bin' : [ 0x0, ['pointer', ['_HBIN']]],
'CellPoint' : [ 0x4, ['pointer', ['_HCELL']]],
} ],
'__unnamed_163e' : [ 0x4, {
'FileOffset' : [ 0x0, ['unsigned long']],
} ],
'_HIVE_LOAD_FAILURE' : [ 0x120, {
'Hive' : [ 0x0, ['pointer', ['_HHIVE']]],
'Index' : [ 0x4, ['unsigned long']],
'RecoverableIndex' : [ 0x8, ['unsigned long']],
'Locations' : [ 0xc, ['array', 8, ['__unnamed_162b']]],
'RecoverableLocations' : [ 0x6c, ['array', 8, ['__unnamed_162b']]],
'RegistryIO' : [ 0xcc, ['__unnamed_162e']],
'CheckRegistry2' : [ 0xd8, ['__unnamed_1630']],
'CheckKey' : [ 0xdc, ['__unnamed_1632']],
'CheckValueList' : [ 0xec, ['__unnamed_1634']],
'CheckHive' : [ 0xfc, ['__unnamed_1638']],
'CheckHive1' : [ 0x108, ['__unnamed_1638']],
'CheckBin' : [ 0x114, ['__unnamed_163c']],
'RecoverData' : [ 0x11c, ['__unnamed_163e']],
} ],
'_PCW_COUNTER_DESCRIPTOR' : [ 0x8, {
'Id' : [ 0x0, ['unsigned short']],
'StructIndex' : [ 0x2, ['unsigned short']],
'Offset' : [ 0x4, ['unsigned short']],
'Size' : [ 0x6, ['unsigned short']],
} ],
'_PCW_REGISTRATION_INFORMATION' : [ 0x18, {
'Version' : [ 0x0, ['unsigned long']],
'Name' : [ 0x4, ['pointer', ['_UNICODE_STRING']]],
'CounterCount' : [ 0x8, ['unsigned long']],
'Counters' : [ 0xc, ['pointer', ['_PCW_COUNTER_DESCRIPTOR']]],
'Callback' : [ 0x10, ['pointer', ['void']]],
'CallbackContext' : [ 0x14, ['pointer', ['void']]],
} ],
'_PCW_PROCESSOR_INFO' : [ 0x80, {
'IdleTime' : [ 0x0, ['unsigned long long']],
'AvailableTime' : [ 0x8, ['unsigned long long']],
'UserTime' : [ 0x10, ['unsigned long long']],
'KernelTime' : [ 0x18, ['unsigned long long']],
'Interrupts' : [ 0x20, ['unsigned long']],
'DpcTime' : [ 0x28, ['unsigned long long']],
'InterruptTime' : [ 0x30, ['unsigned long long']],
'DpcCount' : [ 0x38, ['unsigned long']],
'DpcRate' : [ 0x3c, ['unsigned long']],
'C1Time' : [ 0x40, ['unsigned long long']],
'C2Time' : [ 0x48, ['unsigned long long']],
'C3Time' : [ 0x50, ['unsigned long long']],
'C1Transitions' : [ 0x58, ['unsigned long long']],
'C2Transitions' : [ 0x60, ['unsigned long long']],
'C3Transitions' : [ 0x68, ['unsigned long long']],
'ParkingStatus' : [ 0x70, ['unsigned long']],
'CurrentFrequency' : [ 0x74, ['unsigned long']],
'PercentMaxFrequency' : [ 0x78, ['unsigned long']],
'StateFlags' : [ 0x7c, ['unsigned long']],
} ],
'_PCW_DATA' : [ 0x8, {
'Data' : [ 0x0, ['pointer', ['void']]],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_ETW_PERF_COUNTERS' : [ 0x18, {
'TotalActiveSessions' : [ 0x0, ['long']],
'TotalBufferMemoryNonPagedPool' : [ 0x4, ['long']],
'TotalBufferMemoryPagedPool' : [ 0x8, ['long']],
'TotalGuidsEnabled' : [ 0xc, ['long']],
'TotalGuidsNotEnabled' : [ 0x10, ['long']],
'TotalGuidsPreEnabled' : [ 0x14, ['long']],
} ],
'_ETW_SESSION_PERF_COUNTERS' : [ 0x18, {
'BufferMemoryPagedPool' : [ 0x0, ['long']],
'BufferMemoryNonPagedPool' : [ 0x4, ['long']],
'EventsLoggedCount' : [ 0x8, ['unsigned long long']],
'EventsLost' : [ 0x10, ['long']],
'NumConsumers' : [ 0x14, ['long']],
} ],
'_TEB32' : [ 0xfe4, {
'NtTib' : [ 0x0, ['_NT_TIB32']],
'EnvironmentPointer' : [ 0x1c, ['unsigned long']],
'ClientId' : [ 0x20, ['_CLIENT_ID32']],
'ActiveRpcHandle' : [ 0x28, ['unsigned long']],
'ThreadLocalStoragePointer' : [ 0x2c, ['unsigned long']],
'ProcessEnvironmentBlock' : [ 0x30, ['unsigned long']],
'LastErrorValue' : [ 0x34, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x38, ['unsigned long']],
'CsrClientThread' : [ 0x3c, ['unsigned long']],
'Win32ThreadInfo' : [ 0x40, ['unsigned long']],
'User32Reserved' : [ 0x44, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xac, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0xc0, ['unsigned long']],
'CurrentLocale' : [ 0xc4, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0xc8, ['unsigned long']],
'SystemReserved1' : [ 0xcc, ['array', 54, ['unsigned long']]],
'ExceptionCode' : [ 0x1a4, ['long']],
'ActivationContextStackPointer' : [ 0x1a8, ['unsigned long']],
'SpareBytes' : [ 0x1ac, ['array', 36, ['unsigned char']]],
'TxFsContext' : [ 0x1d0, ['unsigned long']],
'GdiTebBatch' : [ 0x1d4, ['_GDI_TEB_BATCH32']],
'RealClientId' : [ 0x6b4, ['_CLIENT_ID32']],
'GdiCachedProcessHandle' : [ 0x6bc, ['unsigned long']],
'GdiClientPID' : [ 0x6c0, ['unsigned long']],
'GdiClientTID' : [ 0x6c4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x6c8, ['unsigned long']],
'Win32ClientInfo' : [ 0x6cc, ['array', 62, ['unsigned long']]],
'glDispatchTable' : [ 0x7c4, ['array', 233, ['unsigned long']]],
'glReserved1' : [ 0xb68, ['array', 29, ['unsigned long']]],
'glReserved2' : [ 0xbdc, ['unsigned long']],
'glSectionInfo' : [ 0xbe0, ['unsigned long']],
'glSection' : [ 0xbe4, ['unsigned long']],
'glTable' : [ 0xbe8, ['unsigned long']],
'glCurrentRC' : [ 0xbec, ['unsigned long']],
'glContext' : [ 0xbf0, ['unsigned long']],
'LastStatusValue' : [ 0xbf4, ['unsigned long']],
'StaticUnicodeString' : [ 0xbf8, ['_STRING32']],
'StaticUnicodeBuffer' : [ 0xc00, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0xe0c, ['unsigned long']],
'TlsSlots' : [ 0xe10, ['array', 64, ['unsigned long']]],
'TlsLinks' : [ 0xf10, ['LIST_ENTRY32']],
'Vdm' : [ 0xf18, ['unsigned long']],
'ReservedForNtRpc' : [ 0xf1c, ['unsigned long']],
'DbgSsReserved' : [ 0xf20, ['array', 2, ['unsigned long']]],
'HardErrorMode' : [ 0xf28, ['unsigned long']],
'Instrumentation' : [ 0xf2c, ['array', 9, ['unsigned long']]],
'ActivityId' : [ 0xf50, ['_GUID']],
'SubProcessTag' : [ 0xf60, ['unsigned long']],
'EtwLocalData' : [ 0xf64, ['unsigned long']],
'EtwTraceData' : [ 0xf68, ['unsigned long']],
'WinSockData' : [ 0xf6c, ['unsigned long']],
'GdiBatchCount' : [ 0xf70, ['unsigned long']],
'CurrentIdealProcessor' : [ 0xf74, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0xf74, ['unsigned long']],
'ReservedPad0' : [ 0xf74, ['unsigned char']],
'ReservedPad1' : [ 0xf75, ['unsigned char']],
'ReservedPad2' : [ 0xf76, ['unsigned char']],
'IdealProcessor' : [ 0xf77, ['unsigned char']],
'GuaranteedStackBytes' : [ 0xf78, ['unsigned long']],
'ReservedForPerf' : [ 0xf7c, ['unsigned long']],
'ReservedForOle' : [ 0xf80, ['unsigned long']],
'WaitingOnLoaderLock' : [ 0xf84, ['unsigned long']],
'SavedPriorityState' : [ 0xf88, ['unsigned long']],
'SoftPatchPtr1' : [ 0xf8c, ['unsigned long']],
'ThreadPoolData' : [ 0xf90, ['unsigned long']],
'TlsExpansionSlots' : [ 0xf94, ['unsigned long']],
'MuiGeneration' : [ 0xf98, ['unsigned long']],
'IsImpersonating' : [ 0xf9c, ['unsigned long']],
'NlsCache' : [ 0xfa0, ['unsigned long']],
'pShimData' : [ 0xfa4, ['unsigned long']],
'HeapVirtualAffinity' : [ 0xfa8, ['unsigned long']],
'CurrentTransactionHandle' : [ 0xfac, ['unsigned long']],
'ActiveFrame' : [ 0xfb0, ['unsigned long']],
'FlsData' : [ 0xfb4, ['unsigned long']],
'PreferredLanguages' : [ 0xfb8, ['unsigned long']],
'UserPrefLanguages' : [ 0xfbc, ['unsigned long']],
'MergedPrefLanguages' : [ 0xfc0, ['unsigned long']],
'MuiImpersonation' : [ 0xfc4, ['unsigned long']],
'CrossTebFlags' : [ 0xfc8, ['unsigned short']],
'SpareCrossTebBits' : [ 0xfc8, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0xfca, ['unsigned short']],
'SafeThunkCall' : [ 0xfca, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0xfca, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0xfca, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0xfca, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0xfca, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0xfca, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0xfca, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0xfca, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0xfca, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0xfca, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0xfca, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0xfca, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0xfcc, ['unsigned long']],
'TxnScopeExitCallback' : [ 0xfd0, ['unsigned long']],
'TxnScopeContext' : [ 0xfd4, ['unsigned long']],
'LockCount' : [ 0xfd8, ['unsigned long']],
'SpareUlong0' : [ 0xfdc, ['unsigned long']],
'ResourceRetValue' : [ 0xfe0, ['unsigned long']],
} ],
'_TEB64' : [ 0x1818, {
'NtTib' : [ 0x0, ['_NT_TIB64']],
'EnvironmentPointer' : [ 0x38, ['unsigned long long']],
'ClientId' : [ 0x40, ['_CLIENT_ID64']],
'ActiveRpcHandle' : [ 0x50, ['unsigned long long']],
'ThreadLocalStoragePointer' : [ 0x58, ['unsigned long long']],
'ProcessEnvironmentBlock' : [ 0x60, ['unsigned long long']],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['unsigned long long']],
'Win32ThreadInfo' : [ 0x78, ['unsigned long long']],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['unsigned long long']],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['unsigned long long']]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['unsigned long long']],
'SpareBytes' : [ 0x2d0, ['array', 24, ['unsigned char']]],
'TxFsContext' : [ 0x2e8, ['unsigned long']],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH64']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID64']],
'GdiCachedProcessHandle' : [ 0x7e8, ['unsigned long long']],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['unsigned long long']],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['unsigned long long']]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['unsigned long long']],
'glSectionInfo' : [ 0x1228, ['unsigned long long']],
'glSection' : [ 0x1230, ['unsigned long long']],
'glTable' : [ 0x1238, ['unsigned long long']],
'glCurrentRC' : [ 0x1240, ['unsigned long long']],
'glContext' : [ 0x1248, ['unsigned long long']],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_STRING64']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['wchar']]],
'DeallocationStack' : [ 0x1478, ['unsigned long long']],
'TlsSlots' : [ 0x1480, ['array', 64, ['unsigned long long']]],
'TlsLinks' : [ 0x1680, ['LIST_ENTRY64']],
'Vdm' : [ 0x1690, ['unsigned long long']],
'ReservedForNtRpc' : [ 0x1698, ['unsigned long long']],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['unsigned long long']]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 11, ['unsigned long long']]],
'ActivityId' : [ 0x1710, ['_GUID']],
'SubProcessTag' : [ 0x1720, ['unsigned long long']],
'EtwLocalData' : [ 0x1728, ['unsigned long long']],
'EtwTraceData' : [ 0x1730, ['unsigned long long']],
'WinSockData' : [ 0x1738, ['unsigned long long']],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'CurrentIdealProcessor' : [ 0x1744, ['_PROCESSOR_NUMBER']],
'IdealProcessorValue' : [ 0x1744, ['unsigned long']],
'ReservedPad0' : [ 0x1744, ['unsigned char']],
'ReservedPad1' : [ 0x1745, ['unsigned char']],
'ReservedPad2' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['unsigned long long']],
'ReservedForOle' : [ 0x1758, ['unsigned long long']],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SavedPriorityState' : [ 0x1768, ['unsigned long long']],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'ThreadPoolData' : [ 0x1778, ['unsigned long long']],
'TlsExpansionSlots' : [ 0x1780, ['unsigned long long']],
'DeallocationBStore' : [ 0x1788, ['unsigned long long']],
'BStoreLimit' : [ 0x1790, ['unsigned long long']],
'MuiGeneration' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['unsigned long long']],
'pShimData' : [ 0x17a8, ['unsigned long long']],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['unsigned long long']],
'ActiveFrame' : [ 0x17c0, ['unsigned long long']],
'FlsData' : [ 0x17c8, ['unsigned long long']],
'PreferredLanguages' : [ 0x17d0, ['unsigned long long']],
'UserPrefLanguages' : [ 0x17d8, ['unsigned long long']],
'MergedPrefLanguages' : [ 0x17e0, ['unsigned long long']],
'MuiImpersonation' : [ 0x17e8, ['unsigned long']],
'CrossTebFlags' : [ 0x17ec, ['unsigned short']],
'SpareCrossTebBits' : [ 0x17ec, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned short')]],
'SameTebFlags' : [ 0x17ee, ['unsigned short']],
'SafeThunkCall' : [ 0x17ee, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'InDebugPrint' : [ 0x17ee, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'HasFiberData' : [ 0x17ee, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'SkipThreadAttach' : [ 0x17ee, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'WerInShipAssertCode' : [ 0x17ee, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned short')]],
'RanProcessInit' : [ 0x17ee, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned short')]],
'ClonedThread' : [ 0x17ee, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned short')]],
'SuppressDebugMsg' : [ 0x17ee, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned short')]],
'DisableUserStackWalk' : [ 0x17ee, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned short')]],
'RtlExceptionAttached' : [ 0x17ee, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned short')]],
'InitialThread' : [ 0x17ee, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned short')]],
'SpareSameTebBits' : [ 0x17ee, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned short')]],
'TxnScopeEnterCallback' : [ 0x17f0, ['unsigned long long']],
'TxnScopeExitCallback' : [ 0x17f8, ['unsigned long long']],
'TxnScopeContext' : [ 0x1800, ['unsigned long long']],
'LockCount' : [ 0x1808, ['unsigned long']],
'SpareUlong0' : [ 0x180c, ['unsigned long']],
'ResourceRetValue' : [ 0x1810, ['unsigned long long']],
} ],
'_KTIMER_TABLE' : [ 0x1840, {
'TimerExpiry' : [ 0x0, ['array', 16, ['pointer', ['_KTIMER']]]],
'TimerEntries' : [ 0x40, ['array', 256, ['_KTIMER_TABLE_ENTRY']]],
} ],
'_KTIMER_TABLE_ENTRY' : [ 0x18, {
'Lock' : [ 0x0, ['unsigned long']],
'Entry' : [ 0x4, ['_LIST_ENTRY']],
'Time' : [ 0x10, ['_ULARGE_INTEGER']],
} ],
'_KAFFINITY_EX' : [ 0xc, {
'Count' : [ 0x0, ['unsigned short']],
'Size' : [ 0x2, ['unsigned short']],
'Reserved' : [ 0x4, ['unsigned long']],
'Bitmap' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_KAFFINITY_ENUMERATION_CONTEXT' : [ 0xc, {
'Affinity' : [ 0x0, ['pointer', ['_KAFFINITY_EX']]],
'CurrentMask' : [ 0x4, ['unsigned long']],
'CurrentIndex' : [ 0x8, ['unsigned short']],
} ],
'_GROUP_AFFINITY' : [ 0xc, {
'Mask' : [ 0x0, ['unsigned long']],
'Group' : [ 0x4, ['unsigned short']],
'Reserved' : [ 0x6, ['array', 3, ['unsigned short']]],
} ],
'_XSTATE_SAVE' : [ 0x20, {
'Reserved1' : [ 0x0, ['long long']],
'Reserved2' : [ 0x8, ['unsigned long']],
'Prev' : [ 0xc, ['pointer', ['_XSTATE_SAVE']]],
'Reserved3' : [ 0x10, ['pointer', ['_XSAVE_AREA']]],
'Thread' : [ 0x14, ['pointer', ['_KTHREAD']]],
'Reserved4' : [ 0x18, ['pointer', ['void']]],
'Level' : [ 0x1c, ['unsigned char']],
'XStateContext' : [ 0x0, ['_XSTATE_CONTEXT']],
} ],
'_XSAVE_AREA' : [ 0x240, {
'LegacyState' : [ 0x0, ['_XSAVE_FORMAT']],
'Header' : [ 0x200, ['_XSAVE_AREA_HEADER']],
} ],
'_FXSAVE_FORMAT' : [ 0x1e0, {
'ControlWord' : [ 0x0, ['unsigned short']],
'StatusWord' : [ 0x2, ['unsigned short']],
'TagWord' : [ 0x4, ['unsigned short']],
'ErrorOpcode' : [ 0x6, ['unsigned short']],
'ErrorOffset' : [ 0x8, ['unsigned long']],
'ErrorSelector' : [ 0xc, ['unsigned long']],
'DataOffset' : [ 0x10, ['unsigned long']],
'DataSelector' : [ 0x14, ['unsigned long']],
'MXCsr' : [ 0x18, ['unsigned long']],
'MXCsrMask' : [ 0x1c, ['unsigned long']],
'RegisterArea' : [ 0x20, ['array', 128, ['unsigned char']]],
'Reserved3' : [ 0xa0, ['array', 128, ['unsigned char']]],
'Reserved4' : [ 0x120, ['array', 192, ['unsigned char']]],
} ],
'_FNSAVE_FORMAT' : [ 0x6c, {
'ControlWord' : [ 0x0, ['unsigned long']],
'StatusWord' : [ 0x4, ['unsigned long']],
'TagWord' : [ 0x8, ['unsigned long']],
'ErrorOffset' : [ 0xc, ['unsigned long']],
'ErrorSelector' : [ 0x10, ['unsigned long']],
'DataOffset' : [ 0x14, ['unsigned long']],
'DataSelector' : [ 0x18, ['unsigned long']],
'RegisterArea' : [ 0x1c, ['array', 80, ['unsigned char']]],
} ],
'_KSTACK_AREA' : [ 0x210, {
'FnArea' : [ 0x0, ['_FNSAVE_FORMAT']],
'NpxFrame' : [ 0x0, ['_FXSAVE_FORMAT']],
'StackControl' : [ 0x1e0, ['_KERNEL_STACK_CONTROL']],
'Cr0NpxState' : [ 0x1fc, ['unsigned long']],
'Padding' : [ 0x200, ['array', 4, ['unsigned long']]],
} ],
'_KERNEL_STACK_CONTROL' : [ 0x1c, {
'PreviousTrapFrame' : [ 0x0, ['pointer', ['_KTRAP_FRAME']]],
'PreviousExceptionList' : [ 0x0, ['pointer', ['void']]],
'StackControlFlags' : [ 0x4, ['unsigned long']],
'PreviousLargeStack' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PreviousSegmentsPresent' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ExpandCalloutStack' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Previous' : [ 0x8, ['_KERNEL_STACK_SEGMENT']],
} ],
'_KTRAP_FRAME' : [ 0x8c, {
'DbgEbp' : [ 0x0, ['unsigned long']],
'DbgEip' : [ 0x4, ['unsigned long']],
'DbgArgMark' : [ 0x8, ['unsigned long']],
'DbgArgPointer' : [ 0xc, ['unsigned long']],
'TempSegCs' : [ 0x10, ['unsigned short']],
'Logging' : [ 0x12, ['unsigned char']],
'Reserved' : [ 0x13, ['unsigned char']],
'TempEsp' : [ 0x14, ['unsigned long']],
'Dr0' : [ 0x18, ['unsigned long']],
'Dr1' : [ 0x1c, ['unsigned long']],
'Dr2' : [ 0x20, ['unsigned long']],
'Dr3' : [ 0x24, ['unsigned long']],
'Dr6' : [ 0x28, ['unsigned long']],
'Dr7' : [ 0x2c, ['unsigned long']],
'SegGs' : [ 0x30, ['unsigned long']],
'SegEs' : [ 0x34, ['unsigned long']],
'SegDs' : [ 0x38, ['unsigned long']],
'Edx' : [ 0x3c, ['unsigned long']],
'Ecx' : [ 0x40, ['unsigned long']],
'Eax' : [ 0x44, ['unsigned long']],
'PreviousPreviousMode' : [ 0x48, ['unsigned long']],
'ExceptionList' : [ 0x4c, ['pointer', ['_EXCEPTION_REGISTRATION_RECORD']]],
'SegFs' : [ 0x50, ['unsigned long']],
'Edi' : [ 0x54, ['unsigned long']],
'Esi' : [ 0x58, ['unsigned long']],
'Ebx' : [ 0x5c, ['unsigned long']],
'Ebp' : [ 0x60, ['unsigned long']],
'ErrCode' : [ 0x64, ['unsigned long']],
'Eip' : [ 0x68, ['unsigned long']],
'SegCs' : [ 0x6c, ['unsigned long']],
'EFlags' : [ 0x70, ['unsigned long']],
'HardwareEsp' : [ 0x74, ['unsigned long']],
'HardwareSegSs' : [ 0x78, ['unsigned long']],
'V86Es' : [ 0x7c, ['unsigned long']],
'V86Ds' : [ 0x80, ['unsigned long']],
'V86Fs' : [ 0x84, ['unsigned long']],
'V86Gs' : [ 0x88, ['unsigned long']],
} ],
'_PNP_DEVICE_COMPLETION_QUEUE' : [ 0x2c, {
'DispatchedList' : [ 0x0, ['_LIST_ENTRY']],
'DispatchedCount' : [ 0x8, ['unsigned long']],
'CompletedList' : [ 0xc, ['_LIST_ENTRY']],
'CompletedSemaphore' : [ 0x14, ['_KSEMAPHORE']],
'SpinLock' : [ 0x28, ['unsigned long']],
} ],
'_KSEMAPHORE' : [ 0x14, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'Limit' : [ 0x10, ['long']],
} ],
'_DEVOBJ_EXTENSION' : [ 0x3c, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
'PowerFlags' : [ 0x8, ['unsigned long']],
'Dope' : [ 0xc, ['pointer', ['_DEVICE_OBJECT_POWER_EXTENSION']]],
'ExtensionFlags' : [ 0x10, ['unsigned long']],
'DeviceNode' : [ 0x14, ['pointer', ['void']]],
'AttachedTo' : [ 0x18, ['pointer', ['_DEVICE_OBJECT']]],
'StartIoCount' : [ 0x1c, ['long']],
'StartIoKey' : [ 0x20, ['long']],
'StartIoFlags' : [ 0x24, ['unsigned long']],
'Vpb' : [ 0x28, ['pointer', ['_VPB']]],
'DependentList' : [ 0x2c, ['_LIST_ENTRY']],
'ProviderList' : [ 0x34, ['_LIST_ENTRY']],
} ],
'__unnamed_1740' : [ 0x4, {
'LegacyDeviceNode' : [ 0x0, ['pointer', ['_DEVICE_NODE']]],
'PendingDeviceRelations' : [ 0x0, ['pointer', ['_DEVICE_RELATIONS']]],
'Information' : [ 0x0, ['pointer', ['void']]],
} ],
'__unnamed_1742' : [ 0x4, {
'NextResourceDeviceNode' : [ 0x0, ['pointer', ['_DEVICE_NODE']]],
} ],
'__unnamed_1746' : [ 0x10, {
'DockStatus' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DOCK_NOTDOCKDEVICE', 1: 'DOCK_QUIESCENT', 2: 'DOCK_ARRIVING', 3: 'DOCK_DEPARTING', 4: 'DOCK_EJECTIRP_COMPLETED'})]],
'ListEntry' : [ 0x4, ['_LIST_ENTRY']],
'SerialNumber' : [ 0xc, ['pointer', ['unsigned short']]],
} ],
'_DEVICE_NODE' : [ 0x188, {
'Sibling' : [ 0x0, ['pointer', ['_DEVICE_NODE']]],
'Child' : [ 0x4, ['pointer', ['_DEVICE_NODE']]],
'Parent' : [ 0x8, ['pointer', ['_DEVICE_NODE']]],
'LastChild' : [ 0xc, ['pointer', ['_DEVICE_NODE']]],
'PhysicalDeviceObject' : [ 0x10, ['pointer', ['_DEVICE_OBJECT']]],
'InstancePath' : [ 0x14, ['_UNICODE_STRING']],
'ServiceName' : [ 0x1c, ['_UNICODE_STRING']],
'PendingIrp' : [ 0x24, ['pointer', ['_IRP']]],
'Level' : [ 0x28, ['unsigned long']],
'Notify' : [ 0x2c, ['_PO_DEVICE_NOTIFY']],
'PoIrpManager' : [ 0x68, ['_PO_IRP_MANAGER']],
'State' : [ 0x78, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'PreviousState' : [ 0x7c, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'StateHistory' : [ 0x80, ['array', -80, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]]],
'StateHistoryEntry' : [ 0xd0, ['unsigned long']],
'CompletionStatus' : [ 0xd4, ['long']],
'Flags' : [ 0xd8, ['unsigned long']],
'UserFlags' : [ 0xdc, ['unsigned long']],
'Problem' : [ 0xe0, ['unsigned long']],
'ResourceList' : [ 0xe4, ['pointer', ['_CM_RESOURCE_LIST']]],
'ResourceListTranslated' : [ 0xe8, ['pointer', ['_CM_RESOURCE_LIST']]],
'DuplicatePDO' : [ 0xec, ['pointer', ['_DEVICE_OBJECT']]],
'ResourceRequirements' : [ 0xf0, ['pointer', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'InterfaceType' : [ 0xf4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0xf8, ['unsigned long']],
'ChildInterfaceType' : [ 0xfc, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ChildBusNumber' : [ 0x100, ['unsigned long']],
'ChildBusTypeIndex' : [ 0x104, ['unsigned short']],
'RemovalPolicy' : [ 0x106, ['unsigned char']],
'HardwareRemovalPolicy' : [ 0x107, ['unsigned char']],
'TargetDeviceNotify' : [ 0x108, ['_LIST_ENTRY']],
'DeviceArbiterList' : [ 0x110, ['_LIST_ENTRY']],
'DeviceTranslatorList' : [ 0x118, ['_LIST_ENTRY']],
'NoTranslatorMask' : [ 0x120, ['unsigned short']],
'QueryTranslatorMask' : [ 0x122, ['unsigned short']],
'NoArbiterMask' : [ 0x124, ['unsigned short']],
'QueryArbiterMask' : [ 0x126, ['unsigned short']],
'OverUsed1' : [ 0x128, ['__unnamed_1740']],
'OverUsed2' : [ 0x12c, ['__unnamed_1742']],
'BootResources' : [ 0x130, ['pointer', ['_CM_RESOURCE_LIST']]],
'BootResourcesTranslated' : [ 0x134, ['pointer', ['_CM_RESOURCE_LIST']]],
'CapabilityFlags' : [ 0x138, ['unsigned long']],
'DockInfo' : [ 0x13c, ['__unnamed_1746']],
'DisableableDepends' : [ 0x14c, ['unsigned long']],
'PendedSetInterfaceState' : [ 0x150, ['_LIST_ENTRY']],
'LegacyBusListEntry' : [ 0x158, ['_LIST_ENTRY']],
'DriverUnloadRetryCount' : [ 0x160, ['unsigned long']],
'PreviousParent' : [ 0x164, ['pointer', ['_DEVICE_NODE']]],
'DeletedChildren' : [ 0x168, ['unsigned long']],
'NumaNodeIndex' : [ 0x16c, ['unsigned long']],
'ContainerID' : [ 0x170, ['_GUID']],
'OverrideFlags' : [ 0x180, ['unsigned char']],
'RequiresUnloadedDriver' : [ 0x181, ['unsigned char']],
'PendingEjectRelations' : [ 0x184, ['pointer', ['_PENDING_RELATIONS_LIST_ENTRY']]],
} ],
'_KNODE' : [ 0x80, {
'PagedPoolSListHead' : [ 0x0, ['_SLIST_HEADER']],
'NonPagedPoolSListHead' : [ 0x8, ['array', 3, ['_SLIST_HEADER']]],
'Affinity' : [ 0x20, ['_GROUP_AFFINITY']],
'ProximityId' : [ 0x2c, ['unsigned long']],
'NodeNumber' : [ 0x30, ['unsigned short']],
'PrimaryNodeNumber' : [ 0x32, ['unsigned short']],
'MaximumProcessors' : [ 0x34, ['unsigned char']],
'Color' : [ 0x35, ['unsigned char']],
'Flags' : [ 0x36, ['_flags']],
'NodePad0' : [ 0x37, ['unsigned char']],
'Seed' : [ 0x38, ['unsigned long']],
'MmShiftedColor' : [ 0x3c, ['unsigned long']],
'FreeCount' : [ 0x40, ['array', 2, ['unsigned long']]],
'CachedKernelStacks' : [ 0x48, ['_CACHED_KSTACK_LIST']],
'ParkLock' : [ 0x60, ['long']],
'NodePad1' : [ 0x64, ['unsigned long']],
} ],
'_PNP_ASSIGN_RESOURCES_CONTEXT' : [ 0xc, {
'IncludeFailedDevices' : [ 0x0, ['unsigned long']],
'DeviceCount' : [ 0x4, ['unsigned long']],
'DeviceList' : [ 0x8, ['array', 1, ['pointer', ['_DEVICE_OBJECT']]]],
} ],
'_PNP_RESOURCE_REQUEST' : [ 0x28, {
'PhysicalDevice' : [ 0x0, ['pointer', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x4, ['unsigned long']],
'AllocationType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Priority' : [ 0xc, ['unsigned long']],
'Position' : [ 0x10, ['unsigned long']],
'ResourceRequirements' : [ 0x14, ['pointer', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'ReqList' : [ 0x18, ['pointer', ['void']]],
'ResourceAssignment' : [ 0x1c, ['pointer', ['_CM_RESOURCE_LIST']]],
'TranslatedResourceAssignment' : [ 0x20, ['pointer', ['_CM_RESOURCE_LIST']]],
'Status' : [ 0x24, ['long']],
} ],
'_IO_RESOURCE_REQUIREMENTS_LIST' : [ 0x48, {
'ListSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x8, ['unsigned long']],
'SlotNumber' : [ 0xc, ['unsigned long']],
'Reserved' : [ 0x10, ['array', 3, ['unsigned long']]],
'AlternativeLists' : [ 0x1c, ['unsigned long']],
'List' : [ 0x20, ['array', 1, ['_IO_RESOURCE_LIST']]],
} ],
'_EXCEPTION_RECORD64' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long long']],
'ExceptionAddress' : [ 0x10, ['unsigned long long']],
'NumberParameters' : [ 0x18, ['unsigned long']],
'__unusedAlignment' : [ 0x1c, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_EXCEPTION_RECORD32' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long']],
'ExceptionAddress' : [ 0xc, ['unsigned long']],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'_DBGKM_EXCEPTION64' : [ 0xa0, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD64']],
'FirstChance' : [ 0x98, ['unsigned long']],
} ],
'_DBGKM_EXCEPTION32' : [ 0x54, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD32']],
'FirstChance' : [ 0x50, ['unsigned long']],
} ],
'_DBGKD_LOAD_SYMBOLS64' : [ 0x28, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x8, ['unsigned long long']],
'ProcessId' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
'SizeOfImage' : [ 0x1c, ['unsigned long']],
'UnloadSymbols' : [ 0x20, ['unsigned char']],
} ],
'_DBGKD_LOAD_SYMBOLS32' : [ 0x18, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x4, ['unsigned long']],
'ProcessId' : [ 0x8, ['unsigned long']],
'CheckSum' : [ 0xc, ['unsigned long']],
'SizeOfImage' : [ 0x10, ['unsigned long']],
'UnloadSymbols' : [ 0x14, ['unsigned char']],
} ],
'_DBGKD_READ_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesRead' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesRead' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesWritten' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesWritten' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT64' : [ 0x10, {
'BreakPointAddress' : [ 0x0, ['unsigned long long']],
'BreakPointHandle' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT32' : [ 0x8, {
'BreakPointAddress' : [ 0x0, ['unsigned long']],
'BreakPointHandle' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO64' : [ 0x10, {
'IoAddress' : [ 0x0, ['unsigned long long']],
'DataSize' : [ 0x8, ['unsigned long']],
'DataValue' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO32' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'IoAddress' : [ 0x4, ['unsigned long']],
'DataValue' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED64' : [ 0x20, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long long']],
'DataValue' : [ 0x18, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED32' : [ 0x18, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long']],
'DataValue' : [ 0x14, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL32' : [ 0x4, {
'SpecialCall' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL64' : [ 0x8, {
'SpecialCall' : [ 0x0, ['unsigned long long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT32' : [ 0x8, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT64' : [ 0x10, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT64' : [ 0x20, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
'Calls' : [ 0xc, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0x10, ['unsigned long']],
'MinInstructions' : [ 0x14, ['unsigned long']],
'MaxInstructions' : [ 0x18, ['unsigned long']],
'TotalInstructions' : [ 0x1c, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT32' : [ 0x1c, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'Calls' : [ 0x8, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0xc, ['unsigned long']],
'MinInstructions' : [ 0x10, ['unsigned long']],
'MaxInstructions' : [ 0x14, ['unsigned long']],
'TotalInstructions' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_17ef' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT64']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO64']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED64']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL64']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT64']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT64']],
'GetVersion64' : [ 0x0, ['_DBGKD_GET_VERSION64']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
'GetSetBusData' : [ 0x0, ['_DBGKD_GET_SET_BUS_DATA']],
'FillMemory' : [ 0x0, ['_DBGKD_FILL_MEMORY']],
'QueryMemory' : [ 0x0, ['_DBGKD_QUERY_MEMORY']],
'SwitchPartition' : [ 0x0, ['_DBGKD_SWITCH_PARTITION']],
} ],
'_DBGKD_MANIPULATE_STATE64' : [ 0x38, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0x10, ['__unnamed_17ef']],
} ],
'__unnamed_17f6' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY32']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY32']],
'ReadMemory64' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory64' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT32']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO32']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED32']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL32']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT32']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT32']],
'GetVersion32' : [ 0x0, ['_DBGKD_GET_VERSION32']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
} ],
'_DBGKD_MANIPULATE_STATE32' : [ 0x34, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0xc, ['__unnamed_17f6']],
} ],
'_DBGKD_READ_WRITE_MSR' : [ 0xc, {
'Msr' : [ 0x0, ['unsigned long']],
'DataValueLow' : [ 0x4, ['unsigned long']],
'DataValueHigh' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_BREAKPOINTEX' : [ 0x8, {
'BreakPointCount' : [ 0x0, ['unsigned long']],
'ContinueStatus' : [ 0x4, ['long']],
} ],
'_DBGKD_SEARCH_MEMORY' : [ 0x18, {
'SearchAddress' : [ 0x0, ['unsigned long long']],
'FoundAddress' : [ 0x0, ['unsigned long long']],
'SearchLength' : [ 0x8, ['unsigned long long']],
'PatternLength' : [ 0x10, ['unsigned long']],
} ],
'_DBGKD_RESTORE_BREAKPOINT' : [ 0x4, {
'BreakPointHandle' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_CONTINUE' : [ 0x4, {
'ContinueStatus' : [ 0x0, ['long']],
} ],
'_DBGKD_CONTINUE2' : [ 0x20, {
'ContinueStatus' : [ 0x0, ['long']],
'ControlSet' : [ 0x4, ['_X86_DBGKD_CONTROL_SET']],
'AnyControlSet' : [ 0x4, ['_DBGKD_ANY_CONTROL_SET']],
} ],
'_POP_CPU_INFO' : [ 0x10, {
'Eax' : [ 0x0, ['unsigned long']],
'Ebx' : [ 0x4, ['unsigned long']],
'Ecx' : [ 0x8, ['unsigned long']],
'Edx' : [ 0xc, ['unsigned long']],
} ],
'_VOLUME_CACHE_MAP' : [ 0x20, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteCode' : [ 0x2, ['short']],
'UseCount' : [ 0x4, ['unsigned long']],
'DeviceObject' : [ 0x8, ['pointer', ['_DEVICE_OBJECT']]],
'VolumeCacheMapLinks' : [ 0xc, ['_LIST_ENTRY']],
'Flags' : [ 0x14, ['unsigned long']],
'DirtyPages' : [ 0x18, ['unsigned long']],
'PagesQueuedToDisk' : [ 0x1c, ['unsigned long']],
} ],
'_SHARED_CACHE_MAP' : [ 0x158, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'OpenCount' : [ 0x4, ['unsigned long']],
'FileSize' : [ 0x8, ['_LARGE_INTEGER']],
'BcbList' : [ 0x10, ['_LIST_ENTRY']],
'SectionSize' : [ 0x18, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataGoal' : [ 0x28, ['_LARGE_INTEGER']],
'InitialVacbs' : [ 0x30, ['array', 4, ['pointer', ['_VACB']]]],
'Vacbs' : [ 0x40, ['pointer', ['pointer', ['_VACB']]]],
'FileObjectFastRef' : [ 0x44, ['_EX_FAST_REF']],
'VacbLock' : [ 0x48, ['_EX_PUSH_LOCK']],
'DirtyPages' : [ 0x4c, ['unsigned long']],
'LoggedStreamLinks' : [ 0x50, ['_LIST_ENTRY']],
'SharedCacheMapLinks' : [ 0x58, ['_LIST_ENTRY']],
'Flags' : [ 0x60, ['unsigned long']],
'Status' : [ 0x64, ['long']],
'Mbcb' : [ 0x68, ['pointer', ['_MBCB']]],
'Section' : [ 0x6c, ['pointer', ['void']]],
'CreateEvent' : [ 0x70, ['pointer', ['_KEVENT']]],
'WaitOnActiveCount' : [ 0x74, ['pointer', ['_KEVENT']]],
'PagesToWrite' : [ 0x78, ['unsigned long']],
'BeyondLastFlush' : [ 0x80, ['long long']],
'Callbacks' : [ 0x88, ['pointer', ['_CACHE_MANAGER_CALLBACKS']]],
'LazyWriteContext' : [ 0x8c, ['pointer', ['void']]],
'PrivateList' : [ 0x90, ['_LIST_ENTRY']],
'LogHandle' : [ 0x98, ['pointer', ['void']]],
'FlushToLsnRoutine' : [ 0x9c, ['pointer', ['void']]],
'DirtyPageThreshold' : [ 0xa0, ['unsigned long']],
'LazyWritePassCount' : [ 0xa4, ['unsigned long']],
'UninitializeEvent' : [ 0xa8, ['pointer', ['_CACHE_UNINITIALIZE_EVENT']]],
'BcbLock' : [ 0xac, ['_KGUARDED_MUTEX']],
'LastUnmapBehindOffset' : [ 0xd0, ['_LARGE_INTEGER']],
'Event' : [ 0xd8, ['_KEVENT']],
'HighWaterMappingOffset' : [ 0xe8, ['_LARGE_INTEGER']],
'PrivateCacheMap' : [ 0xf0, ['_PRIVATE_CACHE_MAP']],
'WriteBehindWorkQueueEntry' : [ 0x148, ['pointer', ['void']]],
'VolumeCacheMap' : [ 0x14c, ['pointer', ['_VOLUME_CACHE_MAP']]],
'ProcImagePathHash' : [ 0x150, ['unsigned long']],
'WritesInProgress' : [ 0x154, ['unsigned long']],
} ],
'__unnamed_1866' : [ 0x8, {
'FileOffset' : [ 0x0, ['_LARGE_INTEGER']],
'ActiveCount' : [ 0x0, ['unsigned short']],
} ],
'_VACB' : [ 0x20, {
'BaseAddress' : [ 0x0, ['pointer', ['void']]],
'SharedCacheMap' : [ 0x4, ['pointer', ['_SHARED_CACHE_MAP']]],
'Overlay' : [ 0x8, ['__unnamed_1866']],
'Links' : [ 0x10, ['_LIST_ENTRY']],
'ArrayHead' : [ 0x18, ['pointer', ['_VACB_ARRAY_HEADER']]],
} ],
'_KGUARDED_MUTEX' : [ 0x20, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x4, ['pointer', ['_KTHREAD']]],
'Contention' : [ 0x8, ['unsigned long']],
'Gate' : [ 0xc, ['_KGATE']],
'KernelApcDisable' : [ 0x1c, ['short']],
'SpecialApcDisable' : [ 0x1e, ['short']],
'CombinedApcDisable' : [ 0x1c, ['unsigned long']],
} ],
'__unnamed_1884' : [ 0x4, {
'FileObject' : [ 0x0, ['pointer', ['_FILE_OBJECT']]],
} ],
'__unnamed_1886' : [ 0x4, {
'SharedCacheMap' : [ 0x0, ['pointer', ['_SHARED_CACHE_MAP']]],
} ],
'__unnamed_1888' : [ 0x4, {
'Event' : [ 0x0, ['pointer', ['_KEVENT']]],
} ],
'__unnamed_188a' : [ 0x4, {
'Reason' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_188c' : [ 0x4, {
'Read' : [ 0x0, ['__unnamed_1884']],
'Write' : [ 0x0, ['__unnamed_1886']],
'Event' : [ 0x0, ['__unnamed_1888']],
'Notification' : [ 0x0, ['__unnamed_188a']],
} ],
'_WORK_QUEUE_ENTRY' : [ 0x10, {
'WorkQueueLinks' : [ 0x0, ['_LIST_ENTRY']],
'Parameters' : [ 0x8, ['__unnamed_188c']],
'Function' : [ 0xc, ['unsigned char']],
} ],
'VACB_LEVEL_ALLOCATION_LIST' : [ 0x10, {
'VacbLevelList' : [ 0x0, ['_LIST_ENTRY']],
'VacbLevelWithBcbListHeads' : [ 0x8, ['pointer', ['void']]],
'VacbLevelsAllocated' : [ 0xc, ['unsigned long']],
} ],
'_VACB_LEVEL_REFERENCE' : [ 0x8, {
'Reference' : [ 0x0, ['long']],
'SpecialReference' : [ 0x4, ['long']],
} ],
'_CACHE_UNINITIALIZE_EVENT' : [ 0x14, {
'Next' : [ 0x0, ['pointer', ['_CACHE_UNINITIALIZE_EVENT']]],
'Event' : [ 0x4, ['_KEVENT']],
} ],
'_HEAP_LIST_LOOKUP' : [ 0x24, {
'ExtendedLookup' : [ 0x0, ['pointer', ['_HEAP_LIST_LOOKUP']]],
'ArraySize' : [ 0x4, ['unsigned long']],
'ExtraItem' : [ 0x8, ['unsigned long']],
'ItemCount' : [ 0xc, ['unsigned long']],
'OutOfRangeItems' : [ 0x10, ['unsigned long']],
'BaseIndex' : [ 0x14, ['unsigned long']],
'ListHead' : [ 0x18, ['pointer', ['_LIST_ENTRY']]],
'ListsInUseUlong' : [ 0x1c, ['pointer', ['unsigned long']]],
'ListHints' : [ 0x20, ['pointer', ['pointer', ['_LIST_ENTRY']]]],
} ],
'_HEAP' : [ 0x138, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['unsigned long']],
'SegmentListEntry' : [ 0x10, ['_LIST_ENTRY']],
'Heap' : [ 0x18, ['pointer', ['_HEAP']]],
'BaseAddress' : [ 0x1c, ['pointer', ['void']]],
'NumberOfPages' : [ 0x20, ['unsigned long']],
'FirstEntry' : [ 0x24, ['pointer', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x28, ['pointer', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x2c, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x30, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x34, ['unsigned short']],
'Reserved' : [ 0x36, ['unsigned short']],
'UCRSegmentList' : [ 0x38, ['_LIST_ENTRY']],
'Flags' : [ 0x40, ['unsigned long']],
'ForceFlags' : [ 0x44, ['unsigned long']],
'CompatibilityFlags' : [ 0x48, ['unsigned long']],
'EncodeFlagMask' : [ 0x4c, ['unsigned long']],
'Encoding' : [ 0x50, ['_HEAP_ENTRY']],
'PointerKey' : [ 0x58, ['unsigned long']],
'Interceptor' : [ 0x5c, ['unsigned long']],
'VirtualMemoryThreshold' : [ 0x60, ['unsigned long']],
'Signature' : [ 0x64, ['unsigned long']],
'SegmentReserve' : [ 0x68, ['unsigned long']],
'SegmentCommit' : [ 0x6c, ['unsigned long']],
'DeCommitFreeBlockThreshold' : [ 0x70, ['unsigned long']],
'DeCommitTotalFreeThreshold' : [ 0x74, ['unsigned long']],
'TotalFreeSize' : [ 0x78, ['unsigned long']],
'MaximumAllocationSize' : [ 0x7c, ['unsigned long']],
'ProcessHeapsListIndex' : [ 0x80, ['unsigned short']],
'HeaderValidateLength' : [ 0x82, ['unsigned short']],
'HeaderValidateCopy' : [ 0x84, ['pointer', ['void']]],
'NextAvailableTagIndex' : [ 0x88, ['unsigned short']],
'MaximumTagIndex' : [ 0x8a, ['unsigned short']],
'TagEntries' : [ 0x8c, ['pointer', ['_HEAP_TAG_ENTRY']]],
'UCRList' : [ 0x90, ['_LIST_ENTRY']],
'AlignRound' : [ 0x98, ['unsigned long']],
'AlignMask' : [ 0x9c, ['unsigned long']],
'VirtualAllocdBlocks' : [ 0xa0, ['_LIST_ENTRY']],
'SegmentList' : [ 0xa8, ['_LIST_ENTRY']],
'AllocatorBackTraceIndex' : [ 0xb0, ['unsigned short']],
'NonDedicatedListLength' : [ 0xb4, ['unsigned long']],
'BlocksIndex' : [ 0xb8, ['pointer', ['void']]],
'UCRIndex' : [ 0xbc, ['pointer', ['void']]],
'PseudoTagEntries' : [ 0xc0, ['pointer', ['_HEAP_PSEUDO_TAG_ENTRY']]],
'FreeLists' : [ 0xc4, ['_LIST_ENTRY']],
'LockVariable' : [ 0xcc, ['pointer', ['_HEAP_LOCK']]],
'CommitRoutine' : [ 0xd0, ['pointer', ['void']]],
'FrontEndHeap' : [ 0xd4, ['pointer', ['void']]],
'FrontHeapLockCount' : [ 0xd8, ['unsigned short']],
'FrontEndHeapType' : [ 0xda, ['unsigned char']],
'Counters' : [ 0xdc, ['_HEAP_COUNTERS']],
'TuningParameters' : [ 0x130, ['_HEAP_TUNING_PARAMETERS']],
} ],
'__unnamed_18dd' : [ 0x18, {
'CriticalSection' : [ 0x0, ['_RTL_CRITICAL_SECTION']],
} ],
'_HEAP_LOCK' : [ 0x18, {
'Lock' : [ 0x0, ['__unnamed_18dd']],
} ],
'_RTL_CRITICAL_SECTION' : [ 0x18, {
'DebugInfo' : [ 0x0, ['pointer', ['_RTL_CRITICAL_SECTION_DEBUG']]],
'LockCount' : [ 0x4, ['long']],
'RecursionCount' : [ 0x8, ['long']],
'OwningThread' : [ 0xc, ['pointer', ['void']]],
'LockSemaphore' : [ 0x10, ['pointer', ['void']]],
'SpinCount' : [ 0x14, ['unsigned long']],
} ],
'_HEAP_ENTRY' : [ 0x8, {
'Size' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned char']],
'SmallTagIndex' : [ 0x3, ['unsigned char']],
'SubSegmentCode' : [ 0x0, ['pointer', ['void']]],
'PreviousSize' : [ 0x4, ['unsigned short']],
'SegmentOffset' : [ 0x6, ['unsigned char']],
'LFHFlags' : [ 0x6, ['unsigned char']],
'UnusedBytes' : [ 0x7, ['unsigned char']],
'FunctionIndex' : [ 0x0, ['unsigned short']],
'ContextValue' : [ 0x2, ['unsigned short']],
'InterceptorValue' : [ 0x0, ['unsigned long']],
'UnusedBytesLength' : [ 0x4, ['unsigned short']],
'EntryOffset' : [ 0x6, ['unsigned char']],
'ExtendedBlockSignature' : [ 0x7, ['unsigned char']],
'Code1' : [ 0x0, ['unsigned long']],
'Code2' : [ 0x4, ['unsigned short']],
'Code3' : [ 0x6, ['unsigned char']],
'Code4' : [ 0x7, ['unsigned char']],
'AgregateCode' : [ 0x0, ['unsigned long long']],
} ],
'_HEAP_SEGMENT' : [ 0x40, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'SegmentSignature' : [ 0x8, ['unsigned long']],
'SegmentFlags' : [ 0xc, ['unsigned long']],
'SegmentListEntry' : [ 0x10, ['_LIST_ENTRY']],
'Heap' : [ 0x18, ['pointer', ['_HEAP']]],
'BaseAddress' : [ 0x1c, ['pointer', ['void']]],
'NumberOfPages' : [ 0x20, ['unsigned long']],
'FirstEntry' : [ 0x24, ['pointer', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x28, ['pointer', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x2c, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x30, ['unsigned long']],
'SegmentAllocatorBackTraceIndex' : [ 0x34, ['unsigned short']],
'Reserved' : [ 0x36, ['unsigned short']],
'UCRSegmentList' : [ 0x38, ['_LIST_ENTRY']],
} ],
'_HEAP_FREE_ENTRY' : [ 0x10, {
'Size' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned char']],
'SmallTagIndex' : [ 0x3, ['unsigned char']],
'SubSegmentCode' : [ 0x0, ['pointer', ['void']]],
'PreviousSize' : [ 0x4, ['unsigned short']],
'SegmentOffset' : [ 0x6, ['unsigned char']],
'LFHFlags' : [ 0x6, ['unsigned char']],
'UnusedBytes' : [ 0x7, ['unsigned char']],
'FunctionIndex' : [ 0x0, ['unsigned short']],
'ContextValue' : [ 0x2, ['unsigned short']],
'InterceptorValue' : [ 0x0, ['unsigned long']],
'UnusedBytesLength' : [ 0x4, ['unsigned short']],
'EntryOffset' : [ 0x6, ['unsigned char']],
'ExtendedBlockSignature' : [ 0x7, ['unsigned char']],
'Code1' : [ 0x0, ['unsigned long']],
'Code2' : [ 0x4, ['unsigned short']],
'Code3' : [ 0x6, ['unsigned char']],
'Code4' : [ 0x7, ['unsigned char']],
'AgregateCode' : [ 0x0, ['unsigned long long']],
'FreeList' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_PEB' : [ 0x248, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsProtectedProcess' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'IsLegacyProcess' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'IsImageDynamicallyRelocated' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'SkipPatchingUser32Forwarders' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x4, ['pointer', ['void']]],
'ImageBaseAddress' : [ 0x8, ['pointer', ['void']]],
'Ldr' : [ 0xc, ['pointer', ['_PEB_LDR_DATA']]],
'ProcessParameters' : [ 0x10, ['pointer', ['_RTL_USER_PROCESS_PARAMETERS']]],
'SubSystemData' : [ 0x14, ['pointer', ['void']]],
'ProcessHeap' : [ 0x18, ['pointer', ['void']]],
'FastPebLock' : [ 0x1c, ['pointer', ['_RTL_CRITICAL_SECTION']]],
'AtlThunkSListPtr' : [ 0x20, ['pointer', ['void']]],
'IFEOKey' : [ 0x24, ['pointer', ['void']]],
'CrossProcessFlags' : [ 0x28, ['unsigned long']],
'ProcessInJob' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProcessInitializing' : [ 0x28, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessUsingVEH' : [ 0x28, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessUsingVCH' : [ 0x28, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ProcessUsingFTH' : [ 0x28, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'ReservedBits0' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 32, native_type='unsigned long')]],
'KernelCallbackTable' : [ 0x2c, ['pointer', ['void']]],
'UserSharedInfoPtr' : [ 0x2c, ['pointer', ['void']]],
'SystemReserved' : [ 0x30, ['array', 1, ['unsigned long']]],
'AtlThunkSListPtr32' : [ 0x34, ['unsigned long']],
'ApiSetMap' : [ 0x38, ['pointer', ['void']]],
'TlsExpansionCounter' : [ 0x3c, ['unsigned long']],
'TlsBitmap' : [ 0x40, ['pointer', ['void']]],
'TlsBitmapBits' : [ 0x44, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x4c, ['pointer', ['void']]],
'HotpatchInformation' : [ 0x50, ['pointer', ['void']]],
'ReadOnlyStaticServerData' : [ 0x54, ['pointer', ['pointer', ['void']]]],
'AnsiCodePageData' : [ 0x58, ['pointer', ['void']]],
'OemCodePageData' : [ 0x5c, ['pointer', ['void']]],
'UnicodeCaseTableData' : [ 0x60, ['pointer', ['void']]],
'NumberOfProcessors' : [ 0x64, ['unsigned long']],
'NtGlobalFlag' : [ 0x68, ['unsigned long']],
'CriticalSectionTimeout' : [ 0x70, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0x78, ['unsigned long']],
'HeapSegmentCommit' : [ 0x7c, ['unsigned long']],
'HeapDeCommitTotalFreeThreshold' : [ 0x80, ['unsigned long']],
'HeapDeCommitFreeBlockThreshold' : [ 0x84, ['unsigned long']],
'NumberOfHeaps' : [ 0x88, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0x8c, ['unsigned long']],
'ProcessHeaps' : [ 0x90, ['pointer', ['pointer', ['void']]]],
'GdiSharedHandleTable' : [ 0x94, ['pointer', ['void']]],
'ProcessStarterHelper' : [ 0x98, ['pointer', ['void']]],
'GdiDCAttributeList' : [ 0x9c, ['unsigned long']],
'LoaderLock' : [ 0xa0, ['pointer', ['_RTL_CRITICAL_SECTION']]],
'OSMajorVersion' : [ 0xa4, ['unsigned long']],
'OSMinorVersion' : [ 0xa8, ['unsigned long']],
'OSBuildNumber' : [ 0xac, ['unsigned short']],
'OSCSDVersion' : [ 0xae, ['unsigned short']],
'OSPlatformId' : [ 0xb0, ['unsigned long']],
'ImageSubsystem' : [ 0xb4, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0xb8, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0xbc, ['unsigned long']],
'ActiveProcessAffinityMask' : [ 0xc0, ['unsigned long']],
'GdiHandleBuffer' : [ 0xc4, ['array', 34, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x14c, ['pointer', ['void']]],
'TlsExpansionBitmap' : [ 0x150, ['pointer', ['void']]],
'TlsExpansionBitmapBits' : [ 0x154, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x1d4, ['unsigned long']],
'AppCompatFlags' : [ 0x1d8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x1e0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x1e8, ['pointer', ['void']]],
'AppCompatInfo' : [ 0x1ec, ['pointer', ['void']]],
'CSDVersion' : [ 0x1f0, ['_UNICODE_STRING']],
'ActivationContextData' : [ 0x1f8, ['pointer', ['_ACTIVATION_CONTEXT_DATA']]],
'ProcessAssemblyStorageMap' : [ 0x1fc, ['pointer', ['_ASSEMBLY_STORAGE_MAP']]],
'SystemDefaultActivationContextData' : [ 0x200, ['pointer', ['_ACTIVATION_CONTEXT_DATA']]],
'SystemAssemblyStorageMap' : [ 0x204, ['pointer', ['_ASSEMBLY_STORAGE_MAP']]],
'MinimumStackCommit' : [ 0x208, ['unsigned long']],
'FlsCallback' : [ 0x20c, ['pointer', ['_FLS_CALLBACK_INFO']]],
'FlsListHead' : [ 0x210, ['_LIST_ENTRY']],
'FlsBitmap' : [ 0x218, ['pointer', ['void']]],
'FlsBitmapBits' : [ 0x21c, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x22c, ['unsigned long']],
'WerRegistrationData' : [ 0x230, ['pointer', ['void']]],
'WerShipAssertPtr' : [ 0x234, ['pointer', ['void']]],
'pContextData' : [ 0x238, ['pointer', ['void']]],
'pImageHeaderHash' : [ 0x23c, ['pointer', ['void']]],
'TracingFlags' : [ 0x240, ['unsigned long']],
'HeapTracingEnabled' : [ 0x240, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CritSecTracingEnabled' : [ 0x240, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SpareTracingBits' : [ 0x240, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_PEB_LDR_DATA' : [ 0x30, {
'Length' : [ 0x0, ['unsigned long']],
'Initialized' : [ 0x4, ['unsigned char']],
'SsHandle' : [ 0x8, ['pointer', ['void']]],
'InLoadOrderModuleList' : [ 0xc, ['_LIST_ENTRY']],
'InMemoryOrderModuleList' : [ 0x14, ['_LIST_ENTRY']],
'InInitializationOrderModuleList' : [ 0x1c, ['_LIST_ENTRY']],
'EntryInProgress' : [ 0x24, ['pointer', ['void']]],
'ShutdownInProgress' : [ 0x28, ['unsigned char']],
'ShutdownThreadId' : [ 0x2c, ['pointer', ['void']]],
} ],
'_LDR_DATA_TABLE_ENTRY' : [ 0x78, {
'InLoadOrderLinks' : [ 0x0, ['_LIST_ENTRY']],
'InMemoryOrderLinks' : [ 0x8, ['_LIST_ENTRY']],
'InInitializationOrderLinks' : [ 0x10, ['_LIST_ENTRY']],
'DllBase' : [ 0x18, ['pointer', ['void']]],
'EntryPoint' : [ 0x1c, ['pointer', ['void']]],
'SizeOfImage' : [ 0x20, ['unsigned long']],
'FullDllName' : [ 0x24, ['_UNICODE_STRING']],
'BaseDllName' : [ 0x2c, ['_UNICODE_STRING']],
'Flags' : [ 0x34, ['unsigned long']],
'LoadCount' : [ 0x38, ['unsigned short']],
'TlsIndex' : [ 0x3a, ['unsigned short']],
'HashLinks' : [ 0x3c, ['_LIST_ENTRY']],
'SectionPointer' : [ 0x3c, ['pointer', ['void']]],
'CheckSum' : [ 0x40, ['unsigned long']],
'TimeDateStamp' : [ 0x44, ['unsigned long']],
'LoadedImports' : [ 0x44, ['pointer', ['void']]],
'EntryPointActivationContext' : [ 0x48, ['pointer', ['_ACTIVATION_CONTEXT']]],
'PatchInformation' : [ 0x4c, ['pointer', ['void']]],
'ForwarderLinks' : [ 0x50, ['_LIST_ENTRY']],
'ServiceTagLinks' : [ 0x58, ['_LIST_ENTRY']],
'StaticLinks' : [ 0x60, ['_LIST_ENTRY']],
'ContextInformation' : [ 0x68, ['pointer', ['void']]],
'OriginalBase' : [ 0x6c, ['unsigned long']],
'LoadTime' : [ 0x70, ['_LARGE_INTEGER']],
} ],
'_HEAP_SUBSEGMENT' : [ 0x20, {
'LocalInfo' : [ 0x0, ['pointer', ['_HEAP_LOCAL_SEGMENT_INFO']]],
'UserBlocks' : [ 0x4, ['pointer', ['_HEAP_USERDATA_HEADER']]],
'AggregateExchg' : [ 0x8, ['_INTERLOCK_SEQ']],
'BlockSize' : [ 0x10, ['unsigned short']],
'Flags' : [ 0x12, ['unsigned short']],
'BlockCount' : [ 0x14, ['unsigned short']],
'SizeIndex' : [ 0x16, ['unsigned char']],
'AffinityIndex' : [ 0x17, ['unsigned char']],
'Alignment' : [ 0x10, ['array', 2, ['unsigned long']]],
'SFreeListEntry' : [ 0x18, ['_SINGLE_LIST_ENTRY']],
'Lock' : [ 0x1c, ['unsigned long']],
} ],
'__unnamed_195c' : [ 0x4, {
'DataLength' : [ 0x0, ['short']],
'TotalLength' : [ 0x2, ['short']],
} ],
'__unnamed_195e' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_195c']],
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1960' : [ 0x4, {
'Type' : [ 0x0, ['short']],
'DataInfoOffset' : [ 0x2, ['short']],
} ],
'__unnamed_1962' : [ 0x4, {
's2' : [ 0x0, ['__unnamed_1960']],
'ZeroInit' : [ 0x0, ['unsigned long']],
} ],
'_PORT_MESSAGE' : [ 0x18, {
'u1' : [ 0x0, ['__unnamed_195e']],
'u2' : [ 0x4, ['__unnamed_1962']],
'ClientId' : [ 0x8, ['_CLIENT_ID']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x10, ['unsigned long']],
'ClientViewSize' : [ 0x14, ['unsigned long']],
'CallbackId' : [ 0x14, ['unsigned long']],
} ],
'_ALPC_MESSAGE_ATTRIBUTES' : [ 0x8, {
'AllocatedAttributes' : [ 0x0, ['unsigned long']],
'ValidAttributes' : [ 0x4, ['unsigned long']],
} ],
'_ALPC_HANDLE_ENTRY' : [ 0x4, {
'Object' : [ 0x0, ['pointer', ['void']]],
} ],
'_BLOB_TYPE' : [ 0x24, {
'ResourceId' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BLOB_TYPE_UNKNOWN', 1: 'BLOB_TYPE_CONNECTION_INFO', 2: 'BLOB_TYPE_MESSAGE', 3: 'BLOB_TYPE_SECURITY_CONTEXT', 4: 'BLOB_TYPE_SECTION', 5: 'BLOB_TYPE_REGION', 6: 'BLOB_TYPE_VIEW', 7: 'BLOB_TYPE_RESERVE', 8: 'BLOB_TYPE_DIRECT_TRANSFER', 9: 'BLOB_TYPE_HANDLE_DATA', 10: 'BLOB_TYPE_MAX_ID'})]],
'PoolTag' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'CreatedObjects' : [ 0xc, ['unsigned long']],
'DeletedObjects' : [ 0x10, ['unsigned long']],
'DeleteProcedure' : [ 0x14, ['pointer', ['void']]],
'DestroyProcedure' : [ 0x18, ['pointer', ['void']]],
'UsualSize' : [ 0x1c, ['unsigned long']],
'LookasideIndex' : [ 0x20, ['unsigned long']],
} ],
'__unnamed_197e' : [ 0x1, {
'ReferenceCache' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Lookaside' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Initializing' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Deleted' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
} ],
'__unnamed_1980' : [ 0x1, {
's1' : [ 0x0, ['__unnamed_197e']],
'Flags' : [ 0x0, ['unsigned char']],
} ],
'_BLOB' : [ 0x18, {
'ResourceList' : [ 0x0, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'u1' : [ 0x8, ['__unnamed_1980']],
'ResourceId' : [ 0x9, ['unsigned char']],
'CachedReferences' : [ 0xa, ['short']],
'ReferenceCount' : [ 0xc, ['long']],
'Lock' : [ 0x10, ['_EX_PUSH_LOCK']],
'Pad' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_1992' : [ 0x4, {
'Internal' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Secure' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_1994' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1992']],
} ],
'_KALPC_SECTION' : [ 0x28, {
'SectionObject' : [ 0x0, ['pointer', ['void']]],
'Size' : [ 0x4, ['unsigned long']],
'HandleTable' : [ 0x8, ['pointer', ['_ALPC_HANDLE_TABLE']]],
'SectionHandle' : [ 0xc, ['pointer', ['void']]],
'OwnerProcess' : [ 0x10, ['pointer', ['_EPROCESS']]],
'OwnerPort' : [ 0x14, ['pointer', ['_ALPC_PORT']]],
'u1' : [ 0x18, ['__unnamed_1994']],
'NumberOfRegions' : [ 0x1c, ['unsigned long']],
'RegionListHead' : [ 0x20, ['_LIST_ENTRY']],
} ],
'__unnamed_199a' : [ 0x4, {
'Secure' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
} ],
'__unnamed_199c' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_199a']],
} ],
'_KALPC_REGION' : [ 0x30, {
'RegionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Section' : [ 0x8, ['pointer', ['_KALPC_SECTION']]],
'Offset' : [ 0xc, ['unsigned long']],
'Size' : [ 0x10, ['unsigned long']],
'ViewSize' : [ 0x14, ['unsigned long']],
'u1' : [ 0x18, ['__unnamed_199c']],
'NumberOfViews' : [ 0x1c, ['unsigned long']],
'ViewListHead' : [ 0x20, ['_LIST_ENTRY']],
'ReadOnlyView' : [ 0x28, ['pointer', ['_KALPC_VIEW']]],
'ReadWriteView' : [ 0x2c, ['pointer', ['_KALPC_VIEW']]],
} ],
'__unnamed_19a2' : [ 0x4, {
'WriteAccess' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoRelease' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ForceUnlink' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
} ],
'__unnamed_19a4' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19a2']],
} ],
'_KALPC_VIEW' : [ 0x34, {
'ViewListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Region' : [ 0x8, ['pointer', ['_KALPC_REGION']]],
'OwnerPort' : [ 0xc, ['pointer', ['_ALPC_PORT']]],
'OwnerProcess' : [ 0x10, ['pointer', ['_EPROCESS']]],
'Address' : [ 0x14, ['pointer', ['void']]],
'Size' : [ 0x18, ['unsigned long']],
'SecureViewHandle' : [ 0x1c, ['pointer', ['void']]],
'WriteAccessHandle' : [ 0x20, ['pointer', ['void']]],
'u1' : [ 0x24, ['__unnamed_19a4']],
'NumberOfOwnerMessages' : [ 0x28, ['unsigned long']],
'ProcessViewListEntry' : [ 0x2c, ['_LIST_ENTRY']],
} ],
'_ALPC_COMMUNICATION_INFO' : [ 0x24, {
'ConnectionPort' : [ 0x0, ['pointer', ['_ALPC_PORT']]],
'ServerCommunicationPort' : [ 0x4, ['pointer', ['_ALPC_PORT']]],
'ClientCommunicationPort' : [ 0x8, ['pointer', ['_ALPC_PORT']]],
'CommunicationList' : [ 0xc, ['_LIST_ENTRY']],
'HandleTable' : [ 0x14, ['_ALPC_HANDLE_TABLE']],
} ],
'__unnamed_19c0' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned long')]],
'ConnectionPending' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ConnectionRefused' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Disconnected' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Closed' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'NoFlushOnClose' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ReturnExtendedInfo' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Waitable' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'DynamicSecurity' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Wow64CompletionList' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Lpc' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'LpcToLpc' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HasCompletionList' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'HadCompletionList' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'EnableCompletionList' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
} ],
'__unnamed_19c2' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19c0']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_ALPC_PORT' : [ 0xfc, {
'PortListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CommunicationInfo' : [ 0x8, ['pointer', ['_ALPC_COMMUNICATION_INFO']]],
'OwnerProcess' : [ 0xc, ['pointer', ['_EPROCESS']]],
'CompletionPort' : [ 0x10, ['pointer', ['void']]],
'CompletionKey' : [ 0x14, ['pointer', ['void']]],
'CompletionPacketLookaside' : [ 0x18, ['pointer', ['_ALPC_COMPLETION_PACKET_LOOKASIDE']]],
'PortContext' : [ 0x1c, ['pointer', ['void']]],
'StaticSecurity' : [ 0x20, ['_SECURITY_CLIENT_CONTEXT']],
'MainQueue' : [ 0x5c, ['_LIST_ENTRY']],
'PendingQueue' : [ 0x64, ['_LIST_ENTRY']],
'LargeMessageQueue' : [ 0x6c, ['_LIST_ENTRY']],
'WaitQueue' : [ 0x74, ['_LIST_ENTRY']],
'Semaphore' : [ 0x7c, ['pointer', ['_KSEMAPHORE']]],
'DummyEvent' : [ 0x7c, ['pointer', ['_KEVENT']]],
'PortAttributes' : [ 0x80, ['_ALPC_PORT_ATTRIBUTES']],
'Lock' : [ 0xac, ['_EX_PUSH_LOCK']],
'ResourceListLock' : [ 0xb0, ['_EX_PUSH_LOCK']],
'ResourceListHead' : [ 0xb4, ['_LIST_ENTRY']],
'CompletionList' : [ 0xbc, ['pointer', ['_ALPC_COMPLETION_LIST']]],
'MessageZone' : [ 0xc0, ['pointer', ['_ALPC_MESSAGE_ZONE']]],
'CallbackObject' : [ 0xc4, ['pointer', ['_CALLBACK_OBJECT']]],
'CallbackContext' : [ 0xc8, ['pointer', ['void']]],
'CanceledQueue' : [ 0xcc, ['_LIST_ENTRY']],
'SequenceNo' : [ 0xd4, ['long']],
'u1' : [ 0xd8, ['__unnamed_19c2']],
'TargetQueuePort' : [ 0xdc, ['pointer', ['_ALPC_PORT']]],
'TargetSequencePort' : [ 0xe0, ['pointer', ['_ALPC_PORT']]],
'CachedMessage' : [ 0xe4, ['pointer', ['_KALPC_MESSAGE']]],
'MainQueueLength' : [ 0xe8, ['unsigned long']],
'PendingQueueLength' : [ 0xec, ['unsigned long']],
'LargeMessageQueueLength' : [ 0xf0, ['unsigned long']],
'CanceledQueueLength' : [ 0xf4, ['unsigned long']],
'WaitQueueLength' : [ 0xf8, ['unsigned long']],
} ],
'_OBJECT_TYPE' : [ 0x88, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'Name' : [ 0x8, ['_UNICODE_STRING']],
'DefaultObject' : [ 0x10, ['pointer', ['void']]],
'Index' : [ 0x14, ['unsigned char']],
'TotalNumberOfObjects' : [ 0x18, ['unsigned long']],
'TotalNumberOfHandles' : [ 0x1c, ['unsigned long']],
'HighWaterNumberOfObjects' : [ 0x20, ['unsigned long']],
'HighWaterNumberOfHandles' : [ 0x24, ['unsigned long']],
'TypeInfo' : [ 0x28, ['_OBJECT_TYPE_INITIALIZER']],
'TypeLock' : [ 0x78, ['_EX_PUSH_LOCK']],
'Key' : [ 0x7c, ['unsigned long']],
'CallbackList' : [ 0x80, ['_LIST_ENTRY']],
} ],
'__unnamed_19da' : [ 0x4, {
'QueueType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'QueuePortType' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned long')]],
'Canceled' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Ready' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ReleaseMessage' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SharedQuota' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'ReplyWaitReply' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'OwnerPortReference' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'ReserveReference' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'ReceiverReference' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'ViewAttributeRetrieved' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'InDispatch' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
} ],
'__unnamed_19dc' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19da']],
'State' : [ 0x0, ['unsigned long']],
} ],
'_KALPC_MESSAGE' : [ 0x88, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtensionBuffer' : [ 0x8, ['pointer', ['void']]],
'ExtensionBufferSize' : [ 0xc, ['unsigned long']],
'QuotaProcess' : [ 0x10, ['pointer', ['_EPROCESS']]],
'QuotaBlock' : [ 0x10, ['pointer', ['void']]],
'SequenceNo' : [ 0x14, ['long']],
'u1' : [ 0x18, ['__unnamed_19dc']],
'CancelSequencePort' : [ 0x1c, ['pointer', ['_ALPC_PORT']]],
'CancelQueuePort' : [ 0x20, ['pointer', ['_ALPC_PORT']]],
'CancelSequenceNo' : [ 0x24, ['long']],
'CancelListEntry' : [ 0x28, ['_LIST_ENTRY']],
'WaitingThread' : [ 0x30, ['pointer', ['_ETHREAD']]],
'Reserve' : [ 0x34, ['pointer', ['_KALPC_RESERVE']]],
'PortQueue' : [ 0x38, ['pointer', ['_ALPC_PORT']]],
'OwnerPort' : [ 0x3c, ['pointer', ['_ALPC_PORT']]],
'MessageAttributes' : [ 0x40, ['_KALPC_MESSAGE_ATTRIBUTES']],
'DataUserVa' : [ 0x5c, ['pointer', ['void']]],
'DataSystemVa' : [ 0x60, ['pointer', ['void']]],
'CommunicationInfo' : [ 0x64, ['pointer', ['_ALPC_COMMUNICATION_INFO']]],
'ConnectionPort' : [ 0x68, ['pointer', ['_ALPC_PORT']]],
'ServerThread' : [ 0x6c, ['pointer', ['_ETHREAD']]],
'PortMessage' : [ 0x70, ['_PORT_MESSAGE']],
} ],
'_REMOTE_PORT_VIEW' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'ViewSize' : [ 0x4, ['unsigned long']],
'ViewBase' : [ 0x8, ['pointer', ['void']]],
} ],
'_KALPC_RESERVE' : [ 0x14, {
'OwnerPort' : [ 0x0, ['pointer', ['_ALPC_PORT']]],
'HandleTable' : [ 0x4, ['pointer', ['_ALPC_HANDLE_TABLE']]],
'Handle' : [ 0x8, ['pointer', ['void']]],
'Message' : [ 0xc, ['pointer', ['_KALPC_MESSAGE']]],
'Active' : [ 0x10, ['long']],
} ],
'_KALPC_HANDLE_DATA' : [ 0xc, {
'Flags' : [ 0x0, ['unsigned long']],
'ObjectType' : [ 0x4, ['unsigned long']],
'DuplicateContext' : [ 0x8, ['pointer', ['_OB_DUPLICATE_OBJECT_STATE']]],
} ],
'_KALPC_MESSAGE_ATTRIBUTES' : [ 0x1c, {
'ClientContext' : [ 0x0, ['pointer', ['void']]],
'ServerContext' : [ 0x4, ['pointer', ['void']]],
'PortContext' : [ 0x8, ['pointer', ['void']]],
'CancelPortContext' : [ 0xc, ['pointer', ['void']]],
'SecurityData' : [ 0x10, ['pointer', ['_KALPC_SECURITY_DATA']]],
'View' : [ 0x14, ['pointer', ['_KALPC_VIEW']]],
'HandleData' : [ 0x18, ['pointer', ['_KALPC_HANDLE_DATA']]],
} ],
'__unnamed_1a19' : [ 0x4, {
'Revoked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Impersonated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
} ],
'__unnamed_1a1b' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_1a19']],
} ],
'_KALPC_SECURITY_DATA' : [ 0x50, {
'HandleTable' : [ 0x0, ['pointer', ['_ALPC_HANDLE_TABLE']]],
'ContextHandle' : [ 0x4, ['pointer', ['void']]],
'OwningProcess' : [ 0x8, ['pointer', ['_EPROCESS']]],
'OwnerPort' : [ 0xc, ['pointer', ['_ALPC_PORT']]],
'DynamicSecurity' : [ 0x10, ['_SECURITY_CLIENT_CONTEXT']],
'u1' : [ 0x4c, ['__unnamed_1a1b']],
} ],
'_IO_MINI_COMPLETION_PACKET_USER' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'PacketType' : [ 0x8, ['unsigned long']],
'KeyContext' : [ 0xc, ['pointer', ['void']]],
'ApcContext' : [ 0x10, ['pointer', ['void']]],
'IoStatus' : [ 0x14, ['long']],
'IoStatusInformation' : [ 0x18, ['unsigned long']],
'MiniPacketCallback' : [ 0x1c, ['pointer', ['void']]],
'Context' : [ 0x20, ['pointer', ['void']]],
'Allocated' : [ 0x24, ['unsigned char']],
} ],
'_ALPC_DISPATCH_CONTEXT' : [ 0x20, {
'PortObject' : [ 0x0, ['pointer', ['_ALPC_PORT']]],
'Message' : [ 0x4, ['pointer', ['_KALPC_MESSAGE']]],
'CommunicationInfo' : [ 0x8, ['pointer', ['_ALPC_COMMUNICATION_INFO']]],
'TargetThread' : [ 0xc, ['pointer', ['_ETHREAD']]],
'TargetPort' : [ 0x10, ['pointer', ['_ALPC_PORT']]],
'Flags' : [ 0x14, ['unsigned long']],
'TotalLength' : [ 0x18, ['unsigned short']],
'Type' : [ 0x1a, ['unsigned short']],
'DataInfoOffset' : [ 0x1c, ['unsigned short']],
} ],
'_DRIVER_OBJECT' : [ 0xa8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x4, ['pointer', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x8, ['unsigned long']],
'DriverStart' : [ 0xc, ['pointer', ['void']]],
'DriverSize' : [ 0x10, ['unsigned long']],
'DriverSection' : [ 0x14, ['pointer', ['void']]],
'DriverExtension' : [ 0x18, ['pointer', ['_DRIVER_EXTENSION']]],
'DriverName' : [ 0x1c, ['_UNICODE_STRING']],
'HardwareDatabase' : [ 0x24, ['pointer', ['_UNICODE_STRING']]],
'FastIoDispatch' : [ 0x28, ['pointer', ['_FAST_IO_DISPATCH']]],
'DriverInit' : [ 0x2c, ['pointer', ['void']]],
'DriverStartIo' : [ 0x30, ['pointer', ['void']]],
'DriverUnload' : [ 0x34, ['pointer', ['void']]],
'MajorFunction' : [ 0x38, ['array', 28, ['pointer', ['void']]]],
} ],
'_FILE_SEGMENT_ELEMENT' : [ 0x8, {
'Buffer' : [ 0x0, ['pointer64', ['void']]],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_RELATIVE_SYMLINK_INFO' : [ 0x14, {
'ExposedNamespaceLength' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'DeviceNameLength' : [ 0x4, ['unsigned short']],
'Reserved' : [ 0x6, ['unsigned short']],
'InteriorMountPoint' : [ 0x8, ['pointer', ['_RELATIVE_SYMLINK_INFO']]],
'OpenedName' : [ 0xc, ['_UNICODE_STRING']],
} ],
'_ECP_LIST' : [ 0x10, {
'Signature' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'EcpList' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_IOP_FILE_OBJECT_EXTENSION' : [ 0x24, {
'FoExtFlags' : [ 0x0, ['unsigned long']],
'FoExtPerTypeExtension' : [ 0x4, ['array', 7, ['pointer', ['void']]]],
'FoIoPriorityHint' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'IopIoPriorityNotSet', 1: 'IopIoPriorityVeryLow', 2: 'IopIoPriorityLow', 3: 'IopIoPriorityNormal', 4: 'IopIoPriorityHigh', 5: 'IopIoPriorityCritical', 6: 'MaxIopIoPriorityTypes'})]],
} ],
'_OPEN_PACKET' : [ 0x70, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'FileObject' : [ 0x4, ['pointer', ['_FILE_OBJECT']]],
'FinalStatus' : [ 0x8, ['long']],
'Information' : [ 0xc, ['unsigned long']],
'ParseCheck' : [ 0x10, ['unsigned long']],
'RelatedFileObject' : [ 0x14, ['pointer', ['_FILE_OBJECT']]],
'OriginalAttributes' : [ 0x18, ['pointer', ['_OBJECT_ATTRIBUTES']]],
'AllocationSize' : [ 0x20, ['_LARGE_INTEGER']],
'CreateOptions' : [ 0x28, ['unsigned long']],
'FileAttributes' : [ 0x2c, ['unsigned short']],
'ShareAccess' : [ 0x2e, ['unsigned short']],
'EaBuffer' : [ 0x30, ['pointer', ['void']]],
'EaLength' : [ 0x34, ['unsigned long']],
'Options' : [ 0x38, ['unsigned long']],
'Disposition' : [ 0x3c, ['unsigned long']],
'BasicInformation' : [ 0x40, ['pointer', ['_FILE_BASIC_INFORMATION']]],
'NetworkInformation' : [ 0x44, ['pointer', ['_FILE_NETWORK_OPEN_INFORMATION']]],
'CreateFileType' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'CreateFileTypeNone', 1: 'CreateFileTypeNamedPipe', 2: 'CreateFileTypeMailslot'})]],
'MailslotOrPipeParameters' : [ 0x4c, ['pointer', ['void']]],
'Override' : [ 0x50, ['unsigned char']],
'QueryOnly' : [ 0x51, ['unsigned char']],
'DeleteOnly' : [ 0x52, ['unsigned char']],
'FullAttributes' : [ 0x53, ['unsigned char']],
'LocalFileObject' : [ 0x54, ['pointer', ['_DUMMY_FILE_OBJECT']]],
'InternalFlags' : [ 0x58, ['unsigned long']],
'DriverCreateContext' : [ 0x5c, ['_IO_DRIVER_CREATE_CONTEXT']],
} ],
'_ETW_SYSTEMTIME' : [ 0x10, {
'Year' : [ 0x0, ['unsigned short']],
'Month' : [ 0x2, ['unsigned short']],
'DayOfWeek' : [ 0x4, ['unsigned short']],
'Day' : [ 0x6, ['unsigned short']],
'Hour' : [ 0x8, ['unsigned short']],
'Minute' : [ 0xa, ['unsigned short']],
'Second' : [ 0xc, ['unsigned short']],
'Milliseconds' : [ 0xe, ['unsigned short']],
} ],
'_TIME_FIELDS' : [ 0x10, {
'Year' : [ 0x0, ['short']],
'Month' : [ 0x2, ['short']],
'Day' : [ 0x4, ['short']],
'Hour' : [ 0x6, ['short']],
'Minute' : [ 0x8, ['short']],
'Second' : [ 0xa, ['short']],
'Milliseconds' : [ 0xc, ['short']],
'Weekday' : [ 0xe, ['short']],
} ],
'_WMI_LOGGER_CONTEXT' : [ 0x238, {
'LoggerId' : [ 0x0, ['unsigned long']],
'BufferSize' : [ 0x4, ['unsigned long']],
'MaximumEventSize' : [ 0x8, ['unsigned long']],
'CollectionOn' : [ 0xc, ['long']],
'LoggerMode' : [ 0x10, ['unsigned long']],
'AcceptNewEvents' : [ 0x14, ['long']],
'GetCpuClock' : [ 0x18, ['pointer', ['void']]],
'StartTime' : [ 0x20, ['_LARGE_INTEGER']],
'LogFileHandle' : [ 0x28, ['pointer', ['void']]],
'LoggerThread' : [ 0x2c, ['pointer', ['_ETHREAD']]],
'LoggerStatus' : [ 0x30, ['long']],
'NBQHead' : [ 0x34, ['pointer', ['void']]],
'OverflowNBQHead' : [ 0x38, ['pointer', ['void']]],
'QueueBlockFreeList' : [ 0x40, ['_SLIST_HEADER']],
'GlobalList' : [ 0x48, ['_LIST_ENTRY']],
'BatchedBufferList' : [ 0x50, ['pointer', ['_WMI_BUFFER_HEADER']]],
'CurrentBuffer' : [ 0x50, ['_EX_FAST_REF']],
'LoggerName' : [ 0x54, ['_UNICODE_STRING']],
'LogFileName' : [ 0x5c, ['_UNICODE_STRING']],
'LogFilePattern' : [ 0x64, ['_UNICODE_STRING']],
'NewLogFileName' : [ 0x6c, ['_UNICODE_STRING']],
'ClockType' : [ 0x74, ['unsigned long']],
'MaximumFileSize' : [ 0x78, ['unsigned long']],
'LastFlushedBuffer' : [ 0x7c, ['unsigned long']],
'FlushTimer' : [ 0x80, ['unsigned long']],
'FlushThreshold' : [ 0x84, ['unsigned long']],
'ByteOffset' : [ 0x88, ['_LARGE_INTEGER']],
'MinimumBuffers' : [ 0x90, ['unsigned long']],
'BuffersAvailable' : [ 0x94, ['long']],
'NumberOfBuffers' : [ 0x98, ['long']],
'MaximumBuffers' : [ 0x9c, ['unsigned long']],
'EventsLost' : [ 0xa0, ['unsigned long']],
'BuffersWritten' : [ 0xa4, ['unsigned long']],
'LogBuffersLost' : [ 0xa8, ['unsigned long']],
'RealTimeBuffersDelivered' : [ 0xac, ['unsigned long']],
'RealTimeBuffersLost' : [ 0xb0, ['unsigned long']],
'SequencePtr' : [ 0xb4, ['pointer', ['long']]],
'LocalSequence' : [ 0xb8, ['unsigned long']],
'InstanceGuid' : [ 0xbc, ['_GUID']],
'FileCounter' : [ 0xcc, ['long']],
'BufferCallback' : [ 0xd0, ['pointer', ['void']]],
'PoolType' : [ 0xd4, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'ReferenceTime' : [ 0xd8, ['_ETW_REF_CLOCK']],
'Consumers' : [ 0xe8, ['_LIST_ENTRY']],
'NumConsumers' : [ 0xf0, ['unsigned long']],
'TransitionConsumer' : [ 0xf4, ['pointer', ['_ETW_REALTIME_CONSUMER']]],
'RealtimeLogfileHandle' : [ 0xf8, ['pointer', ['void']]],
'RealtimeLogfileName' : [ 0xfc, ['_UNICODE_STRING']],
'RealtimeWriteOffset' : [ 0x108, ['_LARGE_INTEGER']],
'RealtimeReadOffset' : [ 0x110, ['_LARGE_INTEGER']],
'RealtimeLogfileSize' : [ 0x118, ['_LARGE_INTEGER']],
'RealtimeLogfileUsage' : [ 0x120, ['unsigned long long']],
'RealtimeMaximumFileSize' : [ 0x128, ['unsigned long long']],
'RealtimeBuffersSaved' : [ 0x130, ['unsigned long']],
'RealtimeReferenceTime' : [ 0x138, ['_ETW_REF_CLOCK']],
'NewRTEventsLost' : [ 0x148, ['Enumeration', dict(target = 'long', choices = {0: 'EtwRtEventNoLoss', 1: 'EtwRtEventLost', 2: 'EtwRtBufferLost', 3: 'EtwRtBackupLost', 4: 'EtwRtEventLossMax'})]],
'LoggerEvent' : [ 0x14c, ['_KEVENT']],
'FlushEvent' : [ 0x15c, ['_KEVENT']],
'FlushTimeOutTimer' : [ 0x170, ['_KTIMER']],
'FlushDpc' : [ 0x198, ['_KDPC']],
'LoggerMutex' : [ 0x1b8, ['_KMUTANT']],
'LoggerLock' : [ 0x1d8, ['_EX_PUSH_LOCK']],
'BufferListSpinLock' : [ 0x1dc, ['unsigned long']],
'BufferListPushLock' : [ 0x1dc, ['_EX_PUSH_LOCK']],
'ClientSecurityContext' : [ 0x1e0, ['_SECURITY_CLIENT_CONTEXT']],
'SecurityDescriptor' : [ 0x21c, ['_EX_FAST_REF']],
'BufferSequenceNumber' : [ 0x220, ['long long']],
'Flags' : [ 0x228, ['unsigned long']],
'Persistent' : [ 0x228, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AutoLogger' : [ 0x228, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'FsReady' : [ 0x228, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'RealTime' : [ 0x228, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow' : [ 0x228, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'KernelTrace' : [ 0x228, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'NoMoreEnable' : [ 0x228, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'StackTracing' : [ 0x228, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ErrorLogged' : [ 0x228, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'RealtimeLoggerContextFreed' : [ 0x228, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'RequestFlag' : [ 0x22c, ['unsigned long']],
'RequestNewFie' : [ 0x22c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'RequestUpdateFile' : [ 0x22c, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'RequestFlush' : [ 0x22c, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'RequestDisableRealtime' : [ 0x22c, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'RequestDisconnectConsumer' : [ 0x22c, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'RequestConnectConsumer' : [ 0x22c, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'HookIdMap' : [ 0x230, ['_RTL_BITMAP']],
} ],
'_ETW_LOGGER_HANDLE' : [ 0x1, {
'DereferenceAndLeave' : [ 0x0, ['unsigned char']],
} ],
'_ETW_BUFFER_HANDLE' : [ 0x8, {
'TraceBuffer' : [ 0x0, ['pointer', ['_WMI_BUFFER_HEADER']]],
'BufferFastRef' : [ 0x4, ['pointer', ['_EX_FAST_REF']]],
} ],
'_SYSTEM_TRACE_HEADER' : [ 0x20, {
'Marker' : [ 0x0, ['unsigned long']],
'Version' : [ 0x0, ['unsigned short']],
'HeaderType' : [ 0x2, ['unsigned char']],
'Flags' : [ 0x3, ['unsigned char']],
'Header' : [ 0x4, ['unsigned long']],
'Packet' : [ 0x4, ['_WMI_TRACE_PACKET']],
'ThreadId' : [ 0x8, ['unsigned long']],
'ProcessId' : [ 0xc, ['unsigned long']],
'SystemTime' : [ 0x10, ['_LARGE_INTEGER']],
'KernelTime' : [ 0x18, ['unsigned long']],
'UserTime' : [ 0x1c, ['unsigned long']],
} ],
'_PERFINFO_TRACE_HEADER' : [ 0x18, {
'Marker' : [ 0x0, ['unsigned long']],
'Version' : [ 0x0, ['unsigned short']],
'HeaderType' : [ 0x2, ['unsigned char']],
'Flags' : [ 0x3, ['unsigned char']],
'Header' : [ 0x4, ['unsigned long']],
'Packet' : [ 0x4, ['_WMI_TRACE_PACKET']],
'TS' : [ 0x8, ['unsigned long long']],
'SystemTime' : [ 0x8, ['_LARGE_INTEGER']],
'Data' : [ 0x10, ['array', 1, ['unsigned char']]],
} ],
'_NBQUEUE_BLOCK' : [ 0x18, {
'SListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Next' : [ 0x8, ['unsigned long long']],
'Data' : [ 0x10, ['unsigned long long']],
} ],
'_KMUTANT' : [ 0x20, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListEntry' : [ 0x10, ['_LIST_ENTRY']],
'OwnerThread' : [ 0x18, ['pointer', ['_KTHREAD']]],
'Abandoned' : [ 0x1c, ['unsigned char']],
'ApcDisable' : [ 0x1d, ['unsigned char']],
} ],
'_ETW_LAST_ENABLE_INFO' : [ 0x10, {
'EnableFlags' : [ 0x0, ['_LARGE_INTEGER']],
'LoggerId' : [ 0x8, ['unsigned short']],
'Level' : [ 0xa, ['unsigned char']],
'Enabled' : [ 0xb, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'InternalFlag' : [ 0xb, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
} ],
'_TRACE_ENABLE_CONTEXT' : [ 0x8, {
'LoggerId' : [ 0x0, ['unsigned short']],
'Level' : [ 0x2, ['unsigned char']],
'InternalFlag' : [ 0x3, ['unsigned char']],
'EnableFlags' : [ 0x4, ['unsigned long']],
} ],
'_TRACE_ENABLE_CONTEXT_EX' : [ 0x10, {
'LoggerId' : [ 0x0, ['unsigned short']],
'Level' : [ 0x2, ['unsigned char']],
'InternalFlag' : [ 0x3, ['unsigned char']],
'EnableFlags' : [ 0x4, ['unsigned long']],
'EnableFlagsHigh' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_ETW_GUID_ENTRY' : [ 0x178, {
'GuidList' : [ 0x0, ['_LIST_ENTRY']],
'RefCount' : [ 0x8, ['long']],
'Guid' : [ 0xc, ['_GUID']],
'RegListHead' : [ 0x1c, ['_LIST_ENTRY']],
'SecurityDescriptor' : [ 0x24, ['pointer', ['void']]],
'LastEnable' : [ 0x28, ['_ETW_LAST_ENABLE_INFO']],
'MatchId' : [ 0x28, ['unsigned long long']],
'ProviderEnableInfo' : [ 0x38, ['_TRACE_ENABLE_INFO']],
'EnableInfo' : [ 0x58, ['array', 8, ['_TRACE_ENABLE_INFO']]],
'FilterData' : [ 0x158, ['array', 8, ['pointer', ['_EVENT_FILTER_HEADER']]]],
} ],
'_TRACE_ENABLE_INFO' : [ 0x20, {
'IsEnabled' : [ 0x0, ['unsigned long']],
'Level' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'LoggerId' : [ 0x6, ['unsigned short']],
'EnableProperty' : [ 0x8, ['unsigned long']],
'Reserved2' : [ 0xc, ['unsigned long']],
'MatchAnyKeyword' : [ 0x10, ['unsigned long long']],
'MatchAllKeyword' : [ 0x18, ['unsigned long long']],
} ],
'_LUID_AND_ATTRIBUTES' : [ 0xc, {
'Luid' : [ 0x0, ['_LUID']],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_TOKEN' : [ 0x1e0, {
'TokenSource' : [ 0x0, ['_TOKEN_SOURCE']],
'TokenId' : [ 0x10, ['_LUID']],
'AuthenticationId' : [ 0x18, ['_LUID']],
'ParentTokenId' : [ 0x20, ['_LUID']],
'ExpirationTime' : [ 0x28, ['_LARGE_INTEGER']],
'TokenLock' : [ 0x30, ['pointer', ['_ERESOURCE']]],
'ModifiedId' : [ 0x34, ['_LUID']],
'Privileges' : [ 0x40, ['_SEP_TOKEN_PRIVILEGES']],
'AuditPolicy' : [ 0x58, ['_SEP_AUDIT_POLICY']],
'SessionId' : [ 0x74, ['unsigned long']],
'UserAndGroupCount' : [ 0x78, ['unsigned long']],
'RestrictedSidCount' : [ 0x7c, ['unsigned long']],
'VariableLength' : [ 0x80, ['unsigned long']],
'DynamicCharged' : [ 0x84, ['unsigned long']],
'DynamicAvailable' : [ 0x88, ['unsigned long']],
'DefaultOwnerIndex' : [ 0x8c, ['unsigned long']],
'UserAndGroups' : [ 0x90, ['pointer', ['_SID_AND_ATTRIBUTES']]],
'RestrictedSids' : [ 0x94, ['pointer', ['_SID_AND_ATTRIBUTES']]],
'PrimaryGroup' : [ 0x98, ['pointer', ['void']]],
'DynamicPart' : [ 0x9c, ['pointer', ['unsigned long']]],
'DefaultDacl' : [ 0xa0, ['pointer', ['_ACL']]],
'TokenType' : [ 0xa4, ['Enumeration', dict(target = 'long', choices = {1: 'TokenPrimary', 2: 'TokenImpersonation'})]],
'ImpersonationLevel' : [ 0xa8, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'TokenFlags' : [ 0xac, ['unsigned long']],
'TokenInUse' : [ 0xb0, ['unsigned char']],
'IntegrityLevelIndex' : [ 0xb4, ['unsigned long']],
'MandatoryPolicy' : [ 0xb8, ['unsigned long']],
'LogonSession' : [ 0xbc, ['pointer', ['_SEP_LOGON_SESSION_REFERENCES']]],
'OriginatingLogonSession' : [ 0xc0, ['_LUID']],
'SidHash' : [ 0xc8, ['_SID_AND_ATTRIBUTES_HASH']],
'RestrictedSidHash' : [ 0x150, ['_SID_AND_ATTRIBUTES_HASH']],
'pSecurityAttributes' : [ 0x1d8, ['pointer', ['_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION']]],
'VariablePart' : [ 0x1dc, ['unsigned long']],
} ],
'_SEP_LOGON_SESSION_REFERENCES' : [ 0x34, {
'Next' : [ 0x0, ['pointer', ['_SEP_LOGON_SESSION_REFERENCES']]],
'LogonId' : [ 0x4, ['_LUID']],
'BuddyLogonId' : [ 0xc, ['_LUID']],
'ReferenceCount' : [ 0x14, ['unsigned long']],
'Flags' : [ 0x18, ['unsigned long']],
'pDeviceMap' : [ 0x1c, ['pointer', ['_DEVICE_MAP']]],
'Token' : [ 0x20, ['pointer', ['void']]],
'AccountName' : [ 0x24, ['_UNICODE_STRING']],
'AuthorityName' : [ 0x2c, ['_UNICODE_STRING']],
} ],
'_OBJECT_HEADER' : [ 0x20, {
'PointerCount' : [ 0x0, ['long']],
'HandleCount' : [ 0x4, ['long']],
'NextToFree' : [ 0x4, ['pointer', ['void']]],
'Lock' : [ 0x8, ['_EX_PUSH_LOCK']],
'TypeIndex' : [ 0xc, ['unsigned char']],
'TraceFlags' : [ 0xd, ['unsigned char']],
'InfoMask' : [ 0xe, ['unsigned char']],
'Flags' : [ 0xf, ['unsigned char']],
'ObjectCreateInfo' : [ 0x10, ['pointer', ['_OBJECT_CREATE_INFORMATION']]],
'QuotaBlockCharged' : [ 0x10, ['pointer', ['void']]],
'SecurityDescriptor' : [ 0x14, ['pointer', ['void']]],
'Body' : [ 0x18, ['_QUAD']],
} ],
'_OBJECT_HEADER_QUOTA_INFO' : [ 0x10, {
'PagedPoolCharge' : [ 0x0, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x4, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x8, ['unsigned long']],
'SecurityDescriptorQuotaBlock' : [ 0xc, ['pointer', ['void']]],
} ],
'_OBJECT_HEADER_PROCESS_INFO' : [ 0x8, {
'ExclusiveProcess' : [ 0x0, ['pointer', ['_EPROCESS']]],
'Reserved' : [ 0x4, ['unsigned long']],
} ],
'_OBJECT_HEADER_HANDLE_INFO' : [ 0x8, {
'HandleCountDataBase' : [ 0x0, ['pointer', ['_OBJECT_HANDLE_COUNT_DATABASE']]],
'SingleEntry' : [ 0x0, ['_OBJECT_HANDLE_COUNT_ENTRY']],
} ],
'_OBJECT_HEADER_NAME_INFO' : [ 0x10, {
'Directory' : [ 0x0, ['pointer', ['_OBJECT_DIRECTORY']]],
'Name' : [ 0x4, ['_UNICODE_STRING']],
'ReferenceCount' : [ 0xc, ['long']],
} ],
'_OBJECT_HEADER_CREATOR_INFO' : [ 0x10, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'CreatorUniqueProcess' : [ 0x8, ['pointer', ['void']]],
'CreatorBackTraceIndex' : [ 0xc, ['unsigned short']],
'Reserved' : [ 0xe, ['unsigned short']],
} ],
'_OBP_LOOKUP_CONTEXT' : [ 0x14, {
'Directory' : [ 0x0, ['pointer', ['_OBJECT_DIRECTORY']]],
'Object' : [ 0x4, ['pointer', ['void']]],
'HashValue' : [ 0x8, ['unsigned long']],
'HashIndex' : [ 0xc, ['unsigned short']],
'DirectoryLocked' : [ 0xe, ['unsigned char']],
'LockedExclusive' : [ 0xf, ['unsigned char']],
'LockStateSignature' : [ 0x10, ['unsigned long']],
} ],
'_OBJECT_DIRECTORY' : [ 0xa8, {
'HashBuckets' : [ 0x0, ['array', 37, ['pointer', ['_OBJECT_DIRECTORY_ENTRY']]]],
'Lock' : [ 0x94, ['_EX_PUSH_LOCK']],
'DeviceMap' : [ 0x98, ['pointer', ['_DEVICE_MAP']]],
'SessionId' : [ 0x9c, ['unsigned long']],
'NamespaceEntry' : [ 0xa0, ['pointer', ['void']]],
'Flags' : [ 0xa4, ['unsigned long']],
} ],
'_PS_CLIENT_SECURITY_CONTEXT' : [ 0x4, {
'ImpersonationData' : [ 0x0, ['unsigned long']],
'ImpersonationToken' : [ 0x0, ['pointer', ['void']]],
'ImpersonationLevel' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='unsigned long')]],
'EffectiveOnly' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
} ],
'_DBGKD_ANY_CONTROL_SET' : [ 0x1c, {
'X86ControlSet' : [ 0x0, ['_X86_DBGKD_CONTROL_SET']],
'AlphaControlSet' : [ 0x0, ['unsigned long']],
'IA64ControlSet' : [ 0x0, ['_IA64_DBGKD_CONTROL_SET']],
'Amd64ControlSet' : [ 0x0, ['_AMD64_DBGKD_CONTROL_SET']],
'ArmControlSet' : [ 0x0, ['_ARM_DBGKD_CONTROL_SET']],
'PpcControlSet' : [ 0x0, ['_PPC_DBGKD_CONTROL_SET']],
} ],
'_MMVAD_FLAGS3' : [ 0x4, {
'PreferredNode' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 6, native_type='unsigned long')]],
'Teb' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SequentialAccess' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'LastSequentialTrim' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 24, native_type='unsigned long')]],
'Spare2' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_MI_VERIFIER_POOL_HEADER' : [ 0x4, {
'VerifierPoolEntry' : [ 0x0, ['pointer', ['_VI_POOL_ENTRY']]],
} ],
'_HBASE_BLOCK' : [ 0x1000, {
'Signature' : [ 0x0, ['unsigned long']],
'Sequence1' : [ 0x4, ['unsigned long']],
'Sequence2' : [ 0x8, ['unsigned long']],
'TimeStamp' : [ 0xc, ['_LARGE_INTEGER']],
'Major' : [ 0x14, ['unsigned long']],
'Minor' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['unsigned long']],
'Format' : [ 0x20, ['unsigned long']],
'RootCell' : [ 0x24, ['unsigned long']],
'Length' : [ 0x28, ['unsigned long']],
'Cluster' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['array', 64, ['unsigned char']]],
'RmId' : [ 0x70, ['_GUID']],
'LogId' : [ 0x80, ['_GUID']],
'Flags' : [ 0x90, ['unsigned long']],
'TmId' : [ 0x94, ['_GUID']],
'GuidSignature' : [ 0xa4, ['unsigned long']],
'Reserved1' : [ 0xa8, ['array', 85, ['unsigned long']]],
'CheckSum' : [ 0x1fc, ['unsigned long']],
'Reserved2' : [ 0x200, ['array', 882, ['unsigned long']]],
'ThawTmId' : [ 0xfc8, ['_GUID']],
'ThawRmId' : [ 0xfd8, ['_GUID']],
'ThawLogId' : [ 0xfe8, ['_GUID']],
'BootType' : [ 0xff8, ['unsigned long']],
'BootRecover' : [ 0xffc, ['unsigned long']],
} ],
'_ERESOURCE' : [ 0x38, {
'SystemResourcesList' : [ 0x0, ['_LIST_ENTRY']],
'OwnerTable' : [ 0x8, ['pointer', ['_OWNER_ENTRY']]],
'ActiveCount' : [ 0xc, ['short']],
'Flag' : [ 0xe, ['unsigned short']],
'SharedWaiters' : [ 0x10, ['pointer', ['_KSEMAPHORE']]],
'ExclusiveWaiters' : [ 0x14, ['pointer', ['_KEVENT']]],
'OwnerEntry' : [ 0x18, ['_OWNER_ENTRY']],
'ActiveEntries' : [ 0x20, ['unsigned long']],
'ContentionCount' : [ 0x24, ['unsigned long']],
'NumberOfSharedWaiters' : [ 0x28, ['unsigned long']],
'NumberOfExclusiveWaiters' : [ 0x2c, ['unsigned long']],
'Address' : [ 0x30, ['pointer', ['void']]],
'CreatorBackTraceIndex' : [ 0x30, ['unsigned long']],
'SpinLock' : [ 0x34, ['unsigned long']],
} ],
'_ARM_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_LPCP_MESSAGE' : [ 0x30, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'FreeEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Reserved0' : [ 0x4, ['unsigned long']],
'SenderPort' : [ 0x8, ['pointer', ['void']]],
'RepliedToThread' : [ 0xc, ['pointer', ['_ETHREAD']]],
'PortContext' : [ 0x10, ['pointer', ['void']]],
'Request' : [ 0x18, ['_PORT_MESSAGE']],
} ],
'_HARDWARE_PTE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'reserved' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_DUAL' : [ 0x13c, {
'Length' : [ 0x0, ['unsigned long']],
'Map' : [ 0x4, ['pointer', ['_HMAP_DIRECTORY']]],
'SmallDir' : [ 0x8, ['pointer', ['_HMAP_TABLE']]],
'Guard' : [ 0xc, ['unsigned long']],
'FreeDisplay' : [ 0x10, ['array', 24, ['_FREE_DISPLAY']]],
'FreeSummary' : [ 0x130, ['unsigned long']],
'FreeBins' : [ 0x134, ['_LIST_ENTRY']],
} ],
'_ALPC_PORT_ATTRIBUTES' : [ 0x2c, {
'Flags' : [ 0x0, ['unsigned long']],
'SecurityQos' : [ 0x4, ['_SECURITY_QUALITY_OF_SERVICE']],
'MaxMessageLength' : [ 0x10, ['unsigned long']],
'MemoryBandwidth' : [ 0x14, ['unsigned long']],
'MaxPoolUsage' : [ 0x18, ['unsigned long']],
'MaxSectionSize' : [ 0x1c, ['unsigned long']],
'MaxViewSize' : [ 0x20, ['unsigned long']],
'MaxTotalSectionSize' : [ 0x24, ['unsigned long']],
'DupObjectTypes' : [ 0x28, ['unsigned long']],
} ],
'_CM_INDEX_HINT_BLOCK' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'HashKey' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_KQUEUE' : [ 0x28, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'EntryListHead' : [ 0x10, ['_LIST_ENTRY']],
'CurrentCount' : [ 0x18, ['unsigned long']],
'MaximumCount' : [ 0x1c, ['unsigned long']],
'ThreadListHead' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_KSTACK_COUNT' : [ 0x4, {
'Value' : [ 0x0, ['long']],
'State' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'StackCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
} ],
'_DISPATCHER_HEADER' : [ 0x10, {
'Type' : [ 0x0, ['unsigned char']],
'TimerControlFlags' : [ 0x1, ['unsigned char']],
'Absolute' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Coalescable' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'KeepShifting' : [ 0x1, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'EncodedTolerableDelay' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
'Abandoned' : [ 0x1, ['unsigned char']],
'Signalling' : [ 0x1, ['unsigned char']],
'ThreadControlFlags' : [ 0x2, ['unsigned char']],
'CpuThrottled' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'CycleProfiling' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'CounterProfiling' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Reserved' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
'Hand' : [ 0x2, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'TimerMiscFlags' : [ 0x3, ['unsigned char']],
'Index' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Processor' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 6, native_type='unsigned char')]],
'Inserted' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Expired' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'DebugActive' : [ 0x3, ['unsigned char']],
'ActiveDR7' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Instrumented' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Reserved2' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned char')]],
'UmsScheduled' : [ 0x3, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'UmsPrimary' : [ 0x3, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'DpcActive' : [ 0x3, ['unsigned char']],
'Lock' : [ 0x0, ['long']],
'SignalState' : [ 0x4, ['long']],
'WaitListHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_VI_POOL_ENTRY' : [ 0x10, {
'PageHeader' : [ 0x0, ['_VI_POOL_PAGE_HEADER']],
'InUse' : [ 0x0, ['_VI_POOL_ENTRY_INUSE']],
'NextFree' : [ 0x0, ['pointer', ['_SINGLE_LIST_ENTRY']]],
} ],
'_MM_PAGE_ACCESS_INFO' : [ 0x8, {
'Flags' : [ 0x0, ['_MM_PAGE_ACCESS_INFO_FLAGS']],
'FileOffset' : [ 0x0, ['unsigned long long']],
'VirtualAddress' : [ 0x0, ['pointer', ['void']]],
'DontUse0' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned long')]],
'Spare0' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'PointerProtoPte' : [ 0x4, ['pointer', ['void']]],
} ],
'_HEAP_COUNTERS' : [ 0x54, {
'TotalMemoryReserved' : [ 0x0, ['unsigned long']],
'TotalMemoryCommitted' : [ 0x4, ['unsigned long']],
'TotalMemoryLargeUCR' : [ 0x8, ['unsigned long']],
'TotalSizeInVirtualBlocks' : [ 0xc, ['unsigned long']],
'TotalSegments' : [ 0x10, ['unsigned long']],
'TotalUCRs' : [ 0x14, ['unsigned long']],
'CommittOps' : [ 0x18, ['unsigned long']],
'DeCommitOps' : [ 0x1c, ['unsigned long']],
'LockAcquires' : [ 0x20, ['unsigned long']],
'LockCollisions' : [ 0x24, ['unsigned long']],
'CommitRate' : [ 0x28, ['unsigned long']],
'DecommittRate' : [ 0x2c, ['unsigned long']],
'CommitFailures' : [ 0x30, ['unsigned long']],
'InBlockCommitFailures' : [ 0x34, ['unsigned long']],
'CompactHeapCalls' : [ 0x38, ['unsigned long']],
'CompactedUCRs' : [ 0x3c, ['unsigned long']],
'AllocAndFreeOps' : [ 0x40, ['unsigned long']],
'InBlockDeccommits' : [ 0x44, ['unsigned long']],
'InBlockDeccomitSize' : [ 0x48, ['unsigned long']],
'HighWatermarkSize' : [ 0x4c, ['unsigned long']],
'LastPolledSize' : [ 0x50, ['unsigned long']],
} ],
'_CM_KEY_HASH' : [ 0x10, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x4, ['pointer', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x8, ['pointer', ['_HHIVE']]],
'KeyCell' : [ 0xc, ['unsigned long']],
} ],
'_SYSPTES_HEADER' : [ 0x14, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Count' : [ 0x8, ['unsigned long']],
'NumberOfEntries' : [ 0xc, ['unsigned long']],
'NumberOfEntriesPeak' : [ 0x10, ['unsigned long']],
} ],
'_EXCEPTION_RECORD' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['pointer', ['_EXCEPTION_RECORD']]],
'ExceptionAddress' : [ 0xc, ['pointer', ['void']]],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'_PENDING_RELATIONS_LIST_ENTRY' : [ 0x3c, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'WorkItem' : [ 0x8, ['_WORK_QUEUE_ITEM']],
'DeviceEvent' : [ 0x18, ['pointer', ['_PNP_DEVICE_EVENT_ENTRY']]],
'DeviceObject' : [ 0x1c, ['pointer', ['_DEVICE_OBJECT']]],
'RelationsList' : [ 0x20, ['pointer', ['_RELATION_LIST']]],
'EjectIrp' : [ 0x24, ['pointer', ['_IRP']]],
'Lock' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'IRPLOCK_CANCELABLE', 1: 'IRPLOCK_CANCEL_STARTED', 2: 'IRPLOCK_CANCEL_COMPLETE', 3: 'IRPLOCK_COMPLETED'})]],
'Problem' : [ 0x2c, ['unsigned long']],
'ProfileChangingEject' : [ 0x30, ['unsigned char']],
'DisplaySafeRemovalDialog' : [ 0x31, ['unsigned char']],
'LightestSleepState' : [ 0x34, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DockInterface' : [ 0x38, ['pointer', ['DOCK_INTERFACE']]],
} ],
'_I386_LOADER_BLOCK' : [ 0xc, {
'CommonDataArea' : [ 0x0, ['pointer', ['void']]],
'MachineType' : [ 0x4, ['unsigned long']],
'VirtualBias' : [ 0x8, ['unsigned long']],
} ],
'_CELL_DATA' : [ 0x50, {
'u' : [ 0x0, ['_u']],
} ],
'_ARC_DISK_INFORMATION' : [ 0x8, {
'DiskSignatures' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_INITIAL_PRIVILEGE_SET' : [ 0x2c, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 3, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_HEAP_TUNING_PARAMETERS' : [ 0x8, {
'CommittThresholdShift' : [ 0x0, ['unsigned long']],
'MaxPreCommittThreshold' : [ 0x4, ['unsigned long']],
} ],
'_MMWSLE_NONDIRECT_HASH' : [ 0x8, {
'Key' : [ 0x0, ['pointer', ['void']]],
'Index' : [ 0x4, ['unsigned long']],
} ],
'_HMAP_DIRECTORY' : [ 0x1000, {
'Directory' : [ 0x0, ['array', 1024, ['pointer', ['_HMAP_TABLE']]]],
} ],
'_HANDLE_TABLE' : [ 0x3c, {
'TableCode' : [ 0x0, ['unsigned long']],
'QuotaProcess' : [ 0x4, ['pointer', ['_EPROCESS']]],
'UniqueProcessId' : [ 0x8, ['pointer', ['void']]],
'HandleLock' : [ 0xc, ['_EX_PUSH_LOCK']],
'HandleTableList' : [ 0x10, ['_LIST_ENTRY']],
'HandleContentionEvent' : [ 0x18, ['_EX_PUSH_LOCK']],
'DebugInfo' : [ 0x1c, ['pointer', ['_HANDLE_TRACE_DEBUG_INFO']]],
'ExtraInfoPages' : [ 0x20, ['long']],
'Flags' : [ 0x24, ['unsigned long']],
'StrictFIFO' : [ 0x24, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FirstFreeHandle' : [ 0x28, ['unsigned long']],
'LastFreeHandleEntry' : [ 0x2c, ['pointer', ['_HANDLE_TABLE_ENTRY']]],
'HandleCount' : [ 0x30, ['unsigned long']],
'NextHandleNeedingPool' : [ 0x34, ['unsigned long']],
'HandleCountHighWatermark' : [ 0x38, ['unsigned long']],
} ],
'_POOL_TRACKER_BIG_PAGES' : [ 0x10, {
'Va' : [ 0x0, ['pointer', ['void']]],
'Key' : [ 0x4, ['unsigned long']],
'PoolType' : [ 0x8, ['unsigned long']],
'NumberOfBytes' : [ 0xc, ['unsigned long']],
} ],
'_MMVAD_FLAGS2' : [ 0x4, {
'FileOffset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'SecNoChange' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'OneSecured' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'MultipleSecured' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'LongVad' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'ExtendableFile' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'Inherit' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_TEB_ACTIVE_FRAME' : [ 0xc, {
'Flags' : [ 0x0, ['unsigned long']],
'Previous' : [ 0x4, ['pointer', ['_TEB_ACTIVE_FRAME']]],
'Context' : [ 0x8, ['pointer', ['_TEB_ACTIVE_FRAME_CONTEXT']]],
} ],
'_FILE_GET_QUOTA_INFORMATION' : [ 0x14, {
'NextEntryOffset' : [ 0x0, ['unsigned long']],
'SidLength' : [ 0x4, ['unsigned long']],
'Sid' : [ 0x8, ['_SID']],
} ],
'_ACCESS_REASONS' : [ 0x80, {
'Data' : [ 0x0, ['array', 32, ['unsigned long']]],
} ],
'_CM_KEY_BODY' : [ 0x2c, {
'Type' : [ 0x0, ['unsigned long']],
'KeyControlBlock' : [ 0x4, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'NotifyBlock' : [ 0x8, ['pointer', ['_CM_NOTIFY_BLOCK']]],
'ProcessID' : [ 0xc, ['pointer', ['void']]],
'KeyBodyList' : [ 0x10, ['_LIST_ENTRY']],
'Flags' : [ 0x18, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'HandleTags' : [ 0x18, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'KtmTrans' : [ 0x1c, ['pointer', ['void']]],
'KtmUow' : [ 0x20, ['pointer', ['_GUID']]],
'ContextListHead' : [ 0x24, ['_LIST_ENTRY']],
} ],
'_KWAIT_BLOCK' : [ 0x18, {
'WaitListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Thread' : [ 0x8, ['pointer', ['_KTHREAD']]],
'Object' : [ 0xc, ['pointer', ['void']]],
'NextWaitBlock' : [ 0x10, ['pointer', ['_KWAIT_BLOCK']]],
'WaitKey' : [ 0x14, ['unsigned short']],
'WaitType' : [ 0x16, ['unsigned char']],
'BlockState' : [ 0x17, ['unsigned char']],
} ],
'_MMPTE_PROTOTYPE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ProtoAddressLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 9, native_type='unsigned long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'ProtoAddressHigh' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 32, native_type='unsigned long')]],
} ],
'_WHEA_ERROR_PACKET_FLAGS' : [ 0x4, {
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HypervisorError' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'PlatformPfaControl' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'PlatformDirectedOffline' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_THERMAL_INFORMATION_EX' : [ 0x58, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0xc, ['_KAFFINITY_EX']],
'SamplingPeriod' : [ 0x18, ['unsigned long']],
'CurrentTemperature' : [ 0x1c, ['unsigned long']],
'PassiveTripPoint' : [ 0x20, ['unsigned long']],
'CriticalTripPoint' : [ 0x24, ['unsigned long']],
'ActiveTripPointCount' : [ 0x28, ['unsigned char']],
'ActiveTripPoint' : [ 0x2c, ['array', 10, ['unsigned long']]],
'S4TransitionTripPoint' : [ 0x54, ['unsigned long']],
} ],
'__unnamed_1c1b' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Spare0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
} ],
'__unnamed_1c1d' : [ 0x4, {
'FilePointerIndex' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 9, native_type='unsigned long')]],
'HardFault' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Spare1' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
} ],
'_MM_PAGE_ACCESS_INFO_FLAGS' : [ 0x4, {
'File' : [ 0x0, ['__unnamed_1c1b']],
'Private' : [ 0x0, ['__unnamed_1c1d']],
} ],
'_VI_VERIFIER_ISSUE' : [ 0x10, {
'IssueType' : [ 0x0, ['unsigned long']],
'Address' : [ 0x4, ['pointer', ['void']]],
'Parameters' : [ 0x8, ['array', 2, ['unsigned long']]],
} ],
'_MMSUBSECTION_FLAGS' : [ 0x4, {
'SubsectionAccessed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 6, native_type='unsigned short')]],
'StartingSector4132' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 16, native_type='unsigned short')]],
'SubsectionStatic' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'GlobalMemory' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'DirtyPages' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'Spare' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'SectorEndOffset' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 16, native_type='unsigned short')]],
} ],
'_EXCEPTION_POINTERS' : [ 0x8, {
'ExceptionRecord' : [ 0x0, ['pointer', ['_EXCEPTION_RECORD']]],
'ContextRecord' : [ 0x4, ['pointer', ['_CONTEXT']]],
} ],
'_OBJECT_REF_INFO' : [ 0x1c, {
'ObjectHeader' : [ 0x0, ['pointer', ['_OBJECT_HEADER']]],
'NextRef' : [ 0x4, ['pointer', ['void']]],
'ImageFileName' : [ 0x8, ['array', 16, ['unsigned char']]],
'NextPos' : [ 0x18, ['unsigned short']],
'MaxStacks' : [ 0x1a, ['unsigned short']],
'StackInfo' : [ 0x1c, ['array', 0, ['_OBJECT_REF_STACK_INFO']]],
} ],
'_HBIN' : [ 0x20, {
'Signature' : [ 0x0, ['unsigned long']],
'FileOffset' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['array', 2, ['unsigned long']]],
'TimeStamp' : [ 0x14, ['_LARGE_INTEGER']],
'Spare' : [ 0x1c, ['unsigned long']],
} ],
'_MI_IMAGE_SECURITY_REFERENCE' : [ 0xc, {
'SecurityContext' : [ 0x0, ['_IMAGE_SECURITY_CONTEXT']],
'DynamicRelocations' : [ 0x4, ['pointer', ['void']]],
'ReferenceCount' : [ 0x8, ['long']],
} ],
'_HEAP_TAG_ENTRY' : [ 0x40, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
'TagIndex' : [ 0xc, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0xe, ['unsigned short']],
'TagName' : [ 0x10, ['array', 24, ['wchar']]],
} ],
'_SECURITY_QUALITY_OF_SERVICE' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'ContextTrackingMode' : [ 0x8, ['unsigned char']],
'EffectiveOnly' : [ 0x9, ['unsigned char']],
} ],
'__unnamed_1c3f' : [ 0x8, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'Secured' : [ 0x0, ['_MMADDRESS_LIST']],
} ],
'__unnamed_1c45' : [ 0x4, {
'Banked' : [ 0x0, ['pointer', ['_MMBANKED_SECTION']]],
'ExtendedInfo' : [ 0x0, ['pointer', ['_MMEXTEND_INFO']]],
} ],
'_MMVAD_LONG' : [ 0x48, {
'u1' : [ 0x0, ['__unnamed_1580']],
'LeftChild' : [ 0x4, ['pointer', ['_MMVAD']]],
'RightChild' : [ 0x8, ['pointer', ['_MMVAD']]],
'StartingVpn' : [ 0xc, ['unsigned long']],
'EndingVpn' : [ 0x10, ['unsigned long']],
'u' : [ 0x14, ['__unnamed_1583']],
'PushLock' : [ 0x18, ['_EX_PUSH_LOCK']],
'u5' : [ 0x1c, ['__unnamed_1586']],
'u2' : [ 0x20, ['__unnamed_1593']],
'Subsection' : [ 0x24, ['pointer', ['_SUBSECTION']]],
'FirstPrototypePte' : [ 0x28, ['pointer', ['_MMPTE']]],
'LastContiguousPte' : [ 0x2c, ['pointer', ['_MMPTE']]],
'ViewLinks' : [ 0x30, ['_LIST_ENTRY']],
'VadsProcess' : [ 0x38, ['pointer', ['_EPROCESS']]],
'u3' : [ 0x3c, ['__unnamed_1c3f']],
'u4' : [ 0x44, ['__unnamed_1c45']],
} ],
'_MMWSLE_FREE_ENTRY' : [ 0x4, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PreviousFree' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 12, native_type='unsigned long')]],
'NextFree' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_NT_TIB' : [ 0x1c, {
'ExceptionList' : [ 0x0, ['pointer', ['_EXCEPTION_REGISTRATION_RECORD']]],
'StackBase' : [ 0x4, ['pointer', ['void']]],
'StackLimit' : [ 0x8, ['pointer', ['void']]],
'SubSystemTib' : [ 0xc, ['pointer', ['void']]],
'FiberData' : [ 0x10, ['pointer', ['void']]],
'Version' : [ 0x10, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x14, ['pointer', ['void']]],
'Self' : [ 0x18, ['pointer', ['_NT_TIB']]],
} ],
'_WHEA_REVISION' : [ 0x2, {
'MinorRevision' : [ 0x0, ['unsigned char']],
'MajorRevision' : [ 0x1, ['unsigned char']],
'AsUSHORT' : [ 0x0, ['unsigned short']],
} ],
'_EJOB' : [ 0x138, {
'Event' : [ 0x0, ['_KEVENT']],
'JobLinks' : [ 0x10, ['_LIST_ENTRY']],
'ProcessListHead' : [ 0x18, ['_LIST_ENTRY']],
'JobLock' : [ 0x20, ['_ERESOURCE']],
'TotalUserTime' : [ 0x58, ['_LARGE_INTEGER']],
'TotalKernelTime' : [ 0x60, ['_LARGE_INTEGER']],
'ThisPeriodTotalUserTime' : [ 0x68, ['_LARGE_INTEGER']],
'ThisPeriodTotalKernelTime' : [ 0x70, ['_LARGE_INTEGER']],
'TotalPageFaultCount' : [ 0x78, ['unsigned long']],
'TotalProcesses' : [ 0x7c, ['unsigned long']],
'ActiveProcesses' : [ 0x80, ['unsigned long']],
'TotalTerminatedProcesses' : [ 0x84, ['unsigned long']],
'PerProcessUserTimeLimit' : [ 0x88, ['_LARGE_INTEGER']],
'PerJobUserTimeLimit' : [ 0x90, ['_LARGE_INTEGER']],
'MinimumWorkingSetSize' : [ 0x98, ['unsigned long']],
'MaximumWorkingSetSize' : [ 0x9c, ['unsigned long']],
'LimitFlags' : [ 0xa0, ['unsigned long']],
'ActiveProcessLimit' : [ 0xa4, ['unsigned long']],
'Affinity' : [ 0xa8, ['_KAFFINITY_EX']],
'PriorityClass' : [ 0xb4, ['unsigned char']],
'AccessState' : [ 0xb8, ['pointer', ['_JOB_ACCESS_STATE']]],
'UIRestrictionsClass' : [ 0xbc, ['unsigned long']],
'EndOfJobTimeAction' : [ 0xc0, ['unsigned long']],
'CompletionPort' : [ 0xc4, ['pointer', ['void']]],
'CompletionKey' : [ 0xc8, ['pointer', ['void']]],
'SessionId' : [ 0xcc, ['unsigned long']],
'SchedulingClass' : [ 0xd0, ['unsigned long']],
'ReadOperationCount' : [ 0xd8, ['unsigned long long']],
'WriteOperationCount' : [ 0xe0, ['unsigned long long']],
'OtherOperationCount' : [ 0xe8, ['unsigned long long']],
'ReadTransferCount' : [ 0xf0, ['unsigned long long']],
'WriteTransferCount' : [ 0xf8, ['unsigned long long']],
'OtherTransferCount' : [ 0x100, ['unsigned long long']],
'ProcessMemoryLimit' : [ 0x108, ['unsigned long']],
'JobMemoryLimit' : [ 0x10c, ['unsigned long']],
'PeakProcessMemoryUsed' : [ 0x110, ['unsigned long']],
'PeakJobMemoryUsed' : [ 0x114, ['unsigned long']],
'CurrentJobMemoryUsed' : [ 0x118, ['unsigned long long']],
'MemoryLimitsLock' : [ 0x120, ['_EX_PUSH_LOCK']],
'JobSetLinks' : [ 0x124, ['_LIST_ENTRY']],
'MemberLevel' : [ 0x12c, ['unsigned long']],
'JobFlags' : [ 0x130, ['unsigned long']],
} ],
'__unnamed_1c56' : [ 0x4, {
'AsULONG' : [ 0x0, ['unsigned long']],
'AllowScaling' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Disabled' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HvMaxCState' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
} ],
'_PPM_IDLE_STATES' : [ 0x68, {
'Count' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['__unnamed_1c56']],
'TargetState' : [ 0x8, ['unsigned long']],
'ActualState' : [ 0xc, ['unsigned long']],
'OldState' : [ 0x10, ['unsigned long']],
'NewlyUnparked' : [ 0x14, ['unsigned char']],
'TargetProcessors' : [ 0x18, ['_KAFFINITY_EX']],
'State' : [ 0x28, ['array', 1, ['_PPM_IDLE_STATE']]],
} ],
'__unnamed_1c5f' : [ 0x10, {
'EfiInformation' : [ 0x0, ['_EFI_FIRMWARE_INFORMATION']],
'PcatInformation' : [ 0x0, ['_PCAT_FIRMWARE_INFORMATION']],
} ],
'_FIRMWARE_INFORMATION_LOADER_BLOCK' : [ 0x14, {
'FirmwareTypeEfi' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'u' : [ 0x4, ['__unnamed_1c5f']],
} ],
'_HEAP_UCR_DESCRIPTOR' : [ 0x18, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SegmentEntry' : [ 0x8, ['_LIST_ENTRY']],
'Address' : [ 0x10, ['pointer', ['void']]],
'Size' : [ 0x14, ['unsigned long']],
} ],
'_ETW_REALTIME_CONSUMER' : [ 0x50, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'ProcessHandle' : [ 0x8, ['pointer', ['void']]],
'ProcessObject' : [ 0xc, ['pointer', ['_EPROCESS']]],
'NextNotDelivered' : [ 0x10, ['pointer', ['void']]],
'RealtimeConnectContext' : [ 0x14, ['pointer', ['void']]],
'DisconnectEvent' : [ 0x18, ['pointer', ['_KEVENT']]],
'DataAvailableEvent' : [ 0x1c, ['pointer', ['_KEVENT']]],
'UserBufferCount' : [ 0x20, ['pointer', ['unsigned long']]],
'UserBufferListHead' : [ 0x24, ['pointer', ['_SINGLE_LIST_ENTRY']]],
'BuffersLost' : [ 0x28, ['unsigned long']],
'EmptyBuffersCount' : [ 0x2c, ['unsigned long']],
'LoggerId' : [ 0x30, ['unsigned long']],
'ShutDownRequested' : [ 0x34, ['unsigned char']],
'NewBuffersLost' : [ 0x35, ['unsigned char']],
'Disconnected' : [ 0x36, ['unsigned char']],
'ReservedBufferSpaceBitMap' : [ 0x38, ['_RTL_BITMAP']],
'ReservedBufferSpace' : [ 0x40, ['pointer', ['unsigned char']]],
'ReservedBufferSpaceSize' : [ 0x44, ['unsigned long']],
'UserPagesAllocated' : [ 0x48, ['unsigned long']],
'UserPagesReused' : [ 0x4c, ['unsigned long']],
} ],
'__unnamed_1c68' : [ 0x4, {
'BaseMid' : [ 0x0, ['unsigned char']],
'Flags1' : [ 0x1, ['unsigned char']],
'Flags2' : [ 0x2, ['unsigned char']],
'BaseHi' : [ 0x3, ['unsigned char']],
} ],
'__unnamed_1c6e' : [ 0x4, {
'BaseMid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned long')]],
'Dpl' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned long')]],
'Pres' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'LimitHi' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'Sys' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'Reserved_0' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Default_Big' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'Granularity' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'BaseHi' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_1c70' : [ 0x4, {
'Bytes' : [ 0x0, ['__unnamed_1c68']],
'Bits' : [ 0x0, ['__unnamed_1c6e']],
} ],
'_KGDTENTRY' : [ 0x8, {
'LimitLow' : [ 0x0, ['unsigned short']],
'BaseLow' : [ 0x2, ['unsigned short']],
'HighWord' : [ 0x4, ['__unnamed_1c70']],
} ],
'_POOL_DESCRIPTOR' : [ 0x1140, {
'PoolType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'PagedLock' : [ 0x4, ['_KGUARDED_MUTEX']],
'NonPagedLock' : [ 0x4, ['unsigned long']],
'RunningAllocs' : [ 0x40, ['long']],
'RunningDeAllocs' : [ 0x44, ['long']],
'TotalBigPages' : [ 0x48, ['long']],
'ThreadsProcessingDeferrals' : [ 0x4c, ['long']],
'TotalBytes' : [ 0x50, ['unsigned long']],
'PoolIndex' : [ 0x80, ['unsigned long']],
'TotalPages' : [ 0xc0, ['long']],
'PendingFrees' : [ 0x100, ['pointer', ['pointer', ['void']]]],
'PendingFreeDepth' : [ 0x104, ['long']],
'ListHeads' : [ 0x140, ['array', 512, ['_LIST_ENTRY']]],
} ],
'_KGATE' : [ 0x10, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_WHEA_ERROR_RECORD_HEADER' : [ 0x80, {
'Signature' : [ 0x0, ['unsigned long']],
'Revision' : [ 0x4, ['_WHEA_REVISION']],
'SignatureEnd' : [ 0x6, ['unsigned long']],
'SectionCount' : [ 0xa, ['unsigned short']],
'Severity' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'WheaErrSevRecoverable', 1: 'WheaErrSevFatal', 2: 'WheaErrSevCorrected', 3: 'WheaErrSevInformational'})]],
'ValidBits' : [ 0x10, ['_WHEA_ERROR_RECORD_HEADER_VALIDBITS']],
'Length' : [ 0x14, ['unsigned long']],
'Timestamp' : [ 0x18, ['_WHEA_TIMESTAMP']],
'PlatformId' : [ 0x20, ['_GUID']],
'PartitionId' : [ 0x30, ['_GUID']],
'CreatorId' : [ 0x40, ['_GUID']],
'NotifyType' : [ 0x50, ['_GUID']],
'RecordId' : [ 0x60, ['unsigned long long']],
'Flags' : [ 0x68, ['_WHEA_ERROR_RECORD_HEADER_FLAGS']],
'PersistenceInfo' : [ 0x6c, ['_WHEA_PERSISTENCE_INFO']],
'Reserved' : [ 0x74, ['array', 12, ['unsigned char']]],
} ],
'_ALPC_PROCESS_CONTEXT' : [ 0x10, {
'Lock' : [ 0x0, ['_EX_PUSH_LOCK']],
'ViewListHead' : [ 0x4, ['_LIST_ENTRY']],
'PagedPoolQuotaCache' : [ 0xc, ['unsigned long']],
} ],
'_DRIVER_EXTENSION' : [ 0x1c, {
'DriverObject' : [ 0x0, ['pointer', ['_DRIVER_OBJECT']]],
'AddDevice' : [ 0x4, ['pointer', ['void']]],
'Count' : [ 0x8, ['unsigned long']],
'ServiceKeyName' : [ 0xc, ['_UNICODE_STRING']],
'ClientDriverExtension' : [ 0x14, ['pointer', ['_IO_CLIENT_EXTENSION']]],
'FsFilterCallbacks' : [ 0x18, ['pointer', ['_FS_FILTER_CALLBACKS']]],
} ],
'_PRIVILEGE_SET' : [ 0x14, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 1, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_CM_NOTIFY_BLOCK' : [ 0x2c, {
'HiveList' : [ 0x0, ['_LIST_ENTRY']],
'PostList' : [ 0x8, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x10, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
'KeyBody' : [ 0x14, ['pointer', ['_CM_KEY_BODY']]],
'Filter' : [ 0x18, ['BitField', dict(start_bit = 0, end_bit = 30, native_type='unsigned long')]],
'WatchTree' : [ 0x18, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'NotifyPending' : [ 0x18, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubjectContext' : [ 0x1c, ['_SECURITY_SUBJECT_CONTEXT']],
} ],
'_KINTERRUPT' : [ 0x278, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'InterruptListEntry' : [ 0x4, ['_LIST_ENTRY']],
'ServiceRoutine' : [ 0xc, ['pointer', ['void']]],
'MessageServiceRoutine' : [ 0x10, ['pointer', ['void']]],
'MessageIndex' : [ 0x14, ['unsigned long']],
'ServiceContext' : [ 0x18, ['pointer', ['void']]],
'SpinLock' : [ 0x1c, ['unsigned long']],
'TickCount' : [ 0x20, ['unsigned long']],
'ActualLock' : [ 0x24, ['pointer', ['unsigned long']]],
'DispatchAddress' : [ 0x28, ['pointer', ['void']]],
'Vector' : [ 0x2c, ['unsigned long']],
'Irql' : [ 0x30, ['unsigned char']],
'SynchronizeIrql' : [ 0x31, ['unsigned char']],
'FloatingSave' : [ 0x32, ['unsigned char']],
'Connected' : [ 0x33, ['unsigned char']],
'Number' : [ 0x34, ['unsigned long']],
'ShareVector' : [ 0x38, ['unsigned char']],
'Pad' : [ 0x39, ['array', 3, ['unsigned char']]],
'Mode' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'Polarity' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'InterruptPolarityUnknown', 1: 'InterruptActiveHigh', 2: 'InterruptActiveLow'})]],
'ServiceCount' : [ 0x44, ['unsigned long']],
'DispatchCount' : [ 0x48, ['unsigned long']],
'Rsvd1' : [ 0x50, ['unsigned long long']],
'DispatchCode' : [ 0x58, ['array', 135, ['unsigned long']]],
} ],
'_HANDLE_TABLE_ENTRY' : [ 0x8, {
'Object' : [ 0x0, ['pointer', ['void']]],
'ObAttributes' : [ 0x0, ['unsigned long']],
'InfoTable' : [ 0x0, ['pointer', ['_HANDLE_TABLE_ENTRY_INFO']]],
'Value' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
'GrantedAccessIndex' : [ 0x4, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x6, ['unsigned short']],
'NextFreeTableEntry' : [ 0x4, ['unsigned long']],
} ],
'_SID' : [ 0xc, {
'Revision' : [ 0x0, ['unsigned char']],
'SubAuthorityCount' : [ 0x1, ['unsigned char']],
'IdentifierAuthority' : [ 0x2, ['_SID_IDENTIFIER_AUTHORITY']],
'SubAuthority' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_AUTHZBASEP_SECURITY_ATTRIBUTES_INFORMATION' : [ 0x18, {
'SecurityAttributeCount' : [ 0x0, ['unsigned long']],
'SecurityAttributesList' : [ 0x4, ['_LIST_ENTRY']],
'WorkingSecurityAttributeCount' : [ 0xc, ['unsigned long']],
'WorkingSecurityAttributesList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_IMAGE_FILE_HEADER' : [ 0x14, {
'Machine' : [ 0x0, ['unsigned short']],
'NumberOfSections' : [ 0x2, ['unsigned short']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'PointerToSymbolTable' : [ 0x8, ['unsigned long']],
'NumberOfSymbols' : [ 0xc, ['unsigned long']],
'SizeOfOptionalHeader' : [ 0x10, ['unsigned short']],
'Characteristics' : [ 0x12, ['unsigned short']],
} ],
'_MMEXTEND_INFO' : [ 0x10, {
'CommittedSize' : [ 0x0, ['unsigned long long']],
'ReferenceCount' : [ 0x8, ['unsigned long']],
} ],
'_STRING64' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['unsigned long long']],
} ],
'_HIVE_LIST_ENTRY' : [ 0x58, {
'FileName' : [ 0x0, ['pointer', ['unsigned short']]],
'BaseName' : [ 0x4, ['pointer', ['unsigned short']]],
'RegRootName' : [ 0x8, ['pointer', ['unsigned short']]],
'CmHive' : [ 0xc, ['pointer', ['_CMHIVE']]],
'HHiveFlags' : [ 0x10, ['unsigned long']],
'CmHiveFlags' : [ 0x14, ['unsigned long']],
'CmKcbCacheSize' : [ 0x18, ['unsigned long']],
'CmHive2' : [ 0x1c, ['pointer', ['_CMHIVE']]],
'HiveMounted' : [ 0x20, ['unsigned char']],
'ThreadFinished' : [ 0x21, ['unsigned char']],
'ThreadStarted' : [ 0x22, ['unsigned char']],
'Allocate' : [ 0x23, ['unsigned char']],
'WinPERequired' : [ 0x24, ['unsigned char']],
'StartEvent' : [ 0x28, ['_KEVENT']],
'FinishedEvent' : [ 0x38, ['_KEVENT']],
'MountLock' : [ 0x48, ['_KEVENT']],
} ],
'_CONTEXT' : [ 0x2cc, {
'ContextFlags' : [ 0x0, ['unsigned long']],
'Dr0' : [ 0x4, ['unsigned long']],
'Dr1' : [ 0x8, ['unsigned long']],
'Dr2' : [ 0xc, ['unsigned long']],
'Dr3' : [ 0x10, ['unsigned long']],
'Dr6' : [ 0x14, ['unsigned long']],
'Dr7' : [ 0x18, ['unsigned long']],
'FloatSave' : [ 0x1c, ['_FLOATING_SAVE_AREA']],
'SegGs' : [ 0x8c, ['unsigned long']],
'SegFs' : [ 0x90, ['unsigned long']],
'SegEs' : [ 0x94, ['unsigned long']],
'SegDs' : [ 0x98, ['unsigned long']],
'Edi' : [ 0x9c, ['unsigned long']],
'Esi' : [ 0xa0, ['unsigned long']],
'Ebx' : [ 0xa4, ['unsigned long']],
'Edx' : [ 0xa8, ['unsigned long']],
'Ecx' : [ 0xac, ['unsigned long']],
'Eax' : [ 0xb0, ['unsigned long']],
'Ebp' : [ 0xb4, ['unsigned long']],
'Eip' : [ 0xb8, ['unsigned long']],
'SegCs' : [ 0xbc, ['unsigned long']],
'EFlags' : [ 0xc0, ['unsigned long']],
'Esp' : [ 0xc4, ['unsigned long']],
'SegSs' : [ 0xc8, ['unsigned long']],
'ExtendedRegisters' : [ 0xcc, ['array', 512, ['unsigned char']]],
} ],
'_ALPC_HANDLE_TABLE' : [ 0x10, {
'Handles' : [ 0x0, ['pointer', ['_ALPC_HANDLE_ENTRY']]],
'TotalHandles' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'Lock' : [ 0xc, ['_EX_PUSH_LOCK']],
} ],
'_MMPTE_HARDWARE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Dirty1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_IO_COMPLETION_CONTEXT' : [ 0x8, {
'Port' : [ 0x0, ['pointer', ['void']]],
'Key' : [ 0x4, ['pointer', ['void']]],
} ],
'_IOV_FORCED_PENDING_TRACE' : [ 0x100, {
'Irp' : [ 0x0, ['pointer', ['_IRP']]],
'Thread' : [ 0x4, ['pointer', ['_ETHREAD']]],
'StackTrace' : [ 0x8, ['array', 62, ['pointer', ['void']]]],
} ],
'_DBGKD_SET_CONTEXT' : [ 0x4, {
'ContextFlags' : [ 0x0, ['unsigned long']],
} ],
'_VI_POOL_ENTRY_INUSE' : [ 0x10, {
'VirtualAddress' : [ 0x0, ['pointer', ['void']]],
'CallingAddress' : [ 0x4, ['pointer', ['void']]],
'NumberOfBytes' : [ 0x8, ['unsigned long']],
'Tag' : [ 0xc, ['unsigned long']],
} ],
'_ALPC_COMPLETION_LIST' : [ 0x54, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'OwnerProcess' : [ 0x8, ['pointer', ['_EPROCESS']]],
'Mdl' : [ 0xc, ['pointer', ['_MDL']]],
'UserVa' : [ 0x10, ['pointer', ['void']]],
'UserLimit' : [ 0x14, ['pointer', ['void']]],
'DataUserVa' : [ 0x18, ['pointer', ['void']]],
'SystemVa' : [ 0x1c, ['pointer', ['void']]],
'TotalSize' : [ 0x20, ['unsigned long']],
'Header' : [ 0x24, ['pointer', ['_ALPC_COMPLETION_LIST_HEADER']]],
'List' : [ 0x28, ['pointer', ['void']]],
'ListSize' : [ 0x2c, ['unsigned long']],
'Bitmap' : [ 0x30, ['pointer', ['void']]],
'BitmapSize' : [ 0x34, ['unsigned long']],
'Data' : [ 0x38, ['pointer', ['void']]],
'DataSize' : [ 0x3c, ['unsigned long']],
'BitmapLimit' : [ 0x40, ['unsigned long']],
'BitmapNextHint' : [ 0x44, ['unsigned long']],
'ConcurrencyCount' : [ 0x48, ['unsigned long']],
'AttributeFlags' : [ 0x4c, ['unsigned long']],
'AttributeSize' : [ 0x50, ['unsigned long']],
} ],
'_INTERFACE' : [ 0x10, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x4, ['pointer', ['void']]],
'InterfaceReference' : [ 0x8, ['pointer', ['void']]],
'InterfaceDereference' : [ 0xc, ['pointer', ['void']]],
} ],
'_ACL' : [ 0x8, {
'AclRevision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'AclSize' : [ 0x2, ['unsigned short']],
'AceCount' : [ 0x4, ['unsigned short']],
'Sbz2' : [ 0x6, ['unsigned short']],
} ],
'_LAZY_WRITER' : [ 0x50, {
'ScanDpc' : [ 0x0, ['_KDPC']],
'ScanTimer' : [ 0x20, ['_KTIMER']],
'ScanActive' : [ 0x48, ['unsigned char']],
'OtherWork' : [ 0x49, ['unsigned char']],
'PendingTeardownScan' : [ 0x4a, ['unsigned char']],
'PendingPeriodicScan' : [ 0x4b, ['unsigned char']],
'PendingLowMemoryScan' : [ 0x4c, ['unsigned char']],
'PendingPowerScan' : [ 0x4d, ['unsigned char']],
} ],
'_PI_BUS_EXTENSION' : [ 0x44, {
'Flags' : [ 0x0, ['unsigned long']],
'NumberCSNs' : [ 0x4, ['unsigned char']],
'ReadDataPort' : [ 0x8, ['pointer', ['unsigned char']]],
'DataPortMapped' : [ 0xc, ['unsigned char']],
'AddressPort' : [ 0x10, ['pointer', ['unsigned char']]],
'AddrPortMapped' : [ 0x14, ['unsigned char']],
'CommandPort' : [ 0x18, ['pointer', ['unsigned char']]],
'CmdPortMapped' : [ 0x1c, ['unsigned char']],
'NextSlotNumber' : [ 0x20, ['unsigned long']],
'DeviceList' : [ 0x24, ['_SINGLE_LIST_ENTRY']],
'CardList' : [ 0x28, ['_SINGLE_LIST_ENTRY']],
'PhysicalBusDevice' : [ 0x2c, ['pointer', ['_DEVICE_OBJECT']]],
'FunctionalBusDevice' : [ 0x30, ['pointer', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x34, ['pointer', ['_DEVICE_OBJECT']]],
'BusNumber' : [ 0x38, ['unsigned long']],
'SystemPowerState' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DevicePowerState' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_EXCEPTION_REGISTRATION_RECORD' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['_EXCEPTION_REGISTRATION_RECORD']]],
'Handler' : [ 0x4, ['pointer', ['void']]],
} ],
'_SID_AND_ATTRIBUTES' : [ 0x8, {
'Sid' : [ 0x0, ['pointer', ['void']]],
'Attributes' : [ 0x4, ['unsigned long']],
} ],
'_SID_IDENTIFIER_AUTHORITY' : [ 0x6, {
'Value' : [ 0x0, ['array', 6, ['unsigned char']]],
} ],
'_IO_WORKITEM' : [ 0x20, {
'WorkItem' : [ 0x0, ['_WORK_QUEUE_ITEM']],
'Routine' : [ 0x10, ['pointer', ['void']]],
'IoObject' : [ 0x14, ['pointer', ['void']]],
'Context' : [ 0x18, ['pointer', ['void']]],
'Type' : [ 0x1c, ['unsigned long']],
} ],
'_CM_RM' : [ 0x58, {
'RmListEntry' : [ 0x0, ['_LIST_ENTRY']],
'TransactionListHead' : [ 0x8, ['_LIST_ENTRY']],
'TmHandle' : [ 0x10, ['pointer', ['void']]],
'Tm' : [ 0x14, ['pointer', ['void']]],
'RmHandle' : [ 0x18, ['pointer', ['void']]],
'KtmRm' : [ 0x1c, ['pointer', ['void']]],
'RefCount' : [ 0x20, ['unsigned long']],
'ContainerNum' : [ 0x24, ['unsigned long']],
'ContainerSize' : [ 0x28, ['unsigned long long']],
'CmHive' : [ 0x30, ['pointer', ['_CMHIVE']]],
'LogFileObject' : [ 0x34, ['pointer', ['void']]],
'MarshallingContext' : [ 0x38, ['pointer', ['void']]],
'RmFlags' : [ 0x3c, ['unsigned long']],
'LogStartStatus1' : [ 0x40, ['long']],
'LogStartStatus2' : [ 0x44, ['long']],
'BaseLsn' : [ 0x48, ['unsigned long long']],
'RmLock' : [ 0x50, ['pointer', ['_ERESOURCE']]],
} ],
'_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_MMVAD_FLAGS' : [ 0x4, {
'CommitCharge' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 19, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'VadType' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 23, native_type='unsigned long')]],
'MemCommit' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 29, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 31, native_type='unsigned long')]],
'PrivateMemory' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_MMWSLE_HASH' : [ 0x4, {
'Index' : [ 0x0, ['unsigned long']],
} ],
'_STRING32' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_FILL_MEMORY' : [ 0x10, {
'Address' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned short']],
'PatternLength' : [ 0xe, ['unsigned short']],
} ],
'_HEAP_STOP_ON_VALUES' : [ 0x18, {
'AllocAddress' : [ 0x0, ['unsigned long']],
'AllocTag' : [ 0x4, ['_HEAP_STOP_ON_TAG']],
'ReAllocAddress' : [ 0x8, ['unsigned long']],
'ReAllocTag' : [ 0xc, ['_HEAP_STOP_ON_TAG']],
'FreeAddress' : [ 0x10, ['unsigned long']],
'FreeTag' : [ 0x14, ['_HEAP_STOP_ON_TAG']],
} ],
'_HEAP_PSEUDO_TAG_ENTRY' : [ 0xc, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long']],
} ],
'_CALL_HASH_ENTRY' : [ 0x14, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CallersAddress' : [ 0x8, ['pointer', ['void']]],
'CallersCaller' : [ 0xc, ['pointer', ['void']]],
'CallCount' : [ 0x10, ['unsigned long']],
} ],
'_VF_TRACKER_STAMP' : [ 0x8, {
'Thread' : [ 0x0, ['pointer', ['void']]],
'Flags' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'OldIrql' : [ 0x5, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'NewIrql' : [ 0x6, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'Processor' : [ 0x7, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
} ],
'_VI_TRACK_IRQL' : [ 0x20, {
'Thread' : [ 0x0, ['pointer', ['void']]],
'OldIrql' : [ 0x4, ['unsigned char']],
'NewIrql' : [ 0x5, ['unsigned char']],
'Processor' : [ 0x6, ['unsigned short']],
'TickCount' : [ 0x8, ['unsigned long']],
'StackTrace' : [ 0xc, ['array', 5, ['pointer', ['void']]]],
} ],
'_PNP_DEVICE_EVENT_ENTRY' : [ 0x64, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Argument' : [ 0x8, ['unsigned long']],
'CallerEvent' : [ 0xc, ['pointer', ['_KEVENT']]],
'Callback' : [ 0x10, ['pointer', ['void']]],
'Context' : [ 0x14, ['pointer', ['void']]],
'VetoType' : [ 0x18, ['pointer', ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]]],
'VetoName' : [ 0x1c, ['pointer', ['_UNICODE_STRING']]],
'Data' : [ 0x20, ['_PLUGPLAY_EVENT_BLOCK']],
} ],
'_HEAP_STOP_ON_TAG' : [ 0x4, {
'HeapAndTagIndex' : [ 0x0, ['unsigned long']],
'TagIndex' : [ 0x0, ['unsigned short']],
'HeapIndex' : [ 0x2, ['unsigned short']],
} ],
'_DBGKD_GET_CONTEXT' : [ 0x4, {
'Unused' : [ 0x0, ['unsigned long']],
} ],
'_TEB_ACTIVE_FRAME_CONTEXT' : [ 0x8, {
'Flags' : [ 0x0, ['unsigned long']],
'FrameName' : [ 0x4, ['pointer', ['unsigned char']]],
} ],
'_NLS_DATA_BLOCK' : [ 0xc, {
'AnsiCodePageData' : [ 0x0, ['pointer', ['void']]],
'OemCodePageData' : [ 0x4, ['pointer', ['void']]],
'UnicodeCaseTableData' : [ 0x8, ['pointer', ['void']]],
} ],
'_ALIGNED_AFFINITY_SUMMARY' : [ 0x40, {
'CpuSet' : [ 0x0, ['_KAFFINITY_EX']],
'SMTSet' : [ 0xc, ['_KAFFINITY_EX']],
} ],
'_XSTATE_CONFIGURATION' : [ 0x210, {
'EnabledFeatures' : [ 0x0, ['unsigned long long']],
'Size' : [ 0x8, ['unsigned long']],
'OptimizedSave' : [ 0xc, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Features' : [ 0x10, ['array', 64, ['_XSTATE_FEATURE']]],
} ],
'_CM_KEY_SECURITY_CACHE' : [ 0x2c, {
'Cell' : [ 0x0, ['unsigned long']],
'ConvKey' : [ 0x4, ['unsigned long']],
'List' : [ 0x8, ['_LIST_ENTRY']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'RealRefCount' : [ 0x14, ['unsigned long']],
'Descriptor' : [ 0x18, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_MMPTE_SOFTWARE' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'PageFileHigh' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_NT_TIB32' : [ 0x1c, {
'ExceptionList' : [ 0x0, ['unsigned long']],
'StackBase' : [ 0x4, ['unsigned long']],
'StackLimit' : [ 0x8, ['unsigned long']],
'SubSystemTib' : [ 0xc, ['unsigned long']],
'FiberData' : [ 0x10, ['unsigned long']],
'Version' : [ 0x10, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x14, ['unsigned long']],
'Self' : [ 0x18, ['unsigned long']],
} ],
'_CM_RESOURCE_LIST' : [ 0x24, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['array', 1, ['_CM_FULL_RESOURCE_DESCRIPTOR']]],
} ],
'_POOL_TRACKER_TABLE' : [ 0x1c, {
'Key' : [ 0x0, ['long']],
'NonPagedAllocs' : [ 0x4, ['long']],
'NonPagedFrees' : [ 0x8, ['long']],
'NonPagedBytes' : [ 0xc, ['unsigned long']],
'PagedAllocs' : [ 0x10, ['unsigned long']],
'PagedFrees' : [ 0x14, ['unsigned long']],
'PagedBytes' : [ 0x18, ['unsigned long']],
} ],
'_MM_SUBSECTION_AVL_TABLE' : [ 0x20, {
'BalancedRoot' : [ 0x0, ['_MMSUBSECTION_NODE']],
'DepthOfTree' : [ 0x18, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long')]],
'Unused' : [ 0x18, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long')]],
'NumberGenericTableElements' : [ 0x18, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
'NodeHint' : [ 0x1c, ['pointer', ['void']]],
} ],
'_HANDLE_TABLE_ENTRY_INFO' : [ 0x4, {
'AuditMask' : [ 0x0, ['unsigned long']],
} ],
'_CM_FULL_RESOURCE_DESCRIPTOR' : [ 0x20, {
'InterfaceType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'Vmcs', 17: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x4, ['unsigned long']],
'PartialResourceList' : [ 0x8, ['_CM_PARTIAL_RESOURCE_LIST']],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_FLAGS' : [ 0x4, {
'Primary' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ContainmentWarning' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reset' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ThresholdExceeded' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'ResourceNotAvailable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'LatentError' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_WMI_BUFFER_HEADER' : [ 0x48, {
'BufferSize' : [ 0x0, ['unsigned long']],
'SavedOffset' : [ 0x4, ['unsigned long']],
'CurrentOffset' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['long']],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'SequenceNumber' : [ 0x18, ['long long']],
'Padding0' : [ 0x20, ['array', 2, ['unsigned long']]],
'SlistEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'NextBuffer' : [ 0x20, ['pointer', ['_WMI_BUFFER_HEADER']]],
'ClientContext' : [ 0x28, ['_ETW_BUFFER_CONTEXT']],
'State' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'EtwBufferStateFree', 1: 'EtwBufferStateGeneralLogging', 2: 'EtwBufferStateCSwitch', 3: 'EtwBufferStateFlush', 4: 'EtwBufferStateMaximum'})]],
'Offset' : [ 0x30, ['unsigned long']],
'BufferFlag' : [ 0x34, ['unsigned short']],
'BufferType' : [ 0x36, ['unsigned short']],
'Padding1' : [ 0x38, ['array', 4, ['unsigned long']]],
'ReferenceTime' : [ 0x38, ['_ETW_REF_CLOCK']],
'GlobalEntry' : [ 0x38, ['_LIST_ENTRY']],
'Pointer0' : [ 0x38, ['pointer', ['void']]],
'Pointer1' : [ 0x3c, ['pointer', ['void']]],
} ],
'_NT_TIB64' : [ 0x38, {
'ExceptionList' : [ 0x0, ['unsigned long long']],
'StackBase' : [ 0x8, ['unsigned long long']],
'StackLimit' : [ 0x10, ['unsigned long long']],
'SubSystemTib' : [ 0x18, ['unsigned long long']],
'FiberData' : [ 0x20, ['unsigned long long']],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['unsigned long long']],
'Self' : [ 0x30, ['unsigned long long']],
} ],
'_POWER_SEQUENCE' : [ 0xc, {
'SequenceD1' : [ 0x0, ['unsigned long']],
'SequenceD2' : [ 0x4, ['unsigned long']],
'SequenceD3' : [ 0x8, ['unsigned long']],
} ],
'_PROCESSOR_POWER_STATE' : [ 0xc8, {
'IdleStates' : [ 0x0, ['pointer', ['_PPM_IDLE_STATES']]],
'IdleTimeLast' : [ 0x8, ['unsigned long long']],
'IdleTimeTotal' : [ 0x10, ['unsigned long long']],
'IdleTimeEntry' : [ 0x18, ['unsigned long long']],
'IdleAccounting' : [ 0x20, ['pointer', ['_PROC_IDLE_ACCOUNTING']]],
'Hypervisor' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'ProcHypervisorNone', 1: 'ProcHypervisorPresent', 2: 'ProcHypervisorPower'})]],
'PerfHistoryTotal' : [ 0x28, ['unsigned long']],
'ThermalConstraint' : [ 0x2c, ['unsigned char']],
'PerfHistoryCount' : [ 0x2d, ['unsigned char']],
'PerfHistorySlot' : [ 0x2e, ['unsigned char']],
'Reserved' : [ 0x2f, ['unsigned char']],
'LastSysTime' : [ 0x30, ['unsigned long']],
'WmiDispatchPtr' : [ 0x34, ['unsigned long']],
'WmiInterfaceEnabled' : [ 0x38, ['long']],
'FFHThrottleStateInfo' : [ 0x40, ['_PPM_FFH_THROTTLE_STATE_INFO']],
'PerfActionDpc' : [ 0x60, ['_KDPC']],
'PerfActionMask' : [ 0x80, ['long']],
'IdleCheck' : [ 0x88, ['_PROC_IDLE_SNAP']],
'PerfCheck' : [ 0x98, ['_PROC_IDLE_SNAP']],
'Domain' : [ 0xa8, ['pointer', ['_PROC_PERF_DOMAIN']]],
'PerfConstraint' : [ 0xac, ['pointer', ['_PROC_PERF_CONSTRAINT']]],
'Load' : [ 0xb0, ['pointer', ['_PROC_PERF_LOAD']]],
'PerfHistory' : [ 0xb4, ['pointer', ['_PROC_HISTORY_ENTRY']]],
'Utility' : [ 0xb8, ['unsigned long']],
'OverUtilizedHistory' : [ 0xbc, ['unsigned long']],
'AffinityCount' : [ 0xc0, ['unsigned long']],
'AffinityHistory' : [ 0xc4, ['unsigned long']],
} ],
'_OBJECT_REF_STACK_INFO' : [ 0xc, {
'Sequence' : [ 0x0, ['unsigned long']],
'Index' : [ 0x4, ['unsigned short']],
'NumTraces' : [ 0x6, ['unsigned short']],
'Tag' : [ 0x8, ['unsigned long']],
} ],
'_PPC_DBGKD_CONTROL_SET' : [ 0xc, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long']],
'CurrentSymbolEnd' : [ 0x8, ['unsigned long']],
} ],
'_MMPFNENTRY' : [ 0x2, {
'PageLocation' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'WriteInProgress' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Modified' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ReadInProgress' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'CacheAttribute' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'Priority' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'Rom' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'InPageError' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'KernelStack' : [ 0x1, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'RemovalRequested' : [ 0x1, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'ParityError' : [ 0x1, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'_SEGMENT_OBJECT' : [ 0x28, {
'BaseAddress' : [ 0x0, ['pointer', ['void']]],
'TotalNumberOfPtes' : [ 0x4, ['unsigned long']],
'SizeOfSegment' : [ 0x8, ['_LARGE_INTEGER']],
'NonExtendedPtes' : [ 0x10, ['unsigned long']],
'ImageCommitment' : [ 0x14, ['unsigned long']],
'ControlArea' : [ 0x18, ['pointer', ['_CONTROL_AREA']]],
'Subsection' : [ 0x1c, ['pointer', ['_SUBSECTION']]],
'MmSectionFlags' : [ 0x20, ['pointer', ['_MMSECTION_FLAGS']]],
'MmSubSectionFlags' : [ 0x24, ['pointer', ['_MMSUBSECTION_FLAGS']]],
} ],
'_PCW_CALLBACK_INFORMATION' : [ 0x20, {
'AddCounter' : [ 0x0, ['_PCW_COUNTER_INFORMATION']],
'RemoveCounter' : [ 0x0, ['_PCW_COUNTER_INFORMATION']],
'EnumerateInstances' : [ 0x0, ['_PCW_MASK_INFORMATION']],
'CollectData' : [ 0x0, ['_PCW_MASK_INFORMATION']],
} ],
'_KTSS' : [ 0x20ac, {
'Backlink' : [ 0x0, ['unsigned short']],
'Reserved0' : [ 0x2, ['unsigned short']],
'Esp0' : [ 0x4, ['unsigned long']],
'Ss0' : [ 0x8, ['unsigned short']],
'Reserved1' : [ 0xa, ['unsigned short']],
'NotUsed1' : [ 0xc, ['array', 4, ['unsigned long']]],
'CR3' : [ 0x1c, ['unsigned long']],
'Eip' : [ 0x20, ['unsigned long']],
'EFlags' : [ 0x24, ['unsigned long']],
'Eax' : [ 0x28, ['unsigned long']],
'Ecx' : [ 0x2c, ['unsigned long']],
'Edx' : [ 0x30, ['unsigned long']],
'Ebx' : [ 0x34, ['unsigned long']],
'Esp' : [ 0x38, ['unsigned long']],
'Ebp' : [ 0x3c, ['unsigned long']],
'Esi' : [ 0x40, ['unsigned long']],
'Edi' : [ 0x44, ['unsigned long']],
'Es' : [ 0x48, ['unsigned short']],
'Reserved2' : [ 0x4a, ['unsigned short']],
'Cs' : [ 0x4c, ['unsigned short']],
'Reserved3' : [ 0x4e, ['unsigned short']],
'Ss' : [ 0x50, ['unsigned short']],
'Reserved4' : [ 0x52, ['unsigned short']],
'Ds' : [ 0x54, ['unsigned short']],
'Reserved5' : [ 0x56, ['unsigned short']],
'Fs' : [ 0x58, ['unsigned short']],
'Reserved6' : [ 0x5a, ['unsigned short']],
'Gs' : [ 0x5c, ['unsigned short']],
'Reserved7' : [ 0x5e, ['unsigned short']],
'LDT' : [ 0x60, ['unsigned short']],
'Reserved8' : [ 0x62, ['unsigned short']],
'Flags' : [ 0x64, ['unsigned short']],
'IoMapBase' : [ 0x66, ['unsigned short']],
'IoMaps' : [ 0x68, ['array', 1, ['_KiIoAccessMap']]],
'IntDirectionMap' : [ 0x208c, ['array', 32, ['unsigned char']]],
} ],
'_TOKEN_SOURCE' : [ 0x10, {
'SourceName' : [ 0x0, ['array', 8, ['unsigned char']]],
'SourceIdentifier' : [ 0x8, ['_LUID']],
} ],
'_DBGKD_QUERY_MEMORY' : [ 0x18, {
'Address' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['unsigned long long']],
'AddressSpace' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'_KIDTENTRY' : [ 0x8, {
'Offset' : [ 0x0, ['unsigned short']],
'Selector' : [ 0x2, ['unsigned short']],
'Access' : [ 0x4, ['unsigned short']],
'ExtendedOffset' : [ 0x6, ['unsigned short']],
} ],
'DOCK_INTERFACE' : [ 0x18, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x4, ['pointer', ['void']]],
'InterfaceReference' : [ 0x8, ['pointer', ['void']]],
'InterfaceDereference' : [ 0xc, ['pointer', ['void']]],
'ProfileDepartureSetMode' : [ 0x10, ['pointer', ['void']]],
'ProfileDepartureUpdate' : [ 0x14, ['pointer', ['void']]],
} ],
'CMP_OFFSET_ARRAY' : [ 0xc, {
'FileOffset' : [ 0x0, ['unsigned long']],
'DataBuffer' : [ 0x4, ['pointer', ['void']]],
'DataLength' : [ 0x8, ['unsigned long']],
} ],
'_MMSUPPORT_FLAGS' : [ 0x4, {
'WorkingSetType' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'ModwriterAttached' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'TrimHard' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'MaximumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'ForceTrim' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'MinimumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'SessionMaster' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'TrimmerState' : [ 0x1, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned char')]],
'Reserved' : [ 0x1, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'PageStealers' : [ 0x1, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned char')]],
'MemoryPriority' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'WsleDeleted' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'VmExiting' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ExpansionFailed' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Available' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned char')]],
} ],
'_IMAGE_OPTIONAL_HEADER' : [ 0xe0, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'ImageBase' : [ 0x1c, ['unsigned long']],
'SectionAlignment' : [ 0x20, ['unsigned long']],
'FileAlignment' : [ 0x24, ['unsigned long']],
'MajorOperatingSystemVersion' : [ 0x28, ['unsigned short']],
'MinorOperatingSystemVersion' : [ 0x2a, ['unsigned short']],
'MajorImageVersion' : [ 0x2c, ['unsigned short']],
'MinorImageVersion' : [ 0x2e, ['unsigned short']],
'MajorSubsystemVersion' : [ 0x30, ['unsigned short']],
'MinorSubsystemVersion' : [ 0x32, ['unsigned short']],
'Win32VersionValue' : [ 0x34, ['unsigned long']],
'SizeOfImage' : [ 0x38, ['unsigned long']],
'SizeOfHeaders' : [ 0x3c, ['unsigned long']],
'CheckSum' : [ 0x40, ['unsigned long']],
'Subsystem' : [ 0x44, ['unsigned short']],
'DllCharacteristics' : [ 0x46, ['unsigned short']],
'SizeOfStackReserve' : [ 0x48, ['unsigned long']],
'SizeOfStackCommit' : [ 0x4c, ['unsigned long']],
'SizeOfHeapReserve' : [ 0x50, ['unsigned long']],
'SizeOfHeapCommit' : [ 0x54, ['unsigned long']],
'LoaderFlags' : [ 0x58, ['unsigned long']],
'NumberOfRvaAndSizes' : [ 0x5c, ['unsigned long']],
'DataDirectory' : [ 0x60, ['array', 16, ['_IMAGE_DATA_DIRECTORY']]],
} ],
'_ALPC_COMPLETION_PACKET_LOOKASIDE' : [ 0x30, {
'Lock' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
'ActiveCount' : [ 0x8, ['unsigned long']],
'PendingNullCount' : [ 0xc, ['unsigned long']],
'PendingCheckCompletionListCount' : [ 0x10, ['unsigned long']],
'PendingDelete' : [ 0x14, ['unsigned long']],
'FreeListHead' : [ 0x18, ['_SINGLE_LIST_ENTRY']],
'CompletionPort' : [ 0x1c, ['pointer', ['void']]],
'CompletionKey' : [ 0x20, ['pointer', ['void']]],
'Entry' : [ 0x24, ['array', 1, ['_ALPC_COMPLETION_PACKET_LOOKASIDE_ENTRY']]],
} ],
'_TERMINATION_PORT' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['_TERMINATION_PORT']]],
'Port' : [ 0x4, ['pointer', ['void']]],
} ],
'_MEMORY_ALLOCATION_DESCRIPTOR' : [ 0x14, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'MemoryType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'LoaderExceptionBlock', 1: 'LoaderSystemBlock', 2: 'LoaderFree', 3: 'LoaderBad', 4: 'LoaderLoadedProgram', 5: 'LoaderFirmwareTemporary', 6: 'LoaderFirmwarePermanent', 7: 'LoaderOsloaderHeap', 8: 'LoaderOsloaderStack', 9: 'LoaderSystemCode', 10: 'LoaderHalCode', 11: 'LoaderBootDriver', 12: 'LoaderConsoleInDriver', 13: 'LoaderConsoleOutDriver', 14: 'LoaderStartupDpcStack', 15: 'LoaderStartupKernelStack', 16: 'LoaderStartupPanicStack', 17: 'LoaderStartupPcrPage', 18: 'LoaderStartupPdrPage', 19: 'LoaderRegistryData', 20: 'LoaderMemoryData', 21: 'LoaderNlsData', 22: 'LoaderSpecialMemory', 23: 'LoaderBBTMemory', 24: 'LoaderReserve', 25: 'LoaderXIPRom', 26: 'LoaderHALCachedMemory', 27: 'LoaderLargePageFiller', 28: 'LoaderErrorLogMemory', 29: 'LoaderMaximum'})]],
'BasePage' : [ 0xc, ['unsigned long']],
'PageCount' : [ 0x10, ['unsigned long']],
} ],
'_CM_INTENT_LOCK' : [ 0x8, {
'OwnerCount' : [ 0x0, ['unsigned long']],
'OwnerTable' : [ 0x4, ['pointer', ['pointer', ['_CM_KCB_UOW']]]],
} ],
'_PROC_IDLE_ACCOUNTING' : [ 0x2c0, {
'StateCount' : [ 0x0, ['unsigned long']],
'TotalTransitions' : [ 0x4, ['unsigned long']],
'ResetCount' : [ 0x8, ['unsigned long']],
'StartTime' : [ 0x10, ['unsigned long long']],
'BucketLimits' : [ 0x18, ['array', 16, ['unsigned long long']]],
'State' : [ 0x98, ['array', 1, ['_PROC_IDLE_STATE_ACCOUNTING']]],
} ],
'_THERMAL_INFORMATION' : [ 0x4c, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0xc, ['unsigned long']],
'SamplingPeriod' : [ 0x10, ['unsigned long']],
'CurrentTemperature' : [ 0x14, ['unsigned long']],
'PassiveTripPoint' : [ 0x18, ['unsigned long']],
'CriticalTripPoint' : [ 0x1c, ['unsigned long']],
'ActiveTripPointCount' : [ 0x20, ['unsigned char']],
'ActiveTripPoint' : [ 0x24, ['array', 10, ['unsigned long']]],
} ],
'_MAPPED_FILE_SEGMENT' : [ 0x20, {
'ControlArea' : [ 0x0, ['pointer', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x4, ['unsigned long']],
'SegmentFlags' : [ 0x8, ['_SEGMENT_FLAGS']],
'NumberOfCommittedPages' : [ 0xc, ['unsigned long']],
'SizeOfSegment' : [ 0x10, ['unsigned long long']],
'ExtendInfo' : [ 0x18, ['pointer', ['_MMEXTEND_INFO']]],
'BasedAddress' : [ 0x18, ['pointer', ['void']]],
'SegmentLock' : [ 0x1c, ['_EX_PUSH_LOCK']],
} ],
'_GDI_TEB_BATCH' : [ 0x4e0, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x4, ['unsigned long']],
'Buffer' : [ 0x8, ['array', 310, ['unsigned long']]],
} ],
'_MM_DRIVER_VERIFIER_DATA' : [ 0x84, {
'Level' : [ 0x0, ['unsigned long']],
'RaiseIrqls' : [ 0x4, ['unsigned long']],
'AcquireSpinLocks' : [ 0x8, ['unsigned long']],
'SynchronizeExecutions' : [ 0xc, ['unsigned long']],
'AllocationsAttempted' : [ 0x10, ['unsigned long']],
'AllocationsSucceeded' : [ 0x14, ['unsigned long']],
'AllocationsSucceededSpecialPool' : [ 0x18, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x1c, ['unsigned long']],
'TrimRequests' : [ 0x20, ['unsigned long']],
'Trims' : [ 0x24, ['unsigned long']],
'AllocationsFailed' : [ 0x28, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x2c, ['unsigned long']],
'Loads' : [ 0x30, ['unsigned long']],
'Unloads' : [ 0x34, ['unsigned long']],
'UnTrackedPool' : [ 0x38, ['unsigned long']],
'UserTrims' : [ 0x3c, ['unsigned long']],
'CurrentPagedPoolAllocations' : [ 0x40, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x44, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x48, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x4c, ['unsigned long']],
'PagedBytes' : [ 0x50, ['unsigned long']],
'NonPagedBytes' : [ 0x54, ['unsigned long']],
'PeakPagedBytes' : [ 0x58, ['unsigned long']],
'PeakNonPagedBytes' : [ 0x5c, ['unsigned long']],
'BurstAllocationsFailedDeliberately' : [ 0x60, ['unsigned long']],
'SessionTrims' : [ 0x64, ['unsigned long']],
'OptionChanges' : [ 0x68, ['unsigned long']],
'VerifyMode' : [ 0x6c, ['unsigned long']],
'PreviousBucketName' : [ 0x70, ['_UNICODE_STRING']],
'ActivityCounter' : [ 0x78, ['unsigned long']],
'PreviousActivityCounter' : [ 0x7c, ['unsigned long']],
'WorkerTrimRequests' : [ 0x80, ['unsigned long']],
} ],
'_VI_FAULT_TRACE' : [ 0x24, {
'Thread' : [ 0x0, ['pointer', ['_ETHREAD']]],
'StackTrace' : [ 0x4, ['array', 8, ['pointer', ['void']]]],
} ],
'_GENERIC_MAPPING' : [ 0x10, {
'GenericRead' : [ 0x0, ['unsigned long']],
'GenericWrite' : [ 0x4, ['unsigned long']],
'GenericExecute' : [ 0x8, ['unsigned long']],
'GenericAll' : [ 0xc, ['unsigned long']],
} ],
'_OBJECT_HANDLE_COUNT_DATABASE' : [ 0xc, {
'CountEntries' : [ 0x0, ['unsigned long']],
'HandleCountEntries' : [ 0x4, ['array', 1, ['_OBJECT_HANDLE_COUNT_ENTRY']]],
} ],
'_OWNER_ENTRY' : [ 0x8, {
'OwnerThread' : [ 0x0, ['unsigned long']],
'IoPriorityBoosted' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OwnerReferenced' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'OwnerCount' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
'TableSize' : [ 0x4, ['unsigned long']],
} ],
'_MI_SECTION_CREATION_GATE' : [ 0x14, {
'Next' : [ 0x0, ['pointer', ['_MI_SECTION_CREATION_GATE']]],
'Gate' : [ 0x4, ['_KGATE']],
} ],
'_ETIMER' : [ 0x98, {
'KeTimer' : [ 0x0, ['_KTIMER']],
'TimerApc' : [ 0x28, ['_KAPC']],
'TimerDpc' : [ 0x58, ['_KDPC']],
'ActiveTimerListEntry' : [ 0x78, ['_LIST_ENTRY']],
'Lock' : [ 0x80, ['unsigned long']],
'Period' : [ 0x84, ['long']],
'ApcAssociated' : [ 0x88, ['unsigned char']],
'WakeReason' : [ 0x8c, ['pointer', ['_DIAGNOSTIC_CONTEXT']]],
'WakeTimerListEntry' : [ 0x90, ['_LIST_ENTRY']],
} ],
'_FREE_DISPLAY' : [ 0xc, {
'RealVectorSize' : [ 0x0, ['unsigned long']],
'Display' : [ 0x4, ['_RTL_BITMAP']],
} ],
'_POOL_BLOCK_HEAD' : [ 0x10, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'List' : [ 0x8, ['_LIST_ENTRY']],
} ],
'__unnamed_1dc5' : [ 0x4, {
'Flags' : [ 0x0, ['_MMSECURE_FLAGS']],
'StartVa' : [ 0x0, ['pointer', ['void']]],
} ],
'_MMADDRESS_LIST' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_1dc5']],
'EndVa' : [ 0x4, ['pointer', ['void']]],
} ],
'_XSTATE_FEATURE' : [ 0x8, {
'Offset' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_ARBITER_INSTANCE' : [ 0x5ec, {
'Signature' : [ 0x0, ['unsigned long']],
'MutexEvent' : [ 0x4, ['pointer', ['_KEVENT']]],
'Name' : [ 0x8, ['pointer', ['unsigned short']]],
'OrderingName' : [ 0xc, ['pointer', ['unsigned short']]],
'ResourceType' : [ 0x10, ['long']],
'Allocation' : [ 0x14, ['pointer', ['_RTL_RANGE_LIST']]],
'PossibleAllocation' : [ 0x18, ['pointer', ['_RTL_RANGE_LIST']]],
'OrderingList' : [ 0x1c, ['_ARBITER_ORDERING_LIST']],
'ReservedList' : [ 0x24, ['_ARBITER_ORDERING_LIST']],
'ReferenceCount' : [ 0x2c, ['long']],
'Interface' : [ 0x30, ['pointer', ['_ARBITER_INTERFACE']]],
'AllocationStackMaxSize' : [ 0x34, ['unsigned long']],
'AllocationStack' : [ 0x38, ['pointer', ['_ARBITER_ALLOCATION_STATE']]],
'UnpackRequirement' : [ 0x3c, ['pointer', ['void']]],
'PackResource' : [ 0x40, ['pointer', ['void']]],
'UnpackResource' : [ 0x44, ['pointer', ['void']]],
'ScoreRequirement' : [ 0x48, ['pointer', ['void']]],
'TestAllocation' : [ 0x4c, ['pointer', ['void']]],
'RetestAllocation' : [ 0x50, ['pointer', ['void']]],
'CommitAllocation' : [ 0x54, ['pointer', ['void']]],
'RollbackAllocation' : [ 0x58, ['pointer', ['void']]],
'BootAllocation' : [ 0x5c, ['pointer', ['void']]],
'QueryArbitrate' : [ 0x60, ['pointer', ['void']]],
'QueryConflict' : [ 0x64, ['pointer', ['void']]],
'AddReserved' : [ 0x68, ['pointer', ['void']]],
'StartArbiter' : [ 0x6c, ['pointer', ['void']]],
'PreprocessEntry' : [ 0x70, ['pointer', ['void']]],
'AllocateEntry' : [ 0x74, ['pointer', ['void']]],
'GetNextAllocationRange' : [ 0x78, ['pointer', ['void']]],
'FindSuitableRange' : [ 0x7c, ['pointer', ['void']]],
'AddAllocation' : [ 0x80, ['pointer', ['void']]],
'BacktrackAllocation' : [ 0x84, ['pointer', ['void']]],
'OverrideConflict' : [ 0x88, ['pointer', ['void']]],
'InitializeRangeList' : [ 0x8c, ['pointer', ['void']]],
'TransactionInProgress' : [ 0x90, ['unsigned char']],
'TransactionEvent' : [ 0x94, ['pointer', ['_KEVENT']]],
'Extension' : [ 0x98, ['pointer', ['void']]],
'BusDeviceObject' : [ 0x9c, ['pointer', ['_DEVICE_OBJECT']]],
'ConflictCallbackContext' : [ 0xa0, ['pointer', ['void']]],
'ConflictCallback' : [ 0xa4, ['pointer', ['void']]],
'PdoDescriptionString' : [ 0xa8, ['array', 336, ['wchar']]],
'PdoSymbolicNameString' : [ 0x348, ['array', 672, ['unsigned char']]],
'PdoAddressString' : [ 0x5e8, ['array', 1, ['wchar']]],
} ],
'_KDEVICE_QUEUE_ENTRY' : [ 0x10, {
'DeviceListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SortKey' : [ 0x8, ['unsigned long']],
'Inserted' : [ 0xc, ['unsigned char']],
} ],
'__unnamed_1e1e' : [ 0x4, {
'UserData' : [ 0x0, ['unsigned long']],
'Next' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1e20' : [ 0x8, {
'Last' : [ 0x0, ['unsigned long']],
'u' : [ 0x4, ['__unnamed_1e1e']],
} ],
'__unnamed_1e22' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_1e1e']],
} ],
'__unnamed_1e24' : [ 0x8, {
'OldCell' : [ 0x0, ['__unnamed_1e20']],
'NewCell' : [ 0x0, ['__unnamed_1e22']],
} ],
'_HCELL' : [ 0xc, {
'Size' : [ 0x0, ['long']],
'u' : [ 0x4, ['__unnamed_1e24']],
} ],
'_HMAP_TABLE' : [ 0x2000, {
'Table' : [ 0x0, ['array', 512, ['_HMAP_ENTRY']]],
} ],
'_PROC_PERF_CONSTRAINT' : [ 0x24, {
'Prcb' : [ 0x0, ['pointer', ['_KPRCB']]],
'PerfContext' : [ 0x4, ['unsigned long']],
'PercentageCap' : [ 0x8, ['unsigned long']],
'ThermalCap' : [ 0xc, ['unsigned long']],
'TargetFrequency' : [ 0x10, ['unsigned long']],
'AcumulatedFullFrequency' : [ 0x14, ['unsigned long']],
'AcumulatedZeroFrequency' : [ 0x18, ['unsigned long']],
'FrequencyHistoryTotal' : [ 0x1c, ['unsigned long']],
'AverageFrequency' : [ 0x20, ['unsigned long']],
} ],
'_IMAGE_DATA_DIRECTORY' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'_DEVICE_CAPABILITIES' : [ 0x40, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'DeviceD1' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeviceD2' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LockSupported' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EjectSupported' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Removable' : [ 0x4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DockDevice' : [ 0x4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'UniqueID' : [ 0x4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SilentInstall' : [ 0x4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'RawDeviceOK' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SurpriseRemovalOK' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'WakeFromD0' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'WakeFromD1' : [ 0x4, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WakeFromD2' : [ 0x4, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'WakeFromD3' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HardwareDisabled' : [ 0x4, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NonDynamic' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'WarmEjectSupported' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NoDisplayInUI' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Reserved1' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'Reserved' : [ 0x4, ['BitField', dict(start_bit = 19, end_bit = 32, native_type='unsigned long')]],
'Address' : [ 0x8, ['unsigned long']],
'UINumber' : [ 0xc, ['unsigned long']],
'DeviceState' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'SystemWake' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWake' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'D1Latency' : [ 0x34, ['unsigned long']],
'D2Latency' : [ 0x38, ['unsigned long']],
'D3Latency' : [ 0x3c, ['unsigned long']],
} ],
'_CACHED_KSTACK_LIST' : [ 0x18, {
'SListHead' : [ 0x0, ['_SLIST_HEADER']],
'MinimumFree' : [ 0x8, ['long']],
'Misses' : [ 0xc, ['unsigned long']],
'MissesLast' : [ 0x10, ['unsigned long']],
'Pad0' : [ 0x14, ['unsigned long']],
} ],
'__unnamed_1e37' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Alignment' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e3b' : [ 0x14, {
'MinimumVector' : [ 0x0, ['unsigned long']],
'MaximumVector' : [ 0x4, ['unsigned long']],
'AffinityPolicy' : [ 0x8, ['unsigned short']],
'Group' : [ 0xa, ['unsigned short']],
'PriorityPolicy' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'IrqPriorityUndefined', 1: 'IrqPriorityLow', 2: 'IrqPriorityNormal', 3: 'IrqPriorityHigh'})]],
'TargetedProcessors' : [ 0x10, ['unsigned long']],
} ],
'__unnamed_1e3d' : [ 0x8, {
'MinimumChannel' : [ 0x0, ['unsigned long']],
'MaximumChannel' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1e3f' : [ 0xc, {
'Data' : [ 0x0, ['array', 3, ['unsigned long']]],
} ],
'__unnamed_1e41' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'MinBusNumber' : [ 0x4, ['unsigned long']],
'MaxBusNumber' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1e43' : [ 0xc, {
'Priority' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1e45' : [ 0x18, {
'Length40' : [ 0x0, ['unsigned long']],
'Alignment40' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e47' : [ 0x18, {
'Length48' : [ 0x0, ['unsigned long']],
'Alignment48' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e49' : [ 0x18, {
'Length64' : [ 0x0, ['unsigned long']],
'Alignment64' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1e4b' : [ 0x18, {
'Port' : [ 0x0, ['__unnamed_1e37']],
'Memory' : [ 0x0, ['__unnamed_1e37']],
'Interrupt' : [ 0x0, ['__unnamed_1e3b']],
'Dma' : [ 0x0, ['__unnamed_1e3d']],
'Generic' : [ 0x0, ['__unnamed_1e37']],
'DevicePrivate' : [ 0x0, ['__unnamed_1e3f']],
'BusNumber' : [ 0x0, ['__unnamed_1e41']],
'ConfigData' : [ 0x0, ['__unnamed_1e43']],
'Memory40' : [ 0x0, ['__unnamed_1e45']],
'Memory48' : [ 0x0, ['__unnamed_1e47']],
'Memory64' : [ 0x0, ['__unnamed_1e49']],
} ],
'_IO_RESOURCE_DESCRIPTOR' : [ 0x20, {
'Option' : [ 0x0, ['unsigned char']],
'Type' : [ 0x1, ['unsigned char']],
'ShareDisposition' : [ 0x2, ['unsigned char']],
'Spare1' : [ 0x3, ['unsigned char']],
'Flags' : [ 0x4, ['unsigned short']],
'Spare2' : [ 0x6, ['unsigned short']],
'u' : [ 0x8, ['__unnamed_1e4b']],
} ],
'_POP_THERMAL_ZONE' : [ 0x150, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'State' : [ 0x8, ['unsigned char']],
'Flags' : [ 0x9, ['unsigned char']],
'Mode' : [ 0xa, ['unsigned char']],
'PendingMode' : [ 0xb, ['unsigned char']],
'ActivePoint' : [ 0xc, ['unsigned char']],
'PendingActivePoint' : [ 0xd, ['unsigned char']],
'Throttle' : [ 0x10, ['long']],
'LastTime' : [ 0x18, ['unsigned long long']],
'SampleRate' : [ 0x20, ['unsigned long']],
'LastTemp' : [ 0x24, ['unsigned long']],
'PassiveTimer' : [ 0x28, ['_KTIMER']],
'PassiveDpc' : [ 0x50, ['_KDPC']],
'OverThrottled' : [ 0x70, ['_POP_ACTION_TRIGGER']],
'Irp' : [ 0x80, ['pointer', ['_IRP']]],
'Info' : [ 0x84, ['_THERMAL_INFORMATION_EX']],
'InfoLastUpdateTime' : [ 0xe0, ['_LARGE_INTEGER']],
'Metrics' : [ 0xe8, ['_POP_THERMAL_ZONE_METRICS']],
} ],
'_MMPTE_LIST' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OneEntry' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'filler0' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'filler1' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_VI_POOL_PAGE_HEADER' : [ 0xc, {
'NextPage' : [ 0x0, ['pointer', ['_SINGLE_LIST_ENTRY']]],
'VerifierEntry' : [ 0x4, ['pointer', ['void']]],
'Signature' : [ 0x8, ['unsigned long']],
} ],
'_HANDLE_TRACE_DEBUG_INFO' : [ 0x80, {
'RefCount' : [ 0x0, ['long']],
'TableSize' : [ 0x4, ['unsigned long']],
'BitMaskFlags' : [ 0x8, ['unsigned long']],
'CloseCompactionLock' : [ 0xc, ['_FAST_MUTEX']],
'CurrentStackIndex' : [ 0x2c, ['unsigned long']],
'TraceDb' : [ 0x30, ['array', 1, ['_HANDLE_TRACE_DB_ENTRY']]],
} ],
'_CM_WORKITEM' : [ 0x14, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Private' : [ 0x8, ['unsigned long']],
'WorkerRoutine' : [ 0xc, ['pointer', ['void']]],
'Parameter' : [ 0x10, ['pointer', ['void']]],
} ],
'_POP_THERMAL_ZONE_METRICS' : [ 0x68, {
'MetricsResource' : [ 0x0, ['_ERESOURCE']],
'ActiveCount' : [ 0x38, ['unsigned long']],
'PassiveCount' : [ 0x3c, ['unsigned long']],
'LastActiveStartTick' : [ 0x40, ['_LARGE_INTEGER']],
'AverageActiveTime' : [ 0x48, ['_LARGE_INTEGER']],
'LastPassiveStartTick' : [ 0x50, ['_LARGE_INTEGER']],
'AveragePassiveTime' : [ 0x58, ['_LARGE_INTEGER']],
'StartTickSinceLastReset' : [ 0x60, ['_LARGE_INTEGER']],
} ],
'_CM_TRANS' : [ 0x68, {
'TransactionListEntry' : [ 0x0, ['_LIST_ENTRY']],
'KCBUoWListHead' : [ 0x8, ['_LIST_ENTRY']],
'LazyCommitListEntry' : [ 0x10, ['_LIST_ENTRY']],
'KtmTrans' : [ 0x18, ['pointer', ['void']]],
'CmRm' : [ 0x1c, ['pointer', ['_CM_RM']]],
'KtmEnlistmentObject' : [ 0x20, ['pointer', ['_KENLISTMENT']]],
'KtmEnlistmentHandle' : [ 0x24, ['pointer', ['void']]],
'KtmUow' : [ 0x28, ['_GUID']],
'StartLsn' : [ 0x38, ['unsigned long long']],
'TransState' : [ 0x40, ['unsigned long']],
'HiveCount' : [ 0x44, ['unsigned long']],
'HiveArray' : [ 0x48, ['array', 7, ['pointer', ['_CMHIVE']]]],
} ],
'_WHEA_ERROR_RECORD_HEADER_VALIDBITS' : [ 0x4, {
'PlatformId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Timestamp' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'PartitionId' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_CM_PARTIAL_RESOURCE_LIST' : [ 0x18, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'PartialDescriptors' : [ 0x8, ['array', 1, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_RTL_RANGE_LIST' : [ 0x14, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x8, ['unsigned long']],
'Count' : [ 0xc, ['unsigned long']],
'Stamp' : [ 0x10, ['unsigned long']],
} ],
'_OBJECT_CREATE_INFORMATION' : [ 0x2c, {
'Attributes' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x4, ['pointer', ['void']]],
'ProbeMode' : [ 0x8, ['unsigned char']],
'PagedPoolCharge' : [ 0xc, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x10, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x14, ['unsigned long']],
'SecurityDescriptor' : [ 0x18, ['pointer', ['void']]],
'SecurityQos' : [ 0x1c, ['pointer', ['_SECURITY_QUALITY_OF_SERVICE']]],
'SecurityQualityOfService' : [ 0x20, ['_SECURITY_QUALITY_OF_SERVICE']],
} ],
'_RTL_CRITICAL_SECTION_DEBUG' : [ 0x20, {
'Type' : [ 0x0, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x2, ['unsigned short']],
'CriticalSection' : [ 0x4, ['pointer', ['_RTL_CRITICAL_SECTION']]],
'ProcessLocksList' : [ 0x8, ['_LIST_ENTRY']],
'EntryCount' : [ 0x10, ['unsigned long']],
'ContentionCount' : [ 0x14, ['unsigned long']],
'Flags' : [ 0x18, ['unsigned long']],
'CreatorBackTraceIndexHigh' : [ 0x1c, ['unsigned short']],
'SpareUSHORT' : [ 0x1e, ['unsigned short']],
} ],
'_POOL_HACKER' : [ 0x28, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'Contents' : [ 0x8, ['array', 8, ['unsigned long']]],
} ],
'_PO_DIAG_STACK_RECORD' : [ 0x8, {
'StackDepth' : [ 0x0, ['unsigned long']],
'Stack' : [ 0x4, ['array', 1, ['pointer', ['void']]]],
} ],
'_SECTION_OBJECT_POINTERS' : [ 0xc, {
'DataSectionObject' : [ 0x0, ['pointer', ['void']]],
'SharedCacheMap' : [ 0x4, ['pointer', ['void']]],
'ImageSectionObject' : [ 0x8, ['pointer', ['void']]],
} ],
'_VF_BTS_DATA_MANAGEMENT_AREA' : [ 0x34, {
'BTSBufferBase' : [ 0x0, ['pointer', ['void']]],
'BTSIndex' : [ 0x4, ['pointer', ['void']]],
'BTSMax' : [ 0x8, ['pointer', ['void']]],
'BTSInterruptThreshold' : [ 0xc, ['pointer', ['void']]],
'PEBSBufferBase' : [ 0x10, ['pointer', ['void']]],
'PEBSIndex' : [ 0x14, ['pointer', ['void']]],
'PEBSMax' : [ 0x18, ['pointer', ['void']]],
'PEBSInterruptThreshold' : [ 0x1c, ['pointer', ['void']]],
'PEBSCounterReset' : [ 0x20, ['array', 2, ['pointer', ['void']]]],
'Reserved' : [ 0x28, ['array', 12, ['unsigned char']]],
} ],
'_FLOATING_SAVE_AREA' : [ 0x70, {
'ControlWord' : [ 0x0, ['unsigned long']],
'StatusWord' : [ 0x4, ['unsigned long']],
'TagWord' : [ 0x8, ['unsigned long']],
'ErrorOffset' : [ 0xc, ['unsigned long']],
'ErrorSelector' : [ 0x10, ['unsigned long']],
'DataOffset' : [ 0x14, ['unsigned long']],
'DataSelector' : [ 0x18, ['unsigned long']],
'RegisterArea' : [ 0x1c, ['array', 80, ['unsigned char']]],
'Cr0NpxState' : [ 0x6c, ['unsigned long']],
} ],
'_SEP_AUDIT_POLICY' : [ 0x1c, {
'AdtTokenPolicy' : [ 0x0, ['_TOKEN_AUDIT_POLICY']],
'PolicySetStatus' : [ 0x1b, ['unsigned char']],
} ],
'__unnamed_1e88' : [ 0x4, {
'SnapSharedExportsFailed' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_1e8a' : [ 0xc, {
'AllSharedExportThunks' : [ 0x0, ['_VF_TARGET_ALL_SHARED_EXPORT_THUNKS']],
'Flags' : [ 0x0, ['__unnamed_1e88']],
} ],
'_VF_TARGET_DRIVER' : [ 0x18, {
'TreeNode' : [ 0x0, ['_VF_AVL_TREE_NODE']],
'u1' : [ 0x8, ['__unnamed_1e8a']],
'VerifiedData' : [ 0x14, ['pointer', ['_VF_TARGET_VERIFIED_DRIVER_DATA']]],
} ],
'__unnamed_1e92' : [ 0x14, {
'ClassGuid' : [ 0x0, ['_GUID']],
'SymbolicLinkName' : [ 0x10, ['array', 1, ['wchar']]],
} ],
'__unnamed_1e94' : [ 0x2, {
'DeviceIds' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1e96' : [ 0x2, {
'DeviceId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1e98' : [ 0x8, {
'NotificationStructure' : [ 0x0, ['pointer', ['void']]],
'DeviceIds' : [ 0x4, ['array', 1, ['wchar']]],
} ],
'__unnamed_1e9a' : [ 0x4, {
'Notification' : [ 0x0, ['pointer', ['void']]],
} ],
'__unnamed_1e9c' : [ 0x8, {
'NotificationCode' : [ 0x0, ['unsigned long']],
'NotificationData' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1e9e' : [ 0x8, {
'VetoType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]],
'DeviceIdVetoNameBuffer' : [ 0x4, ['array', 1, ['wchar']]],
} ],
'__unnamed_1ea0' : [ 0x10, {
'BlockedDriverGuid' : [ 0x0, ['_GUID']],
} ],
'__unnamed_1ea2' : [ 0x2, {
'ParentId' : [ 0x0, ['array', 1, ['wchar']]],
} ],
'__unnamed_1ea4' : [ 0x20, {
'PowerSettingGuid' : [ 0x0, ['_GUID']],
'Flags' : [ 0x10, ['unsigned long']],
'SessionId' : [ 0x14, ['unsigned long']],
'DataLength' : [ 0x18, ['unsigned long']],
'Data' : [ 0x1c, ['array', 1, ['unsigned char']]],
} ],
'__unnamed_1ea6' : [ 0x20, {
'DeviceClass' : [ 0x0, ['__unnamed_1e92']],
'TargetDevice' : [ 0x0, ['__unnamed_1e94']],
'InstallDevice' : [ 0x0, ['__unnamed_1e96']],
'CustomNotification' : [ 0x0, ['__unnamed_1e98']],
'ProfileNotification' : [ 0x0, ['__unnamed_1e9a']],
'PowerNotification' : [ 0x0, ['__unnamed_1e9c']],
'VetoNotification' : [ 0x0, ['__unnamed_1e9e']],
'BlockedDriverNotification' : [ 0x0, ['__unnamed_1ea0']],
'InvalidIDNotification' : [ 0x0, ['__unnamed_1ea2']],
'PowerSettingNotification' : [ 0x0, ['__unnamed_1ea4']],
'PropertyChangeNotification' : [ 0x0, ['__unnamed_1e96']],
} ],
'_PLUGPLAY_EVENT_BLOCK' : [ 0x44, {
'EventGuid' : [ 0x0, ['_GUID']],
'EventCategory' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'HardwareProfileChangeEvent', 1: 'TargetDeviceChangeEvent', 2: 'DeviceClassChangeEvent', 3: 'CustomDeviceEvent', 4: 'DeviceInstallEvent', 5: 'DeviceArrivalEvent', 6: 'VetoEvent', 7: 'BlockedDriverEvent', 8: 'InvalidIDEvent', 9: 'DevicePropertyChangeEvent', 10: 'DeviceInstanceRemovalEvent', 11: 'MaxPlugEventCategory'})]],
'Result' : [ 0x14, ['pointer', ['unsigned long']]],
'Flags' : [ 0x18, ['unsigned long']],
'TotalSize' : [ 0x1c, ['unsigned long']],
'DeviceObject' : [ 0x20, ['pointer', ['void']]],
'u' : [ 0x24, ['__unnamed_1ea6']],
} ],
'_VF_SUSPECT_DRIVER_ENTRY' : [ 0x18, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'Loads' : [ 0x8, ['unsigned long']],
'Unloads' : [ 0xc, ['unsigned long']],
'BaseName' : [ 0x10, ['_UNICODE_STRING']],
} ],
'_MMPTE_TIMESTAMP' : [ 0x4, {
'MustBeZero' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'GlobalTimeStamp' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_SID_AND_ATTRIBUTES_HASH' : [ 0x88, {
'SidCount' : [ 0x0, ['unsigned long']],
'SidAttr' : [ 0x4, ['pointer', ['_SID_AND_ATTRIBUTES']]],
'Hash' : [ 0x8, ['array', 32, ['unsigned long']]],
} ],
'_XSTATE_CONTEXT' : [ 0x20, {
'Mask' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['unsigned long']],
'Area' : [ 0x10, ['pointer', ['_XSAVE_AREA']]],
'Reserved2' : [ 0x14, ['unsigned long']],
'Buffer' : [ 0x18, ['pointer', ['void']]],
'Reserved3' : [ 0x1c, ['unsigned long']],
} ],
'_XSAVE_FORMAT' : [ 0x200, {
'ControlWord' : [ 0x0, ['unsigned short']],
'StatusWord' : [ 0x2, ['unsigned short']],
'TagWord' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'ErrorOpcode' : [ 0x6, ['unsigned short']],
'ErrorOffset' : [ 0x8, ['unsigned long']],
'ErrorSelector' : [ 0xc, ['unsigned short']],
'Reserved2' : [ 0xe, ['unsigned short']],
'DataOffset' : [ 0x10, ['unsigned long']],
'DataSelector' : [ 0x14, ['unsigned short']],
'Reserved3' : [ 0x16, ['unsigned short']],
'MxCsr' : [ 0x18, ['unsigned long']],
'MxCsr_Mask' : [ 0x1c, ['unsigned long']],
'FloatRegisters' : [ 0x20, ['array', 8, ['_M128A']]],
'XmmRegisters' : [ 0xa0, ['array', 8, ['_M128A']]],
'Reserved4' : [ 0x120, ['array', 192, ['unsigned char']]],
'StackControl' : [ 0x1e0, ['array', 7, ['unsigned long']]],
'Cr0NpxState' : [ 0x1fc, ['unsigned long']],
} ],
'_MBCB' : [ 0x88, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeIsInZone' : [ 0x2, ['short']],
'PagesToWrite' : [ 0x4, ['unsigned long']],
'DirtyPages' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'BitmapRanges' : [ 0x10, ['_LIST_ENTRY']],
'ResumeWritePage' : [ 0x18, ['long long']],
'MostRecentlyDirtiedPage' : [ 0x20, ['long long']],
'BitmapRange1' : [ 0x28, ['_BITMAP_RANGE']],
'BitmapRange2' : [ 0x48, ['_BITMAP_RANGE']],
'BitmapRange3' : [ 0x68, ['_BITMAP_RANGE']],
} ],
'_PS_CPU_QUOTA_BLOCK' : [ 0x880, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SessionId' : [ 0x8, ['unsigned long']],
'CpuShareWeight' : [ 0xc, ['unsigned long']],
'CapturedWeightData' : [ 0x10, ['_PSP_CPU_SHARE_CAPTURED_WEIGHT_DATA']],
'DuplicateInputMarker' : [ 0x18, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x18, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'MiscFlags' : [ 0x18, ['long']],
'BlockCurrentGenerationLock' : [ 0x0, ['unsigned long']],
'CyclesAccumulated' : [ 0x8, ['unsigned long long']],
'CycleCredit' : [ 0x40, ['unsigned long long']],
'BlockCurrentGeneration' : [ 0x48, ['unsigned long']],
'CpuCyclePercent' : [ 0x4c, ['unsigned long']],
'CyclesFinishedForCurrentGeneration' : [ 0x50, ['unsigned char']],
'Cpu' : [ 0x80, ['array', 32, ['_PS_PER_CPU_QUOTA_CACHE_AWARE']]],
} ],
'__unnamed_1ec1' : [ 0x1, {
'AsUCHAR' : [ 0x0, ['unsigned char']],
'NoDomainAccounting' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IncreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 3, native_type='unsigned char')]],
'DecreasePolicy' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 5, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned char')]],
} ],
'PROCESSOR_PERFSTATE_POLICY' : [ 0x1c, {
'Revision' : [ 0x0, ['unsigned long']],
'MaxThrottle' : [ 0x4, ['unsigned char']],
'MinThrottle' : [ 0x5, ['unsigned char']],
'BusyAdjThreshold' : [ 0x6, ['unsigned char']],
'Spare' : [ 0x7, ['unsigned char']],
'Flags' : [ 0x7, ['__unnamed_1ec1']],
'TimeCheck' : [ 0x8, ['unsigned long']],
'IncreaseTime' : [ 0xc, ['unsigned long']],
'DecreaseTime' : [ 0x10, ['unsigned long']],
'IncreasePercent' : [ 0x14, ['unsigned long']],
'DecreasePercent' : [ 0x18, ['unsigned long']],
} ],
'_BUS_EXTENSION_LIST' : [ 0x8, {
'Next' : [ 0x0, ['pointer', ['void']]],
'BusExtension' : [ 0x4, ['pointer', ['_PI_BUS_EXTENSION']]],
} ],
'_CACHED_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'ValueList' : [ 0x4, ['unsigned long']],
'RealKcb' : [ 0x4, ['pointer', ['_CM_KEY_CONTROL_BLOCK']]],
} ],
'_KDEVICE_QUEUE' : [ 0x14, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceListHead' : [ 0x4, ['_LIST_ENTRY']],
'Lock' : [ 0xc, ['unsigned long']],
'Busy' : [ 0x10, ['unsigned char']],
} ],
'_SYSTEM_POWER_STATE_CONTEXT' : [ 0x4, {
'Reserved1' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'TargetSystemState' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 12, native_type='unsigned long')]],
'EffectiveSystemState' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 16, native_type='unsigned long')]],
'CurrentSystemState' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'IgnoreHibernationPath' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'PseudoTransition' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'Reserved2' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long')]],
'ContextAsUlong' : [ 0x0, ['unsigned long']],
} ],
'_OBJECT_TYPE_INITIALIZER' : [ 0x50, {
'Length' : [ 0x0, ['unsigned short']],
'ObjectTypeFlags' : [ 0x2, ['unsigned char']],
'CaseInsensitive' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'UnnamedObjectsOnly' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'UseDefaultObject' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'SecurityRequired' : [ 0x2, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'MaintainHandleCount' : [ 0x2, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'MaintainTypeList' : [ 0x2, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'SupportsObjectCallbacks' : [ 0x2, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'ObjectTypeCode' : [ 0x4, ['unsigned long']],
'InvalidAttributes' : [ 0x8, ['unsigned long']],
'GenericMapping' : [ 0xc, ['_GENERIC_MAPPING']],
'ValidAccessMask' : [ 0x1c, ['unsigned long']],
'RetainAccess' : [ 0x20, ['unsigned long']],
'PoolType' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'DefaultPagedPoolCharge' : [ 0x28, ['unsigned long']],
'DefaultNonPagedPoolCharge' : [ 0x2c, ['unsigned long']],
'DumpProcedure' : [ 0x30, ['pointer', ['void']]],
'OpenProcedure' : [ 0x34, ['pointer', ['void']]],
'CloseProcedure' : [ 0x38, ['pointer', ['void']]],
'DeleteProcedure' : [ 0x3c, ['pointer', ['void']]],
'ParseProcedure' : [ 0x40, ['pointer', ['void']]],
'SecurityProcedure' : [ 0x44, ['pointer', ['void']]],
'QueryNameProcedure' : [ 0x48, ['pointer', ['void']]],
'OkayToCloseProcedure' : [ 0x4c, ['pointer', ['void']]],
} ],
'__unnamed_1ef2' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'SubsectionFlags' : [ 0x0, ['_MMSUBSECTION_FLAGS']],
} ],
'_SUBSECTION' : [ 0x20, {
'ControlArea' : [ 0x0, ['pointer', ['_CONTROL_AREA']]],
'SubsectionBase' : [ 0x4, ['pointer', ['_MMPTE']]],
'NextSubsection' : [ 0x8, ['pointer', ['_SUBSECTION']]],
'PtesInSubsection' : [ 0xc, ['unsigned long']],
'UnusedPtes' : [ 0x10, ['unsigned long']],
'GlobalPerSessionHead' : [ 0x10, ['pointer', ['_MM_AVL_TABLE']]],
'u' : [ 0x14, ['__unnamed_1ef2']],
'StartingSector' : [ 0x18, ['unsigned long']],
'NumberOfFullSectors' : [ 0x1c, ['unsigned long']],
} ],
'_IO_CLIENT_EXTENSION' : [ 0x8, {
'NextExtension' : [ 0x0, ['pointer', ['_IO_CLIENT_EXTENSION']]],
'ClientIdentificationAddress' : [ 0x4, ['pointer', ['void']]],
} ],
'_PS_PER_CPU_QUOTA_CACHE_AWARE' : [ 0x40, {
'SortedListEntry' : [ 0x0, ['_LIST_ENTRY']],
'IdleOnlyListHead' : [ 0x8, ['_LIST_ENTRY']],
'CycleBaseAllowance' : [ 0x10, ['unsigned long long']],
'CyclesRemaining' : [ 0x18, ['long long']],
'CurrentGeneration' : [ 0x20, ['unsigned long']],
} ],
'_ETW_BUFFER_CONTEXT' : [ 0x4, {
'ProcessorNumber' : [ 0x0, ['unsigned char']],
'Alignment' : [ 0x1, ['unsigned char']],
'LoggerId' : [ 0x2, ['unsigned short']],
} ],
'_PROC_IDLE_SNAP' : [ 0x10, {
'Time' : [ 0x0, ['unsigned long long']],
'Idle' : [ 0x8, ['unsigned long long']],
} ],
'_KERNEL_STACK_SEGMENT' : [ 0x14, {
'StackBase' : [ 0x0, ['unsigned long']],
'StackLimit' : [ 0x4, ['unsigned long']],
'KernelStack' : [ 0x8, ['unsigned long']],
'InitialStack' : [ 0xc, ['unsigned long']],
'ActualLimit' : [ 0x10, ['unsigned long']],
} ],
'_KEXECUTE_OPTIONS' : [ 0x1, {
'ExecuteDisable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ExecuteEnable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'DisableThunkEmulation' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Permanent' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ExecuteDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ImageDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'DisableExceptionChainValidation' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'ExecuteOptions' : [ 0x0, ['unsigned char']],
} ],
'_SEP_TOKEN_PRIVILEGES' : [ 0x18, {
'Present' : [ 0x0, ['unsigned long long']],
'Enabled' : [ 0x8, ['unsigned long long']],
'EnabledByDefault' : [ 0x10, ['unsigned long long']],
} ],
'_WORK_QUEUE_ITEM' : [ 0x10, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'WorkerRoutine' : [ 0x8, ['pointer', ['void']]],
'Parameter' : [ 0xc, ['pointer', ['void']]],
} ],
'_ARBITER_ALLOCATION_STATE' : [ 0x38, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'CurrentMinimum' : [ 0x10, ['unsigned long long']],
'CurrentMaximum' : [ 0x18, ['unsigned long long']],
'Entry' : [ 0x20, ['pointer', ['_ARBITER_LIST_ENTRY']]],
'CurrentAlternative' : [ 0x24, ['pointer', ['_ARBITER_ALTERNATIVE']]],
'AlternativeCount' : [ 0x28, ['unsigned long']],
'Alternatives' : [ 0x2c, ['pointer', ['_ARBITER_ALTERNATIVE']]],
'Flags' : [ 0x30, ['unsigned short']],
'RangeAttributes' : [ 0x32, ['unsigned char']],
'RangeAvailableAttributes' : [ 0x33, ['unsigned char']],
'WorkSpace' : [ 0x34, ['unsigned long']],
} ],
'_VACB_ARRAY_HEADER' : [ 0x10, {
'VacbArrayIndex' : [ 0x0, ['unsigned long']],
'MappingCount' : [ 0x4, ['unsigned long']],
'HighestMappedIndex' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'_MMWSLENTRY' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Hashed' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Direct' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 9, native_type='unsigned long')]],
'Age' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 12, native_type='unsigned long')]],
'VirtualPageNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long')]],
} ],
'_DBGKD_SWITCH_PARTITION' : [ 0x4, {
'Partition' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_GET_VERSION32' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned short']],
'Flags' : [ 0x6, ['unsigned short']],
'KernBase' : [ 0x8, ['unsigned long']],
'PsLoadedModuleList' : [ 0xc, ['unsigned long']],
'MachineType' : [ 0x10, ['unsigned short']],
'ThCallbackStack' : [ 0x12, ['unsigned short']],
'NextCallback' : [ 0x14, ['unsigned short']],
'FramePointer' : [ 0x16, ['unsigned short']],
'KiCallUserMode' : [ 0x18, ['unsigned long']],
'KeUserCallbackDispatcher' : [ 0x1c, ['unsigned long']],
'BreakpointWithStatus' : [ 0x20, ['unsigned long']],
'DebuggerDataList' : [ 0x24, ['unsigned long']],
} ],
'_INTERLOCK_SEQ' : [ 0x8, {
'Depth' : [ 0x0, ['unsigned short']],
'FreeEntryOffset' : [ 0x2, ['unsigned short']],
'OffsetAndDepth' : [ 0x0, ['unsigned long']],
'Sequence' : [ 0x4, ['unsigned long']],
'Exchg' : [ 0x0, ['long long']],
} ],
'_WHEA_TIMESTAMP' : [ 0x8, {
'Seconds' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long long')]],
'Minutes' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long long')]],
'Hours' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long long')]],
'Precise' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 32, native_type='unsigned long long')]],
'Day' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 40, native_type='unsigned long long')]],
'Month' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 48, native_type='unsigned long long')]],
'Year' : [ 0x0, ['BitField', dict(start_bit = 48, end_bit = 56, native_type='unsigned long long')]],
'Century' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 64, native_type='unsigned long long')]],
'AsLARGE_INTEGER' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'_VPB' : [ 0x58, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned short']],
'VolumeLabelLength' : [ 0x6, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer', ['_DEVICE_OBJECT']]],
'RealDevice' : [ 0xc, ['pointer', ['_DEVICE_OBJECT']]],
'SerialNumber' : [ 0x10, ['unsigned long']],
'ReferenceCount' : [ 0x14, ['unsigned long']],
'VolumeLabel' : [ 0x18, ['array', 32, ['wchar']]],
} ],
'_CACHE_DESCRIPTOR' : [ 0xc, {
'Level' : [ 0x0, ['unsigned char']],
'Associativity' : [ 0x1, ['unsigned char']],
'LineSize' : [ 0x2, ['unsigned short']],
'Size' : [ 0x4, ['unsigned long']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'CacheUnified', 1: 'CacheInstruction', 2: 'CacheData', 3: 'CacheTrace'})]],
} ],
'_FILE_BASIC_INFORMATION' : [ 0x28, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x20, ['unsigned long']],
} ],
'_SECURITY_SUBJECT_CONTEXT' : [ 0x10, {
'ClientToken' : [ 0x0, ['pointer', ['void']]],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'PrimaryToken' : [ 0x8, ['pointer', ['void']]],
'ProcessAuditId' : [ 0xc, ['pointer', ['void']]],
} ],
'_KiIoAccessMap' : [ 0x2024, {
'DirectionMap' : [ 0x0, ['array', 32, ['unsigned char']]],
'IoMap' : [ 0x20, ['array', 8196, ['unsigned char']]],
} ],
'_PF_KERNEL_GLOBALS' : [ 0x40, {
'AccessBufferAgeThreshold' : [ 0x0, ['unsigned long long']],
'AccessBufferRef' : [ 0x8, ['_EX_RUNDOWN_REF']],
'AccessBufferExistsEvent' : [ 0xc, ['_KEVENT']],
'AccessBufferMax' : [ 0x1c, ['unsigned long']],
'AccessBufferList' : [ 0x20, ['_SLIST_HEADER']],
'StreamSequenceNumber' : [ 0x28, ['long']],
'Flags' : [ 0x2c, ['unsigned long']],
'ScenarioPrefetchCount' : [ 0x30, ['long']],
} ],
'_ARBITER_QUERY_ARBITRATE_PARAMETERS' : [ 0x4, {
'ArbitrationList' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
} ],
'_ARBITER_BOOT_ALLOCATION_PARAMETERS' : [ 0x4, {
'ArbitrationList' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
} ],
'_POP_SYSTEM_IDLE' : [ 0x38, {
'AverageIdleness' : [ 0x0, ['long']],
'LowestIdleness' : [ 0x4, ['long']],
'Time' : [ 0x8, ['unsigned long']],
'Timeout' : [ 0xc, ['unsigned long']],
'LastUserInput' : [ 0x10, ['unsigned long']],
'Action' : [ 0x14, ['POWER_ACTION_POLICY']],
'MinState' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SystemRequired' : [ 0x24, ['unsigned char']],
'IdleWorker' : [ 0x25, ['unsigned char']],
'Sampling' : [ 0x26, ['unsigned char']],
'LastTick' : [ 0x28, ['unsigned long long']],
'LastSystemRequiredTime' : [ 0x30, ['unsigned long']],
} ],
'_VF_TARGET_ALL_SHARED_EXPORT_THUNKS' : [ 0xc, {
'SharedExportThunks' : [ 0x0, ['pointer', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
'PoolSharedExportThunks' : [ 0x4, ['pointer', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
'OrderDependentSharedExportThunks' : [ 0x8, ['pointer', ['_VERIFIER_SHARED_EXPORT_THUNK']]],
} ],
'_ETW_REF_CLOCK' : [ 0x10, {
'StartTime' : [ 0x0, ['_LARGE_INTEGER']],
'StartPerfClock' : [ 0x8, ['_LARGE_INTEGER']],
} ],
'_OB_DUPLICATE_OBJECT_STATE' : [ 0x18, {
'SourceProcess' : [ 0x0, ['pointer', ['_EPROCESS']]],
'SourceHandle' : [ 0x4, ['pointer', ['void']]],
'Object' : [ 0x8, ['pointer', ['void']]],
'TargetAccess' : [ 0xc, ['unsigned long']],
'ObjectInfo' : [ 0x10, ['_HANDLE_TABLE_ENTRY_INFO']],
'HandleAttributes' : [ 0x14, ['unsigned long']],
} ],
'_MMPTE_SUBSECTION' : [ 0x4, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'SubsectionAddressLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 10, native_type='unsigned long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'SubsectionAddressHigh' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 32, native_type='unsigned long')]],
} ],
'_POWER_STATE' : [ 0x4, {
'SystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_EFI_FIRMWARE_INFORMATION' : [ 0x10, {
'FirmwareVersion' : [ 0x0, ['unsigned long']],
'VirtualEfiRuntimeServices' : [ 0x4, ['pointer', ['_VIRTUAL_EFI_RUNTIME_SERVICES']]],
'SetVirtualAddressMapStatus' : [ 0x8, ['long']],
'MissedMappingsCount' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_1f53' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f55' : [ 0xc, {
'Level' : [ 0x0, ['unsigned short']],
'Group' : [ 0x2, ['unsigned short']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f57' : [ 0xc, {
'Group' : [ 0x0, ['unsigned short']],
'MessageCount' : [ 0x2, ['unsigned short']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f59' : [ 0xc, {
'Raw' : [ 0x0, ['__unnamed_1f57']],
'Translated' : [ 0x0, ['__unnamed_1f55']],
} ],
'__unnamed_1f5b' : [ 0xc, {
'Channel' : [ 0x0, ['unsigned long']],
'Port' : [ 0x4, ['unsigned long']],
'Reserved1' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f5d' : [ 0xc, {
'Start' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f5f' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f61' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length40' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f63' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length48' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f65' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length64' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1f67' : [ 0xc, {
'Generic' : [ 0x0, ['__unnamed_1f53']],
'Port' : [ 0x0, ['__unnamed_1f53']],
'Interrupt' : [ 0x0, ['__unnamed_1f55']],
'MessageInterrupt' : [ 0x0, ['__unnamed_1f59']],
'Memory' : [ 0x0, ['__unnamed_1f53']],
'Dma' : [ 0x0, ['__unnamed_1f5b']],
'DevicePrivate' : [ 0x0, ['__unnamed_1e3f']],
'BusNumber' : [ 0x0, ['__unnamed_1f5d']],
'DeviceSpecificData' : [ 0x0, ['__unnamed_1f5f']],
'Memory40' : [ 0x0, ['__unnamed_1f61']],
'Memory48' : [ 0x0, ['__unnamed_1f63']],
'Memory64' : [ 0x0, ['__unnamed_1f65']],
} ],
'_CM_PARTIAL_RESOURCE_DESCRIPTOR' : [ 0x10, {
'Type' : [ 0x0, ['unsigned char']],
'ShareDisposition' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned short']],
'u' : [ 0x4, ['__unnamed_1f67']],
} ],
'__unnamed_1f6c' : [ 0x4, {
'PhysicalAddress' : [ 0x0, ['unsigned long']],
'VirtualSize' : [ 0x0, ['unsigned long']],
} ],
'_IMAGE_SECTION_HEADER' : [ 0x28, {
'Name' : [ 0x0, ['array', 8, ['unsigned char']]],
'Misc' : [ 0x8, ['__unnamed_1f6c']],
'VirtualAddress' : [ 0xc, ['unsigned long']],
'SizeOfRawData' : [ 0x10, ['unsigned long']],
'PointerToRawData' : [ 0x14, ['unsigned long']],
'PointerToRelocations' : [ 0x18, ['unsigned long']],
'PointerToLinenumbers' : [ 0x1c, ['unsigned long']],
'NumberOfRelocations' : [ 0x20, ['unsigned short']],
'NumberOfLinenumbers' : [ 0x22, ['unsigned short']],
'Characteristics' : [ 0x24, ['unsigned long']],
} ],
'_ARBITER_ADD_RESERVED_PARAMETERS' : [ 0x4, {
'ReserveDevice' : [ 0x0, ['pointer', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_1f76' : [ 0x50, {
'CellData' : [ 0x0, ['_CELL_DATA']],
'List' : [ 0x0, ['array', 1, ['unsigned long']]],
} ],
'_CM_CACHED_VALUE_INDEX' : [ 0x54, {
'CellIndex' : [ 0x0, ['unsigned long']],
'Data' : [ 0x4, ['__unnamed_1f76']],
} ],
'_CONFIGURATION_COMPONENT_DATA' : [ 0x34, {
'Parent' : [ 0x0, ['pointer', ['_CONFIGURATION_COMPONENT_DATA']]],
'Child' : [ 0x4, ['pointer', ['_CONFIGURATION_COMPONENT_DATA']]],
'Sibling' : [ 0x8, ['pointer', ['_CONFIGURATION_COMPONENT_DATA']]],
'ComponentEntry' : [ 0xc, ['_CONFIGURATION_COMPONENT']],
'ConfigurationData' : [ 0x30, ['pointer', ['void']]],
} ],
'_DBGKD_QUERY_SPECIAL_CALLS' : [ 0x4, {
'NumberOfSpecialCalls' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_1f80' : [ 0x4, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long')]],
'Parent' : [ 0x0, ['pointer', ['_MMSUBSECTION_NODE']]],
} ],
'_MMSUBSECTION_NODE' : [ 0x18, {
'u' : [ 0x0, ['__unnamed_1ef2']],
'StartingSector' : [ 0x4, ['unsigned long']],
'NumberOfFullSectors' : [ 0x8, ['unsigned long']],
'u1' : [ 0xc, ['__unnamed_1f80']],
'LeftChild' : [ 0x10, ['pointer', ['_MMSUBSECTION_NODE']]],
'RightChild' : [ 0x14, ['pointer', ['_MMSUBSECTION_NODE']]],
} ],
'_VF_AVL_TREE_NODE' : [ 0x8, {
'p' : [ 0x0, ['pointer', ['void']]],
'RangeSize' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1f88' : [ 0x8, {
'IdleTime' : [ 0x0, ['unsigned long']],
'NonIdleTime' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1f8a' : [ 0x8, {
'Disk' : [ 0x0, ['__unnamed_1f88']],
} ],
'_DEVICE_OBJECT_POWER_EXTENSION' : [ 0x40, {
'IdleCount' : [ 0x0, ['unsigned long']],
'BusyCount' : [ 0x4, ['unsigned long']],
'BusyReference' : [ 0x8, ['unsigned long']],
'TotalBusyCount' : [ 0xc, ['unsigned long']],
'ConservationIdleTime' : [ 0x10, ['unsigned long']],
'PerformanceIdleTime' : [ 0x14, ['unsigned long']],
'DeviceObject' : [ 0x18, ['pointer', ['_DEVICE_OBJECT']]],
'IdleList' : [ 0x1c, ['_LIST_ENTRY']],
'IdleType' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceIdleNormal', 1: 'DeviceIdleDisk'})]],
'IdleState' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'CurrentState' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'Volume' : [ 0x30, ['_LIST_ENTRY']],
'Specific' : [ 0x38, ['__unnamed_1f8a']],
} ],
'_ARBITER_RETEST_ALLOCATION_PARAMETERS' : [ 0xc, {
'ArbitrationList' : [ 0x0, ['pointer', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x4, ['unsigned long']],
'AllocateFrom' : [ 0x8, ['pointer', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_WHEA_ERROR_RECORD_SECTION_DESCRIPTOR_VALIDBITS' : [ 0x1, {
'FRUId' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'FRUText' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 8, native_type='unsigned char')]],
'AsUCHAR' : [ 0x0, ['unsigned char']],
} ],
'_FS_FILTER_CALLBACKS' : [ 0x38, {
'SizeOfFsFilterCallbacks' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'PreAcquireForSectionSynchronization' : [ 0x8, ['pointer', ['void']]],
'PostAcquireForSectionSynchronization' : [ 0xc, ['pointer', ['void']]],
'PreReleaseForSectionSynchronization' : [ 0x10, ['pointer', ['void']]],
'PostReleaseForSectionSynchronization' : [ 0x14, ['pointer', ['void']]],
'PreAcquireForCcFlush' : [ 0x18, ['pointer', ['void']]],
'PostAcquireForCcFlush' : [ 0x1c, ['pointer', ['void']]],
'PreReleaseForCcFlush' : [ 0x20, ['pointer', ['void']]],
'PostReleaseForCcFlush' : [ 0x24, ['pointer', ['void']]],
'PreAcquireForModifiedPageWriter' : [ 0x28, ['pointer', ['void']]],
'PostAcquireForModifiedPageWriter' : [ 0x2c, ['pointer', ['void']]],
'PreReleaseForModifiedPageWriter' : [ 0x30, ['pointer', ['void']]],
'PostReleaseForModifiedPageWriter' : [ 0x34, ['pointer', ['void']]],
} ],
'_KENLISTMENT' : [ 0x168, {
'cookie' : [ 0x0, ['unsigned long']],
'NamespaceLink' : [ 0x4, ['_KTMOBJECT_NAMESPACE_LINK']],
'EnlistmentId' : [ 0x18, ['_GUID']],
'Mutex' : [ 0x28, ['_KMUTANT']],
'NextSameTx' : [ 0x48, ['_LIST_ENTRY']],
'NextSameRm' : [ 0x50, ['_LIST_ENTRY']],
'ResourceManager' : [ 0x58, ['pointer', ['_KRESOURCEMANAGER']]],
'Transaction' : [ 0x5c, ['pointer', ['_KTRANSACTION']]],
'State' : [ 0x60, ['Enumeration', dict(target = 'long', choices = {0: 'KEnlistmentUninitialized', 256: 'KEnlistmentActive', 258: 'KEnlistmentPrepared', 259: 'KEnlistmentInDoubt', 260: 'KEnlistmentCommitted', 261: 'KEnlistmentCommittedNotify', 262: 'KEnlistmentCommitRequested', 257: 'KEnlistmentPreparing', 264: 'KEnlistmentDelegated', 265: 'KEnlistmentDelegatedDisconnected', 266: 'KEnlistmentPrePreparing', 263: 'KEnlistmentAborted', 268: 'KEnlistmentRecovering', 269: 'KEnlistmentAborting', 270: 'KEnlistmentReadOnly', 271: 'KEnlistmentOutcomeUnavailable', 272: 'KEnlistmentOffline', 273: 'KEnlistmentPrePrepared', 274: 'KEnlistmentInitialized', 267: 'KEnlistmentForgotten'})]],
'Flags' : [ 0x64, ['unsigned long']],
'NotificationMask' : [ 0x68, ['unsigned long']],
'Key' : [ 0x6c, ['pointer', ['void']]],
'KeyRefCount' : [ 0x70, ['unsigned long']],
'RecoveryInformation' : [ 0x74, ['pointer', ['void']]],
'RecoveryInformationLength' : [ 0x78, ['unsigned long']],
'DynamicNameInformation' : [ 0x7c, ['pointer', ['void']]],
'DynamicNameInformationLength' : [ 0x80, ['unsigned long']],
'FinalNotification' : [ 0x84, ['pointer', ['_KTMNOTIFICATION_PACKET']]],
'SupSubEnlistment' : [ 0x88, ['pointer', ['_KENLISTMENT']]],
'SupSubEnlHandle' : [ 0x8c, ['pointer', ['void']]],
'SubordinateTxHandle' : [ 0x90, ['pointer', ['void']]],
'CrmEnlistmentEnId' : [ 0x94, ['_GUID']],
'CrmEnlistmentTmId' : [ 0xa4, ['_GUID']],
'CrmEnlistmentRmId' : [ 0xb4, ['_GUID']],
'NextHistory' : [ 0xc4, ['unsigned long']],
'History' : [ 0xc8, ['array', 20, ['_KENLISTMENT_HISTORY']]],
} ],
'_ARBITER_INTERFACE' : [ 0x18, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x4, ['pointer', ['void']]],
'InterfaceReference' : [ 0x8, ['pointer', ['void']]],
'InterfaceDereference' : [ 0xc, ['pointer', ['void']]],
'ArbiterHandler' : [ 0x10, ['pointer', ['void']]],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'_KAPC_STATE' : [ 0x18, {
'ApcListHead' : [ 0x0, ['array', 2, ['_LIST_ENTRY']]],
'Process' : [ 0x10, ['pointer', ['_KPROCESS']]],
'KernelApcInProgress' : [ 0x14, ['unsigned char']],
'KernelApcPending' : [ 0x15, ['unsigned char']],
'UserApcPending' : [ 0x16, ['unsigned char']],
} ],
'_IA64_LOADER_BLOCK' : [ 0x4, {
'PlaceHolder' : [ 0x0, ['unsigned long']],
} ],
'_IA64_DBGKD_CONTROL_SET' : [ 0x14, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long long']],
} ],
'_DEVICE_RELATIONS' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'Objects' : [ 0x4, ['array', 1, ['pointer', ['_DEVICE_OBJECT']]]],
} ],
'_IMAGE_ROM_OPTIONAL_HEADER' : [ 0x38, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'BaseOfBss' : [ 0x1c, ['unsigned long']],
'GprMask' : [ 0x20, ['unsigned long']],
'CprMask' : [ 0x24, ['array', 4, ['unsigned long']]],
'GpValue' : [ 0x34, ['unsigned long']],
} ],
'_ALPC_COMPLETION_LIST_HEADER' : [ 0x300, {
'StartMagic' : [ 0x0, ['unsigned long long']],
'TotalSize' : [ 0x8, ['unsigned long']],
'ListOffset' : [ 0xc, ['unsigned long']],
'ListSize' : [ 0x10, ['unsigned long']],
'BitmapOffset' : [ 0x14, ['unsigned long']],
'BitmapSize' : [ 0x18, ['unsigned long']],
'DataOffset' : [ 0x1c, ['unsigned long']],
'DataSize' : [ 0x20, ['unsigned long']],
'AttributeFlags' : [ 0x24, ['unsigned long']],
'AttributeSize' : [ 0x28, ['unsigned long']],
'State' : [ 0x80, ['_ALPC_COMPLETION_LIST_STATE']],
'LastMessageId' : [ 0x88, ['unsigned long']],
'LastCallbackId' : [ 0x8c, ['unsigned long']],
'PostCount' : [ 0x100, ['unsigned long']],
'ReturnCount' : [ 0x180, ['unsigned long']],
'LogSequenceNumber' : [ 0x200, ['unsigned long']],
'UserLock' : [ 0x280, ['_RTL_SRWLOCK']],
'EndMagic' : [ 0x288, ['unsigned long long']],
} ],
'_IMAGE_DEBUG_DIRECTORY' : [ 0x1c, {
'Characteristics' : [ 0x0, ['unsigned long']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'MajorVersion' : [ 0x8, ['unsigned short']],
'MinorVersion' : [ 0xa, ['unsigned short']],
'Type' : [ 0xc, ['unsigned long']],
'SizeOfData' : [ 0x10, ['unsigned long']],
'AddressOfRawData' : [ 0x14, ['unsigned long']],
'PointerToRawData' : [ 0x18, ['unsigned long']],
} ],
'_ETW_WMITRACE_WORK' : [ 0xf0, {
'LoggerId' : [ 0x0, ['unsigned long']],
'LoggerName' : [ 0x8, ['array', 65, ['unsigned char']]],
'FileName' : [ 0x49, ['array', 129, ['unsigned char']]],
'MaximumFileSize' : [ 0xcc, ['unsigned long']],
'MinBuffers' : [ 0xd0, ['unsigned long']],
'MaxBuffers' : [ 0xd4, ['unsigned long']],
'BufferSize' : [ 0xd8, ['unsigned long']],
'Mode' : [ 0xdc, ['unsigned long']],
'FlushTimer' : [ 0xe0, ['unsigned long']],
'MatchAny' : [ 0x8, ['unsigned long long']],
'MatchAll' : [ 0x10, ['unsigned long long']],
'EnableProperty' : [ 0x18, ['unsigned long']],
'Guid' : [ 0x1c, ['_GUID']],
'Level' : [ 0x2c, ['unsigned char']],
'Status' : [ 0xe8, ['long']],
} ],
'_DEVICE_MAP' : [ 0x34, {
'DosDevicesDirectory' : [ 0x0, ['pointer', ['_OBJECT_DIRECTORY']]],
'GlobalDosDevicesDirectory' : [ 0x4, ['pointer', ['_OBJECT_DIRECTORY']]],
'DosDevicesDirectoryHandle' : [ 0x8, ['pointer', ['void']]],
'ReferenceCount' : [ 0xc, ['unsigned long']],
'DriveMap' : [ 0x10, ['unsigned long']],
'DriveType' : [ 0x14, ['array', 32, ['unsigned char']]],
} ],
'_HEAP_DEBUGGING_INFORMATION' : [ 0x1c, {
'InterceptorFunction' : [ 0x0, ['pointer', ['void']]],
'InterceptorValue' : [ 0x4, ['unsigned short']],
'ExtendedOptions' : [ 0x8, ['unsigned long']],
'StackTraceDepth' : [ 0xc, ['unsigned long']],
'MinTotalBlockSize' : [ 0x10, ['unsigned long']],
'MaxTotalBlockSize' : [ 0x14, ['unsigned long']],
'HeapLeakEnumerationRoutine' : [ 0x18, ['pointer', ['void']]],
} ],
'_IO_RESOURCE_LIST' : [ 0x28, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'Descriptors' : [ 0x8, ['array', 1, ['_IO_RESOURCE_DESCRIPTOR']]],
} ],
'_MMBANKED_SECTION' : [ 0x20, {
'BasePhysicalPage' : [ 0x0, ['unsigned long']],
'BasedPte' : [ 0x4, ['pointer', ['_MMPTE']]],
'BankSize' : [ 0x8, ['unsigned long']],
'BankShift' : [ 0xc, ['unsigned long']],
'BankedRoutine' : [ 0x10, ['pointer', ['void']]],
'Context' : [ 0x14, ['pointer', ['void']]],
'CurrentMappedPte' : [ 0x18, ['pointer', ['_MMPTE']]],
'BankTemplate' : [ 0x1c, ['array', 1, ['_MMPTE']]],
} ],
'_WHEA_ERROR_RECORD_HEADER_FLAGS' : [ 0x4, {
'Recovered' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'PreviousError' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Simulated' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_XSAVE_AREA_HEADER' : [ 0x40, {
'Mask' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['array', 7, ['unsigned long long']]],
} ],
'_HEAP_VIRTUAL_ALLOC_ENTRY' : [ 0x20, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtraStuff' : [ 0x8, ['_HEAP_ENTRY_EXTRA']],
'CommitSize' : [ 0x10, ['unsigned long']],
'ReserveSize' : [ 0x14, ['unsigned long']],
'BusyBlock' : [ 0x18, ['_HEAP_ENTRY']],
} ],
'_PNP_DEVICE_COMPLETION_REQUEST' : [ 0x38, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'DeviceNode' : [ 0x8, ['pointer', ['_DEVICE_NODE']]],
'Context' : [ 0xc, ['pointer', ['void']]],
'CompletionState' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'IrpPended' : [ 0x14, ['unsigned long']],
'Status' : [ 0x18, ['long']],
'Information' : [ 0x1c, ['pointer', ['void']]],
'WorkItem' : [ 0x20, ['_WORK_QUEUE_ITEM']],
'FailingDriver' : [ 0x30, ['pointer', ['_DRIVER_OBJECT']]],
'ReferenceCount' : [ 0x34, ['long']],
} ],
'_EVENT_FILTER_HEADER' : [ 0x18, {
'Id' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['array', 5, ['unsigned char']]],
'InstanceId' : [ 0x8, ['unsigned long long']],
'Size' : [ 0x10, ['unsigned long']],
'NextOffset' : [ 0x14, ['unsigned long']],
} ],
'_WAIT_CONTEXT_BLOCK' : [ 0x28, {
'WaitQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DeviceRoutine' : [ 0x10, ['pointer', ['void']]],
'DeviceContext' : [ 0x14, ['pointer', ['void']]],
'NumberOfMapRegisters' : [ 0x18, ['unsigned long']],
'DeviceObject' : [ 0x1c, ['pointer', ['void']]],
'CurrentIrp' : [ 0x20, ['pointer', ['void']]],
'BufferChainingDpc' : [ 0x24, ['pointer', ['_KDPC']]],
} ],
'_SECTION_OBJECT' : [ 0x18, {
'StartingVa' : [ 0x0, ['pointer', ['void']]],
'EndingVa' : [ 0x4, ['pointer', ['void']]],
'Parent' : [ 0x8, ['pointer', ['void']]],
'LeftChild' : [ 0xc, ['pointer', ['void']]],
'RightChild' : [ 0x10, ['pointer', ['void']]],
'Segment' : [ 0x14, ['pointer', ['_SEGMENT_OBJECT']]],
} ],
'_CM_NAME_CONTROL_BLOCK' : [ 0x10, {
'Compressed' : [ 0x0, ['unsigned char']],
'RefCount' : [ 0x2, ['unsigned short']],
'NameHash' : [ 0x4, ['_CM_NAME_HASH']],
'ConvKey' : [ 0x4, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer', ['_CM_KEY_HASH']]],
'NameLength' : [ 0xc, ['unsigned short']],
'Name' : [ 0xe, ['array', 1, ['wchar']]],
} ],
}
|
gpl-2.0
|
brijeshkesariya/odoo
|
openerp/report/common.py
|
457
|
3337
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
pageSize = {
'A4': (210,297),
'A5': (148.5,105)
}
odt_namespace = {
"office":"{urn:oasis:names:tc:opendocument:xmlns:office:1.0}",
"style":"{urn:oasis:names:tc:opendocument:xmlns:style:1.0}",
"text":"{urn:oasis:names:tc:opendocument:xmlns:text:1.0}",
"table":"{urn:oasis:names:tc:opendocument:xmlns:table:1.0}",
"draw":"{urn:oasis:names:tc:opendocument:xmlns:drawing:1.0}",
"fo":"{urn:oasis:names:tc:opendocument:xmlns:xsl-fo-compatible:1.0}",
"xlink":"{http://www.w3.org/1999/xlink}",
"dc":"{http://purl.org/dc/elements/1.1/}",
"meta":"{urn:oasis:names:tc:opendocument:xmlns:meta:1.0}",
"number":"{urn:oasis:names:tc:opendocument:xmlns:datastyle:1.0}",
"svg":"{urn:oasis:names:tc:opendocument:xmlns:svg-compatible:1.0}",
"chart":"{urn:oasis:names:tc:opendocument:xmlns:chart:1.0}",
"dr3d":"{urn:oasis:names:tc:opendocument:xmlns:dr3d:1.0}",
"math":"{http://www.w3.org/1998/Math/MathML}",
"form":"{urn:oasis:names:tc:opendocument:xmlns:form:1.0}",
"script":"{urn:oasis:names:tc:opendocument:xmlns:script:1.0}",
"ooo":"{http://openoffice.org/2004/office}",
"ooow":"{http://openoffice.org/2004/writer}",
"oooc":"{http://openoffice.org/2004/calc}",
"dom":"{http://www.w3.org/2001/xml-events}" }
sxw_namespace = {
"office":"{http://openoffice.org/2000/office}",
"style":"{http://openoffice.org/2000/style}",
"text":"{http://openoffice.org/2000/text}",
"table":"{http://openoffice.org/2000/table}",
"draw":"{http://openoffice.org/2000/drawing}",
"fo":"{http://www.w3.org/1999/XSL/Format}",
"xlink":"{http://www.w3.org/1999/xlink}",
"dc":"{http://purl.org/dc/elements/1.1/}",
"meta":"{http://openoffice.org/2000/meta}",
"number":"{http://openoffice.org/2000/datastyle}",
"svg":"{http://www.w3.org/2000/svg}",
"chart":"{http://openoffice.org/2000/chart}",
"dr3d":"{http://openoffice.org/2000/dr3d}",
"math":"{http://www.w3.org/1998/Math/MathML}",
"form":"{http://openoffice.org/2000/form}",
"script":"{http://openoffice.org/2000/script}",
"ooo":"{http://openoffice.org/2004/office}",
"ooow":"{http://openoffice.org/2004/writer}",
"oooc":"{http://openoffice.org/2004/calc}",
"dom":"{http://www.w3.org/2001/xml-events}"}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
albertliangcode/Pi_MonteCarloSim
|
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/escsm.py
|
2930
|
7839
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart, eError, eItsMe
HZ_cls = (
1,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,4,0,5,2,0, # 78 - 7f
1,1,1,1,1,1,1,1, # 80 - 87
1,1,1,1,1,1,1,1, # 88 - 8f
1,1,1,1,1,1,1,1, # 90 - 97
1,1,1,1,1,1,1,1, # 98 - 9f
1,1,1,1,1,1,1,1, # a0 - a7
1,1,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,1,1,1,1,1,1, # c0 - c7
1,1,1,1,1,1,1,1, # c8 - cf
1,1,1,1,1,1,1,1, # d0 - d7
1,1,1,1,1,1,1,1, # d8 - df
1,1,1,1,1,1,1,1, # e0 - e7
1,1,1,1,1,1,1,1, # e8 - ef
1,1,1,1,1,1,1,1, # f0 - f7
1,1,1,1,1,1,1,1, # f8 - ff
)
HZ_st = (
eStart,eError, 3,eStart,eStart,eStart,eError,eError,# 00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
eItsMe,eItsMe,eError,eError,eStart,eStart, 4,eError,# 10-17
5,eError, 6,eError, 5, 5, 4,eError,# 18-1f
4,eError, 4, 4, 4,eError, 4,eError,# 20-27
4,eItsMe,eStart,eStart,eStart,eStart,eStart,eStart,# 28-2f
)
HZCharLenTable = (0, 0, 0, 0, 0, 0)
HZSMModel = {'classTable': HZ_cls,
'classFactor': 6,
'stateTable': HZ_st,
'charLenTable': HZCharLenTable,
'name': "HZ-GB-2312"}
ISO2022CN_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,4,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022CN_st = (
eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
eStart,eError,eError,eError,eError,eError,eError,eError,# 08-0f
eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
eItsMe,eItsMe,eItsMe,eError,eError,eError, 4,eError,# 18-1f
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 20-27
5, 6,eError,eError,eError,eError,eError,eError,# 28-2f
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 30-37
eError,eError,eError,eError,eError,eItsMe,eError,eStart,# 38-3f
)
ISO2022CNCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022CNSMModel = {'classTable': ISO2022CN_cls,
'classFactor': 9,
'stateTable': ISO2022CN_st,
'charLenTable': ISO2022CNCharLenTable,
'name': "ISO-2022-CN"}
ISO2022JP_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,2,2, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,7,0,0,0, # 20 - 27
3,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
6,0,4,0,8,0,0,0, # 40 - 47
0,9,5,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022JP_st = (
eStart, 3,eError,eStart,eStart,eStart,eStart,eStart,# 00-07
eStart,eStart,eError,eError,eError,eError,eError,eError,# 08-0f
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 10-17
eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eError,eError,# 18-1f
eError, 5,eError,eError,eError, 4,eError,eError,# 20-27
eError,eError,eError, 6,eItsMe,eError,eItsMe,eError,# 28-2f
eError,eError,eError,eError,eError,eError,eItsMe,eItsMe,# 30-37
eError,eError,eError,eItsMe,eError,eError,eError,eError,# 38-3f
eError,eError,eError,eError,eItsMe,eError,eStart,eStart,# 40-47
)
ISO2022JPCharLenTable = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022JPSMModel = {'classTable': ISO2022JP_cls,
'classFactor': 10,
'stateTable': ISO2022JP_st,
'charLenTable': ISO2022JPCharLenTable,
'name': "ISO-2022-JP"}
ISO2022KR_cls = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,3,0,0,0, # 20 - 27
0,4,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,5,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022KR_st = (
eStart, 3,eError,eStart,eStart,eStart,eError,eError,# 00-07
eError,eError,eError,eError,eItsMe,eItsMe,eItsMe,eItsMe,# 08-0f
eItsMe,eItsMe,eError,eError,eError, 4,eError,eError,# 10-17
eError,eError,eError,eError, 5,eError,eError,eError,# 18-1f
eError,eError,eError,eItsMe,eStart,eStart,eStart,eStart,# 20-27
)
ISO2022KRCharLenTable = (0, 0, 0, 0, 0, 0)
ISO2022KRSMModel = {'classTable': ISO2022KR_cls,
'classFactor': 6,
'stateTable': ISO2022KR_st,
'charLenTable': ISO2022KRCharLenTable,
'name': "ISO-2022-KR"}
# flake8: noqa
|
mit
|
s3ql/main
|
tests/t6_upgrade.py
|
2
|
9214
|
#!/usr/bin/env python3
'''
t6_upgrade.py - this file is part of S3QL.
Copyright © 2008 Nikolaus Rath <Nikolaus@rath.org>
This work can be distributed under the terms of the GNU GPLv3.
'''
if __name__ == '__main__':
import pytest
import sys
sys.exit(pytest.main([__file__] + sys.argv[1:]))
from common import populate_dir, skip_without_rsync, retry
from t1_backends import get_remote_test_info, NoTestSection
from s3ql import backends
import shutil
import subprocess
from subprocess import check_output, CalledProcessError
import t4_fuse
import tempfile
import os
import pytest
@pytest.mark.usefixtures('pass_reg_output')
class TestUpgrade(t4_fuse.TestFuse):
def setup_method(self, method):
skip_without_rsync()
basedir_old = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', 's3ql.old'))
if not os.path.exists(os.path.join(basedir_old, 'bin', 'mkfs.s3ql')):
pytest.skip('no previous S3QL version found')
super().setup_method(method)
self.ref_dir = tempfile.mkdtemp(prefix='s3ql-ref-')
self.bak_dir = tempfile.mkdtemp(prefix='s3ql-bak-')
self.basedir_old = basedir_old
def teardown_method(self, method):
super().teardown_method(method)
shutil.rmtree(self.ref_dir)
shutil.rmtree(self.bak_dir)
def mkfs_old(self, force=False, max_obj_size=500):
argv = [ os.path.join(self.basedir_old, 'bin', 'mkfs.s3ql'),
'-L', 'test fs', '--max-obj-size', str(max_obj_size),
'--cachedir', self.cache_dir, '--quiet',
'--authfile', '/dev/null', self.storage_url ]
if force:
argv.append('--force')
if self.passphrase is None:
argv.append('--plain')
proc = subprocess.Popen(argv, stdin=subprocess.PIPE, universal_newlines=True)
if self.backend_login is not None:
print(self.backend_login, file=proc.stdin)
print(self.backend_passphrase, file=proc.stdin)
if self.passphrase is not None:
print(self.passphrase, file=proc.stdin)
print(self.passphrase, file=proc.stdin)
proc.stdin.close()
assert proc.wait() == 0
self.reg_output(r'^WARNING: Maximum object sizes less than '
'1 MiB will degrade performance\.$', count=1)
def mount_old(self):
self.mount_process = subprocess.Popen([os.path.join(self.basedir_old, 'bin', 'mount.s3ql'),
"--fg", '--cachedir', self.cache_dir, '--log',
'none', '--quiet', '--authfile', '/dev/null',
'--compress', 'zlib', self.storage_url, self.mnt_dir],
stdin=subprocess.PIPE, universal_newlines=True)
if self.backend_login is not None:
print(self.backend_login, file=self.mount_process.stdin)
print(self.backend_passphrase, file=self.mount_process.stdin)
if self.passphrase is not None:
print(self.passphrase, file=self.mount_process.stdin)
self.mount_process.stdin.close()
def poll():
if os.path.ismount(self.mnt_dir):
return True
assert self.mount_process.poll() is None
retry(30, poll)
def umount_old(self):
with open('/dev/null', 'wb') as devnull:
retry(5, lambda: subprocess.call(['fuser', '-m', self.mnt_dir],
stdout=devnull, stderr=devnull) == 1)
proc = subprocess.Popen([os.path.join(self.basedir_old, 'bin', 'umount.s3ql'),
'--quiet', self.mnt_dir])
retry(90, lambda : proc.poll() is not None)
assert proc.wait() == 0
assert self.mount_process.poll() == 0
assert not os.path.ismount(self.mnt_dir)
def upgrade(self):
proc = subprocess.Popen(self.s3ql_cmd_argv('s3qladm') +
[ '--cachedir', self.cache_dir, '--authfile',
'/dev/null', '--quiet', 'upgrade', self.storage_url ],
stdin=subprocess.PIPE, universal_newlines=True)
if self.backend_login is not None:
print(self.backend_login, file=proc.stdin)
print(self.backend_passphrase, file=proc.stdin)
if self.passphrase is not None:
print(self.passphrase, file=proc.stdin)
print('yes', file=proc.stdin)
proc.stdin.close()
assert proc.wait() == 0
def compare(self):
try:
out = check_output(['rsync', '-anciHAX', '--delete', '--exclude', '/lost+found',
self.ref_dir + '/', self.mnt_dir + '/'], universal_newlines=True,
stderr=subprocess.STDOUT)
except CalledProcessError as exc:
pytest.fail('rsync failed with ' + exc.output)
if out:
pytest.fail('Copy not equal to original, rsync says:\n' + out)
def populate(self):
populate_dir(self.ref_dir)
@pytest.mark.parametrize("with_cache", (True, False))
def test(self, with_cache):
self.populate()
# Create and mount using previous S3QL version
self.mkfs_old()
self.mount_old()
subprocess.check_call(['rsync', '-aHAX', self.ref_dir + '/', self.mnt_dir + '/'])
self.umount_old()
# Try to access with new version (should fail)
if not with_cache:
shutil.rmtree(self.cache_dir)
self.cache_dir = tempfile.mkdtemp(prefix='s3ql-cache-')
if isinstance(self, RemoteUpgradeTest):
self.mount(expect_fail=32)
self.reg_output(r'^ERROR: File system revision needs upgrade', count=1)
self.reg_output(r'^WARNING: MD5 mismatch in metadata for '
's3ql_(metadata|passphrase)', count=1)
elif self.passphrase:
self.mount(expect_fail=17)
self.reg_output(r'^ERROR: Wrong file system passphrase', count=1)
else:
self.mount(expect_fail=32)
self.reg_output(r'^ERROR: File system revision too old', count=1)
# Upgrade
if not with_cache:
shutil.rmtree(self.cache_dir)
self.cache_dir = tempfile.mkdtemp(prefix='s3ql-cache-')
self.upgrade()
# ...and test
if not with_cache:
shutil.rmtree(self.cache_dir)
self.cache_dir = tempfile.mkdtemp(prefix='s3ql-cache-')
self.fsck()
self.mount()
self.compare()
# Try if we can still write (we messed this up in the upgrade
# from 2.16 to 2.17).
with open('%s/some_new_file' % (self.mnt_dir,), 'w') as fh:
fh.write('hello, world')
self.umount()
class TestPlainUpgrade(TestUpgrade):
def setup_method(self, method):
super().setup_method(method)
self.passphrase = None
class RemoteUpgradeTest:
def setup_method(self, method, name):
super().setup_method(method)
try:
(backend_login, backend_pw,
self.storage_url) = get_remote_test_info(name)
except NoTestSection as exc:
super().teardown_method(method)
pytest.skip(exc.reason)
self.backend_login = backend_login
self.backend_passphrase = backend_pw
def populate(self):
populate_dir(self.ref_dir, entries=50, size=5*1024*1024)
def teardown_method(self, method):
super().teardown_method(method)
proc = subprocess.Popen(self.s3ql_cmd_argv('s3qladm') +
[ '--quiet', '--authfile', '/dev/null',
'clear', self.storage_url ],
stdin=subprocess.PIPE, universal_newlines=True)
if self.backend_login is not None:
print(self.backend_login, file=proc.stdin)
print(self.backend_passphrase, file=proc.stdin)
print('yes', file=proc.stdin)
proc.stdin.close()
assert proc.wait() == 0
# Dynamically generate tests for other backends
for backend_name in backends.prefix_map:
if backend_name == 'local':
continue
# Plain
def setup_method(self, method, backend_name=backend_name):
RemoteUpgradeTest.setup_method(self, method, backend_name + '-test')
self.passphrase = None
test_class_name = 'TestPlain' + backend_name + 'Upgrade'
globals()[test_class_name] = type(test_class_name,
(RemoteUpgradeTest, TestUpgrade),
{ 'setup_method': setup_method })
# Encrypted
def setup_method(self, method, backend_name=backend_name):
RemoteUpgradeTest.setup_method(self, method, backend_name + '-test')
test_class_name = 'Test' + backend_name + 'Upgrade'
globals()[test_class_name] = type(test_class_name,
(RemoteUpgradeTest, TestUpgrade),
{ 'setup_method': setup_method })
|
gpl-3.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.