text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
# © 2012-2016 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{'name': 'Switzerland - Payment Slip (BVR/ESR)',
'summary': 'Print ESR/BVR payment slip with your invoices',
'version': '10.0.2.1.1',
'author': "Camptocamp,Odoo Community Association (OCA)",
'category': 'Localization',
'website': 'http://www.camptocamp.com',
'license': 'AGPL-3',
'depends': [
'base',
'account',
'report',
'l10n_ch_base_bank',
'base_transaction_id', # OCA/bank-statement-reconcile
],
'data': [
"views/company.xml",
"views/bank.xml",
"views/account_invoice.xml",
"wizard/bvr_import_view.xml",
"report/report_declaration.xml",
"security/ir.model.access.csv"
],
'demo': [],
'test': [],
'auto_install': False,
'installable': True,
'images': []
}
|
lem8r/cofair-addons
|
l10n_ch_payment_slip/__manifest__.py
|
Python
|
lgpl-3.0
| 847 | 0 |
info_system = 'http://webpac.lib.nthu.edu.tw/F/'
top_circulations = 'http://www.lib.nthu.edu.tw/guide/topcirculations/index.htm'
top_circulations_bc2007 = 'http://www.lib.nthu.edu.tw/guide/topcirculations/bc2007.htm'
rss_recent_books = 'http://webpac.lib.nthu.edu.tw:8080/nbr/reader/rbn_rss.jsp'
lost_found_url = 'http://adage.lib.nthu.edu.tw/find/search_it.php'
|
leVirve/NTHU-Library
|
nthu_library/static_urls.py
|
Python
|
gpl-2.0
| 363 | 0.002755 |
import pyglet
from pyglet.window import key
window = pyglet.window.Window()
@window.event
def on_key_press(symbol, modifiers):
print('A key was pressed')
if symbol == key.A:
print('The "A" key was pressed.')
elif symbol == key.LEFT:
print('The left arrow key was pressed.')
elif symbol == key.ENTER:
print('The enter key was pressed.')
@window.event
def on_draw():
window.clear()
pyglet.app.run()
|
davidam/python-examples
|
pyglet/keyboard.py
|
Python
|
gpl-3.0
| 446 | 0.006726 |
from . import load_fixture
from lintreview.config import load_config
from lintreview.diff import DiffCollection
from lintreview.review import Review, Problems, Comment
from lintreview.repo import GithubRepository, GithubPullRequest
from mock import Mock, call
from nose.tools import eq_
from github3.issues.comment import IssueComment as GhIssueComment
from github3.pulls import PullFile
from unittest import TestCase
import json
config = load_config()
class TestReview(TestCase):
def setUp(self):
repo = Mock(spec=GithubRepository)
pr = Mock(spec=GithubPullRequest,
head='abc123',
display_name='markstory/lint-review#1',
number=2)
repo.pull_request.return_value = pr
self.repo, self.pr = repo, pr
self.review = Review(repo, pr)
def test_load_comments__none_active(self):
fixture_data = load_fixture('comments_none_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
review = Review(self.repo, self.pr)
review.load_comments()
eq_(0, len(review.comments("View/Helper/AssetCompressHelper.php")))
def test_load_comments__loads_comments(self):
fixture_data = load_fixture('comments_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
review = Review(self.repo, self.pr)
review.load_comments()
filename = "Routing/Filter/AssetCompressor.php"
res = review.comments(filename)
eq_(1, len(res))
expected = Comment(filename, None, 87, "A pithy remark")
eq_(expected, res[0])
filename = "View/Helper/AssetCompressHelper.php"
res = review.comments(filename)
eq_(2, len(res))
expected = Comment(filename, None, 40, "Some witty comment.")
eq_(expected, res[0])
expected = Comment(filename, None, 89, "Not such a good comment")
eq_(expected, res[1])
def test_filter_existing__removes_duplicates(self):
fixture_data = load_fixture('comments_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture_data))
problems = Problems()
review = Review(self.repo, self.pr)
filename_1 = "Routing/Filter/AssetCompressor.php"
filename_2 = "View/Helper/AssetCompressHelper.php"
problems.add(filename_1, 87, 'A pithy remark')
problems.add(filename_1, 87, 'Something different')
problems.add(filename_2, 88, 'I <3 it')
problems.add(filename_2, 89, 'Not such a good comment')
review.load_comments()
review.remove_existing(problems)
res = problems.all(filename_1)
eq_(1, len(res))
expected = Comment(filename_1,
87,
87,
'A pithy remark\nSomething different')
eq_(res[0], expected)
res = problems.all(filename_2)
eq_(1, len(res))
expected = Comment(filename_2, 88, 88, 'I <3 it')
eq_(res[0], expected)
def test_publish_problems(self):
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
sha = 'abc123'
review = Review(self.repo, self.pr)
review.publish_problems(problems, sha)
assert self.pr.create_review_comment.called
eq_(2, self.pr.create_review_comment.call_count)
assert_review_comments_created(
self.pr.create_review_comment.call_args_list,
errors,
sha)
def test_publish_status__ok_no_comment_label_or_status(self):
config = {
'OK_COMMENT': None,
'OK_LABEL': None,
'PULLREQUEST_STATUS': False,
}
review = Review(self.repo, self.pr, config)
review.publish_status(0)
assert not self.repo.create_status.called, 'Create status called'
assert not self.pr.create_comment.called, 'Comment not created'
assert not self.pr.add_label.called, 'Label added created'
def test_publish_status__ok_with_comment_label_and_status(self):
config = {
'OK_COMMENT': 'Great job!',
'OK_LABEL': 'No lint errors',
'PULLREQUEST_STATUS': True,
}
review = Review(self.repo, self.pr, config)
review.publish_status(0)
assert self.repo.create_status.called, 'Create status not called'
self.repo.create_status.assert_called_with(
self.pr.head,
'success',
'No lint errors found.')
assert self.pr.create_comment.called, 'Issue comment created'
self.pr.create_comment.assert_called_with('Great job!')
assert self.pr.add_label.called, 'Label added created'
self.pr.add_label.assert_called_with('No lint errors')
def test_publish_status__has_errors(self):
config = {
'OK_COMMENT': 'Great job!',
'OK_LABEL': 'No lint errors',
'APP_NAME': 'custom-name'
}
review = Review(self.repo, self.pr, config)
review.publish_status(1)
assert self.repo.create_status.called, 'Create status not called'
self.repo.create_status.assert_called_with(
self.pr.head,
'failure',
'Lint errors found, see pull request comments.')
assert not self.pr.create_comment.called, 'Comment not created'
assert not self.pr.add_label.called, 'Label added created'
def test_publish_problems_remove_ok_label(self):
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
sha = 'abc123'
config = {'OK_LABEL': 'No lint'}
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish_problems(problems, sha)
assert self.pr.remove_label.called, 'Label should be removed'
assert self.pr.create_review_comment.called, 'Comments should be added'
eq_(2, self.pr.create_review_comment.call_count)
self.pr.remove_label.assert_called_with(config['OK_LABEL'])
assert_review_comments_created(
self.pr.create_review_comment.call_args_list,
errors,
sha)
def test_publish_empty_comment(self):
problems = Problems(changes=[])
review = Review(self.repo, self.pr)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'Should create a comment'
msg = ('Could not review pull request. '
'It may be too large, or contain no reviewable changes.')
self.pr.create_comment.assert_called_with(msg)
def test_publish_empty_comment_add_ok_label(self):
problems = Problems(changes=[])
config = {'OK_LABEL': 'No lint'}
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'ok comment should be added.'
assert self.pr.remove_label.called, 'label should be removed.'
self.pr.remove_label.assert_called_with(config['OK_LABEL'])
msg = ('Could not review pull request. '
'It may be too large, or contain no reviewable changes.')
self.pr.create_comment.assert_called_with(msg)
def test_publish_empty_comment_with_comment_status(self):
config = {
'PULLREQUEST_STATUS': True,
}
problems = Problems(changes=[])
review = Review(self.repo, self.pr, config)
sha = 'abc123'
review.publish(problems, sha)
assert self.pr.create_comment.called, 'Should create a comment'
msg = ('Could not review pull request. '
'It may be too large, or contain no reviewable changes.')
self.repo.create_status.assert_called_with(
self.pr.head,
'error',
msg)
self.pr.create_comment.assert_called_with(msg)
def test_publish_comment_threshold_checks(self):
fixture = load_fixture('comments_current.json')
self.pr.review_comments.return_value = map(
lambda f: GhIssueComment(f),
json.loads(fixture))
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
problems.set_changes([1])
sha = 'abc123'
review = Review(self.repo, self.pr)
review.publish_summary = Mock()
review.publish(problems, sha, 1)
assert review.publish_summary.called, 'Should have been called.'
def test_publish_summary(self):
problems = Problems()
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something bad'),
)
problems.add_many(errors)
problems.set_changes([1])
review = Review(self.repo, self.pr)
review.publish_summary(problems)
assert self.pr.create_comment.called
eq_(1, self.pr.create_comment.call_count)
msg = """There are 2 errors:
* Console/Command/Task/AssetBuildTask.php, line 117 - Something bad
* Console/Command/Task/AssetBuildTask.php, line 119 - Something bad
"""
self.pr.create_comment.assert_called_with(msg)
class TestProblems(TestCase):
two_files_json = load_fixture('two_file_pull_request.json')
# Block offset so lines don't match offsets
block_offset = load_fixture('pull_request_line_offset.json')
def setUp(self):
self.problems = Problems()
def test_add(self):
self.problems.add('file.py', 10, 'Not good')
for item in self.problems:
print item
eq_(1, len(self.problems))
self.problems.add('file.py', 11, 'Not good')
eq_(2, len(self.problems))
eq_(2, len(self.problems.all()))
eq_(2, len(self.problems.all('file.py')))
eq_(0, len(self.problems.all('not there')))
def test_add__duplicate_is_ignored(self):
self.problems.add('file.py', 10, 'Not good')
eq_(1, len(self.problems))
self.problems.add('file.py', 10, 'Not good')
eq_(1, len(self.problems))
def test_add__same_line_combines(self):
self.problems.add('file.py', 10, 'Tabs bad')
self.problems.add('file.py', 10, 'Spaces are good')
eq_(1, len(self.problems))
result = self.problems.all()
expected = 'Tabs bad\nSpaces are good'
eq_(expected, result[0].body)
def test_add__same_line_ignores_duplicates(self):
self.problems.add('file.py', 10, 'Tabs bad')
self.problems.add('file.py', 10, 'Tabs bad')
eq_(1, len(self.problems))
result = self.problems.all()
expected = 'Tabs bad'
eq_(expected, result[0].body)
def test_add__with_base_path(self):
problems = Problems('/some/path/')
problems.add('/some/path/file.py', 10, 'Not good')
eq_([], problems.all('/some/path/file.py'))
eq_(1, len(problems.all('file.py')))
eq_(1, len(problems))
def test_add__with_base_path_no_trailing_slash(self):
problems = Problems('/some/path')
problems.add('/some/path/file.py', 10, 'Not good')
eq_([], problems.all('/some/path/file.py'))
eq_(1, len(problems.all('file.py')))
eq_(1, len(problems))
def test_add__with_diff_containing_block_offset(self):
res = map(lambda f: PullFile(f),
json.loads(self.block_offset))
changes = DiffCollection(res)
problems = Problems(changes=changes)
line_num = 32
problems.add('somefile.py', line_num, 'Not good')
eq_(1, len(problems))
result = problems.all('somefile.py')
eq_(changes.line_position('somefile.py', line_num), result[0].position,
'Offset should be transformed to match value in changes')
def test_add_many(self):
errors = [
('some/file.py', 10, 'Thing is wrong'),
('some/file.py', 12, 'Not good'),
]
self.problems.add_many(errors)
result = self.problems.all('some/file.py')
eq_(2, len(result))
expected = [
Comment(errors[0][0], errors[0][1], errors[0][1], errors[0][2]),
Comment(errors[1][0], errors[1][1], errors[1][1], errors[1][2]),
]
eq_(expected, result)
def test_limit_to_changes__remove_problems(self):
res = map(lambda f: PullFile(f),
json.loads(self.two_files_json))
changes = DiffCollection(res)
# Setup some fake problems.
filename_1 = 'Console/Command/Task/AssetBuildTask.php'
errors = (
(None, None, 'This is a general comment'),
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something else bad'),
(filename_1, 130, 'Filtered out, as line is not changed'),
)
self.problems.add_many(errors)
filename_2 = 'Test/test_files/View/Parse/single.ctp'
errors = (
(filename_2, 2, 'Filtered out'),
(filename_2, 3, 'Something bad'),
(filename_2, 7, 'Filtered out'),
)
self.problems.add_many(errors)
self.problems.set_changes(changes)
self.problems.limit_to_changes()
result = self.problems.all(filename_1)
eq_(2, len(result))
expected = [
(None, None, 'This is a general comment'),
(filename_1, 117, 'Something bad'),
(filename_1, 119, 'Something else bad')]
eq_(result.sort(), expected.sort())
result = self.problems.all(filename_2)
eq_(1, len(result))
expected = [
Comment(filename_2, 3, 3, 'Something bad')
]
eq_(result, expected)
def test_has_changes(self):
problems = Problems(changes=None)
self.assertFalse(problems.has_changes())
problems = Problems(changes=[1])
assert problems.has_changes()
def assert_review_comments_created(call_args, errors, sha):
"""
Check that the review comments match the error list.
"""
eq_(len(call_args), len(errors), 'Errors and comment counts are off')
for i, err in enumerate(errors):
expected = call(
commit_id=sha,
path=err[0],
position=err[1],
body=err[2])
eq_(expected, call_args[i])
|
zoidbergwill/lint-review
|
tests/test_review.py
|
Python
|
mit
| 15,108 | 0 |
from typing import TypeVar, Dict, Iterable, Any
T = TypeVar("T")
def foo(values: Dict[T, Iterable[Any]]):
for e in []:
values.setdefault(e, undefined)
|
allotria/intellij-community
|
python/testData/inspections/PyTypeCheckerInspection/UnresolvedReceiverGeneric.py
|
Python
|
apache-2.0
| 165 | 0.006061 |
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Michael Hogg
# This file is part of bonemapy - See LICENSE.txt for information on usage and redistribution
import bonemapy
from distutils.core import setup
setup(
name = 'bonemapy',
version = bonemapy.__version__,
description = 'An ABAQUS plug-in to map bone properties from CT scans to 3D finite element bone/implant models',
license = 'MIT license',
keywords = ["ABAQUS", "plug-in","CT","finite","element","bone","properties","python"],
author = 'Michael Hogg',
author_email = 'michael.christopher.hogg@gmail.com',
url = "https://github.com/mhogg/bonemapy",
download_url = "https://github.com/mhogg/bonemapy/releases",
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Environment :: Plugins",
"Intended Audience :: Healthcare Industry",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Medical Science Apps.",
"Topic :: Scientific/Engineering :: Visualization",
],
long_description = """
bonemapy is an ABAQUS plug-in that is used to extract bone density, or Hounsfield Unit (HU) values, from CT scans. The bone density can then be used to setup heterogeneous
material properties for a 3D finite element bone/implant model.
The HU values are extracted at the element integration points. Tri-linear interpolation is used to calculate the HU values at the location of the integration points.
bonemapy produces a text file containing the HU values that is formatted so that it can easily be read using ABAQUS user subroutines that are required to apply the bone properties. An
ABAQUS odb file is also created containing a fieldoutput representing HU so that the user can quickly visualise the mapped HU values.
""",
)
|
mhogg/bonemapy
|
setup.py
|
Python
|
mit
| 2,237 | 0.021904 |
import logbook
import show_off_web_app.infrastructure.static_cache as static_cache
import pyramid.httpexceptions as exc
from show_off_web_app.infrastructure.supressor import suppress
import show_off_web_app.infrastructure.cookie_auth as cookie_auth
from show_off_web_app.services.account_service import AccountService
class BaseController:
def __init__(self, request):
self.request = request
self.build_cache_id = static_cache.build_cache_id
log_name = 'Ctrls/' + type(self).__name__.replace("Controller", "")
self.log = logbook.Logger(log_name)
@property
def is_logged_in(self):
return cookie_auth.get_user_id_via_auth_cookie(self.request) is not None
# noinspection PyMethodMayBeStatic
@suppress()
def redirect(self, to_url, permanent=False):
if permanent:
raise exc.HTTPMovedPermanently(to_url)
raise exc.HTTPFound(to_url)
@property
def merged_dicts(self):
data = dict()
data.update(self.request.GET)
data.update(self.request.POST)
data.update(self.request.matchdict)
return data
@property
def logged_in_user_id(self):
user_id = cookie_auth.get_user_id_via_auth_cookie(self.request)
return user_id
@property
def logged_in_user(self):
uid = self.logged_in_user_id
if not uid:
return None
return AccountService.find_account_by_id(uid)
|
mikeckennedy/cookiecutter-course
|
src/ch8_sharing_your_template/show_off_web_app/show_off_web_app/controllers/base_controller.py
|
Python
|
gpl-2.0
| 1,456 | 0.000687 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------
#-- Servo class
#-- Juan Gonzalez-Gomez (obijuan). May-2013
#-----------------------------------------------------------------
#-- Controlling the position of servos from the PC
#-- The Arduino / skymega or another arduino compatible board
#-- should have the firmware FingerServer uploaded
#-----------------------------------------------------------------
import time
class IncorrectAngle():
pass
class Servo(object):
"""Servo class. For accessing to all the Servos"""
def __init__(self, sp, dir = 0):
"""Arguments: serial port and servo number"""
self.sp = sp #-- Serial device
self.dir = dir #-- Servo number
self._pos = 0; #-- Current pos
def __str__(self):
str1 = "Servo: {0}\n".format(self.dir)
str2 = "Serial port: {0}".format(self.sp.name)
return str1 + str2
def set_pos(self, pos):
"""Set the angular servo pos. The pos is an integer number
in the range [-90 ,90] """
#-- Check that the pos in the range [-90,90]
if not (-90 <= pos <= 90):
raise IncorrectAngle()
return
#-- Convert the pos to an integer value
pos = int(round(pos))
#-- Build the frame
frame = self.dir + str(pos) + "\r"
#-- Debug
print (frame)
#-- Send the frame
self.sp.write(frame)
#-- Store the current servo pos
self._pos = pos
@property
def pos(self):
"""Read the current servo pos"""
return self._pos
@pos.setter
def pos(self, value):
"""Set the sero pos"""
self.set_pos(value)
|
Obijuan/protocoder-apps
|
servos/python-client/Servo.py
|
Python
|
gpl-2.0
| 1,675 | 0.027463 |
import zipfile
try:
import zlib
COMPRESSION = zipfile.ZIP_DEFLATED
except:
COMPRESSION = zipfile.ZIP_STORED
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from django.core.files.uploadedfile import SimpleUploadedFile
class NotACompressedFile(Exception):
pass
class CompressedFile(object):
def __init__(self, file_input=None):
if file_input:
self._open(file_input)
else:
self._create()
def _create(self):
self.descriptor = StringIO()
self.zf = zipfile.ZipFile(self.descriptor, mode='w')
def _open(self, file_input):
try:
# Is it a file like object?
file_input.seek(0)
except AttributeError:
# If not, try open it.
self.descriptor = open(file_input, 'r+b')
else:
self.descriptor = file_input
try:
test = zipfile.ZipFile(self.descriptor, mode='r')
except zipfile.BadZipfile:
raise NotACompressedFile
else:
test.close()
self.descriptor.seek(0)
self.zf = zipfile.ZipFile(self.descriptor, mode='a')
def add_file(self, file_input, arcname=None):
try:
# Is it a file like object?
file_input.seek(0)
except AttributeError:
# If not, keep it
self.zf.write(file_input, arcname=arcname, compress_type=COMPRESSION)
else:
self.zf.writestr(arcname, file_input.read())
def contents(self):
return [filename for filename in self.zf.namelist() if not filename.endswith('/')]
def get_content(self, filename):
return self.zf.read(filename)
def write(self, filename=None):
# fix for Linux zip files read in Windows
for file in self.zf.filelist:
file.create_system = 0
self.descriptor.seek(0)
if filename:
descriptor = open(filename, 'w')
descriptor.write(self.descriptor.read())
else:
return self.descriptor
def as_file(self, filename):
return SimpleUploadedFile(name=filename, content=self.write().read())
def close(self):
self.zf.close()
|
rosarior/rua
|
rua/apps/common/compressed_files.py
|
Python
|
gpl-3.0
| 2,263 | 0.001326 |
import frappe
def execute():
frappe.reload_doc("core", "doctype", "todo")
try:
frappe.db.sql("""update tabToDo set status = if(ifnull(checked,0)=0, 'Open', 'Closed')""")
except:
pass
|
geo-poland/frappe
|
frappe/patches/v4_0/set_todo_checked_as_closed.py
|
Python
|
mit
| 191 | 0.041885 |
__version__ = "2.0"
|
sbidoul/pip
|
tests/data/src/simplewheel-2.0/simplewheel/__init__.py
|
Python
|
mit
| 20 | 0 |
#!/usr/bin/env python
# Calder Phillips-Grafflin - WPI/ARC Lab
import rospy
import math
import tf
from tf.transformations import *
from visualization_msgs.msg import *
from geometry_msgs.msg import *
class RobotMarkerPublisher:
def __init__(self, root_frame, rate):
self.root_frame = root_frame
self.rate = rate
self.marker_pub = rospy.Publisher("robot_markers_debug", Marker)
rate = rospy.Rate(self.rate)
while not rospy.is_shutdown():
self.display_table()
rate.sleep()
def display_table(self):
# Make table top
marker_msg = Marker()
marker_msg.type = Marker.CUBE_LIST
marker_msg.ns = "robot"
marker_msg.id = 1
marker_msg.action = Marker.ADD
marker_msg.lifetime = rospy.Duration(0.0)
marker_msg.header.stamp = rospy.Time.now()
marker_msg.header.frame_id = self.root_frame
marker_msg.scale.x = 0.04
marker_msg.scale.y = 0.04
marker_msg.scale.z = 0.02
marker_msg.color.a = 1.0
marker_msg.color.r = 1.0
marker_msg.color.b = 0.0
marker_msg.color.g = 1.0
marker_msg.pose.position.x = 0.0
marker_msg.pose.position.y = 0.0
marker_msg.pose.position.z = 0.0
marker_msg.pose.orientation.x = 0.0
marker_msg.pose.orientation.y = 0.0
marker_msg.pose.orientation.z = 0.0
marker_msg.pose.orientation.w = 1.0
# Make the individual points
p1 = Point()
p1.x = 0.0025
p1.y = 0.0025
p1.z = -0.01
p2 = Point()
p2.x = p1.x
p2.y = p1.y + 0.04
p2.z = p1.z
p3 = Point()
p3.x = p1.x - 0.04
p3.y = p1.y
p3.z = p1.z
marker_msg.points = [p1, p2, p3]
marker_msg.colors = [marker_msg.color, marker_msg.color, marker_msg.color]
self.marker_pub.publish(marker_msg)
if __name__ == "__main__":
rospy.init_node("robot_marker_debug_publisher")
rospy.loginfo("Starting the robot marker broadcaster...")
#Get the parameters from the server
root_frame = rospy.get_param("~root_frame", "test_robot_frame")
rate = rospy.get_param("~rate", 10.0)
RobotMarkerPublisher(root_frame, rate)
|
WPI-ARC/deformable_planners
|
deformable_astar/src/deformable_astar/robot_marker_debug_pub.py
|
Python
|
bsd-2-clause
| 2,259 | 0.001771 |
# -*- coding: utf-8 -*-
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': 'Summary',
'summary': 'Summary Module used by CLVsol Solutions.',
'version': '12.0.4.0',
'author': 'Carlos Eduardo Vercelino - CLVsol',
'category': 'CLVsol Solutions',
'license': 'AGPL-3',
'website': 'https://github.com/CLVsol',
'images': [],
'depends': [
'clv_base',
'clv_global_log',
],
'data': [
'security/summary_security.xml',
'security/ir.model.access.csv',
'views/summary_template_view.xml',
'views/summary_view.xml',
'views/summary_log_view.xml',
'views/file_system_view.xml',
],
'demo': [],
'test': [],
'init_xml': [],
'test': [],
'update_xml': [],
'installable': True,
'application': False,
'active': False,
'css': [],
}
|
CLVsol/clvsol_odoo_addons
|
clv_summary/__manifest__.py
|
Python
|
agpl-3.0
| 940 | 0 |
# coding=utf-8
# Copyright 2022 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Input utils module for MinDiff Keras integration.
This module provides default implementations for packing and unpacking min_diff
data into or from an input dataset.
"""
import collections
import tensorflow as tf
from tensorflow_model_remediation.min_diff.keras.utils import structure_utils
# Convenience class to help with packing and unpacking.
class MinDiffPackedInputs(
collections.namedtuple("MinDiffPackedInputs",
["original_inputs", "min_diff_data"])):
"""Named tuple containing both `original_inputs` and `min_diff_data`.
`MinDiffModel` default implementations and `utils.(un)pack_*` functions use
this class to pack and unpack the separate components required for MinDiff
and regular training.
Attributes:
original_inputs: Batch of inputs that would originally (i.e. without
applying MinDiff) be passed in to a model's `Model.call` method. This
corresponds to the `x` component described in `tf.keras.Model.fit`.
min_diff_data: Batch of supplemental data to be used to calculate the
`min_diff_loss`.
"""
def pack_min_diff_data(original_dataset: tf.data.Dataset,
sensitive_group_dataset=None,
nonsensitive_group_dataset=None,
min_diff_dataset=None) -> tf.data.Dataset:
# pyformat: disable
"""Packs `min_diff_data` with the `x` component of the original dataset.
Arguments:
original_dataset: `tf.data.Dataset` that was used before applying min
diff. The output should conform to the format used in
`tf.keras.Model.fit`.
sensitive_group_dataset: `tf.data.Dataset` or valid MinDiff structure
(unnested dict) of `tf.data.Dataset`s containing only examples that
belong to the sensitive group.
This must be passed in if `nonsensitive_group_dataset` is passed in.
Furthermore, the `x` component for every batch should have the same
structure as that of the `original_dataset` batches' `x` components.
nonsensitive_group_dataset: `tf.data.Dataset` or valid MinDiff structure
(unnested dict) of `tf.data.Dataset`s containing only examples that do
**not** belong to the sensitive group.
This must be passed in if `sensitive_group_dataset` is passed in.
Furthermore, the `x` component for every batch should have the same
structure as that of the `original_dataset` batches' `x` components.
min_diff_dataset: `tf.data.Dataset` or valid MinDiff structure (unnested
dict) of `tf.data.Dataset`s containing only examples to be used to
calculate the `min_diff_loss`.
This should only be set if neither `sensitive_group_dataset` or
`nonsensitive_group_dataset` is passed in.
Furthermore, the `x` component for every batch should have the same
structure as that of the `original_dataset` batches' `x` components.
This function should be used to create the dataset that will be passed to
`min_diff.keras.MinDiffModel` during training and, optionally, during
evaluation.
The inputs should either have both `sensitive_group_dataset` and
`nonsensitive_group_dataset` passed in and `min_diff_dataset` left unset or
vice versa. In the case of the former, `min_diff_data` will be built using
`utils.build_min_diff_dataset`.
Warning: All input datasets should be batched **before** being passed in.
Each input dataset must output a tuple in the format used in
`tf.keras.Model.fit`. Specifically the output must be a tuple of
length 1, 2 or 3 in the form `(x, y, sample_weight)`.
This output will be parsed internally in the following way:
```
batch = ... # Batch from any one of the input datasets.
x, y, sample_weight = tf.keras.utils.unpack_x_y_sample_weight(batch)
```
Every batch from the returned `tf.data.Dataset` will contain one batch from
each of the input datasets. Each returned batch will be a tuple of
`(packed_inputs, original_y, original_sample_weight)` matching the length of
`original_dataset` batches where:
- `packed_inputs`: is an instance of `utils.MinDiffPackedInputs` containing:
- `original_inputs`: `x` component taken directly from the
`original_dataset` batch.
- `min_diff_data`: batch of data formed from `sensitive_group_dataset` and
`nonsensitive_group_dataset` (as described in
`utils.build_min_diff_dataset`) or taken directly from `min_diff_dataset`.
- `original_y`: is the `y` component taken directly from the
`original_dataset` batch.
- `original_sample_weight`: is the `sample_weight` component taken directly
from the `original_dataset` batch.
`min_diff_data` will be used in `min_diff.keras.MinDiffModel` when calculating
the `min_diff_loss`. It is a tuple or structure (matching the structure of the
inputs) of `(min_diff_x, min_diff_membership, min_diff_sample_weight)`.
Caution: If you are passing in `min_diff_dataset` make sure that each
`min_diff_data` batch contains about the same number of sensitive and
nonsensitive examples as indicated by `min_diff_membership` (when passing in
`sensitive_group_dataset` and `nonsensitive_group_dataset` this is determined
by their batch sizes).
Returns:
A `tf.data.Dataset` whose output is a tuple of (`packed_inputs`,
`original_y`, `original_sample_weight`) matching the output length
of `original_dataset`.
"""
# pyformat: enable
# Either sensitive_group_dataset and nonsensitive_group_dataset are both set
# and min_diff_dataset is not or vice versa.
min_diff_dataset_present = min_diff_dataset is not None
sensitive_dataset_present = sensitive_group_dataset is not None
nonsensitive_dataset_present = nonsensitive_group_dataset is not None
# Case where min_diff_dataset is set and the others are not.
set_to_use_min_diff_dataset = (
min_diff_dataset_present and
not (sensitive_dataset_present or nonsensitive_dataset_present))
# Case where sensitive_group_dataset and nonsensitive_group_dataset are both
# set and min_diff_dataset is not.
set_to_construct_min_diff_dataset = ((sensitive_dataset_present and
nonsensitive_dataset_present) and
not min_diff_dataset_present)
if not (set_to_use_min_diff_dataset or set_to_construct_min_diff_dataset):
raise ValueError(
"Invalid arguments: You must either pass in only the `min_diff_dataset`"
" (and leave `sensitive_group_dataset` and `nonsensitive_group_dataset`"
" as None) or set both `sensitive_group_dataset` and "
"`nonsensitive_group_dataset` (and leave `min_diff_dataset` as None), "
"given: \n"
"\n`sensitive_group_dataset`: {}"
"\n`nonsensitive_group_dataset`: {}"
"\n`min_diff_dataset`: {}".format(sensitive_group_dataset,
nonsensitive_group_dataset,
min_diff_dataset))
# First construct the min_diff_dataset if need be.
if set_to_construct_min_diff_dataset:
min_diff_dataset = build_min_diff_dataset(sensitive_group_dataset,
nonsensitive_group_dataset)
else:
# validate min_diff_dataset since it was passed in.
structure_utils.validate_min_diff_structure(
min_diff_dataset,
struct_name="min_diff_dataset",
element_type=tf.data.Dataset)
dataset = tf.data.Dataset.zip((original_dataset, min_diff_dataset))
def _map_fn(original_batch, min_diff_batch):
# Unpack original batch.
original_x, original_y, original_sample_weight = (
tf.keras.utils.unpack_x_y_sample_weight(original_batch))
# Assert that all min_diff_xs have the same structure as original_x.
# TODO: Should we assert that Tensor shapes are the same (other
# than number of examples).
min_diff_xs = [
tf.keras.utils.unpack_x_y_sample_weight(batch)[0] # First element is x.
for batch in structure_utils._flatten_min_diff_structure(min_diff_batch)
]
for min_diff_x in min_diff_xs:
try:
tf.nest.assert_same_structure(original_x, min_diff_x)
except Exception as e:
raise type(e)(
"The x component structure of (one of) the `min_diff_dataset`(s) "
"does not match that of the original x structure (original shown "
"first): {}".format(e))
# pack min_diff_batch with original_x
return _pack_as_original(
original_batch,
MinDiffPackedInputs(
original_inputs=original_x, min_diff_data=min_diff_batch),
original_y, original_sample_weight)
# Reshape dataset output.
return dataset.map(_map_fn)
def _pack_as_original(original_batch, x, y, w):
"""Packs x, y, w while conserving the shape of the original batch."""
if not isinstance(original_batch, tuple):
return x
length = len(original_batch)
return (x, y, w)[:length]
def _tensor_concat(t1, t2):
"""Concatenates (sparse or dense) tensors."""
if isinstance(t1, tf.SparseTensor):
# Ensure SparseTensors have the same non-batch dim before concatenating.
max_shape = tf.math.maximum(t1.dense_shape[1], t2.dense_shape[1])
t1 = tf.sparse.reset_shape(t1, [t1.dense_shape[0], max_shape])
t2 = tf.sparse.reset_shape(t2, [t2.dense_shape[0], max_shape])
return tf.sparse.concat(axis=0, sp_inputs=[t1, t2])
else:
return tf.concat([t1, t2], axis=0)
def build_min_diff_dataset(sensitive_group_dataset,
nonsensitive_group_dataset) -> tf.data.Dataset:
# pyformat: disable
"""Build MinDiff dataset from sensitive and nonsensitive datasets.
Arguments:
sensitive_group_dataset: `tf.data.Dataset` or valid MinDiff structure
(unnested dict) of `tf.data.Dataset`s containing only examples that
belong to the sensitive group.
nonsensitive_group_dataset: `tf.data.Dataset` or valid MinDiff structure
(unnested dict) of `tf.data.Dataset`s containing only examples that do
**not** belong to the sensitive group.
This function builds a `tf.data.Dataset` containing examples that are meant to
only be used when calculating a `min_diff_loss`. This resulting dataset will
need to be packed with the original dataset used for the original task of the
model which can be done by calling `utils.pack_min_diff_data`.
Warning: All input datasets should be batched **before** being passed in.
Each input dataset must output a tuple in the format used in
`tf.keras.Model.fit`. Specifically the output must be a tuple of
length 1, 2 or 3 in the form `(x, y, sample_weight)`.
This output will be parsed internally in the following way:
```
batch = ... # Batch from any of the input datasets.
x, y, sample_weight = tf.keras.utils.unpack_x_y_sample_weight(batch)
```
Note: the `y` component of input datasets will be ignored completely so it can
be set to `None` or any other arbitrary value. If `sample_weight` is not
included, it can be left out entirely.
Every batch from the returned `tf.data.Dataset` will contain one batch from
each of the input datasets. Each returned batch will be a tuple or structure
(matching the structure of the inputs) of `(min_diff_x, min_diff_membership,
min_diff_sample_weight)` where, for each pair of input datasets:
- `min_diff_x`: is formed by concatenating the `x` components of the paired
datasets. The structure of these must match. If they don't the dataset will
raise an error at the first batch.
- `min_diff_membership`: is a tensor of size `[min_diff_batch_size, 1]`
indicating which dataset each example comes from (`1.0` for
`sensitive_group_dataset` and `0.0` for `nonsensitive_group_dataset`).
- `min_diff_sample_weight`: is formed by concatenating the `sample_weight`
components of the paired datasets. If both are `None`, then this will be set
to `None`. If only one is `None`, it is replaced with a `Tensor` of ones of
the appropriate shape.
Returns:
A `tf.data.Dataset` whose output is a tuple or structure (matching the
structure of the inputs) of `(min_diff_x, min_diff_membership,
min_diff_sample_weight)`.
Raises:
ValueError: If either `sensitive_group_dataset` or
`nonsensitive_group_dataset` is not a valid MinDiff structure (unnested
dict).
ValueError: If `sensitive_group_dataset` and `nonsensitive_group_dataset` do
not have the same structure.
"""
# pyformat: enable
# validate structures.
structure_utils.validate_min_diff_structure(
sensitive_group_dataset,
struct_name="sensitive_group_dataset",
element_type=tf.data.Dataset)
structure_utils.validate_min_diff_structure(
nonsensitive_group_dataset,
struct_name="nonsensitive_group_dataset",
element_type=tf.data.Dataset)
try:
structure_utils._assert_same_min_diff_structure(sensitive_group_dataset,
nonsensitive_group_dataset)
except Exception as e:
raise type(e)("`sensitive_group_dataset` and `nonsensitive_group_dataset` "
"do not have the same structure:\n{}".format(e))
sensitive_group_dataset = tf.nest.map_structure(
lambda dataset: dataset, sensitive_group_dataset)
nonsensitive_group_dataset = tf.nest.map_structure(
lambda dataset: dataset, nonsensitive_group_dataset)
dataset = tf.data.Dataset.zip(
(sensitive_group_dataset, nonsensitive_group_dataset))
def _build_single_batch(single_sensitive_batch, single_nonsensitive_batch):
# Unpack both batches.
sensitive_x, _, sensitive_sample_weight = (
tf.keras.utils.unpack_x_y_sample_weight(single_sensitive_batch))
nonsensitive_x, _, nonsensitive_sample_weight = (
tf.keras.utils.unpack_x_y_sample_weight(single_nonsensitive_batch))
# sensitive_x and nonsensitive_x must have the same structure.
try:
tf.nest.assert_same_structure(sensitive_x, nonsensitive_x)
except Exception as e:
raise type(e)("The x component structure of (one of) the "
"`sensitive_group_dataset`(s) does not match that of the "
"(corresponding) `nonsensitive_group_dataset` x structure "
"(sensitive shown first): {}".format(e))
# Create min_diff_data.
# Merge sensitive_x and nonsensitive_x to form min_diff_x.
flat_sensitive_x = tf.nest.flatten(sensitive_x)
flat_nonsensitive_x = tf.nest.flatten(nonsensitive_x)
flat_min_diff_x = [
_tensor_concat(t1, t2)
for t1, t2 in zip(flat_sensitive_x, flat_nonsensitive_x)
]
min_diff_x = tf.nest.pack_sequence_as(sensitive_x, flat_min_diff_x)
# min_diff_membership indicates which dataset each example comes from.
sensitive_shape = [tf.shape(flat_sensitive_x[0])[0], 1]
nonsensitive_shape = [tf.shape(flat_nonsensitive_x[0])[0], 1]
min_diff_membership = tf.concat(
axis=0,
values=[
tf.ones(sensitive_shape, dtype=tf.float32),
tf.zeros(nonsensitive_shape, dtype=tf.float32)
])
# min_diff_sample_weight is the concatenation of both sample_weights.
min_diff_sample_weight = None # Default if both sample_weights are None.
if (sensitive_sample_weight is not None or
nonsensitive_sample_weight is not None):
if sensitive_sample_weight is None:
sensitive_sample_weight = tf.ones(sensitive_shape, dtype=tf.float32)
elif nonsensitive_sample_weight is None:
nonsensitive_sample_weight = tf.ones(
nonsensitive_shape, dtype=tf.float32)
min_diff_sample_weight = tf.concat(
[sensitive_sample_weight, nonsensitive_sample_weight], axis=0)
# Pack the three components and return them
return tf.keras.utils.pack_x_y_sample_weight(min_diff_x,
min_diff_membership,
min_diff_sample_weight)
def _map_fn(sensitive_batch, nonsensitive_batch):
flat_sensitive_batch = structure_utils._flatten_min_diff_structure(
sensitive_batch)
flat_nonsensitive_batch = structure_utils._flatten_min_diff_structure(
nonsensitive_batch)
flat_min_diff_data = [
_build_single_batch(single_sensitive_batch, single_nonsensitive_batch)
for single_sensitive_batch, single_nonsensitive_batch in zip(
flat_sensitive_batch, flat_nonsensitive_batch)
]
return structure_utils._pack_min_diff_sequence_as(sensitive_batch,
flat_min_diff_data)
# Reshape dataset output.
return dataset.map(_map_fn)
def unpack_original_inputs(inputs):
"""Unpacks `original_inputs` from a `utils.MinDiffPackedInputs` instance.
Arguments:
inputs: Data to be unpacked, if possible.
Returns:
`original_inputs` if `inputs` is an instance of `utils.MinDiffPackedInputs`,
otherwise `inputs` is returned directly.
"""
if not isinstance(inputs, MinDiffPackedInputs):
return inputs # Default to returning inputs directly.
return inputs.original_inputs
def unpack_min_diff_data(inputs):
"""Unpacks `min_diff_data` from a `utils.MinDiffPackedInputs` instance.
Arguments:
inputs: Data to be unpacked, if possible.
Returns:
`min_diff_data` if `inputs` is an instance of `utils.MinDiffPackedInputs`,
otherwise returns `None`.
"""
if not isinstance(inputs, MinDiffPackedInputs):
return None # Default to returning None.
return inputs.min_diff_data
|
tensorflow/model-remediation
|
tensorflow_model_remediation/min_diff/keras/utils/input_utils.py
|
Python
|
apache-2.0
| 18,202 | 0.003846 |
# -*- coding: cp1252 -*-
from compiler import *
####################################################################################################################
# Each scene prop record contains the following fields:
# 1) Scene prop id: used for referencing scene props in other files. The prefix spr_ is automatically added before each scene prop id.
# 2) Scene prop flags. See header_scene_props.py for a list of available flags
# 3) Mesh name: Name of the mesh.
# 4) Physics object name:
# 5) Triggers: Simple triggers that are associated with the scene prop
####################################################################################################################
check_item_use_trigger = (ti_on_scene_prop_use,
[
(store_trigger_param_1, ":agent_id"),
(store_trigger_param_2, ":instance_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_use_item", ":instance_id", ":agent_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"), #0 is server so starting from 1
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_use_item, ":instance_id", ":agent_id"),
(try_end),
])
check_sally_door_use_trigger_double = (ti_on_scene_prop_use,
[
(store_trigger_param_1, ":agent_id"),
(store_trigger_param_2, ":instance_id"),
(agent_get_position, pos1, ":agent_id"),
(prop_instance_get_starting_position, pos2, ":instance_id"),
(scene_prop_get_slot, ":opened_or_closed", ":instance_id", scene_prop_open_or_close_slot),
(try_begin),
#out doors like castle sally door can be opened only from inside, if door coordinate is behind your coordinate. Also it can be closed from both sides.
(prop_instance_get_scene_prop_kind, ":scene_prop_id", ":instance_id"),
(assign, ":can_open_door", 0),
(try_begin),
(neg|eq, ":scene_prop_id", "spr_viking_keep_destroy_sally_door_right"),
(neg|eq, ":scene_prop_id", "spr_viking_keep_destroy_sally_door_left"),
(neg|eq, ":scene_prop_id", "spr_earth_sally_gate_right"),
(neg|eq, ":scene_prop_id", "spr_earth_sally_gate_left"),
(position_is_behind_position, pos1, pos2),
(assign, ":can_open_door", 1),
(else_try),
(this_or_next|eq, ":scene_prop_id", "spr_viking_keep_destroy_sally_door_right"),
(this_or_next|eq, ":scene_prop_id", "spr_viking_keep_destroy_sally_door_left"),
(this_or_next|eq, ":scene_prop_id", "spr_earth_sally_gate_right"),
(eq, ":scene_prop_id", "spr_earth_sally_gate_left"),
(neg|position_is_behind_position, pos1, pos2),
(assign, ":can_open_door", 1),
(try_end),
(this_or_next|eq, ":can_open_door", 1),
(eq, ":opened_or_closed", 1),
(try_begin),
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_use_item", ":instance_id", ":agent_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"), #0 is server so starting from 1
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_use_item, ":instance_id", ":agent_id"),
(try_end),
(try_end),
(try_end),
])
check_sally_door_use_trigger = (ti_on_scene_prop_use,
[
(store_trigger_param_1, ":agent_id"),
(store_trigger_param_2, ":instance_id"),
(agent_get_position, pos1, ":agent_id"),
(prop_instance_get_starting_position, pos2, ":instance_id"),
(scene_prop_get_slot, ":opened_or_closed", ":instance_id", scene_prop_open_or_close_slot),
(try_begin),
#out doors like castle sally door can be opened only from inside, if door coordinate is behind your coordinate. Also it can be closed from both sides.
(this_or_next|position_is_behind_position, pos1, pos2),
(eq, ":opened_or_closed", 1),
(try_begin),
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_use_item", ":instance_id", ":agent_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"), #0 is server so starting from 1
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_use_item, ":instance_id", ":agent_id"),
(try_end),
(try_end),
(try_end),
])
check_castle_door_use_trigger = (ti_on_scene_prop_use,
[
(store_trigger_param_1, ":agent_id"),
(store_trigger_param_2, ":instance_id"),
(agent_get_position, pos1, ":agent_id"),
(prop_instance_get_starting_position, pos2, ":instance_id"),
(scene_prop_get_slot, ":opened_or_closed", ":instance_id", scene_prop_open_or_close_slot),
(try_begin),
(ge, ":agent_id", 0),
(agent_get_team, ":agent_team", ":agent_id"),
#in doors like castle room doors can be opened from both sides, but only defenders can open these doors. Also it can be closed from both sides.
(this_or_next|eq, ":agent_team", 0),
(eq, ":opened_or_closed", 1),
(try_begin),
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_use_item", ":instance_id", ":agent_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"), #0 is server so starting from 1
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_use_item, ":instance_id", ":agent_id"),
(try_end),
(try_end),
(try_end),
])
check_ladder_animate_trigger = (ti_on_scene_prop_is_animating,
[
(store_trigger_param_1, ":instance_id"),
(store_trigger_param_2, ":remaining_time"),
(call_script, "script_check_creating_ladder_dust_effect", ":instance_id", ":remaining_time"),
])
check_ladder_animation_finish_trigger = (ti_on_scene_prop_animation_finished,
[
(store_trigger_param_1, ":instance_id"),
(prop_instance_enable_physics, ":instance_id", 1),
])
scene_props = [
("invalid_object",0,"question_mark","0", []),
("inventory",sokf_type_container|sokf_place_at_origin,"package","bobaggage", []),
("empty", 0, "0", "0", []),
("chest_a",sokf_type_container,"chest_gothic","bochest_gothic", []),
("container_small_chest",sokf_type_container,"package","bobaggage", []),
("container_chest_b",sokf_type_container,"chest_b","bo_chest_b", []),
("container_chest_c",sokf_type_container,"chest_c","bo_chest_c", []),
("player_chest",sokf_type_container,"player_chest","bo_player_chest", []),
("locked_player_chest",0,"player_chest","bo_player_chest", []),
("light_sun",sokf_invisible,"light_sphere","0", [
(ti_on_init_scene_prop,
[
(neg|is_currently_night),
(store_trigger_param_1, ":prop_instance_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_scale, pos5, ":prop_instance_no"),
(position_get_scale_x, ":scale", pos5),
(store_time_of_day,reg(12)),
(try_begin),
(is_between,reg(12),5,20),
(store_mul, ":red", 5 * 200, ":scale"),
(store_mul, ":green", 5 * 193, ":scale"),
(store_mul, ":blue", 5 * 180, ":scale"),
(else_try),
(store_mul, ":red", 5 * 90, ":scale"),
(store_mul, ":green", 5 * 115, ":scale"),
(store_mul, ":blue", 5 * 150, ":scale"),
(try_end),
(val_div, ":red", 100),
(val_div, ":green", 100),
(val_div, ":blue", 100),
(set_current_color,":red", ":green", ":blue"),
(set_position_delta,0,0,0),
(add_point_light_to_entity, 0, 0),
]),
]),
("light",sokf_invisible,"light_sphere","0", [
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":prop_instance_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_scale, pos5, ":prop_instance_no"),
(position_get_scale_x, ":scale", pos5),
(store_mul, ":red", 3 * 200, ":scale"),
(store_mul, ":green", 3 * 145, ":scale"),
(store_mul, ":blue", 3 * 45, ":scale"),
(val_div, ":red", 100),
(val_div, ":green", 100),
(val_div, ":blue", 100),
(set_current_color,":red", ":green", ":blue"),
(set_position_delta,0,0,0),
(add_point_light_to_entity, 10, 30),
]),
]),
("light_red",sokf_invisible,"light_sphere","0", [
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":prop_instance_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_scale, pos5, ":prop_instance_no"),
(position_get_scale_x, ":scale", pos5),
(store_mul, ":red", 2 * 170, ":scale"),
(store_mul, ":green", 2 * 100, ":scale"),
(store_mul, ":blue", 2 * 30, ":scale"),
(val_div, ":red", 100),
(val_div, ":green", 100),
(val_div, ":blue", 100),
(set_current_color,":red", ":green", ":blue"),
(set_position_delta,0,0,0),
(add_point_light_to_entity, 20, 30),
]),
]),
("light_night",sokf_invisible,"light_sphere","0", [
(ti_on_init_scene_prop,
[
# (store_time_of_day,reg(12)),
# (neg|is_between,reg(12),5,20),
(is_currently_night, 0),
(store_trigger_param_1, ":prop_instance_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_scale, pos5, ":prop_instance_no"),
(position_get_scale_x, ":scale", pos5),
(store_mul, ":red", 3 * 160, ":scale"),
(store_mul, ":green", 3 * 145, ":scale"),
(store_mul, ":blue", 3 * 100, ":scale"),
(val_div, ":red", 100),
(val_div, ":green", 100),
(val_div, ":blue", 100),
(set_current_color,":red", ":green", ":blue"),
(set_position_delta,0,0,0),
(add_point_light_to_entity, 10, 30),
]),
]),
("torch",0,"torch_a","0",
[
(ti_on_init_scene_prop,
[
(set_position_delta,0,-35,48),
(particle_system_add_new, "psys_torch_fire"),
(particle_system_add_new, "psys_torch_smoke"),
(particle_system_add_new, "psys_torch_fire_sparks"),
(play_sound, "snd_torch_loop", 0),
(set_position_delta,0,-35,56),
(particle_system_add_new, "psys_fire_glow_1"),
# (particle_system_emit, "psys_fire_glow_1",9000000),
#second method
(get_trigger_object_position, pos2),
(set_position_delta,0,0,0),
(position_move_y, pos2, -35),
(position_move_z, pos2, 55),
(particle_system_burst, "psys_fire_glow_fixed", pos2, 1),
]),
]),
("torch_night",0,"torch_a","0",
[
(ti_on_init_scene_prop,
[
# (store_time_of_day,reg(12)),
# (neg|is_between,reg(12),5,20),
(is_currently_night, 0),
(set_position_delta,0,-35,48),
(particle_system_add_new, "psys_torch_fire"),
(particle_system_add_new, "psys_torch_smoke"),
(particle_system_add_new, "psys_torch_fire_sparks"),
(set_position_delta,0,-35,56),
(particle_system_add_new, "psys_fire_glow_1"),
(particle_system_emit, "psys_fire_glow_1",9000000),
(play_sound, "snd_torch_loop", 0),
]),
]),
# ("Baggage",sokf_place_at_origin|sokf_entity_body,"package","bobaggage"),
("barrier_20m",sokf_invisible|sokf_type_barrier,"barrier_20m","bo_barrier_20m", []),
("barrier_16m",sokf_invisible|sokf_type_barrier,"barrier_16m","bo_barrier_16m", []),
("barrier_8m" ,sokf_invisible|sokf_type_barrier,"barrier_8m" ,"bo_barrier_8m" , []),
("barrier_4m" ,sokf_invisible|sokf_type_barrier,"barrier_4m" ,"bo_barrier_4m" , []),
("barrier_2m" ,sokf_invisible|sokf_type_barrier,"barrier_2m" ,"bo_barrier_2m" , []),
("exit_4m" ,sokf_invisible|sokf_type_barrier_leave,"barrier_4m" ,"bo_barrier_4m" , []),
("exit_8m" ,sokf_invisible|sokf_type_barrier_leave,"barrier_8m" ,"bo_barrier_8m" , []),
("exit_16m" ,sokf_invisible|sokf_type_barrier_leave,"barrier_16m" ,"bo_barrier_16m" , []),
("ai_limiter_2m" ,sokf_invisible|sokf_type_ai_limiter,"barrier_2m" ,"bo_barrier_2m" , []),
("ai_limiter_4m" ,sokf_invisible|sokf_type_ai_limiter,"barrier_4m" ,"bo_barrier_4m" , []),
("ai_limiter_8m" ,sokf_invisible|sokf_type_ai_limiter,"barrier_8m" ,"bo_barrier_8m" , []),
("ai_limiter_16m",sokf_invisible|sokf_type_ai_limiter,"barrier_16m","bo_barrier_16m", []),
("Shield",sokf_dynamic,"0","boshield", []),
("shelves",0,"shelves","boshelves", []),
("table_tavern",0,"table_tavern","botable_tavern", []),
("table_castle_a",0,"table_castle_a","bo_table_castle_a", []),
("chair_castle_a",0,"chair_castle_a","bo_chair_castle_a", []),
("pillow_a",0,"pillow_a","bo_pillow", []),
("pillow_b",0,"pillow_b","bo_pillow", []),
("pillow_c",0,"pillow_c","0", []),
("interior_castle_g_square_keep_b",0,"interior_castle_g_square_keep_b","bo_interior_castle_g_square_keep_b", []),
("carpet_with_pillows_a",0,"carpet_with_pillows_a","bo_carpet_with_pillows", []),
("carpet_with_pillows_b",0,"carpet_with_pillows_b","bo_carpet_with_pillows", []),
("table_round_a",0,"table_round_a","bo_table_round_a", []),
("table_round_b",0,"table_round_b","bo_table_round_b", []),
("fireplace_b",0,"fireplace_b","bo_fireplace_b", []),
("fireplace_c",0,"fireplace_c","bo_fireplace_c", []),
("sofa_a",0,"sofa_a","bo_sofa", []),
("sofa_b",0,"sofa_b","bo_sofa", []),
("ewer_a",0,"ewer_a","bo_ewer_a", []),
("end_table_a",0,"end_table_a","bo_end_table_a", []),
("fake_houses_steppe_a",0,"fake_houses_steppe_a","0", []),
("fake_houses_steppe_b",0,"fake_houses_steppe_b","0", []),
("fake_houses_steppe_c",0,"fake_houses_steppe_c","0", []),
("boat_destroy",0,"boat_destroy","bo_boat_destroy", []),
("destroy_house_a",0,"destroy_house_a","bo_destroy_house_a", []),
("destroy_house_b",0,"destroy_house_b","bo_destroy_house_b", []),
("destroy_house_c",0,"destroy_house_c","bo_destroy_house_c", []),
("destroy_heap",0,"destroy_heap","bo_destroy_heap", []),
("destroy_castle_a",0,"destroy_castle_a","bo_destroy_castle_a", []),
("destroy_castle_b",0,"destroy_castle_b","bo_destroy_castle_b", []),
("destroy_castle_c",0,"destroy_castle_c","bo_destroy_castle_c", []),
("destroy_castle_d",0,"destroy_castle_d","bo_destroy_castle_d", []),
("destroy_windmill",0,"destroy_windmill","bo_destroy_windmill", []),
("destroy_tree_a",0,"destroy_tree_a","bo_destroy_tree_a", []),
("destroy_tree_b",0,"destroy_tree_b","bo_destroy_tree_b", []),
("destroy_bridge_a",0,"destroy_bridge_a","bo_destroy_bridge_a", []),
("destroy_bridge_b",0,"destroy_bridge_b","bo_destroy_bridge_b", []),
("catapult",0,"Catapult","bo_Catapult", []),
("catapult_destructible",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible,"Catapult","bo_Catapult", [
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 1600),
]),
(ti_on_scene_prop_destroy,
[
(play_sound, "snd_dummy_destroyed"),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(store_trigger_param_1, ":instance_no"),
(prop_instance_get_position, pos1, ":instance_no"),
(particle_system_burst, "psys_dummy_smoke_big", pos1, 100),
(particle_system_burst, "psys_dummy_straw_big", pos1, 100),
(position_move_z, pos1, -500),
(position_rotate_x, pos1, 90),
(prop_instance_animate_to_position, ":instance_no", pos1, 300), #animate to 6 meters below in 6 second
(try_begin),
(eq, "$g_round_ended", 0),
(scene_prop_get_team, ":scene_prop_team_no", ":instance_no"),
(try_begin),
(eq, ":scene_prop_team_no", 0),
(assign, ":scene_prop_team_no_multiplier", -1),
(else_try),
(assign, ":scene_prop_team_no_multiplier", 1),
(try_end),
(try_begin),
(eq, "$g_number_of_targets_destroyed", 0),
(store_mul, ":target_no_mul_scene_prop_team", ":scene_prop_team_no_multiplier", 1), #1 means destroyed object is a catapult
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_show_multiplayer_message", multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"),
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_show_multiplayer_message, multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"),
(try_end),
(val_add, "$g_number_of_targets_destroyed", 1),
(else_try),
(store_mul, ":target_no_mul_scene_prop_team", ":scene_prop_team_no_multiplier", 9), #9 means attackers destroyed all targets
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_show_multiplayer_message", multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"),
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_show_multiplayer_message, multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"),
(try_end),
(val_add, "$g_number_of_targets_destroyed", 1),
(try_end),
(try_end),
#giving gold for destroying target (for catapult)
#step-1 calculating total damage given to that scene prop
(assign, ":total_damage_given", 0),
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 0, ":num_players"),
(player_is_active, ":player_no"),
(try_begin),
(eq, "spr_catapult_destructible", "$g_destructible_target_1"),
(player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_1),
(else_try),
(player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_2),
(try_end),
(val_add, ":total_damage_given", ":damage_given"),
(try_end),
#step-2 sharing 1000 gold (if num active players < 20 then 50 * num active players) to players which gave damage with the damage amounts.
(assign, ":destroy_money_addition", 0),
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 0, ":num_players"),
(player_is_active, ":player_no"),
(val_add, ":destroy_money_addition", 50),
(try_end),
(try_begin),
(ge, ":destroy_money_addition", multi_destroy_target_money_add),
(assign, ":destroy_money_addition", multi_destroy_target_money_add),
(try_end),
(val_mul, ":destroy_money_addition", "$g_multiplayer_battle_earnings_multiplier"),
(val_div, ":destroy_money_addition", 100),
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 0, ":num_players"),
(player_is_active, ":player_no"),
(try_begin),
(eq, "spr_catapult_destructible", "$g_destructible_target_1"),
(player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_1),
(else_try),
(player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_2),
(try_end),
(player_get_gold, ":player_gold", ":player_no"), #give money to player which helped flag to be owned by new_flag_owner team
(val_mul, ":damage_given", ":destroy_money_addition"),
(try_begin),
(ge, ":total_damage_given", ":damage_given"),
(gt, ":damage_given", 0),
(store_div, ":gold_earned", ":damage_given", ":total_damage_given"),
(else_try),
(assign, ":gold_earned", 0),
(try_end),
(val_add, ":player_gold", ":gold_earned"),
(player_set_gold, ":player_no", ":player_gold", multi_max_gold_that_can_be_stored),
(try_end),
(try_end),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(set_fixed_point_multiplier, 1),
(position_get_x, ":attacker_agent_id", pos2),
(try_begin),
(ge, ":attacker_agent_id", 0),
(agent_is_alive, ":attacker_agent_id"),
(agent_is_human, ":attacker_agent_id"),
(neg|agent_is_non_player, ":attacker_agent_id"),
(agent_get_player_id, ":attacker_player_id", ":attacker_agent_id"),
(ge, ":attacker_player_id", 0),
(player_is_active, ":attacker_player_id"),
(try_begin),
(eq, "spr_catapult_destructible", "$g_destructible_target_1"),
(player_get_slot, ":damage_given", ":attacker_player_id", slot_player_damage_given_to_target_1),
(val_add, ":damage_given", ":damage"),
(player_set_slot, ":attacker_player_id", slot_player_damage_given_to_target_1, ":damage_given"),
(else_try),
(player_get_slot, ":damage_given", ":attacker_player_id", slot_player_damage_given_to_target_2),
(val_add, ":damage_given", ":damage"),
(player_set_slot, ":attacker_player_id", slot_player_damage_given_to_target_2, ":damage_given"),
(try_end),
(try_end),
(try_end),
]),
]),
("broom",0,"broom","0", []),
("garlic",0,"garlic","0", []),
("garlic_b",0,"garlic_b","0", []),
("destroy_a",0,"destroy_a","0", []),
("destroy_b",0,"destroy_b","0", []),
("bridge_wooden",0,"bridge_wooden","bo_bridge_wooden", []),
("bridge_wooden_snowy",0,"bridge_wooden_snowy","bo_bridge_wooden", []),
("grave_a",0,"grave_a","bo_grave_a", []),
("village_house_e",0,"village_house_e","bo_village_house_e", []),
("village_house_f",0,"village_house_f","bo_village_house_f", []),
("village_house_g",0,"village_house_g","bo_village_house_g", []),
("village_house_h",0,"village_house_h","bo_village_house_h", []),
("village_house_i",0,"village_house_i","bo_village_house_i", []),
("village_house_j",0,"village_house_j","bo_village_house_j", []),
("village_wall_a",0,"village_wall_a","bo_village_wall_a", []),
("village_wall_b",0,"village_wall_b","bo_village_wall_b", []),
("village_snowy_house_a",0,"village_snowy_house_a","bo_village_snowy_house_a", []),
("village_snowy_house_b",0,"village_snowy_house_b","bo_village_snowy_house_b", []),
("village_snowy_house_c",0,"village_snowy_house_c","bo_village_snowy_house_c", []),
("village_snowy_house_d",0,"village_snowy_house_d","bo_village_snowy_house_d", []),
("village_snowy_house_e",0,"village_snowy_house_e","bo_village_snowy_house_e", []),
("village_snowy_house_f",0,"village_snowy_house_f","bo_village_snowy_house_f", []),
("town_house_steppe_a",0,"town_house_steppe_a","bo_town_house_steppe_a", []),
("town_house_steppe_b",0,"town_house_steppe_b","bo_town_house_steppe_b", []),
("town_house_steppe_c",0,"town_house_steppe_c","bo_town_house_steppe_c", []),
("town_house_steppe_d",0,"town_house_steppe_d","bo_town_house_steppe_d", []),
("town_house_steppe_e",0,"town_house_steppe_e","bo_town_house_steppe_e", []),
("town_house_steppe_f",0,"town_house_steppe_f","bo_town_house_steppe_f", []),
("town_house_steppe_g",0,"town_house_steppe_g","bo_town_house_steppe_g", []),
("town_house_steppe_h",0,"town_house_steppe_h","bo_town_house_steppe_h", []),
("town_house_steppe_i",0,"town_house_steppe_i","bo_town_house_steppe_i", []),
("carpet_a",0,"carpet_a","0", []),
("carpet_b",0,"carpet_b","0", []),
("carpet_c",0,"carpet_c","0", []),
("carpet_d",0,"carpet_d","0", []),
("carpet_e",0,"carpet_e","0", []),
("carpet_f",0,"carpet_f","0", []),
("awning_a",0,"awning_a","bo_awning", []),
("awning_b",0,"awning_b","bo_awning", []),
("awning_c",0,"awning_c","bo_awning", []),
("awning_long",0,"awning_long","bo_awning_long", []),
("awning_long_b",0,"awning_long_b","bo_awning_long", []),
("awning_d",0,"awning_d","bo_awning_d", []),
("ship",0,"ship","bo_ship", []),
("ship_b",0,"ship_b","bo_ship_b", []),
("ship_c",0,"ship_c","bo_ship_c", []),
("ship_d",0,"ship_d","bo_ship_d", []),
("snowy_barrel_a",0,"snowy_barrel_a","bo_snowy_barrel_a", []),
("snowy_fence",0,"snowy_fence","bo_snowy_fence", []),
("snowy_wood_heap",0,"snowy_wood_heap","bo_snowy_wood_heap", []),
("village_snowy_stable_a",0,"village_snowy_stable_a","bo_village_snowy_stable_a", []),
("village_straw_house_a",0,"village_straw_house_a","bo_village_straw_house_a", []),
("village_stable_a",0,"village_stable_a","bo_village_stable_a", []),
("village_shed_a",0,"village_shed_a","bo_village_shed_a", []),
("village_shed_b",0,"village_shed_b","bo_village_shed_b", []),
("dungeon_door_cell_a",0,"dungeon_door_cell_a","bo_dungeon_door_cell_a", []),
("dungeon_door_cell_b",0,"dungeon_door_cell_b","bo_dungeon_door_cell_b", []),
("dungeon_door_entry_a",0,"dungeon_door_entry_a","bo_dungeon_door_entry_a", []),
("dungeon_door_entry_b",0,"dungeon_door_entry_b","bo_dungeon_door_entry_a", []),
("dungeon_door_entry_c",0,"dungeon_door_entry_c","bo_dungeon_door_entry_a", []),
("dungeon_door_direction_a",0,"dungeon_door_direction_a","bo_dungeon_door_direction_a", []),
("dungeon_door_direction_b",0,"dungeon_door_direction_b","bo_dungeon_door_direction_a", []),
("dungeon_door_stairs_a",0,"dungeon_door_stairs_a","bo_dungeon_door_stairs_a", []),
("dungeon_door_stairs_b",0,"dungeon_door_stairs_b","bo_dungeon_door_stairs_a", []),
("dungeon_bed_a",0,"dungeon_bed_a","0", []),
("dungeon_bed_b",0,"dungeon_bed_b","bo_dungeon_bed_b", []),
("torture_tool_a",0,"torture_tool_a","bo_torture_tool_a", []),
("torture_tool_b",0,"torture_tool_b","0", []),
("torture_tool_c",0,"torture_tool_c","bo_torture_tool_c", []),
("skeleton_head",0,"skeleton_head","0", []),
("skeleton_bone",0,"skeleton_bone","0", []),
("skeleton_a",0,"skeleton_a","bo_skeleton_a", []),
("dungeon_stairs_a",sokf_type_ladder,"dungeon_stairs_a","bo_dungeon_stairs_a", []),
("dungeon_stairs_b",sokf_type_ladder,"dungeon_stairs_b","bo_dungeon_stairs_a", []),
("dungeon_torture_room_a",0,"dungeon_torture_room_a","bo_dungeon_torture_room_a", []),
("dungeon_entry_a",0,"dungeon_entry_a","bo_dungeon_entry_a", []),
("dungeon_entry_b",0,"dungeon_entry_b","bo_dungeon_entry_b", []),
("dungeon_entry_c",0,"dungeon_entry_c","bo_dungeon_entry_c", []),
("dungeon_cell_a",0,"dungeon_cell_a","bo_dungeon_cell_a", []),
("dungeon_cell_b",0,"dungeon_cell_b","bo_dungeon_cell_b", []),
("dungeon_cell_c",0,"dungeon_cell_c","bo_dungeon_cell_c", []),
("dungeon_corridor_a",0,"dungeon_corridor_a","bo_dungeon_corridor_a", []),
("dungeon_corridor_b",0,"dungeon_corridor_b","bo_dungeon_corridor_b", []),
("dungeon_corridor_c",0,"dungeon_corridor_c","bo_dungeon_corridor_b", []),
("dungeon_corridor_d",0,"dungeon_corridor_d","bo_dungeon_corridor_b", []),
("dungeon_direction_a",0,"dungeon_direction_a","bo_dungeon_direction_a", []),
("dungeon_direction_b",0,"dungeon_direction_b","bo_dungeon_direction_a", []),
("dungeon_room_a",0,"dungeon_room_a","bo_dungeon_room_a", []),
("dungeon_tower_stairs_a",sokf_type_ladder,"dungeon_tower_stairs_a","bo_dungeon_tower_stairs_a", []),
("dungeon_tower_cell_a",0,"dungeon_tower_cell_a","bo_dungeon_tower_cell_a", []),
("tunnel_a",0,"tunnel_a","bo_tunnel_a", []),
("tunnel_salt",0,"tunnel_salt","bo_tunnel_salt", []),
("salt_a",0,"salt_a","bo_salt_a", []),
("door_destructible",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible|spr_use_time(2),"tutorial_door_a","bo_tutorial_door_a", [
check_item_use_trigger,
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 2000),
]),
(ti_on_scene_prop_destroy,
[
(play_sound, "snd_dummy_destroyed"),
(assign, ":rotate_side", 86),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":attacker_agent_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_position, pos1, ":instance_no"),
(try_begin),
(ge, ":attacker_agent_no", 0),
(agent_get_position, pos2, ":attacker_agent_no"),
(try_begin),
(position_is_behind_position, pos2, pos1),
(val_mul, ":rotate_side", -1),
(try_end),
(try_end),
(init_position, pos3),
(try_begin),
(ge, ":rotate_side", 0),
(position_move_y, pos3, -100),
(else_try),
(position_move_y, pos3, 100),
(try_end),
(position_move_x, pos3, -50),
(position_transform_position_to_parent, pos4, pos1, pos3),
(position_move_z, pos4, 100),
(position_get_distance_to_ground_level, ":height_to_terrain", pos4),
(val_sub, ":height_to_terrain", 100),
(assign, ":z_difference", ":height_to_terrain"),
(val_div, ":z_difference", 3),
(try_begin),
(ge, ":rotate_side", 0),
(val_add, ":rotate_side", ":z_difference"),
(else_try),
(val_sub, ":rotate_side", ":z_difference"),
(try_end),
(position_rotate_x, pos1, ":rotate_side"),
(prop_instance_animate_to_position, ":instance_no", pos1, 70), #animate to position 1 in 0.7 second
(try_end),
]),
(ti_on_scene_prop_hit,
[
(play_sound, "snd_dummy_hit"),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
]),
]),
("tutorial_door_a",sokf_moveable,"tutorial_door_a","bo_tutorial_door_a", []),
("tutorial_door_b",sokf_moveable,"tutorial_door_b","bo_tutorial_door_b", []),
("tutorial_flag_yellow",sokf_moveable|sokf_face_player,"tutorial_flag_yellow","0", []),
("tutorial_flag_red",sokf_moveable|sokf_face_player,"tutorial_flag_red","0", []),
("tutorial_flag_blue",sokf_moveable|sokf_face_player,"tutorial_flag_blue","0", []),
("interior_prison_a",0,"interior_prison_a","bo_interior_prison_a", []),
("interior_prison_b",0,"interior_prison_b","bo_interior_prison_b", []),
("interior_prison_cell_a",0,"interior_prison_cell_a","bo_interior_prison_cell_a", []),
("interior_prison_d",0,"interior_prison_d","bo_interior_prison_d", []),
("arena_archery_target_a",0,"arena_archery_target_a","bo_arena_archery_target_a", []),
("archery_butt_a",0,"archery_butt","bo_archery_butt", [
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(prop_instance_get_position, pos2, ":instance_no"),
(get_player_agent_no, ":player_agent"),
(agent_get_position, pos3, ":player_agent"),
(get_distance_between_positions, ":player_distance", pos3, pos2),
(position_transform_position_to_local, pos4, pos2, pos1),
(position_set_y, pos4, 0),
(position_set_x, pos2, 0),
(position_set_y, pos2, 0),
(position_set_z, pos2, 0),
(get_distance_between_positions, ":target_distance", pos4, pos2),
(assign, ":point_earned", 43), #Calculating a point between 0-12
(val_sub, ":point_earned", ":target_distance"),
(val_mul, ":point_earned", 1299),
(val_div, ":point_earned", 4300),
(try_begin),
(lt, ":point_earned", 0),
(assign, ":point_earned", 0),
(try_end),
(val_div, ":player_distance", 91), #Converting to yards
(assign, reg60, ":point_earned"),
(assign, reg61, ":player_distance"),
(display_message, "str_archery_target_hit"),
]),
]),
("archery_target_with_hit_a",0,"arena_archery_target_a","bo_arena_archery_target_a", [
(ti_on_scene_prop_hit,
[
(set_fixed_point_multiplier, 100),
(store_trigger_param_1, ":instance_no"),
(position_get_x, ":attacker_agent_id", pos2),
(val_div, ":attacker_agent_id", 100),
(get_player_agent_no, ":player_agent"),
(try_begin),
(eq, ":player_agent", ":attacker_agent_id"),
(prop_instance_get_position, pos2, ":instance_no"),
(agent_get_position, pos3, ":player_agent"),
(get_distance_between_positions, ":player_distance", pos3, pos2),
(position_transform_position_to_local, pos4, pos2, pos1),
(position_set_y, pos4, 0),
(position_set_x, pos2, 0),
(position_set_y, pos2, 0),
(position_set_z, pos2, 0),
(get_distance_between_positions, ":target_distance", pos4, pos2),
(assign, ":point_earned", 43), #Calculating a point between 0-12
(val_sub, ":point_earned", ":target_distance"),
(val_mul, ":point_earned", 1299),
(val_div, ":point_earned", 4300),
(try_begin),
(lt, ":point_earned", 0),
(assign, ":point_earned", 0),
(try_end),
(assign, "$g_last_archery_point_earned", ":point_earned"),
(val_div, ":player_distance", 91), #Converting to yards
(assign, reg60, ":point_earned"),
(assign, reg61, ":player_distance"),
(display_message, "str_archery_target_hit"),
(eq, "$g_tutorial_training_ground_horseman_trainer_state", 6),
(eq, "$g_tutorial_training_ground_horseman_trainer_completed_chapters", 2),
(prop_instance_get_variation_id_2, ":var_id_2", ":instance_no"),
(val_sub, ":var_id_2", 1),
(eq, "$g_tutorial_training_ground_current_score", ":var_id_2"),
(val_add, "$g_tutorial_training_ground_current_score", 1),
(try_end),
]),
]),
("dummy_a",sokf_destructible|sokf_moveable,"arena_archery_target_b","bo_arena_archery_target_b", [
(ti_on_scene_prop_destroy,
[
(store_trigger_param_1, ":instance_no"),
(prop_instance_get_starting_position, pos1, ":instance_no"),
(get_player_agent_no, ":player_agent"),
(agent_get_position, 2, ":player_agent"),
(assign, ":rotate_side", 80),
(try_begin),
(position_is_behind_position, 2, 1),
(val_mul, ":rotate_side", -1),
(try_end),
(position_rotate_x, 1, ":rotate_side"),
(prop_instance_animate_to_position, ":instance_no", 1, 70), #animate to position 1 in 0.7 second
(val_add, "$tutorial_num_total_dummies_destroyed", 1),
(play_sound, "snd_dummy_destroyed"),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(assign, reg60, ":damage"),
(val_div, ":damage", 8),
(prop_instance_get_position, pos2, ":instance_no"),
(get_player_agent_no, ":player_agent"),
(agent_get_position, pos3, ":player_agent"),
(try_begin),
(position_is_behind_position, pos3, pos2),
(val_mul, ":damage", -1),
(try_end),
(position_rotate_x, 2, ":damage"),
(display_message, "str_delivered_damage"),
(prop_instance_animate_to_position, ":instance_no", 2, 30), #animate to position 1 in 0.3 second
(play_sound, "snd_dummy_hit"),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
]),
]),
("band_a",0,"band_a","0", []),
("arena_sign",0,"arena_arms","0", []),
("castle_h_battlement_a",0,"castle_h_battlement_a","bo_castle_h_battlement_a", []),
("castle_h_battlement_b",0,"castle_h_battlement_b","bo_castle_h_battlement_b", []),
("castle_h_battlement_c",0,"castle_h_battlement_c","bo_castle_h_battlement_c", []),
("castle_h_battlement_a2",0,"castle_h_battlement_a2","bo_castle_h_battlement_a2", []),
("castle_h_battlement_b2",0,"castle_h_battlement_b2","bo_castle_h_battlement_b2", []),
("castle_h_corner_a",0,"castle_h_corner_a","bo_castle_h_corner_a", []),
("castle_h_corner_c",0,"castle_h_corner_c","bo_castle_h_corner_c", []),
("castle_h_stairs_a",sokf_type_ladder,"castle_h_stairs_a","bo_castle_h_stairs_a", []),
("castle_h_stairs_b",0,"castle_h_stairs_b","bo_castle_h_stairs_b", []),
("castle_h_gatehouse_a",0,"castle_h_gatehouse_a","bo_castle_h_gatehouse_a", []),
("castle_h_keep_a",0,"castle_h_keep_a","bo_castle_h_keep_a", []),
("castle_h_keep_b",0,"castle_h_keep_b","bo_castle_h_keep_b", []),
("castle_h_house_a",0,"castle_h_house_a","bo_castle_h_house_a", []),
("castle_h_house_b",0,"castle_h_house_b","bo_castle_h_house_b", []),
("castle_h_house_c",0,"castle_h_house_c","bo_castle_h_house_b", []),
("castle_h_battlement_barrier",0,"castle_h_battlement_barrier","bo_castle_h_battlement_barrier", []),
("full_keep_b",0,"full_keep_b","bo_full_keep_b", []),
("castle_f_keep_a",0,"castle_f_keep_a","bo_castle_f_keep_a", []),
("castle_f_battlement_a",0,"castle_f_battlement_a","bo_castle_f_battlement_a", []),
("castle_f_battlement_a_destroyed",0,"castle_f_battlement_a_destroyed","bo_castle_f_battlement_a_destroyed", []),
("castle_f_battlement_b",0,"castle_f_battlement_b","bo_castle_f_battlement_b", []),
("castle_f_battlement_c",0,"castle_f_battlement_c","bo_castle_f_battlement_c", []),
("castle_f_battlement_d",0,"castle_f_battlement_d","bo_castle_f_battlement_d", []),
("castle_f_battlement_e",0,"castle_f_battlement_e","bo_castle_f_battlement_e", []),
("castle_f_sally_port_elevation",0,"castle_f_sally_port_elevation","bo_castle_f_sally_port_elevation", []),
("castle_f_battlement_corner_a",0,"castle_f_battlement_corner_a","bo_castle_f_battlement_corner_a", []),
("castle_f_battlement_corner_b",0,"castle_f_battlement_corner_b","bo_castle_f_battlement_corner_b", []),
("castle_f_battlement_corner_c",0,"castle_f_battlement_corner_c","bo_castle_f_battlement_corner_c", []),
("castle_f_door_a",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible|spr_use_time(0),"castle_f_door_a","bo_castle_f_door_a", [
check_castle_door_use_trigger,
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 1000),
]),
(ti_on_scene_prop_destroy,
[
(play_sound, "snd_dummy_destroyed"),
(assign, ":rotate_side", 86),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":attacker_agent_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_position, pos1, ":instance_no"),
(try_begin),
(ge, ":attacker_agent_no", 0),
(agent_get_position, pos2, ":attacker_agent_no"),
(try_begin),
(position_is_behind_position, pos2, pos1),
(val_mul, ":rotate_side", -1),
(try_end),
(try_end),
(init_position, pos3),
(try_begin),
(ge, ":rotate_side", 0),
(position_move_y, pos3, -100),
(else_try),
(position_move_y, pos3, 100),
(try_end),
(position_move_x, pos3, -50),
(position_transform_position_to_parent, pos4, pos1, pos3),
(position_move_z, pos4, 100),
(position_get_distance_to_ground_level, ":height_to_terrain", pos4),
(val_sub, ":height_to_terrain", 100),
(assign, ":z_difference", ":height_to_terrain"),
#(assign, reg0, ":z_difference"),
#(display_message, "@{!}z dif : {reg0}"),
(val_div, ":z_difference", 3),
(try_begin),
(ge, ":rotate_side", 0),
(val_add, ":rotate_side", ":z_difference"),
(else_try),
(val_sub, ":rotate_side", ":z_difference"),
(try_end),
(position_rotate_x, pos1, ":rotate_side"),
(prop_instance_animate_to_position, ":instance_no", pos1, 70), #animate to position 1 in 0.7 second
(try_end),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(try_end),
]),
]),
("castle_f_doors_top_a",0,"castle_f_doors_top_a","bo_castle_f_doors_top_a", []),
("castle_f_sally_door_a",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible|spr_use_time(0),"castle_f_sally_door_a","bo_castle_f_sally_door_a", [
check_sally_door_use_trigger,
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 1000),
]),
(ti_on_scene_prop_destroy,
[
(play_sound, "snd_dummy_destroyed"),
(assign, ":rotate_side", 86),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":attacker_agent_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_position, pos1, ":instance_no"),
(try_begin),
(ge, ":attacker_agent_no", 0),
(agent_get_position, pos2, ":attacker_agent_no"),
(try_begin),
(position_is_behind_position, pos2, pos1),
(val_mul, ":rotate_side", -1),
(try_end),
(try_end),
(init_position, pos3),
(try_begin),
(ge, ":rotate_side", 0),
(position_move_y, pos3, -100),
(else_try),
(position_move_y, pos3, 100),
(try_end),
(position_move_x, pos3, -50),
(position_transform_position_to_parent, pos4, pos1, pos3),
(position_move_z, pos4, 100),
(position_get_distance_to_ground_level, ":height_to_terrain", pos4),
(val_sub, ":height_to_terrain", 100),
(assign, ":z_difference", ":height_to_terrain"),
(val_div, ":z_difference", 3),
(try_begin),
(ge, ":rotate_side", 0),
(val_add, ":rotate_side", ":z_difference"),
(else_try),
(val_sub, ":rotate_side", ":z_difference"),
(try_end),
(position_rotate_x, pos1, ":rotate_side"),
(prop_instance_animate_to_position, ":instance_no", pos1, 70), #animate to position 1 in 0.7 second
(try_end),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(try_end),
]),
]),
("castle_f_stairs_a",sokf_type_ladder,"castle_f_stairs_a","bo_castle_f_stairs_a", []),
("castle_f_tower_a",0,"castle_f_tower_a","bo_castle_f_tower_a", []),
("castle_f_wall_stairs_a",sokf_type_ladder,"castle_f_wall_stairs_a","bo_castle_f_wall_stairs_a", []),
("castle_f_wall_stairs_b",sokf_type_ladder,"castle_f_wall_stairs_b","bo_castle_f_wall_stairs_b", []),
("castle_f_wall_way_a",0,"castle_f_wall_way_a","bo_castle_f_wall_way_a", []),
("castle_f_wall_way_b",0,"castle_f_wall_way_b","bo_castle_f_wall_way_b", []),
("castle_f_gatehouse_a",0,"castle_f_gatehouse_a","bo_castle_f_gatehouse_a", []),
("castle_g_battlement_a",0,"castle_g_battlement_a","bo_castle_g_battlement_a", []),
("castle_g_battlement_a1",0,"castle_g_battlement_a1","bo_castle_g_battlement_a1", []),
("castle_g_battlement_c",0,"castle_g_battlement_c","bo_castle_g_battlement_c", []),
("castle_g_corner_a",0,"castle_g_corner_a","bo_castle_g_corner_a", []),
("castle_g_corner_c",0,"castle_g_corner_c","bo_castle_g_corner_c", []),
("castle_g_tower_a",sokf_type_ladder,"castle_g_tower_a","bo_castle_g_tower_a", []),
("castle_g_gate_house",0,"castle_g_gate_house","bo_castle_g_gate_house", []),
("castle_g_gate_house_door_a",0,"castle_g_gate_house_door_a","bo_castle_g_gate_house_door_a", []),
("castle_g_gate_house_door_b",0,"castle_g_gate_house_door_b","bo_castle_g_gate_house_door_b", []),
("castle_g_square_keep_a",0,"castle_g_square_keep_a","bo_castle_g_square_keep_a", []),
("castle_i_battlement_a",0,"castle_i_battlement_a","bo_castle_i_battlement_a", []),
("castle_i_battlement_a1",0,"castle_i_battlement_a1","bo_castle_i_battlement_a1", []),
("castle_i_battlement_c",0,"castle_i_battlement_c","bo_castle_i_battlement_c", []),
("castle_i_corner_a",0,"castle_i_corner_a","bo_castle_i_corner_a", []),
("castle_i_corner_c",0,"castle_i_corner_c","bo_castle_i_corner_c", []),
("castle_i_tower_a",sokf_type_ladder,"castle_i_tower_a","bo_castle_i_tower_a", []),
("castle_i_gate_house",0,"castle_i_gate_house","bo_castle_i_gate_house", []),
("castle_i_gate_house_door_a",0,"castle_i_gate_house_door_a","bo_castle_i_gate_house_door_a", []),
("castle_i_gate_house_door_b",0,"castle_i_gate_house_door_b","bo_castle_i_gate_house_door_b", []),
("castle_i_square_keep_a",0,"castle_i_square_keep_a","bo_castle_i_square_keep_a", []),
("mosque_a",0,"mosque_a","bo_mosque_a", []),
("stone_minaret_a",0,"stone_minaret_a","bo_stone_minaret_a", []),
("stone_house_a",0,"stone_house_a","bo_stone_house_a", []),
("stone_house_b",0,"stone_house_b","bo_stone_house_b", []),
("stone_house_c",0,"stone_house_c","bo_stone_house_c", []),
("stone_house_d",0,"stone_house_d","bo_stone_house_d", []),
("stone_house_e",0,"stone_house_e","bo_stone_house_e", []),
("stone_house_f",0,"stone_house_f","bo_stone_house_f", []),
("banner_pole", sokf_moveable, "banner_pole", "bo_banner_pole", []),
("custom_banner_01",0,"custom_banner_01","0",
[
(ti_on_init_scene_prop,
[
(party_get_slot, ":leader_troop", "$g_encountered_party", slot_town_lord),
(try_begin),
(ge, ":leader_troop", 0),
(cur_scene_prop_set_tableau_material, "tableau_custom_banner_default", ":leader_troop"),
(try_end),
]),
]),
("custom_banner_02",0,"custom_banner_02","0",
[
(ti_on_init_scene_prop,
[
(party_get_slot, ":leader_troop", "$g_encountered_party", slot_town_lord),
(try_begin),
(ge, ":leader_troop", 0),
(cur_scene_prop_set_tableau_material, "tableau_custom_banner_default", ":leader_troop"),
(try_end),
]),
]),
("banner_a",0,"banner_a01","0", []),
("banner_b",0,"banner_a02","0", []),
("banner_c",0,"banner_a03","0", []),
("banner_d",0,"banner_a04","0", []),
("banner_e",0,"banner_a05","0", []),
("banner_f",0,"banner_a06","0", []),
("banner_g",0,"banner_a07","0", []),
("banner_h",0,"banner_a08","0", []),
("banner_i",0,"banner_a09","0", []),
("banner_j",0,"banner_a10","0", []),
("banner_k",0,"banner_a11","0", []),
("banner_l",0,"banner_a12","0", []),
("banner_m",0,"banner_a13","0", []),
("banner_n",0,"banner_a14","0", []),
("banner_o",0,"banner_f21","0", []),
("banner_p",0,"banner_a16","0", []),
("banner_q",0,"banner_a17","0", []),
("banner_r",0,"banner_a18","0", []),
("banner_s",0,"banner_a19","0", []),
("banner_t",0,"banner_a20","0", []),
("banner_u",0,"banner_a21","0", []),
("banner_ba",0,"banner_b01","0", []),
("banner_bb",0,"banner_b02","0", []),
("banner_bc",0,"banner_b03","0", []),
("banner_bd",0,"banner_b04","0", []),
("banner_be",0,"banner_b05","0", []),
("banner_bf",0,"banner_b06","0", []),
("banner_bg",0,"banner_b07","0", []),
("banner_bh",0,"banner_b08","0", []),
("banner_bi",0,"banner_b09","0", []),
("banner_bj",0,"banner_b10","0", []),
("banner_bk",0,"banner_b11","0", []),
("banner_bl",0,"banner_b12","0", []),
("banner_bm",0,"banner_b13","0", []),
("banner_bn",0,"banner_b14","0", []),
("banner_bo",0,"banner_b15","0", []),
("banner_bp",0,"banner_b16","0", []),
("banner_bq",0,"banner_b17","0", []),
("banner_br",0,"banner_b18","0", []),
("banner_bs",0,"banner_b19","0", []),
("banner_bt",0,"banner_b20","0", []),
("banner_bu",0,"banner_b21","0", []),
("banner_ca",0,"banner_c01","0", []),
("banner_cb",0,"banner_c02","0", []),
("banner_cc",0,"banner_c03","0", []),
("banner_cd",0,"banner_c04","0", []),
("banner_ce",0,"banner_c05","0", []),
("banner_cf",0,"banner_c06","0", []),
("banner_cg",0,"banner_c07","0", []),
("banner_ch",0,"banner_c08","0", []),
("banner_ci",0,"banner_c09","0", []),
("banner_cj",0,"banner_c10","0", []),
("banner_ck",0,"banner_c11","0", []),
("banner_cl",0,"banner_c12","0", []),
("banner_cm",0,"banner_c13","0", []),
("banner_cn",0,"banner_c14","0", []),
("banner_co",0,"banner_c15","0", []),
("banner_cp",0,"banner_c16","0", []),
("banner_cq",0,"banner_c17","0", []),
("banner_cr",0,"banner_c18","0", []),
("banner_cs",0,"banner_c19","0", []),
("banner_ct",0,"banner_c20","0", []),
("banner_cu",0,"banner_c21","0", []),
("banner_da",0,"banner_d01","0", []),
("banner_db",0,"banner_d02","0", []),
("banner_dc",0,"banner_d03","0", []),
("banner_dd",0,"banner_d04","0", []),
("banner_de",0,"banner_d05","0", []),
("banner_df",0,"banner_d06","0", []),
("banner_dg",0,"banner_d07","0", []),
("banner_dh",0,"banner_d08","0", []),
("banner_di",0,"banner_d09","0", []),
("banner_dj",0,"banner_d10","0", []),
("banner_dk",0,"banner_d11","0", []),
("banner_dl",0,"banner_d12","0", []),
("banner_dm",0,"banner_d13","0", []),
("banner_dn",0,"banner_d14","0", []),
("banner_do",0,"banner_d15","0", []),
("banner_dp",0,"banner_d16","0", []),
("banner_dq",0,"banner_d17","0", []),
("banner_dr",0,"banner_d18","0", []),
("banner_ds",0,"banner_d19","0", []),
("banner_dt",0,"banner_d20","0", []),
("banner_du",0,"banner_d21","0", []),
("banner_ea",0,"banner_e01","0", []),
("banner_eb",0,"banner_e02","0", []),
("banner_ec",0,"banner_e03","0", []),
("banner_ed",0,"banner_e04","0", []),
("banner_ee",0,"banner_e05","0", []),
("banner_ef",0,"banner_e06","0", []),
("banner_eg",0,"banner_e07","0", []),
("banner_eh",0,"banner_e08","0", []),
("banner_ei",0,"banner_e09","0", []),
("banner_ej",0,"banner_e10","0", []),
("banner_ek",0,"banner_e11","0", []),
("banner_el",0,"banner_e12","0", []),
("banner_em",0,"banner_e13","0", []),
("banner_en",0,"banner_e14","0", []),
("banner_eo",0,"banner_e15","0", []),
("banner_ep",0,"banner_e16","0", []),
("banner_eq",0,"banner_e17","0", []),
("banner_er",0,"banner_e18","0", []),
("banner_es",0,"banner_e19","0", []),
("banner_et",0,"banner_e20","0", []),
("banner_eu",0,"banner_e21","0", []),
("banner_f01", 0, "banner_f01", "0", []),
("banner_f02", 0, "banner_f02", "0", []),
("banner_f03", 0, "banner_f03", "0", []),
("banner_f04", 0, "banner_f04", "0", []),
("banner_f05", 0, "banner_f05", "0", []),
("banner_f06", 0, "banner_f06", "0", []),
("banner_f07", 0, "banner_f07", "0", []),
("banner_f08", 0, "banner_f08", "0", []),
("banner_f09", 0, "banner_f09", "0", []),
("banner_f10", 0, "banner_f10", "0", []),
("banner_f11", 0, "banner_f11", "0", []),
("banner_f12", 0, "banner_f12", "0", []),
("banner_f13", 0, "banner_f13", "0", []),
("banner_f14", 0, "banner_f14", "0", []),
("banner_f15", 0, "banner_f15", "0", []),
("banner_f16", 0, "banner_f16", "0", []),
("banner_f17", 0, "banner_f17", "0", []),
("banner_f18", 0, "banner_f18", "0", []),
("banner_f19", 0, "banner_f19", "0", []),
("banner_f20", 0, "banner_f20", "0", []),
("banner_g01", 0, "banner_f01", "0", []),
("banner_g02", 0, "banner_f02", "0", []),
("banner_g03", 0, "banner_f03", "0", []),
("banner_g04", 0, "banner_f04", "0", []),
("banner_g05", 0, "banner_f05", "0", []),
("banner_g06", 0, "banner_f06", "0", []),
("banner_g07", 0, "banner_f07", "0", []),
("banner_g08", 0, "banner_f08", "0", []),
("banner_g09", 0, "banner_f09", "0", []),
("banner_g10", 0, "banner_f10", "0", []),
("banner_kingdom_a", 0, "banner_kingdom_a", "0", []),
("banner_kingdom_b", 0, "banner_kingdom_b", "0", []),
("banner_kingdom_c", 0, "banner_kingdom_c", "0", []),
("banner_kingdom_d", 0, "banner_kingdom_d", "0", []),
("banner_kingdom_e", 0, "banner_kingdom_e", "0", []),
("banner_kingdom_f", 0, "banner_kingdom_f", "0", []),
("banner_f21", 0, "banner_a15", "0", []),
("tavern_chair_a",0,"tavern_chair_a","bo_tavern_chair_a", []),
("tavern_chair_b",0,"tavern_chair_b","bo_tavern_chair_b", []),
("tavern_table_a",0,"tavern_table_a","bo_tavern_table_a", []),
("tavern_table_b",0,"tavern_table_b","bo_tavern_table_b", []),
("fireplace_a",0,"fireplace_a","bo_fireplace_a", []),
("barrel",0,"barrel","bobarrel", []),
("bench_tavern",0,"bench_tavern","bobench_tavern", []),
("bench_tavern_b",0,"bench_tavern_b","bo_bench_tavern_b", []),
("bowl_wood",0,"bowl_wood","0", []),
("chandelier_table",0,"chandelier_table","0", []),
("chandelier_tavern",0,"chandelier_tavern","0", []),
("chest_gothic",0,"chest_gothic","bochest_gothic", []),
("chest_b",0,"chest_b","bo_chest_b", []),
("chest_c",0,"chest_c","bo_chest_c", []),
("counter_tavern",0,"counter_tavern","bocounter_tavern", []),
("cup",0,"cup","0", []),
("dish_metal",0,"dish_metal","0", []),
("gothic_chair",0,"gothic_chair","bogothic_chair", []),
("gothic_stool",0,"gothic_stool","bogothic_stool", []),
("grate",0,"grate","bograte", []),
("jug",0,"jug","0", []),
("potlamp",0,"potlamp","0", []),
("weapon_rack",0,"weapon_rack","boweapon_rack", []),
("weapon_rack_big",0,"weapon_rack_big","boweapon_rack_big", []),
("tavern_barrel",0,"barrel","bobarrel", []),
("tavern_barrel_b",0,"tavern_barrel_b","bo_tavern_barrel_b", []),
("merchant_sign",0,"merchant_sign","bo_tavern_sign", []),
("tavern_sign",0,"tavern_sign","bo_tavern_sign", []),
("sack",0,"sack","0", []),
("skull_a",0,"skull_a","0", []),
("skull_b",0,"skull_b","0", []),
("skull_c",0,"skull_c","0", []),
("skull_d",0,"skull_d","0", []),
("skeleton_cow",0,"skeleton_cow","0", []),
("cupboard_a",0,"cupboard_a","bo_cupboard_a", []),
("box_a",0,"box_a","bo_box_a", []),
("bucket_a",0,"bucket_a","bo_bucket_a", []),
("straw_a",0,"straw_a","0", []),
("straw_b",0,"straw_b","0", []),
("straw_c",0,"straw_c","0", []),
("cloth_a",0,"cloth_a","0", []),
("cloth_b",0,"cloth_b","0", []),
("mat_a",0,"mat_a","0", []),
("mat_b",0,"mat_b","0", []),
("mat_c",0,"mat_c","0", []),
("mat_d",0,"mat_d","0", []),
("wood_a",0,"wood_a","bo_wood_a", []),
("wood_b",0,"wood_b","bo_wood_b", []),
("wood_heap",0,"wood_heap_a","bo_wood_heap_a", []),
("wood_heap_b",0,"wood_heap_b","bo_wood_heap_b", []),
("water_well_a",0,"water_well_a","bo_water_well_a", []),
("net_a",0,"net_a","bo_net_a", []),
("net_b",0,"net_b","0", []),
("meat_hook",0,"meat_hook","0", []),
("cooking_pole",0,"cooking_pole","0", []),
("bowl_a",0,"bowl_a","0", []),
("bucket_b",0,"bucket_b","0", []),
("washtub_a",0,"washtub_a","bo_washtub_a", []),
("washtub_b",0,"washtub_b","bo_washtub_b", []),
("table_trunk_a",0,"table_trunk_a","bo_table_trunk_a", []),
("chair_trunk_a",0,"chair_trunk_a","bo_chair_trunk_a", []),
("chair_trunk_b",0,"chair_trunk_b","bo_chair_trunk_b", []),
("chair_trunk_c",0,"chair_trunk_c","bo_chair_trunk_c", []),
("table_trestle_long",0,"table_trestle_long","bo_table_trestle_long", []),
("table_trestle_small",0,"table_trestle_small","bo_table_trestle_small", []),
("chair_trestle",0,"chair_trestle","bo_chair_trestle", []),
("wheel",0,"wheel","bo_wheel", []),
("ladder",sokf_type_ladder,"ladder","boladder", []),
("cart",0,"cart","bo_cart", []),
("village_stand",0,"village_stand","bovillage_stand", []),
("wooden_stand",0,"wooden_stand","bowooden_stand", []),
("table_small",0,"table_small","bo_table_small", []),
("table_small_b",0,"table_small_b","bo_table_small_b", []),
("small_timber_frame_house_a",0,"small_timber_frame_house_a","bo_small_timber_frame_house_a", []),
("timber_frame_house_b",0,"tf_house_b","bo_tf_house_b", []),
("timber_frame_house_c",0,"tf_house_c","bo_tf_house_c", []),
("timber_frame_extension_a",0,"timber_frame_extension_a","bo_timber_frame_extension_a", []),
("timber_frame_extension_b",0,"timber_frame_extension_b","bo_timber_frame_extension_b", []),
("stone_stairs_a",sokf_type_ladder,"stone_stairs_a","bo_stone_stairs_a", []),
("stone_stairs_b",sokf_type_ladder,"stone_stairs_b","bo_stone_stairs_b", []),
("railing_a",0,"railing_a","bo_railing_a", []),
("side_building_a",0,"side_building_a","bo_side_building_a", []),
("battlement_a",0,"battlement_a","bo_battlement_a", []),
("battlement_a_destroyed",0,"battlement_a_destroyed","bo_battlement_a_destroyed", []),
("round_tower_a",0,"round_tower_a","bo_round_tower_a", []),
("small_round_tower_a",0,"small_round_tower_a","bo_small_round_tower_a", []),
("small_round_tower_roof_a",0,"small_round_tower_roof_a","bo_small_round_tower_roof_a", []),
("square_keep_a",0,"square_keep_a","bo_square_keep_a", []),
("square_tower_roof_a",0,"square_tower_roof_a","0", []),
("gate_house_a",0,"gate_house_a","bo_gate_house_a", []),
("gate_house_b",0,"gate_house_b","bo_gate_house_b", []),
("small_wall_a",0,"small_wall_a","bo_small_wall_a", []),
("small_wall_b",0,"small_wall_b","bo_small_wall_b", []),
("small_wall_c",0,"small_wall_c","bo_small_wall_c", []),
("small_wall_c_destroy",0,"small_wall_c_destroy","bo_small_wall_c_destroy", []),
("small_wall_d",0,"small_wall_d","bo_small_wall_d", []),
("small_wall_e",0,"small_wall_e","bo_small_wall_d", []),
("small_wall_f",0,"small_wall_f","bo_small_wall_f", []),
("small_wall_f2",0,"small_wall_f2","bo_small_wall_f2", []),
("town_house_a",0,"town_house_a","bo_town_house_a", []),
("town_house_b",0,"town_house_b","bo_town_house_b", []),
("town_house_c",0,"town_house_c","bo_town_house_c", []),
("town_house_d",0,"town_house_d","bo_town_house_d", []),
("town_house_e",0,"town_house_e","bo_town_house_e", []),
("town_house_f",0,"town_house_f","bo_town_house_f", []),
("town_house_g",0,"town_house_g","bo_town_house_g", []),
("town_house_h",0,"town_house_h","bo_town_house_h", []),
("town_house_i",0,"town_house_i","bo_town_house_i", []),
("town_house_j",0,"town_house_j","bo_town_house_j", []),
("town_house_l",0,"town_house_l","bo_town_house_l", []),
("town_house_m",0,"town_house_m","bo_town_house_m", []),
("town_house_n",0,"town_house_n","bo_town_house_n", []),
("town_house_o",0,"town_house_o","bo_town_house_o", []),
("town_house_p",0,"town_house_p","bo_town_house_p", []),
("town_house_q",0,"town_house_q","bo_town_house_q", []),
("passage_house_a",0,"passage_house_a","bo_passage_house_a", []),
("passage_house_b",0,"passage_house_b","bo_passage_house_b", []),
("passage_house_c",0,"passage_house_c","bo_passage_house_c", []),
("passage_house_d",0,"passage_house_d","bo_passage_house_d", []),
("passage_house_c_door",0,"passage_house_c_door","bo_passage_house_c_door", []),
("house_extension_a",0,"house_extension_a","bo_house_extension_a", []),
("house_extension_b",0,"house_extension_b","bo_house_extension_b", []),
("house_extension_c",0,"house_extension_c","bo_house_extension_a", []),#reuse
("house_extension_d",0,"house_extension_d","bo_house_extension_d", []),
("house_extension_e",0,"house_extension_e","bo_house_extension_e", []),
("house_extension_f",0,"house_extension_f","bo_house_extension_f", []),
("house_extension_f2",0,"house_extension_f2","bo_house_extension_f", []),
("house_extension_g",0,"house_extension_g","bo_house_extension_g", []),
("house_extension_g2",0,"house_extension_g2","bo_house_extension_g", []),
("house_extension_h",0,"house_extension_h","bo_house_extension_h", []),
("house_extension_i",0,"house_extension_i","bo_house_extension_i", []),
("house_roof_door",0,"house_roof_door","bo_house_roof_door", []),
("door_extension_a",0,"door_extension_a","bo_door_extension_a", []),
("stairs_arch_a",sokf_type_ladder,"stairs_arch_a","bo_stairs_arch_a", []),
("town_house_r",0,"town_house_r","bo_town_house_r", []),
("town_house_s",0,"town_house_s","bo_town_house_s", []),
("town_house_t",0,"town_house_t","bo_town_house_t", []),
("town_house_u",0,"town_house_u","bo_town_house_u", []),
("town_house_v",0,"town_house_v","bo_town_house_v", []),
("town_house_w",0,"town_house_w","bo_town_house_w", []),
("town_house_y",0,"town_house_y","bo_town_house_y", []),
("town_house_z",0,"town_house_z","bo_town_house_z", []),
("town_house_za",0,"town_house_za","bo_town_house_za", []),
("windmill",0,"windmill","bo_windmill", []),
("windmill_fan_turning",sokf_moveable,"windmill_fan_turning","bo_windmill_fan_turning", []),
("windmill_fan",0,"windmill_fan","bo_windmill_fan", []),
("fake_house_a",0,"fake_house_a","bo_fake_house_a", []),
("fake_house_b",0,"fake_house_b","bo_fake_house_b", []),
("fake_house_c",0,"fake_house_c","bo_fake_house_c", []),
("fake_house_d",0,"fake_house_d","bo_fake_house_d", []),
("fake_house_e",0,"fake_house_e","bo_fake_house_e", []),
("fake_house_f",0,"fake_house_f","bo_fake_house_f", []),
("fake_house_snowy_a",0,"fake_house_snowy_a","bo_fake_house_a", []),
("fake_house_snowy_b",0,"fake_house_snowy_b","bo_fake_house_b", []),
("fake_house_snowy_c",0,"fake_house_snowy_c","bo_fake_house_c", []),
("fake_house_snowy_d",0,"fake_house_snowy_d","bo_fake_house_d", []),
("fake_house_far_a",0,"fake_house_far_a","0", []),
("fake_house_far_b",0,"fake_house_far_b","0", []),
("fake_house_far_c",0,"fake_house_far_c","0", []),
("fake_house_far_d",0,"fake_house_far_d","0", []),
("fake_house_far_e",0,"fake_house_far_e","0", []),
("fake_house_far_f",0,"fake_house_far_f","0", []),
("fake_house_far_snowycrude_a",0,"fake_house_far_snowy_a","0", []),
("fake_house_far_snowy_b",0,"fake_house_far_snowy_b","0", []),
("fake_house_far_snowy_c",0,"fake_house_far_snowy_c","0", []),
("fake_house_far_snowy_d",0,"fake_house_far_snowy_d","0", []),
("earth_wall_a",0,"earth_wall_a","bo_earth_wall_a", []),
("earth_wall_a2",0,"earth_wall_a2","bo_earth_wall_a2", []),
("earth_wall_b",0,"earth_wall_b","bo_earth_wall_b", []),
("earth_wall_b2",0,"earth_wall_b2","bo_earth_wall_b2", []),
("earth_stairs_a",sokf_type_ladder,"earth_stairs_a","bo_earth_stairs_a", []),
("earth_stairs_b",sokf_type_ladder,"earth_stairs_b","bo_earth_stairs_b", []),
("earth_tower_small_a",0,"earth_tower_small_a","bo_earth_tower_small_a", []),
("earth_gate_house_a",0,"earth_gate_house_a","bo_earth_gate_house_a", []),
("earth_gate_a",0,"earth_gate_a","bo_earth_gate_a", []),
("earth_square_keep_a",0,"earth_square_keep_a","bo_earth_square_keep_a", []),
("earth_house_a",0,"earth_house_a","bo_earth_house_a", []),
("earth_house_b",0,"earth_house_b","bo_earth_house_b", []),
("earth_house_c",0,"earth_house_c","bo_earth_house_c", []),
("earth_house_d",0,"earth_house_d","bo_earth_house_d", []),
("village_steppe_a",0,"village_steppe_a","bo_village_steppe_a", []),
("village_steppe_b",0,"village_steppe_b","bo_village_steppe_b", []),
("village_steppe_c",0,"village_steppe_c","bo_village_steppe_c", []),
("village_steppe_d",0,"village_steppe_d","bo_village_steppe_d", []),
("village_steppe_e",0,"village_steppe_e","bo_village_steppe_e", []),
("village_steppe_f",0,"village_steppe_f","bo_village_steppe_f", []),
("town_house_aa",0,"town_house_aa","bo_town_house_aa", []),
("snowy_house_a",0,"snowy_house_a","bo_snowy_house_a", []),
("snowy_house_b",0,"snowy_house_b","bo_snowy_house_b", []),
("snowy_house_c",0,"snowy_house_c","bo_snowy_house_c", []),
("snowy_house_d",0,"snowy_house_d","bo_snowy_house_d", []),
("snowy_house_e",0,"snowy_house_e","bo_snowy_house_e", []),
("snowy_house_f",0,"snowy_house_f","bo_snowy_house_f", []),
("snowy_house_g",0,"snowy_house_g","bo_snowy_house_g", []),
("snowy_house_h",0,"snowy_house_h","bo_snowy_house_h", []),
("snowy_house_i",0,"snowy_house_i","bo_snowy_house_i", []),
("snowy_wall_a",0,"snowy_wall_a","bo_snowy_wall_a", []),
("snowy_stand",0,"snowy_stand","bo_snowy_stand", []),
("snowy_heap_a",0,"snowy_heap_a","bo_snowy_heap_a", []),
("snowy_trunks_a",0,"snowy_trunks_a","bo_snowy_trunks_a", []),
("snowy_castle_tower_a",0,"snowy_castle_tower_a","bo_snowy_castle_tower_a", []),
("snowy_castle_battlement_a",0,"snowy_castle_battlement_a","bo_snowy_castle_battlement_a", []),
("snowy_castle_battlement_a_destroyed",0,"snowy_castle_battlement_a_destroyed","bo_snowy_castle_battlement_a_destroyed", []),
("snowy_castle_battlement_b",0,"snowy_castle_battlement_b","bo_snowy_castle_battlement_b", []),
("snowy_castle_battlement_corner_a",0,"snowy_castle_battlement_corner_a","bo_snowy_castle_battlement_corner_a", []),
("snowy_castle_battlement_corner_b",0,"snowy_castle_battlement_corner_b","bo_snowy_castle_battlement_corner_b", []),
("snowy_castle_battlement_corner_c",0,"snowy_castle_battlement_corner_c","bo_snowy_castle_battlement_corner_c", []),
("snowy_castle_battlement_stairs_a",0,"snowy_castle_battlement_stairs_a","bo_snowy_castle_battlement_stairs_a", []),
("snowy_castle_battlement_stairs_b",0,"snowy_castle_battlement_stairs_b","bo_snowy_castle_battlement_stairs_b", []),
("snowy_castle_gate_house_a",0,"snowy_castle_gate_house_a","bo_snowy_castle_gate_house_a", []),
("snowy_castle_round_tower_a",0,"snowy_castle_round_tower_a","bo_snowy_castle_round_tower_a", []),
("snowy_castle_square_keep_a",0,"snowy_castle_square_keep_a","bo_snowy_castle_square_keep_a", []),
("snowy_castle_stairs_a",sokf_type_ladder,"snowy_castle_stairs_a","bo_snowy_castle_stairs_a", []),
("square_keep_b",0,"square_keep_b","bo_square_keep_b", []),
("square_keep_c",0,"square_keep_c","bo_square_keep_c", []),
("square_keep_d",0,"square_keep_d","bo_square_keep_d", []),
("square_keep_e",0,"square_keep_e","bo_square_keep_e", []),
("square_keep_f",0,"square_keep_f","bo_square_keep_f", []),
("square_extension_a",0,"square_extension_a","bo_square_extension_a", []),
("square_stairs_a",0,"square_stairs_a","bo_square_stairs_a", []),
("castle_courtyard_house_a",0,"castle_courtyard_house_a","bo_castle_courtyard_house_a", []),
("castle_courtyard_house_b",0,"castle_courtyard_house_b","bo_castle_courtyard_house_b", []),
("castle_courtyard_house_c",0,"castle_courtyard_house_c","bo_castle_courtyard_house_c", []),
("castle_courtyard_a",0,"castle_courtyard_a","bo_castle_courtyard_a", []),
("gatehouse_b",0,"gatehouse_b","bo_gatehouse_b", []),
("castle_gaillard",0,"castle_gaillard","bo_castle_gaillard", []),
("castle_e_battlement_a",0,"castle_e_battlement_a","bo_castle_e_battlement_a", []),
("castle_e_battlement_c",0,"castle_e_battlement_c","bo_castle_e_battlement_c", []),
("castle_e_battlement_a_destroyed",0,"castle_e_battlement_a_destroyed","bo_castle_e_battlement_a_destroyed", []),
("castle_e_sally_door_a",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible|spr_use_time(0),"castle_e_sally_door_a","bo_castle_e_sally_door_a", [
check_sally_door_use_trigger,
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 3000),
]),
## (ti_on_scene_prop_destroy,
## [
## (play_sound, "snd_dummy_destroyed"),
##
## (try_begin),
## (multiplayer_is_server),
## (store_trigger_param_1, ":instance_no"),
## (store_trigger_param_2, ":attacker_agent_no"),
##
## (try_begin),
## (ge, ":attacker_agent_no", 0),
## (prop_instance_get_position, pos1, ":instance_no"),
## (agent_get_position, pos2, ":attacker_agent_no"),
## (assign, ":rotate_side", 80),
## (try_begin),
## (position_is_behind_position, pos2, pos1),
## (val_mul, ":rotate_side", -1),
## (try_end),
## (else_try),
## (assign, ":rotate_side", 80),
## (try_end),
##
## (position_rotate_x, pos1, ":rotate_side"),
## (prop_instance_animate_to_position, ":instance_no", pos1, 70), #animate to position 1 in 0.7 second
## (try_end),
## ]),
(ti_on_scene_prop_destroy,
[
(play_sound, "snd_dummy_destroyed"),
(assign, ":rotate_side", 86),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":attacker_agent_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_position, pos1, ":instance_no"),
(try_begin),
(ge, ":attacker_agent_no", 0),
(agent_get_position, pos2, ":attacker_agent_no"),
(try_begin),
(position_is_behind_position, pos2, pos1),
(val_mul, ":rotate_side", -1),
(try_end),
(try_end),
(init_position, pos3),
(try_begin),
(ge, ":rotate_side", 0),
(position_move_y, pos3, -100),
(else_try),
(position_move_y, pos3, 100),
(try_end),
(position_move_x, pos3, -50),
(position_transform_position_to_parent, pos4, pos1, pos3),
(position_move_z, pos4, 100),
(position_get_distance_to_ground_level, ":height_to_terrain", pos4),
(val_sub, ":height_to_terrain", 100),
(assign, ":z_difference", ":height_to_terrain"),
#(assign, reg0, ":z_difference"),
#(display_message, "@{!}z dif : {reg0}"),
(val_div, ":z_difference", 3),
(try_begin),
(ge, ":rotate_side", 0),
(val_add, ":rotate_side", ":z_difference"),
(else_try),
(val_sub, ":rotate_side", ":z_difference"),
(try_end),
(position_rotate_x, pos1, ":rotate_side"),
(prop_instance_animate_to_position, ":instance_no", pos1, 70), #animate to position 1 in 0.7 second
(try_end),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(try_end),
]),
]),
("castle_e_corner",0,"castle_e_corner","bo_castle_e_corner", []),
("castle_e_corner_b",0,"castle_e_corner_b","bo_castle_e_corner_b", []),
("castle_e_corner_c",0,"castle_e_corner_c","bo_castle_e_corner_c", []),
("castle_e_stairs_a",0,"castle_e_stairs_a","bo_castle_e_stairs_a", []),
("castle_e_tower",0,"castle_e_tower","bo_castle_e_tower", []),
("castle_e_gate_house_a",0,"castle_e_gate_house_a","bo_castle_e_gate_house_a", []),
("castle_e_keep_a",0,"castle_e_keep_a","bo_castle_e_keep_a", []),
("stand_thatched",0,"stand_thatched","bo_stand_thatched", []),
("stand_cloth",0,"stand_cloth","bo_stand_cloth", []),
("castle_e_house_a",0,"castle_e_house_a","bo_castle_e_house_a", []),
("castle_e_house_b",0,"castle_e_house_b","bo_castle_e_house_b", []),
("arena_block_a",0,"arena_block_a","bo_arena_block_ab", []),
("arena_block_b",0,"arena_block_b","bo_arena_block_ab", []),
("arena_block_c",0,"arena_block_c","bo_arena_block_c", []),
("arena_block_d",0,"arena_block_d","bo_arena_block_def", []),
("arena_block_e",0,"arena_block_e","bo_arena_block_def", []),
("arena_block_f",0,"arena_block_f","bo_arena_block_def", []),
("arena_block_g",0,"arena_block_g","bo_arena_block_ghi", []),
("arena_block_h",0,"arena_block_h","bo_arena_block_ghi", []),
("arena_block_i",0,"arena_block_i","bo_arena_block_ghi", []),
("arena_block_j",0,"arena_block_j","bo_arena_block_j", []),
("arena_block_j_awning",0,"arena_block_j_awning","bo_arena_block_j_awning", []),
("arena_palisade_a",0,"arena_palisade_a","bo_arena_palisade_a", []),
("arena_wall_a",0,"arena_wall_a","bo_arena_wall_ab", []),
("arena_wall_b",0,"arena_wall_b","bo_arena_wall_ab", []),
("arena_barrier_a",0,"arena_barrier_a","bo_arena_barrier_a", []),
("arena_barrier_b",0,"arena_barrier_b","bo_arena_barrier_bc", []),
("arena_barrier_c",0,"arena_barrier_c","bo_arena_barrier_bc", []),
("arena_tower_a",0,"arena_tower_a","bo_arena_tower_abc", []),
("arena_tower_b",0,"arena_tower_b","bo_arena_tower_abc", []),
("arena_tower_c",0,"arena_tower_c","bo_arena_tower_abc", []),
("arena_spectator_a",0,"arena_spectator_a","0", []),
("arena_spectator_b",0,"arena_spectator_b","0", []),
("arena_spectator_c",0,"arena_spectator_c","0", []),
("arena_spectator_sitting_a",0,"arena_spectator_sitting_a","0", []),
("arena_spectator_sitting_b",0,"arena_spectator_sitting_b","0", []),
("arena_spectator_sitting_c",0,"arena_spectator_sitting_c","0", []),
("courtyard_gate_a",0,"courtyard_entry_a","bo_courtyard_entry_a", []),
("courtyard_gate_b",0,"courtyard_entry_b","bo_courtyard_entry_b", []),
("courtyard_gate_c",0,"courtyard_entry_c","bo_courtyard_entry_c", []),
("courtyard_gate_snowy",0,"courtyard_entry_snowy","bo_courtyard_entry_a", []),
("castle_tower_a",0,"castle_tower_a","bo_castle_tower_a", []),
("castle_battlement_a",0,"castle_battlement_a","bo_castle_battlement_a", []),
("castle_battlement_b",0,"castle_battlement_b","bo_castle_battlement_b", []),
("castle_battlement_c",0,"castle_battlement_c","bo_castle_battlement_c", []),
("castle_battlement_a_destroyed",0,"castle_battlement_a_destroyed","bo_castle_battlement_a_destroyed", []),
("castle_battlement_b_destroyed",0,"castle_battlement_b_destroyed","bo_castle_battlement_b_destroyed", []),
("castle_battlement_corner_a",0,"castle_battlement_corner_a","bo_castle_battlement_corner_a", []),
("castle_battlement_corner_b",0,"castle_battlement_corner_b","bo_castle_battlement_corner_b", []),
("castle_battlement_corner_c",0,"castle_battlement_corner_c","bo_castle_battlement_corner_c", []),
("castle_battlement_stairs_a",0,"castle_battlement_stairs_a","bo_castle_battlement_stairs_a", []),
("castle_battlement_stairs_b",0,"castle_battlement_stairs_b","bo_castle_battlement_stairs_b", []),
("castle_gate_house_a",0,"castle_gate_house_a","bo_castle_gate_house_a", []),
("castle_round_tower_a",0,"castle_round_tower_a","bo_castle_round_tower_a", []),
("castle_square_keep_a",0,"castle_square_keep_a","bo_castle_square_keep_a", []),
("castle_stairs_a",sokf_type_ladder,"castle_stairs_a","bo_castle_stairs_a", []),
("castle_drawbridge_open",0,"castle_drawbridges_open","bo_castle_drawbridges_open", []),
("castle_drawbridge_closed",0,"castle_drawbridges_closed","bo_castle_drawbridges_closed", []),
("spike_group_a",0,"spike_group_a","bo_spike_group_a", []),
("spike_a",0,"spike_a","bo_spike_a", []),
("belfry_a",sokf_moveable,"belfry_a","bo_belfry_a", []),
("belfry_b",sokf_moveable,"belfry_b","bo_belfry_b", []),
("belfry_b_platform_a",sokf_moveable,"belfry_b_platform_a","bo_belfry_b_platform_a", []),
("belfry_old",0,"belfry_a","bo_belfry_a", []),
("belfry_platform_a",sokf_moveable,"belfry_platform_a","bo_belfry_platform_a", []),
("belfry_platform_b",sokf_moveable,"belfry_platform_b","bo_belfry_platform_b", []),
("belfry_platform_old",0,"belfry_platform_b","bo_belfry_platform_b", []),
("belfry_wheel",sokf_moveable,"belfry_wheel",0, []),
("belfry_wheel_old",0,"belfry_wheel",0, []),
("mangonel",0,"mangonel","bo_mangonel", []),
("trebuchet_old",0,"trebuchet_old","bo_trebuchet_old", []),
("trebuchet_new",0,"trebuchet_new","bo_trebuchet_old", []),
("trebuchet_destructible",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible,"trebuchet_new","bo_trebuchet_old", [
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 2400),
]),
(ti_on_scene_prop_destroy,
[
(play_sound, "snd_dummy_destroyed"),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(store_trigger_param_1, ":instance_no"),
(prop_instance_get_position, pos1, ":instance_no"),
(particle_system_burst, "psys_dummy_smoke_big", pos1, 100),
(particle_system_burst, "psys_dummy_straw_big", pos1, 100),
(position_move_z, pos1, -500),
(position_rotate_x, pos1, 90),
(prop_instance_animate_to_position, ":instance_no", pos1, 300), #animate to 6 meters below in 6 second
(try_begin),
(eq, "$g_round_ended", 0),
(scene_prop_get_team, ":scene_prop_team_no", ":instance_no"),
(try_begin),
(eq, ":scene_prop_team_no", 0),
(assign, ":scene_prop_team_no_multiplier", -1),
(else_try),
(assign, ":scene_prop_team_no_multiplier", 1),
(try_end),
(try_begin),
(eq, "$g_number_of_targets_destroyed", 0),
(store_mul, ":target_no_mul_scene_prop_team", ":scene_prop_team_no_multiplier", 2), #2 means destroyed object is a trebuchet
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_show_multiplayer_message", multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"),
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_show_multiplayer_message, multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"),
(try_end),
(val_add, "$g_number_of_targets_destroyed", 1),
(else_try),
(store_mul, ":target_no_mul_scene_prop_team", ":scene_prop_team_no_multiplier", 9), #9 means attackers destroyed all targets
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_show_multiplayer_message", multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"),
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_show_multiplayer_message, multiplayer_message_type_target_destroyed, ":target_no_mul_scene_prop_team"),
(try_end),
(val_add, "$g_number_of_targets_destroyed", 1),
(try_end),
(try_end),
#giving gold for destroying target (for trebuchet)
#step-1 calculating total damage given to that scene prop
(assign, ":total_damage_given", 0),
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 0, ":num_players"),
(player_is_active, ":player_no"),
(try_begin),
(eq, "spr_trebuchet_destructible", "$g_destructible_target_1"),
(player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_1),
(else_try),
(player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_2),
(try_end),
(val_add, ":total_damage_given", ":damage_given"),
(try_end),
#step-2 sharing 1000 gold (if num active players < 20 then 50 * num active players) to players which gave damage with the damage amounts.
#(scene_prop_get_max_hit_points, ":max_hit_points", ":instance_no"),
(assign, ":destroy_money_addition", 0),
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 0, ":num_players"),
(player_is_active, ":player_no"),
(val_add, ":destroy_money_addition", 50),
(try_end),
(try_begin),
(ge, ":destroy_money_addition", multi_destroy_target_money_add),
(assign, ":destroy_money_addition", multi_destroy_target_money_add),
(try_end),
(val_mul, ":destroy_money_addition", "$g_multiplayer_battle_earnings_multiplier"),
(val_div, ":destroy_money_addition", 100),
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 0, ":num_players"),
(player_is_active, ":player_no"),
(try_begin),
(eq, "spr_trebuchet_destructible", "$g_destructible_target_1"),
(player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_1),
(else_try),
(player_get_slot, ":damage_given", ":player_no", slot_player_damage_given_to_target_2),
(try_end),
(player_get_gold, ":player_gold", ":player_no"), #give money to player which helped flag to be owned by new_flag_owner team
(val_mul, ":damage_given", ":destroy_money_addition"),
(store_div, ":gold_earned", ":damage_given", ":total_damage_given"),
(val_add, ":player_gold", ":gold_earned"),
(player_set_gold, ":player_no", ":player_gold", multi_max_gold_that_can_be_stored),
(try_end),
(try_end),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(set_fixed_point_multiplier, 1),
(position_get_x, ":attacker_agent_id", pos2),
(try_begin),
(ge, ":attacker_agent_id", 0),
(agent_is_alive, ":attacker_agent_id"),
(agent_is_human, ":attacker_agent_id"),
(neg|agent_is_non_player, ":attacker_agent_id"),
(agent_get_player_id, ":attacker_player_id", ":attacker_agent_id"),
(ge, ":attacker_player_id", 0),
(player_is_active, ":attacker_player_id"),
(try_begin),
(eq, "spr_trebuchet_destructible", "$g_destructible_target_1"),
(player_get_slot, ":damage_given", ":attacker_player_id", slot_player_damage_given_to_target_1),
(val_add, ":damage_given", ":damage"),
(player_set_slot, ":attacker_player_id", slot_player_damage_given_to_target_1, ":damage_given"),
(else_try),
(player_get_slot, ":damage_given", ":attacker_player_id", slot_player_damage_given_to_target_2),
(val_add, ":damage_given", ":damage"),
(player_set_slot, ":attacker_player_id", slot_player_damage_given_to_target_2, ":damage_given"),
(try_end),
(try_end),
(try_end),
]),
]),
("stone_ball",0,"stone_ball","0", []),
("village_house_a",0,"village_house_a","bo_village_house_a", []),
("village_house_b",0,"village_house_b","bo_village_house_b", []),
("village_house_c",0,"village_house_c","bo_village_house_c", []),
("village_house_d",0,"village_house_d","bo_village_house_d", []),
("farm_house_a",0,"farm_house_a","bo_farm_house_a", []),
("farm_house_b",0,"farm_house_b","bo_farm_house_b", []),
("farm_house_c",0,"farm_house_c","bo_farm_house_c", []),
("mountain_house_a",0,"mountain_house_a","bo_mountain_house_a", []),
("mountain_house_b",0,"mountain_house_b","bo_mountain_house_b", []),
("village_hut_a",0,"village_hut_a","bo_village_hut_a", []),
("crude_fence",0,"fence","bo_fence", []),
("crude_fence_small",0,"crude_fence_small","bo_crude_fence_small", []),
("crude_fence_small_b",0,"crude_fence_small_b","bo_crude_fence_small_b", []),
("ramp_12m",0,"ramp_12m","bo_ramp_12m", []),
("ramp_14m",0,"ramp_14m","bo_ramp_14m", []),
("siege_ladder_6m",sokf_type_ladder,"siege_ladder_move_6m","bo_siege_ladder_move_6m", []),
("siege_ladder_8m",sokf_type_ladder,"siege_ladder_move_8m","bo_siege_ladder_move_8m", []),
("siege_ladder_10m",sokf_type_ladder,"siege_ladder_move_10m","bo_siege_ladder_move_10m", []),
("siege_ladder_12m",sokf_type_ladder,"siege_ladder_12m","bo_siege_ladder_12m", []),
("siege_ladder_14m",sokf_type_ladder,"siege_ladder_14m","bo_siege_ladder_14m", []),
("siege_ladder_move_6m",sokf_type_ladder|sokf_moveable|spr_use_time(2),"siege_ladder_move_6m","bo_siege_ladder_move_6m", [
check_item_use_trigger,
check_ladder_animate_trigger,
check_ladder_animation_finish_trigger,
]),
("siege_ladder_move_8m",sokf_type_ladder|sokf_moveable|spr_use_time(2),"siege_ladder_move_8m","bo_siege_ladder_move_8m", [
check_item_use_trigger,
check_ladder_animate_trigger,
check_ladder_animation_finish_trigger,
]),
("siege_ladder_move_10m",sokf_type_ladder|sokf_moveable|spr_use_time(3),"siege_ladder_move_10m","bo_siege_ladder_move_10m", [
check_item_use_trigger,
check_ladder_animate_trigger,
check_ladder_animation_finish_trigger,
]),
("siege_ladder_move_12m",sokf_type_ladder|sokf_moveable|spr_use_time(3),"siege_ladder_move_12m","bo_siege_ladder_move_12m", [
check_item_use_trigger,
check_ladder_animate_trigger,
check_ladder_animation_finish_trigger,
]),
("siege_ladder_move_14m",sokf_type_ladder|sokf_moveable|spr_use_time(4),"siege_ladder_move_14m","bo_siege_ladder_move_14m", [
check_item_use_trigger,
check_ladder_animate_trigger,
check_ladder_animation_finish_trigger,
]),
("portcullis",sokf_moveable,"portcullis_a","bo_portcullis_a", []),
("bed_a",0,"bed_a","bo_bed_a", []),
("bed_b",0,"bed_b","bo_bed_b", []),
("bed_c",0,"bed_c","bo_bed_c", []),
("bed_d",0,"bed_d","bo_bed_d", []),
("bed_e",0,"bed_e","bo_bed_e", []),
("bed_f",0,"bed_f","bo_bed_f", []),
("towngate_door_left",sokf_moveable,"door_g_left","bo_door_left", []),
("towngate_door_right",sokf_moveable,"door_g_right","bo_door_right", []),
("towngate_rectangle_door_left",sokf_moveable,"towngate_rectangle_door_left","bo_towngate_rectangle_door_left", []),
("towngate_rectangle_door_right",sokf_moveable,"towngate_rectangle_door_right","bo_towngate_rectangle_door_right", []),
("door_screen",sokf_moveable,"door_screen","0", []),
("door_a",sokf_moveable,"door_a","bo_door_a", []),
("door_b",sokf_moveable,"door_b","bo_door_a", []),
("door_c",sokf_moveable,"door_c","bo_door_a", []),
("door_d",sokf_moveable,"door_d","bo_door_a", []),
("tavern_door_a",sokf_moveable,"tavern_door_a","bo_tavern_door_a", []),
("tavern_door_b",sokf_moveable,"tavern_door_b","bo_tavern_door_a", []),
("door_e_left",sokf_moveable,"door_e_left","bo_door_left", []),
("door_e_right",sokf_moveable,"door_e_right","bo_door_right", []),
("door_f_left",sokf_moveable,"door_f_left","bo_door_left", []),
("door_f_right",sokf_moveable,"door_f_right","bo_door_right", []),
("door_h_left",sokf_moveable,"door_g_left","bo_door_left", []),
("door_h_right",sokf_moveable,"door_g_right","bo_door_right", []),
("draw_bridge_a",0,"draw_bridge_a","bo_draw_bridge_a", []),
("chain_1m",0,"chain_1m","0", []),
("chain_2m",0,"chain_2m","0", []),
("chain_5m",0,"chain_5m","0", []),
("chain_10m",0,"chain_10m","0", []),
("bridge_modular_a",0,"bridge_modular_a","bo_bridge_modular_a", []),
("bridge_modular_b",0,"bridge_modular_b","bo_bridge_modular_b", []),
("church_a",0,"church_a","bo_church_a", []),
("church_tower_a",0,"church_tower_a","bo_church_tower_a", []),
("stone_step_a",0,"floor_stone_a","bo_floor_stone_a", []),
("stone_step_b",0,"stone_step_b","0", []),
("stone_step_c",0,"stone_step_c","0", []),
("stone_heap",0,"stone_heap","bo_stone_heap", []),
("stone_heap_b",0,"stone_heap_b","bo_stone_heap", []),
("panel_door_a",0,"house_door_a","bo_house_door_a", []),
("panel_door_b",0,"house_door_b","bo_house_door_a", []),
("smoke_stain",0,"soot_a","0", []),
("brazier_with_fire",0,"brazier","bo_brazier", [
(ti_on_scene_prop_init,
[
(set_position_delta,0,0,85),
(particle_system_add_new, "psys_brazier_fire_1"),
(particle_system_add_new, "psys_fire_sparks_1"),
(set_position_delta,0,0,100),
(particle_system_add_new, "psys_fire_glow_1"),
(particle_system_emit, "psys_fire_glow_1",9000000),
]),
]),
("cooking_fire",0,"fire_floor","0",
[
(ti_on_scene_prop_init,
[
(set_position_delta,0,0,12),
(particle_system_add_new, "psys_cooking_fire_1"),
(particle_system_add_new, "psys_fire_sparks_1"),
(particle_system_add_new, "psys_cooking_smoke"),
(set_position_delta,0,0,50),
(particle_system_add_new, "psys_fire_glow_1"),
(particle_system_emit, "psys_fire_glow_1",9000000),
]),
]),
("cauldron_a",0,"cauldron_a","bo_cauldron_a", []),
("fry_pan_a",0,"fry_pan_a","0", []),
("tripod_cauldron_a",0,"tripod_cauldron_a","bo_tripod_cauldron_a", []),
("tripod_cauldron_b",0,"tripod_cauldron_b","bo_tripod_cauldron_b", []),
("open_stable_a",0,"open_stable_a","bo_open_stable_a", []),
("open_stable_b",0,"open_stable_b","bo_open_stable_b", []),
("plate_a",0,"plate_a","0", []),
("plate_b",0,"plate_b","0", []),
("plate_c",0,"plate_c","0", []),
("lettuce",0,"lettuce","0", []),
("hanger",0,"hanger","0", []),
("knife_eating",0,"knife_eating","0", []),
("colander",0,"colander","0", []),
("ladle",0,"ladle","0", []),
("spoon",0,"spoon","0", []),
("skewer",0,"skewer","0", []),
("grape_a",0,"grape_a","0", []),
("grape_b",0,"grape_b","0", []),
("apple_a",0,"apple_a","0", []),
("apple_b",0,"apple_b","0", []),
("maize_a",0,"maize_a","0", []),
("maize_b",0,"maize_b","0", []),
("cabbage",0,"cabbage","0", []),
("flax_bundle",0,"raw_flax","0",[]),
("olive_plane",0,"olive_plane","0",[]),
("grapes_plane",0,"grapes_plane","0",[]),
("date_fruit_plane",0,"date_fruit_plane","0",[]),
("bowl",0,"bowl_big","0",[]),
("bowl_small",0,"bowl_small","0",[]),
("dye_blue",0,"raw_dye_blue","0",[]),
("dye_red",0,"raw_dye_red","0",[]),
("dye_yellow",0,"raw_dye_yellow","0",[]),
("basket",0,"basket_small","0",[]),
("basket_big",0,"basket_large","0",[]),
("basket_big_green",0,"basket_big","0",[]),
("leatherwork_frame",0,"leatherwork_frame","0", []),
("cabbage_b",0,"cabbage_b","0", []),
("bean",0,"bean","0", []),
("basket_a",0,"basket_a","bo_basket_a", []),
("feeding_trough_a",0,"feeding_trough_a","bo_feeding_trough_a", []),
("marrow_a",0,"marrow_a","0", []),
("marrow_b",0,"marrow_b","0", []),
("squash_plant",0,"marrow_c","0", []),
("gatehouse_new_a",0,"gatehouse_new_a","bo_gatehouse_new_a", []),
("gatehouse_new_b",0,"gatehouse_new_b","bo_gatehouse_new_b", []),
("gatehouse_new_snowy_a",0,"gatehouse_new_snowy_a","bo_gatehouse_new_b", []),
("winch",sokf_moveable,"winch","bo_winch", []),
("winch_b",sokf_moveable|spr_use_time(5),"winch_b","bo_winch", [
(ti_on_scene_prop_use,
[
(store_trigger_param_1, ":agent_id"),
(store_trigger_param_2, ":instance_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(call_script, "script_use_item", ":instance_id", ":agent_id"),
#for only server itself-----------------------------------------------------------------------------------------------
(get_max_players, ":num_players"),
(try_for_range, ":player_no", 1, ":num_players"), #0 is server so starting from 1
(player_is_active, ":player_no"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_use_item, ":instance_id", ":agent_id"),
(try_end),
]),
]),
("drawbridge",0,"drawbridge","bo_drawbridge", []),
("gatehouse_door_left",sokf_moveable,"gatehouse_door_left","bo_gatehouse_door_left", []),
("gatehouse_door_right",sokf_moveable,"gatehouse_door_right","bo_gatehouse_door_right", []),
("cheese_a",0,"cheese_a","0", []),
("cheese_b",0,"cheese_b","0", []),
("cheese_slice_a",0,"cheese_slice_a","0", []),
("bread_a",0,"bread_a","0", []),
("bread_b",0,"bread_b","0", []),
("bread_slice_a",0,"bread_slice_a","0", []),
("fish_a",0,"fish_a","0", []),
("fish_roasted_a",0,"fish_roasted_a","0", []),
("chicken_roasted",0,"chicken","0", []),
("food_steam",0,"0","0",
[
(ti_on_scene_prop_init,
[
(set_position_delta,0,0,0),
(particle_system_add_new, "psys_food_steam"),
]),
]),
########################
("city_smoke",0,"0","0",
[
(ti_on_scene_prop_init,
[
(store_time_of_day,reg(12)),
(neg|is_between,reg(12),5,20),
(set_position_delta,0,0,0),
(particle_system_add_new, "psys_night_smoke_1"),
]),
]),
("city_fire_fly_night",0,"0","0",
[
(ti_on_scene_prop_init,
[
(store_time_of_day,reg(12)),
(neg|is_between,reg(12),5,20),
(set_position_delta,0,0,0),
(particle_system_add_new, "psys_fire_fly_1"),
]),
]),
("city_fly_day",0,"0","0",
[
(ti_on_scene_prop_init,
[
(particle_system_add_new, "psys_bug_fly_1"),
]),
]),
("flue_smoke_tall",0,"0","0",
[
(ti_on_scene_prop_init,
[
(particle_system_add_new, "psys_flue_smoke_tall"),
]),
]),
("flue_smoke_short",0,"0","0",
[
(ti_on_scene_prop_init,
[
(particle_system_add_new, "psys_flue_smoke_short"),
]),
]),
("moon_beam",0,"0","0",
[
(ti_on_scene_prop_init,
[
(particle_system_add_new, "psys_moon_beam_1"),
(particle_system_add_new, "psys_moon_beam_paricle_1"),
]),
]),
("fire_small",0,"0","0",
[
(ti_on_scene_prop_init,
[
(particle_system_add_new, "psys_fireplace_fire_small"),
]),
]),
("fire_big",0,"0","0",
[
(ti_on_scene_prop_init,
[
(particle_system_add_new, "psys_fireplace_fire_big"),
]),
]),
("battle_field_smoke",0,"0","0",
[
(ti_on_scene_prop_init,
[
(particle_system_add_new, "psys_war_smoke_tall"),
]),
]),
("Village_fire_big",0,"0","0",
[
(ti_on_scene_prop_init,
[
(particle_system_add_new, "psys_village_fire_big"),
(set_position_delta,0,0,100),
(particle_system_add_new, "psys_village_fire_smoke_big"),
]),
]),
#########################
("candle_a",0,"candle_a","0",
[
(ti_on_scene_prop_init,
[
(set_position_delta,0,0,27),
(particle_system_add_new, "psys_candle_light"),
]),
]),
("candle_b",0,"candle_b","0",
[
(ti_on_scene_prop_init,
[
(set_position_delta,0,0,25),
(particle_system_add_new, "psys_candle_light"),
]),
]),
("candle_c",0,"candle_c","0", [
(ti_on_scene_prop_init,
[
(set_position_delta,0,0,10),
(particle_system_add_new, "psys_candle_light_small"),
]),
]),
("lamp_a",0,"lamp_a","0", [
(ti_on_scene_prop_init,
[
(set_position_delta,66,0,2),
(particle_system_add_new, "psys_candle_light"),
]),
]),
("lamp_b",0,"lamp_b","0", [
(ti_on_scene_prop_init,
[
(set_position_delta,65,0,-7),
(particle_system_add_new, "psys_lamp_fire"),
(set_position_delta,70,0,-5),
(particle_system_add_new, "psys_fire_glow_1"),
(particle_system_emit, "psys_fire_glow_1",9000000),
(play_sound, "snd_fire_loop", 0),
]),
]),
("hook_a",0,"hook_a","0", []),
("window_night",0,"window_night","0", []),
("fried_pig",0,"pork","0", []),
("village_oven",0,"village_oven","bo_village_oven", []),
("dungeon_water_drops",0,"0","0",
[
(ti_on_scene_prop_init,
[
(particle_system_add_new, "psys_dungeon_water_drops"),
]),
]),
("shadow_circle_1",0,"shadow_circle_1","0", []),
("shadow_circle_2",0,"shadow_circle_2","0", []),
("shadow_square_1",0,"shadow_square_1","0", []),
("shadow_square_2",0,"shadow_square_2","0", []),
("wheelbarrow",0,"wheelbarrow","bo_wheelbarrow", []),
("gourd",sokf_moveable|sokf_destructible|spr_hit_points(1),"gourd","bo_gourd",
[
(ti_on_scene_prop_destroy,
[
(store_trigger_param_1, ":instance_no"),
(val_add, "$g_last_destroyed_gourds", 1),
(prop_instance_get_position, pos1, ":instance_no"),
(copy_position, pos2, pos1),
(position_set_z, pos2, -100000),
(particle_system_burst, "psys_gourd_smoke", pos1, 2),
(particle_system_burst, "psys_gourd_piece_1", pos1, 1),
(particle_system_burst, "psys_gourd_piece_2", pos1, 5),
(prop_instance_animate_to_position, ":instance_no", pos2, 1),
(play_sound, "snd_gourd_destroyed"),
]),
]),
("gourd_spike",sokf_moveable,"gourd_spike","bo_gourd_spike",[]),
("obstacle_fence_1",0,"fence","bo_fence", []),
("obstacle_fallen_tree_a",0,"destroy_tree_a","bo_destroy_tree_a", []),
("obstacle_fallen_tree_b",0,"destroy_tree_b","bo_destroy_tree_b", []),
("siege_wall_a",0,"siege_wall_a","bo_siege_wall_a", []),
("siege_large_shield_a",0,"siege_large_shield_a","bo_siege_large_shield_a", []),
("granary_a",0,"granary_a","bo_granary_a", []),
("small_wall_connect_a",0,"small_wall_connect_a","bo_small_wall_connect_a", []),
("full_stable_a",0,"full_stable_a","bo_full_stable_a", []),
("full_stable_b",0,"full_stable_b","bo_full_stable_b", []),
("full_stable_c",0,"full_stable_c","bo_full_stable_c", []),
("full_stable_d",0,"full_stable_d","bo_full_stable_d", []),
("arabian_house_a",0,"arabian_house_a","bo_arabian_house_a", []),
("arabian_house_b",0,"arabian_house_b","bo_arabian_house_b", []),
("arabian_house_c",0,"arabian_house_c","bo_arabian_house_c", []),
("arabian_house_d",0,"arabian_house_d","bo_arabian_house_d", []),
("arabian_house_e",0,"arabian_house_e","bo_arabian_house_e", []),
("arabian_house_f",0,"arabian_house_f","bo_arabian_house_f", []),
("arabian_house_g",0,"arabian_house_g","bo_arabian_house_g", []),
("arabian_house_h",0,"arabian_house_h","bo_arabian_house_h", []),
("arabian_house_i",0,"arabian_house_i","bo_arabian_house_i", []),
("arabian_square_keep_a",0,"arabian_square_keep_a","bo_arabian_square_keep_a", []),
("arabian_passage_house_a",0,"arabian_passage_house_a","bo_arabian_passage_house_a", []),
("arabian_wall_a",0,"arabian_wall_a","bo_arabian_wall_a", []),
("arabian_wall_b",0,"arabian_wall_b","bo_arabian_wall_b", []),
("arabian_ground_a",0,"arabian_ground_a","bo_arabian_ground_a", []),
("arabian_parterre_a",0,"arabian_parterre_a","bo_arabian_parterre_a", []),
("well_shaft",0,"well_shaft","bo_well_shaft", []),
("horse_mill",0,"horse_mill","bo_horse_mill", []),
("horse_mill_collar",0,"horse_mill_collar","bo_horse_mill_collar", []),
("arabian_stable",0,"arabian_stable","bo_arabian_stable", []),
("arabian_tent",0,"arabian_tent","bo_arabian_tent", []),
("arabian_tent_b",0,"arabian_tent_b","bo_arabian_tent_b", []),
("desert_plant_a",0,"desert_plant_a","0", []),
("arabian_castle_battlement_a",0,"arabian_castle_battlement_a","bo_arabian_castle_battlement_a", []),
("arabian_castle_battlement_b_destroyed",0,"arabian_castle_battlement_b_destroyed","bo_arabian_castle_battlement_b_destroyed", []),
("arabian_castle_battlement_c",0,"arabian_castle_battlement_c","bo_arabian_castle_battlement_c", []),
("arabian_castle_battlement_d",0,"arabian_castle_battlement_d","bo_arabian_castle_battlement_d", []),
("arabian_castle_corner_a",0,"arabian_castle_corner_a","bo_arabian_castle_corner_a", []),
("arabian_castle_stairs",sokf_type_ladder,"arabian_castle_stairs","bo_arabian_castle_stairs", []),
("arabian_castle_stairs_b",sokf_type_ladder,"arabian_castle_stairs_b","bo_arabian_castle_stairs_b", []),
("arabian_castle_stairs_c",sokf_type_ladder,"arabian_castle_stairs_c","bo_arabian_castle_stairs_c", []),
("arabian_castle_battlement_section_a",0,"arabian_castle_battlement_section_a","bo_arabian_castle_battlement_section_a", []),
("arabian_castle_gate_house_a",0,"arabian_castle_gate_house_a","bo_arabian_castle_gate_house_a", []),
("arabian_castle_house_a",0,"arabian_castle_house_a","bo_arabian_castle_house_a", []),
("arabian_castle_house_b",0,"arabian_castle_house_b","bo_arabian_castle_house_b", []),
("arabian_castle_keep_a",0,"arabian_castle_keep_a","bo_arabian_castle_keep_a", []),
("arabian_house_a2",0,"arabian_house_a2","bo_arabian_house_a2", []),
("arabian_village_house_a",0,"arabian_village_house_a","bo_arabian_village_house_a", []),
("arabian_village_house_b",0,"arabian_village_house_b","bo_arabian_village_house_b", []),
("arabian_village_house_c",0,"arabian_village_house_c","bo_arabian_village_house_c", []),
("arabian_village_house_d",0,"arabian_village_house_d","bo_arabian_village_house_d", []),
("arabian_village_stable",0,"arabian_village_stable","bo_arabian_village_stable", []),
("arabian_village_hut",0,"arabian_village_hut","bo_arabian_village_hut", []),
("arabian_village_stairs",sokf_type_ladder,"arabian_village_stairs","bo_arabian_village_stairs", []),
("tree_a01",0,"tree_a01","bo_tree_a01", []),
("stairs_a",sokf_type_ladder,"stairs_a","bo_stairs_a", []),
("headquarters_flag_red",sokf_moveable|sokf_face_player,"tutorial_flag_red","0", []),
("headquarters_flag_blue",sokf_moveable|sokf_face_player,"tutorial_flag_blue","0", []),
("headquarters_flag_gray",sokf_moveable|sokf_face_player,"tutorial_flag_yellow","0", []),
("headquarters_flag_red_code_only",sokf_moveable|sokf_face_player,"mp_flag_red","0", []),
("headquarters_flag_blue_code_only",sokf_moveable|sokf_face_player,"mp_flag_blue","0", []),
("headquarters_flag_gray_code_only",sokf_moveable|sokf_face_player,"mp_flag_white","0", []),
("headquarters_pole_code_only",sokf_moveable,"mp_flag_pole","0", []),
("headquarters_flag_swadian",sokf_moveable|sokf_face_player,"flag_swadian","0", []),
("headquarters_flag_vaegir",sokf_moveable|sokf_face_player,"flag_vaegir","0", []),
("headquarters_flag_khergit",sokf_moveable|sokf_face_player,"flag_khergit","0", []),
("headquarters_flag_nord",sokf_moveable|sokf_face_player,"flag_nord","0", []),
("headquarters_flag_rhodok",sokf_moveable|sokf_face_player,"flag_rhodok","0", []),
("headquarters_flag_sarranid",sokf_moveable|sokf_face_player,"flag_sarranid","0", []),
("glow_a", 0, "glow_a", "0", []),
("glow_b", 0, "glow_b", "0", []),
("arabian_castle_corner_b",0,"arabian_castle_corner_b","bo_arabian_castle_corner_b", []),
("dummy_a_undestructable",sokf_destructible,"arena_archery_target_b","bo_arena_archery_target_b",
[
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 10000000),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(set_fixed_point_multiplier, 1),
(position_get_x, ":attacker_agent_id", pos2),
(get_player_agent_no, ":player_agent"),
(eq, ":player_agent", ":attacker_agent_id"),
(assign, reg60, ":damage"),
(display_message, "str_delivered_damage"),
(eq, "$g_tutorial_training_ground_horseman_trainer_state", 6),
(eq, "$g_tutorial_training_ground_horseman_trainer_completed_chapters", 1),
(prop_instance_get_variation_id_2, ":var_id_2", ":instance_no"),
(val_sub, ":var_id_2", 1),
(eq, "$g_tutorial_training_ground_current_score", ":var_id_2"),
(val_add, "$g_tutorial_training_ground_current_score", 1),
(try_end),
(play_sound, "snd_dummy_hit"),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
]),
]),
("cave_entrance_1",0,"cave_entrance_1","bo_cave_entrance_1", []),
("pointer_arrow", 0, "pointer_arrow", "0", []),
("fireplace_d_interior",0,"fireplace_d","bo_fireplace_d", []),
("ship_sail_off",0,"ship_sail_off","bo_ship_sail_off", []),
("ship_sail_off_b",0,"ship_sail_off_b","bo_ship_sail_off", []),
("ship_c_sail_off",0,"ship_c_sail_off","bo_ship_c_sail_off", []),
("ramp_small_a",0,"ramp_small_a","bo_ramp_small_a", []),
("castle_g_battlement_b",0,"castle_g_battlement_b","bo_castle_g_battlement_b", []),
("box_a_dynamic",sokf_moveable|sokf_dynamic_physics,"box_a","bo_box_a", []),
("desert_field",0,"desert_field","bo_desert_field", []),
("water_river",0,"water_plane","0", []),
("viking_house_a",0,"viking_house_a","bo_viking_house_a", []),
("viking_house_b",0,"viking_house_b","bo_viking_house_b", []),
("viking_house_c",0,"viking_house_c","bo_viking_house_c", []),
("viking_house_d",0,"viking_house_d","bo_viking_house_d", []),
("viking_house_e",0,"viking_house_e","bo_viking_house_e", []),
("viking_stable_a",0,"viking_stable_a","bo_viking_stable_a", []),
("viking_keep",0,"viking_keep","bo_viking_keep", []),
("viking_house_c_destroy",0,"viking_house_c_destroy","bo_viking_house_c_destroy", []),
("viking_house_b_destroy",0,"viking_house_b_destroy","bo_viking_house_b_destroy", []),
("harbour_a",0,"harbour_a","bo_harbour_a", []),
("sea_foam_a",0,"0","0",
[
(ti_on_scene_prop_init,
[
(particle_system_add_new, "psys_sea_foam_a"),
]),
]),
("viking_keep_destroy",0,"viking_keep_destroy","bo_viking_keep_destroy", []),
("viking_keep_destroy_door",0,"viking_keep_destroy_door","bo_viking_keep_destroy_door", []),
("earth_tower_small_b",0,"earth_tower_small_b","bo_earth_tower_small_b", []),
("earth_gate_house_b",0,"earth_gate_house_b","bo_earth_gate_house_b", []),
("earth_tower_a",0,"earth_tower_a","bo_earth_tower_a", []),
("earth_stairs_c",0,"earth_stairs_c","bo_earth_stairs_c", []),
("earth_sally_gate_left",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible|spr_use_time(0),"earth_sally_gate_left","bo_earth_sally_gate_left", [
check_sally_door_use_trigger_double,
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 2000),
]),
(ti_on_scene_prop_destroy,
[
(play_sound, "snd_dummy_destroyed"),
(assign, ":rotate_side", 86),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":attacker_agent_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_position, pos1, ":instance_no"),
(try_begin),
(ge, ":attacker_agent_no", 0),
(agent_get_position, pos2, ":attacker_agent_no"),
(try_begin),
(position_is_behind_position, pos2, pos1),
(val_mul, ":rotate_side", -1),
(try_end),
(try_end),
(init_position, pos3),
(try_begin),
(ge, ":rotate_side", 0),
(position_move_y, pos3, -100),
(else_try),
(position_move_y, pos3, 100),
(try_end),
(position_move_x, pos3, -50),
(position_transform_position_to_parent, pos4, pos1, pos3),
(position_move_z, pos4, 100),
(position_get_distance_to_ground_level, ":height_to_terrain", pos4),
(val_sub, ":height_to_terrain", 100),
(assign, ":z_difference", ":height_to_terrain"),
(val_div, ":z_difference", 3),
(try_begin),
(ge, ":rotate_side", 0),
(val_add, ":rotate_side", ":z_difference"),
(else_try),
(val_sub, ":rotate_side", ":z_difference"),
(try_end),
(position_rotate_x, pos1, ":rotate_side"),
(prop_instance_animate_to_position, ":instance_no", pos1, 70), #animate to position 1 in 0.7 second
(try_end),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(try_end),
]),
]),
("earth_sally_gate_right",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible|spr_use_time(0),"earth_sally_gate_right","bo_earth_sally_gate_right", [
check_sally_door_use_trigger_double,
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 2000),
]),
(ti_on_scene_prop_destroy,
[
(play_sound, "snd_dummy_destroyed"),
(assign, ":rotate_side", 86),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":attacker_agent_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_position, pos1, ":instance_no"),
(try_begin),
(ge, ":attacker_agent_no", 0),
(agent_get_position, pos2, ":attacker_agent_no"),
(try_begin),
(position_is_behind_position, pos2, pos1),
(val_mul, ":rotate_side", -1),
(try_end),
(try_end),
(init_position, pos3),
(try_begin),
(ge, ":rotate_side", 0),
(position_move_y, pos3, -100),
(else_try),
(position_move_y, pos3, 100),
(try_end),
(position_move_x, pos3, -50),
(position_transform_position_to_parent, pos4, pos1, pos3),
(position_move_z, pos4, 100),
(position_get_distance_to_ground_level, ":height_to_terrain", pos4),
(val_sub, ":height_to_terrain", 100),
(assign, ":z_difference", ":height_to_terrain"),
(val_div, ":z_difference", 3),
(try_begin),
(ge, ":rotate_side", 0),
(val_add, ":rotate_side", ":z_difference"),
(else_try),
(val_sub, ":rotate_side", ":z_difference"),
(try_end),
(position_rotate_x, pos1, ":rotate_side"),
(prop_instance_animate_to_position, ":instance_no", pos1, 70), #animate to position 1 in 0.7 second
(try_end),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(try_end),
]),
]),
#("earth_sally_gate_left",0,"earth_sally_gate_left","bo_earth_sally_gate_left", []),
#("earth_sally_gate_right",0,"earth_sally_gate_right","bo_earth_sally_gate_right", []),
("barrier_box",sokf_invisible|sokf_type_barrier3d,"barrier_box","bo_barrier_box", []),
("barrier_capsule",sokf_invisible|sokf_type_barrier3d,"barrier_capsule","bo_barrier_capsule", []),
("barrier_cone" ,sokf_invisible|sokf_type_barrier3d,"barrier_cone" ,"bo_barrier_cone" , []),
("barrier_sphere" ,sokf_invisible|sokf_type_barrier3d,"barrier_sphere" ,"bo_barrier_sphere" , []),
("viking_keep_destroy_sally_door_right",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible|spr_use_time(0),"viking_keep_destroy_sally_door_right","bo_viking_keep_destroy_sally_door_right", [
check_sally_door_use_trigger_double,
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 3000),
]),
(ti_on_scene_prop_destroy,
[
(play_sound, "snd_dummy_destroyed"),
(assign, ":rotate_side", 86),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":attacker_agent_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_position, pos1, ":instance_no"),
(try_begin),
(ge, ":attacker_agent_no", 0),
(agent_get_position, pos2, ":attacker_agent_no"),
(try_begin),
(position_is_behind_position, pos2, pos1),
(val_mul, ":rotate_side", -1),
(try_end),
(try_end),
(init_position, pos3),
(try_begin),
(ge, ":rotate_side", 0),
(position_move_y, pos3, -100),
(else_try),
(position_move_y, pos3, 100),
(try_end),
(position_move_x, pos3, -50),
(position_transform_position_to_parent, pos4, pos1, pos3),
(position_move_z, pos4, 100),
(position_get_distance_to_ground_level, ":height_to_terrain", pos4),
(val_sub, ":height_to_terrain", 100),
(assign, ":z_difference", ":height_to_terrain"),
(val_div, ":z_difference", 3),
(try_begin),
(ge, ":rotate_side", 0),
(val_add, ":rotate_side", ":z_difference"),
(else_try),
(val_sub, ":rotate_side", ":z_difference"),
(try_end),
(position_rotate_x, pos1, ":rotate_side"),
(prop_instance_animate_to_position, ":instance_no", pos1, 70), #animate to position 1 in 0.7 second
(try_end),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(try_end),
]),
]),
("viking_keep_destroy_sally_door_left",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible|spr_use_time(0),"viking_keep_destroy_sally_door_left","bo_viking_keep_destroy_sally_door_left", [
check_sally_door_use_trigger_double,
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 3000),
]),
(ti_on_scene_prop_destroy,
[
(play_sound, "snd_dummy_destroyed"),
(assign, ":rotate_side", 86),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":attacker_agent_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_position, pos1, ":instance_no"),
(try_begin),
(ge, ":attacker_agent_no", 0),
(agent_get_position, pos2, ":attacker_agent_no"),
(try_begin),
(position_is_behind_position, pos2, pos1),
(val_mul, ":rotate_side", -1),
(try_end),
(try_end),
(init_position, pos3),
(try_begin),
(ge, ":rotate_side", 0),
(position_move_y, pos3, -100),
(else_try),
(position_move_y, pos3, 100),
(try_end),
(position_move_x, pos3, -50),
(position_transform_position_to_parent, pos4, pos1, pos3),
(position_move_z, pos4, 100),
(position_get_distance_to_ground_level, ":height_to_terrain", pos4),
(val_sub, ":height_to_terrain", 100),
(assign, ":z_difference", ":height_to_terrain"),
(val_div, ":z_difference", 3),
(try_begin),
(ge, ":rotate_side", 0),
(val_add, ":rotate_side", ":z_difference"),
(else_try),
(val_sub, ":rotate_side", ":z_difference"),
(try_end),
(position_rotate_x, pos1, ":rotate_side"),
(prop_instance_animate_to_position, ":instance_no", pos1, 70), #animate to position 1 in 0.7 second
(try_end),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(try_end),
]),
]),
("castle_f_door_b",sokf_moveable|sokf_show_hit_point_bar|sokf_destructible|spr_use_time(0),"castle_e_sally_door_a","bo_castle_e_sally_door_a", [
check_castle_door_use_trigger,
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 1000),
]),
(ti_on_scene_prop_destroy,
[
(play_sound, "snd_dummy_destroyed"),
(assign, ":rotate_side", 86),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":attacker_agent_no"),
(set_fixed_point_multiplier, 100),
(prop_instance_get_position, pos1, ":instance_no"),
(try_begin),
(ge, ":attacker_agent_no", 0),
(agent_get_position, pos2, ":attacker_agent_no"),
(try_begin),
(position_is_behind_position, pos2, pos1),
(val_mul, ":rotate_side", -1),
(try_end),
(try_end),
(init_position, pos3),
(try_begin),
(ge, ":rotate_side", 0),
(position_move_y, pos3, -100),
(else_try),
(position_move_y, pos3, 100),
(try_end),
(position_move_x, pos3, -50),
(position_transform_position_to_parent, pos4, pos1, pos3),
(position_move_z, pos4, 100),
(position_get_distance_to_ground_level, ":height_to_terrain", pos4),
(val_sub, ":height_to_terrain", 100),
(assign, ":z_difference", ":height_to_terrain"),
#(assign, reg0, ":z_difference"),
#(display_message, "@{!}z dif : {reg0}"),
(val_div, ":z_difference", 3),
(try_begin),
(ge, ":rotate_side", 0),
(val_add, ":rotate_side", ":z_difference"),
(else_try),
(val_sub, ":rotate_side", ":z_difference"),
(try_end),
(position_rotate_x, pos1, ":rotate_side"),
(prop_instance_animate_to_position, ":instance_no", pos1, 70), #animate to position 1 in 0.7 second
(try_end),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(try_end),
]),
]),
("ctf_flag_kingdom_1", sokf_moveable|sokf_face_player, "ctf_flag_kingdom_1", "0", []),
("ctf_flag_kingdom_2", sokf_moveable|sokf_face_player, "ctf_flag_kingdom_2", "0", []),
("ctf_flag_kingdom_3", sokf_moveable|sokf_face_player, "ctf_flag_kingdom_3", "0", []),
("ctf_flag_kingdom_4", sokf_moveable|sokf_face_player, "ctf_flag_kingdom_4", "0", []),
("ctf_flag_kingdom_5", sokf_moveable|sokf_face_player, "ctf_flag_kingdom_5", "0", []),
("ctf_flag_kingdom_6", sokf_moveable|sokf_face_player, "ctf_flag_kingdom_6", "0", []),
("ctf_flag_kingdom_7", sokf_moveable|sokf_face_player, "ctf_flag_kingdom_7", "0", []),
("headquarters_flag_rebel",sokf_moveable|sokf_face_player,"flag_rebel","0", []),
("arabian_lighthouse_a",0,"arabian_lighthouse_a","bo_arabian_lighthouse_a", []),
("arabian_ramp_a",0,"arabian_ramp_a","bo_arabian_ramp_a", []),
("arabian_ramp_b",0,"arabian_ramp_b","bo_arabian_ramp_b", []),
("winery_interior",0,"winery_interior","bo_winery_interior", []),
("winery_barrel_shelf",0,"winery_barrel_shelf","bo_winery_barrel_shelf", []),
("winery_wall_shelf",0,"winery_wall_shelf","bo_winery_wall_shelf", []),
("winery_huge_barrel",0,"winery_huge_barrel","bo_winery_huge_barrel", []),
("winery_wine_press",0,"winery_wine_press","bo_winery_wine_press", []),
("winery_middle_barrel",0,"winery_middle_barrel","bo_winery_middle_barrel", []),
("winery_wine_cart_small_loaded",0,"winery_wine_cart_small_loaded","bo_winery_wine_cart_small_loaded", []),
("winery_wine_cart_small_empty",0,"winery_wine_cart_small_empty","bo_winery_wine_cart_small_empty", []),
("winery_wine_cart_empty",0,"winery_wine_cart_empty","bo_winery_wine_cart_empty", []),
("winery_wine_cart_loaded",0,"winery_wine_cart_loaded","bo_winery_wine_cart_loaded", []),
("weavery_interior",0,"weavery_interior","bo_weavery_interior", []),
("weavery_loom_a",0,"weavery_loom_a","bo_weavery_loom_a", []),
("weavery_spinning_wheel",0,"weavery_spinning_wheel","bo_weavery_spinning_wheel", []),
("mill_interior",0,"mill_interior","bo_mill_interior", []),
("mill_flour_sack", 0,"mill_flour_sack","bo_mill_flour_sack", []),
("mill_flour_sack_desk_a", 0,"mill_flour_sack_desk_a","bo_mill_flour_sack_desk_a", []),
("mill_flour_sack_desk_b", 0,"mill_flour_sack_desk_b","bo_mill_flour_sack_desk_b", []),
("smithy_interior", 0,"smithy_interior","bo_smithy_interior", []),
("smithy_grindstone_wheel", 0,"smithy_grindstone_wheel","bo_smithy_grindstone_wheel", []),
("smithy_forge_bellows", 0,"smithy_forge_bellows","bo_smithy_forge_bellows", []),
("smithy_forge", 0,"smithy_forge","bo_smithy_forge", []),
("smithy_anvil", 0,"smithy_anvil","bo_smithy_anvil", []),
("tannery_hide_a", 0,"tannery_hide_a","bo_tannery_hide_a", []),
("tannery_hide_b", 0,"tannery_hide_b","bo_tannery_hide_b", []),
("tannery_pools_a", 0,"tannery_pools_a","bo_tannery_pools_a", []),
("tannery_pools_b", 0,"tannery_pools_b","bo_tannery_pools_b", []),
("fountain", 0, "fountain", "bo_fountain", []),
("rhodok_houses_a",0,"rhodok_houses_a","bo_rhodok_houses_a", []),
("rhodok_houses_b",0,"rhodok_houses_b","bo_rhodok_houses_b", []),
("rhodok_houses_c",0,"rhodok_houses_c","bo_rhodok_houses_c", []),
("rhodok_houses_d",0,"rhodok_houses_d","bo_rhodok_houses_d", []),
("rhodok_houses_e",0,"rhodok_houses_e","bo_rhodok_houses_e", []),
("rhodok_house_passage_a",0,"rhodok_house_passage_a","bo_rhodok_house_passage_a", []),
("bridge_b",0,"bridge_b","bo_bridge_b", []),
("brewery_pool", 0,"brewery_pool","bo_brewery_pool", []),
("brewery_big_bucket", 0,"brewery_big_bucket","bo_brewery_big_bucket", []),
("brewery_interior", 0,"brewery_interior","bo_brewery_interior", []),
("brewery_bucket_platform_a", 0,"brewery_bucket_platform_a","bo_brewery_bucket_platform_a", []),
("brewery_bucket_platform_b", 0,"brewery_bucket_platform_b","bo_brewery_bucket_platform_b", []),
("weavery_dye_pool_r",0,"weavery_dye_pool_r","bo_weavery_dye_pool_r", []),
("weavery_dye_pool_y",0,"weavery_dye_pool_y","bo_weavery_dye_pool_y", []),
("weavery_dye_pool_b",0,"weavery_dye_pool_b","bo_weavery_dye_pool_b", []),
("weavery_dye_pool_p",0,"weavery_dye_pool_p","bo_weavery_dye_pool_p", []),
("weavery_dye_pool_g",0,"weavery_dye_pool_g","bo_weavery_dye_pool_g", []),
("oil_press_interior",0,"oil_press_interior","bo_oil_press_interior", []),
("city_swad_01" ,0,"city_swad_01" ,"bo_city_swad_01" , []),
("city_swad_02" ,0,"city_swad_02" ,"bo_city_swad_02" , []),
("city_swad_03" ,0,"city_swad_03" ,"bo_city_swad_03" , []),
("city_swad_04" ,0,"city_swad_04" ,"bo_city_swad_04" , []),
("city_swad_passage_01" ,0,"city_swad_passage_01" ,"bo_city_swad_passage_01" , []),
("city_swad_05" ,0,"city_swad_05" ,"bo_city_swad_05" , []),
("arena_block_j_a",0,"arena_block_j_a","bo_arena_block_j_a", []),
("arena_underway_a",0,"arena_underway_a","bo_arena_underway_a", []),
("arena_circle_a",0,"arena_circle_a","bo_arena_circle_a", []),
("rope_bridge_15m",0,"rope_bridge_15m","bo_rope_bridge_15m", []),
("tree_house_a",0,"tree_house_a","bo_tree_house_a", []),
("tree_house_guard_a",0,"tree_house_guard_a","bo_tree_house_guard_a", []),
("tree_house_guard_b",0,"tree_house_guard_b","bo_tree_house_guard_b", []),
("tree_shelter_a",0,"tree_shelter_a","bo_tree_shelter_a", []),
("yellow_fall_leafs_a",0,"0","0",
[
(ti_on_scene_prop_init,
[
(particle_system_add_new, "psys_fall_leafs_a"),
]),
]),
("rock_bridge_a",0,"rock_bridge_a","bo_rock_bridge_a", []),
("suspension_bridge_a",0,"suspension_bridge_a","bo_suspension_bridge_a", []),
("mine_a",0,"mine_a","bo_mine_a", []),
("snowy_destroy_house_a",0,"snowy_destroy_house_a","bo_snowy_destroy_house_a", []),
("snowy_destroy_house_b",0,"snowy_destroy_house_b","bo_snowy_destroy_house_b", []),
("snowy_destroy_house_c",0,"snowy_destroy_house_c","bo_snowy_destroy_house_c", []),
("snowy_destroy_heap",0,"snowy_destroy_heap","bo_snowy_destroy_heap", []),
("snowy_destroy_castle_a",0,"snowy_destroy_castle_a","bo_snowy_destroy_castle_a", []),
("snowy_destroy_castle_b",0,"snowy_destroy_castle_b","bo_snowy_destroy_castle_b", []),
("snowy_destroy_castle_c",0,"snowy_destroy_castle_c","bo_snowy_destroy_castle_c", []),
("snowy_destroy_castle_d",0,"snowy_destroy_castle_d","bo_snowy_destroy_castle_d", []),
("snowy_destroy_windmill",0,"snowy_destroy_windmill","bo_snowy_destroy_windmill", []),
("snowy_destroy_tree_a",0,"snowy_destroy_tree_a","bo_snowy_destroy_tree_a", []),
("snowy_destroy_tree_b",0,"snowy_destroy_tree_b","bo_snowy_destroy_tree_b", []),
("snowy_destroy_bridge_a",0,"snowy_destroy_bridge_a","bo_snowy_destroy_bridge_a", []),
("snowy_destroy_bridge_b",0,"snowy_destroy_bridge_b","bo_snowy_destroy_bridge_b", []),
#INVASION MODE START
#MCA
#prisoner cart
("prison_cart", sokf_moveable,"prison_cart","bo_prison_cart", []),
("prison_cart_door_right", sokf_show_hit_point_bar|sokf_destructible|sokf_moveable,"prison_cart_door_right","bo_prison_cart_door_right",
[
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 300),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(set_fixed_point_multiplier, 1),
(try_end),
]),
]), # added blank prop_hit trigger so hit point bar is displayed
("prison_cart_door_left", sokf_show_hit_point_bar|sokf_destructible|sokf_moveable,"prison_cart_door_left","bo_prison_cart_door_left",
[
(ti_on_init_scene_prop,
[
(store_trigger_param_1, ":instance_no"),
(scene_prop_set_hit_points, ":instance_no", 300),
]),
(ti_on_scene_prop_hit,
[
(store_trigger_param_1, ":instance_no"),
(store_trigger_param_2, ":damage"),
(try_begin),
(scene_prop_get_hit_points, ":hit_points", ":instance_no"),
(val_sub, ":hit_points", ":damage"),
(gt, ":hit_points", 0),
(play_sound, "snd_dummy_hit"),
(else_try),
(neg|multiplayer_is_server),
(play_sound, "snd_dummy_destroyed"),
(try_end),
(try_begin),
(this_or_next|multiplayer_is_server),
(neg|game_in_multiplayer_mode),
(particle_system_burst, "psys_dummy_smoke", pos1, 3),
(particle_system_burst, "psys_dummy_straw", pos1, 10),
(set_fixed_point_multiplier, 1),
(try_end),
]),
]), # added blank prop_hit trigger so hit point bar is displayed
("multiplayer_coop_item_drop", sokf_moveable|sokf_type_player_limiter|spr_use_time(1), "package", "bobaggage", [
(ti_on_scene_prop_use,
[
]),
(ti_on_scene_prop_start_use,
[
(store_trigger_param_1, ":agent_id"),
(store_trigger_param_2, ":instance_id"),
(agent_get_player_id, ":player_no", ":agent_id"),
(player_is_active, ":player_no"),
(assign, ":living_companion_1", -1),
(assign, ":living_companion_2", -1),
#(assign, reg1, ":agent_id"),
#(assign, reg2, ":instance_id"),
#(display_message, "@prop use trigger item: {reg0} agent: {reg1} instance: {reg2} "),
(try_for_agents, ":agent_id"),
#(this_or_next|eq, ":living_companion_1", -1),
#(eq, ":living_companion_1", -1),
(agent_is_active, ":agent_id"),
(agent_is_alive, ":agent_id"),
(agent_is_human, ":agent_id"),
(agent_is_non_player, ":agent_id"),
(agent_get_team, ":team_id", ":agent_id"),
(eq, ":team_id", 0),
(agent_get_group, ":agent_group", ":agent_id"),
(eq, ":agent_group", ":player_no"),
(agent_get_troop_id, ":troop_id", ":agent_id"),
(this_or_next|player_slot_eq, ":player_no", slot_player_companion_ids_begin, ":troop_id"),
(player_slot_eq, ":player_no", slot_player_companion_ids_begin + 1, ":troop_id"),
(try_begin),
(eq, ":living_companion_1", -1),
(assign, ":living_companion_1", ":agent_id"),
(else_try),
(eq, ":living_companion_2", -1),
(assign, ":living_companion_2", ":agent_id"),
(try_end),
(try_end),
#(display_message, "@sending to player"),
#(assign, reg1, ":living_companion_1"),
#(assign, reg2, ":living_companion_2"),
#(display_message, "@living_companion_1: {reg1} living_companion_2: {reg2} "),
(assign, ":new_chest", 1),
(assign, ":empty_slot", -1),
(try_for_range, ":cur_slot", slot_player_coop_opened_chests_begin, slot_player_coop_opened_chests_end),
(eq, ":new_chest", 1),
(player_get_slot, ":cur_instance", ":player_no", ":cur_slot"),
(try_begin),
(eq, ":cur_instance", ":instance_id"),
(assign, ":new_chest", 0),
(try_end),
(try_end),
(try_for_range, ":cur_slot", slot_player_coop_opened_chests_begin, slot_player_coop_opened_chests_end),
(eq, ":new_chest", 1),
(player_get_slot, ":cur_instance", ":player_no", ":cur_slot"),
(try_begin),
(eq, ":cur_instance", 0),
(eq, ":empty_slot", -1),
(assign, ":empty_slot", ":cur_slot"),
(try_end),
(try_end),
(try_begin),
(eq, ":new_chest", 1),
(call_script, "script_coop_generate_item_drop", ":player_no"),
(neq, ":empty_slot", -1),
(player_set_slot, ":player_no", ":empty_slot", ":instance_id"),
(multiplayer_send_2_int_to_player, ":player_no", multiplayer_event_coop_chest_opened, ":empty_slot", ":instance_id"),
(try_end),
(assign, reg1, ":new_chest"),
#(display_message, "@new chest: {reg1}"),
(try_begin),
(eq, ":new_chest", 1),
(try_begin),
(neq, ":player_no", 0),
(multiplayer_send_3_int_to_player, ":player_no", multiplayer_event_coop_drop_item, "$g_ccoop_currently_dropping_item", ":living_companion_1", ":living_companion_2"),
#(display_message, "@script called"), #debug
(else_try),
(call_script, "script_coop_drop_item", "$g_ccoop_currently_dropping_item", ":living_companion_1", ":living_companion_2"),
#(assign, reg1, ":player_no"),
#(display_message, "@sending to player no: {reg1} "),
(try_end),
(try_end),
(assign, "$g_ccoop_currently_dropping_item", -1),
]),
]),
#INVASION MODE END
]
|
Ikaguia/LWBR-WarForge
|
module_scene_props.py
|
Python
|
unlicense
| 133,561 | 0.056558 |
import urwid
import logging
class UserInput(object):
def __init__(self):
self._viewMap = None
self._mainLoop = None
def setMap(self, ** viewMap):
self._viewMap = viewMap
def setLoop(self, loop):
self._mainLoop = loop
def __call__(self, keypress):
logging.debug('keypress={}'.format(keypress))
if keypress in ('q', 'Q'):
raise urwid.ExitMainLoop()
if type(keypress) is not str:
return
if keypress.upper() not in self._viewMap:
return
view = self._viewMap[keypress.upper()]
self._mainLoop.widget = view.widget()
|
kjniemi/scylla
|
tools/scyllatop/userinput.py
|
Python
|
agpl-3.0
| 648 | 0 |
#
# SVC (SVM Multi classifier)
#
# @ author becxer
# @ e-mail becxer87@gmail.com
#
import numpy as np
from pytrain.SVM import SVM
from pytrain.lib import convert
from pytrain.lib import ptmath
class SVC:
def __init__(self, mat_data, label_data):
self.x = np.mat(convert.list2npfloat(mat_data))
self.ys = np.mat(np.sign(convert.list2npfloat(label_data) - 0.5))
self.outbit = self.ys.shape[1]
self.svm4bit = []
for i in range(self.outbit):
self.svm4bit.append(SVM(self.x, self.ys[:,i]))
def fit(self, C, toler, epoch, kernel = 'Linear', kernel_params = {}):
for i in range(self.outbit):
self.svm4bit[i].fit(C, toler, epoch, kernel, kernel_params)
def predict(self, array_input):
array_input = np.mat(convert.list2npfloat(array_input))
output = []
for i in range(self.outbit):
output.append(self.svm4bit[i].predict(array_input))
return list(np.sign(np.array(output) + 1))
|
becxer/pytrain
|
pytrain/SVM/SVC.py
|
Python
|
mit
| 1,018 | 0.007859 |
import numpy as np
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing.label import _check_numpy_unicode_bug
from sklearn.utils import column_or_1d
from ..base import SparkBroadcasterMixin, SparkTransformerMixin
class SparkLabelEncoder(LabelEncoder, SparkTransformerMixin,
SparkBroadcasterMixin):
"""Encode labels with value between 0 and n_classes-1.
Read more in the :ref:`User Guide <preprocessing_targets>`.
Attributes
----------
classes_ : array of shape (n_class,)
Holds the label for each class.
Examples
--------
`SparkLabelEncoder` can be used to normalize labels.
>>> from splearn.preprocessing import SparkLabelEncoder
>>> from splearn import BlockRDD
>>>
>>> data = ["paris", "paris", "tokyo", "amsterdam"]
>>> y = BlockRDD(sc.parallelize(data))
>>>
>>> le = SparkLabelEncoder()
>>> le.fit(y)
>>> le.classes_
array(['amsterdam', 'paris', 'tokyo'],
dtype='|S9')
>>>
>>> test = ["tokyo", "tokyo", "paris"]
>>> y_test = BlockRDD(sc.parallelize(test))
>>>
>>> le.transform(y_test).toarray()
array([2, 2, 1])
>>>
>>> test = [2, 2, 1]
>>> y_test = BlockRDD(sc.parallelize(test))
>>>
>>> le.inverse_transform(y_test).toarray()
array(['tokyo', 'tokyo', 'paris'],
dtype='|S9')
"""
__transient__ = ['classes_']
def fit(self, y):
"""Fit label encoder
Parameters
----------
y : ArrayRDD (n_samples,)
Target values.
Returns
-------
self : returns an instance of self.
"""
def mapper(y):
y = column_or_1d(y, warn=True)
_check_numpy_unicode_bug(y)
return np.unique(y)
def reducer(a, b):
return np.unique(np.concatenate((a, b)))
self.classes_ = y.map(mapper).reduce(reducer)
return self
def fit_transform(self, y):
"""Fit label encoder and return encoded labels
Parameters
----------
y : ArrayRDD [n_samples]
Target values.
Returns
-------
y : ArrayRDD [n_samples]
"""
return self.fit(y).transform(y)
def transform(self, y):
"""Transform labels to normalized encoding.
Parameters
----------
y : ArrayRDD [n_samples]
Target values.
Returns
-------
y : ArrayRDD [n_samples]
"""
mapper = super(SparkLabelEncoder, self).transform
mapper = self.broadcast(mapper, y.context)
return y.transform(mapper)
def inverse_transform(self, y):
"""Transform labels back to original encoding.
Parameters
----------
y : numpy array of shape [n_samples]
Target values.
Returns
-------
y : ArrayRDD [n_samples]
"""
mapper = super(SparkLabelEncoder, self).inverse_transform
mapper = self.broadcast(mapper, y.context)
return y.transform(mapper)
|
lensacom/sparkit-learn
|
splearn/preprocessing/label.py
|
Python
|
apache-2.0
| 3,089 | 0 |
"""grace
Revision ID: 3d30c324ed4
Revises: 8c78a916f1
Create Date: 2015-09-07 08:51:46.375707
"""
# revision identifiers, used by Alembic.
revision = '3d30c324ed4'
down_revision = '8c78a916f1'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
|
huangtao-sh/grace
|
grace/alembic/versions/3d30c324ed4_grace.py
|
Python
|
gpl-2.0
| 531 | 0.011299 |
#!/usr/bin/env python
network_device = {
'ip_addr' : '81.1.1.3',
'username' : 'user1',
'passwd' : 'pass123',
'vendor' : 'cisco',
'model' : '3940',
}
for k,v in network_device.items():
print k,v
network_device['passwd']='newpass'
network_device['secret']='enable'
for k,v in network_device.items():
print k,v
try:
print network_device['device_type']
except KeyError:
print "Device type not found\n"
|
aniketpuranik/pynet_test
|
day2/ex15_exception.py
|
Python
|
apache-2.0
| 445 | 0.024719 |
from OpenGLCffi.GLES2 import params
@params(api='gles2', prms=['n', 'ids'])
def glGenQueriesEXT(n, ids):
pass
@params(api='gles2', prms=['n', 'ids'])
def glDeleteQueriesEXT(n, ids):
pass
@params(api='gles2', prms=['id'])
def glIsQueryEXT(id):
pass
@params(api='gles2', prms=['target', 'id'])
def glBeginQueryEXT(target, id):
pass
@params(api='gles2', prms=['target'])
def glEndQueryEXT(target):
pass
@params(api='gles2', prms=['target', 'pname', 'params'])
def glGetQueryivEXT(target, pname):
pass
@params(api='gles2', prms=['id', 'pname', 'params'])
def glGetQueryObjectuivEXT(id, pname):
pass
|
cydenix/OpenGLCffi
|
OpenGLCffi/GLES2/EXT/EXT/occlusion_query_boolean.py
|
Python
|
mit
| 616 | 0.01461 |
# Copyright 2016-2018 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import subprocess
import shutil
import time
import types
import shlex
from pipes import quote
from devlib.exception import TargetStableError
from devlib.host import PACKAGE_BIN_DIRECTORY
from devlib.platform import Platform
from devlib.utils.ssh import AndroidGem5Connection, LinuxGem5Connection
class Gem5SimulationPlatform(Platform):
def __init__(self, name,
host_output_dir,
gem5_bin,
gem5_args,
gem5_virtio,
core_names=None,
core_clusters=None,
big_core=None,
model=None,
modules=None,
gem5_telnet_port=None):
# First call the parent class
super(Gem5SimulationPlatform, self).__init__(name, core_names, core_clusters,
big_core, model, modules)
# Start setting up the gem5 parameters/directories
# The gem5 subprocess
self.gem5 = None
self.gem5_port = gem5_telnet_port or None
self.stats_directory = host_output_dir
self.gem5_out_dir = os.path.join(self.stats_directory, "gem5")
self.gem5_interact_dir = '/tmp' # Host directory
self.executable_dir = None # Device directory
self.working_dir = None # Device directory
self.stdout_file = None
self.stderr_file = None
self.stderr_filename = None
if self.gem5_port is None: # pylint: disable=simplifiable-if-statement
# Allows devlib to pick up already running simulations
self.start_gem5_simulation = True
else:
self.start_gem5_simulation = False
# Find the first one that does not exist. Ensures that we do not re-use
# the directory used by someone else.
i = 0
directory = os.path.join(self.gem5_interact_dir, "wa_{}".format(i))
while os.path.exists(directory):
i += 1
directory = os.path.join(self.gem5_interact_dir, "wa_{}".format(i))
self.gem5_interact_dir = directory
self.logger.debug("Using {} as the temporary directory."
.format(self.gem5_interact_dir))
# Parameters passed onto gem5
self.gem5args_binary = gem5_bin
self.gem5args_args = gem5_args
self.gem5args_virtio = gem5_virtio
self._check_gem5_command()
# Start the interaction with gem5
self._start_interaction_gem5()
def _check_gem5_command(self):
"""
Check if the command to start gem5 makes sense
"""
if self.gem5args_binary is None:
raise TargetStableError('Please specify a gem5 binary.')
if self.gem5args_args is None:
raise TargetStableError('Please specify the arguments passed on to gem5.')
self.gem5args_virtio = str(self.gem5args_virtio).format(self.gem5_interact_dir)
if self.gem5args_virtio is None:
raise TargetStableError('Please specify arguments needed for virtIO.')
def _start_interaction_gem5(self):
"""
Starts the interaction of devlib with gem5.
"""
# First create the input and output directories for gem5
if self.start_gem5_simulation:
# Create the directory to send data to/from gem5 system
self.logger.info("Creating temporary directory for interaction "
" with gem5 via virtIO: {}"
.format(self.gem5_interact_dir))
os.mkdir(self.gem5_interact_dir)
# Create the directory for gem5 output (stats files etc)
if not os.path.exists(self.stats_directory):
os.mkdir(self.stats_directory)
if os.path.exists(self.gem5_out_dir):
raise TargetStableError("The gem5 stats directory {} already "
"exists.".format(self.gem5_out_dir))
else:
os.mkdir(self.gem5_out_dir)
# We need to redirect the standard output and standard error for the
# gem5 process to a file so that we can debug when things go wrong.
f = os.path.join(self.gem5_out_dir, 'stdout')
self.stdout_file = open(f, 'w')
f = os.path.join(self.gem5_out_dir, 'stderr')
self.stderr_file = open(f, 'w')
# We need to keep this so we can check which port to use for the
# telnet connection.
self.stderr_filename = f
# Start gem5 simulation
self.logger.info("Starting the gem5 simulator")
command_line = "{} --outdir={} {} {}".format(self.gem5args_binary,
quote(self.gem5_out_dir),
self.gem5args_args,
self.gem5args_virtio)
self.logger.debug("gem5 command line: {}".format(command_line))
self.gem5 = subprocess.Popen(shlex.split(command_line),
stdout=self.stdout_file,
stderr=self.stderr_file)
else:
# The simulation should already be running
# Need to dig up the (1) gem5 simulation in question (2) its input
# and output directories (3) virtio setting
self._intercept_existing_gem5()
# As the gem5 simulation is running now or was already running
# we now need to find out which telnet port it uses
self._intercept_telnet_port()
def _intercept_existing_gem5(self):
"""
Intercept the information about a running gem5 simulation
e.g. pid, input directory etc
"""
self.logger("This functionality is not yet implemented")
raise TargetStableError()
def _intercept_telnet_port(self):
"""
Intercept the telnet port of a running gem5 simulation
"""
if self.gem5 is None:
raise TargetStableError('The platform has no gem5 simulation! '
'Something went wrong')
while self.gem5_port is None:
# Check that gem5 is running!
if self.gem5.poll():
message = "The gem5 process has crashed with error code {}!\n\tPlease see {} for details."
raise TargetStableError(message.format(self.gem5.poll(), self.stderr_file.name))
# Open the stderr file
with open(self.stderr_filename, 'r') as f:
for line in f:
# Look for two different strings, exact wording depends on
# version of gem5
m = re.search(r"Listening for system connection on port (?P<port>\d+)", line)
if not m:
m = re.search(r"Listening for connections on port (?P<port>\d+)", line)
if m:
port = int(m.group('port'))
if port >= 3456 and port < 5900:
self.gem5_port = port
break
# Check if the sockets are not disabled
m = re.search(r"Sockets disabled, not accepting terminal connections", line)
if m:
raise TargetStableError("The sockets have been disabled!"
"Pass --listener-mode=on to gem5")
else:
time.sleep(1)
def init_target_connection(self, target):
"""
Update the type of connection in the target from here
"""
if target.os == 'linux':
target.conn_cls = LinuxGem5Connection
else:
target.conn_cls = AndroidGem5Connection
def setup(self, target):
"""
Deploy m5 if not yet installed
"""
m5_path = self._deploy_m5(target)
target.conn.m5_path = m5_path
# Set the terminal settings for the connection to gem5
self._resize_shell(target)
def update_from_target(self, target):
"""
Set the m5 path and if not yet installed, deploy m5
Overwrite certain methods in the target that either can be done
more efficiently by gem5 or don't exist in gem5
"""
m5_path = target.get_installed('m5')
if m5_path is None:
m5_path = self._deploy_m5(target)
target.conn.m5_path = m5_path
# Overwrite the following methods (monkey-patching)
self.logger.debug("Overwriting the 'capture_screen' method in target")
# Housekeeping to prevent recursion
setattr(target, 'target_impl_capture_screen', target.capture_screen)
target.capture_screen = types.MethodType(_overwritten_capture_screen, target)
self.logger.debug("Overwriting the 'reset' method in target")
target.reset = types.MethodType(_overwritten_reset, target)
self.logger.debug("Overwriting the 'reboot' method in target")
target.reboot = types.MethodType(_overwritten_reboot, target)
# Call the general update_from_target implementation
super(Gem5SimulationPlatform, self).update_from_target(target)
def gem5_capture_screen(self, filepath):
file_list = os.listdir(self.gem5_out_dir)
screen_caps = []
for f in file_list:
if '.bmp' in f:
screen_caps.append(f)
if '{ts}' in filepath:
cmd = '{} date -u -Iseconds'
# pylint: disable=no-member
ts = self.target.execute(cmd.format(self.target.busybox)).strip()
filepath = filepath.format(ts=ts)
successful_capture = False
if len(screen_caps) == 1:
# Bail out if we do not have image, and resort to the slower, built
# in method.
try:
import Image
gem5_image = os.path.join(self.gem5_out_dir, screen_caps[0])
temp_image = os.path.join(self.gem5_out_dir, "file.png")
im = Image.open(gem5_image)
im.save(temp_image, "PNG")
shutil.copy(temp_image, filepath)
os.remove(temp_image)
# pylint: disable=undefined-variable
gem5_logger.info("capture_screen: using gem5 screencap")
successful_capture = True
except (shutil.Error, ImportError, IOError):
pass
return successful_capture
# pylint: disable=no-self-use
def _deploy_m5(self, target):
# m5 is not yet installed so install it
host_executable = os.path.join(PACKAGE_BIN_DIRECTORY,
target.abi, 'm5')
return target.install(host_executable)
# pylint: disable=no-self-use
def _resize_shell(self, target):
"""
Resize the shell to avoid line wrapping issues.
"""
# Try and avoid line wrapping as much as possible.
target.execute('{} stty columns 1024'.format(target.busybox))
target.execute('reset', check_exit_code=False)
# Methods that will be monkey-patched onto the target
def _overwritten_reset(self): # pylint: disable=unused-argument
raise TargetStableError('Resetting is not allowed on gem5 platforms!')
def _overwritten_reboot(self): # pylint: disable=unused-argument
raise TargetStableError('Rebooting is not allowed on gem5 platforms!')
def _overwritten_capture_screen(self, filepath):
connection_screencapped = self.platform.gem5_capture_screen(filepath)
if not connection_screencapped:
# The connection was not able to capture the screen so use the target
# implementation
self.logger.debug('{} was not able to screen cap, using the original target implementation'.format(self.platform.__class__.__name__))
self.target_impl_capture_screen(filepath)
|
ARM-software/lisa
|
external/devlib/devlib/platform/gem5.py
|
Python
|
apache-2.0
| 12,624 | 0.00198 |
import pytest, sys, os
sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/../")
from unittest import TestCase
from pylogic.case import Case
class TestBaseOperand(TestCase):
def test_eq_case(self):
case1 = Case("parent", "homer", "bart")
case2 = Case("parent", "homer", "bart")
assert case1 == case2
def test_not_eq_case1(self):
case1 = Case("parent", "homer", "bart")
case2 = Case("parent", "homer", "lisa")
assert case1 != case2
def test_not_eq_case2(self):
case1 = Case("parent", "homer", "bart")
case2 = Case("brother", "homer", "lisa")
assert case1 != case2
|
fran-bravo/pylogic-module
|
test/test_case_operands.py
|
Python
|
mit
| 666 | 0.004505 |
import unittest
from prtgcli.cli import main
class TestQuery(unittest.TestCase):
def setUp(self):
pass
def test_list_devices(self):
pass
def test_list_sensors(self):
pass
def test_status(self):
pass
def test_update(self):
pass
|
kevinschoon/prtgcli
|
test/test_cli.py
|
Python
|
apache-2.0
| 294 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-04-26 16:53
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pppcemr', '0122_auto_20160425_1327'),
]
operations = [
migrations.AddField(
model_name='treatment',
name='height_cm',
field=models.FloatField(blank=True, help_text='cm', null=True),
),
migrations.AlterField(
model_name='treatment',
name='weight_kg',
field=models.FloatField(blank=True, help_text='kg', null=True),
),
]
|
sstebbins/pppcpro
|
pppcemr/migrations/0123_auto_20160426_1253.py
|
Python
|
agpl-3.0
| 663 | 0 |
# TempConv.py
# Celcius to Fahreinheit
def Fahreinheit(temp):
temp = float(temp)
temp = (temp*9/5)+32
return temp
# Fahreinheit to Celcius
def Celcius(temp):
temp = float(temp)
temp = (temp-32)*5/9
return temp
|
megatharun/basic-python-for-researcher
|
TempConv.py
|
Python
|
artistic-2.0
| 243 | 0.012346 |
################################################################################
#
# Copyright (c) 2007 Christopher J. Stawarz
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
################################################################################
"""
Cooperative multitasking and asynchronous I/O using generators
multitask allows Python programs to use generators (a.k.a. coroutines)
to perform cooperative multitasking and asynchronous I/O.
Applications written using multitask consist of a set of cooperating
tasks that yield to a shared task manager whenever they perform a
(potentially) blocking operation, such as I/O on a socket or getting
data from a queue. The task manager temporarily suspends the task
(allowing other tasks to run in the meantime) and then restarts it
when the blocking operation is complete. Such an approach is suitable
for applications that would otherwise have to use select() and/or
multiple threads to achieve concurrency.
The functions and classes in the multitask module allow tasks to yield
for I/O operations on sockets and file descriptors, adding/removing
data to/from queues, or sleeping for a specified interval. When
yielding, a task can also specify a timeout. If the operation for
which the task yielded has not completed after the given number of
seconds, the task is restarted, and a Timeout exception is raised at
the point of yielding.
As a very simple example, here's how one could use multitask to allow
two unrelated tasks to run concurrently:
>>> def printer(message):
... while True:
... print message
... yield
...
>>> multitask.add(printer('hello'))
>>> multitask.add(printer('goodbye'))
>>> multitask.run()
hello
goodbye
hello
goodbye
hello
goodbye
[and so on ...]
For a more useful example, here's how one could implement a
multitasking server that can handle multiple concurrent client
connections:
def listener(sock):
while True:
conn, address = (yield multitask.accept(sock))
multitask.add(client_handler(conn))
def client_handler(sock):
while True:
request = (yield multitask.recv(sock, 1024))
if not request:
break
response = handle_request(request)
yield multitask.send(sock, response)
multitask.add(listener(sock))
multitask.run()
Tasks can also yield other tasks, which allows for composition of
tasks and reuse of existing multitasking code. A child task runs
until it either completes or raises an exception. To return output to
its parent, a child task raises StopIteration, passing the output
value(s) to the StopIteration constructor. An unhandled exception
raised within a child task is propagated to its parent. For example:
>>> def parent():
... print (yield return_none())
... print (yield return_one())
... print (yield return_many())
... try:
... yield raise_exception()
... except Exception, e:
... print 'caught exception: %s' % e
...
>>> def return_none():
... yield
... # do nothing
... # or return
... # or raise StopIteration
... # or raise StopIteration(None)
...
>>> def return_one():
... yield
... raise StopIteration(1)
...
>>> def return_many():
... yield
... raise StopIteration(2, 3) # or raise StopIteration((2, 3))
...
>>> def raise_exception():
... yield
... raise RuntimeError('foo')
...
>>> multitask.add(parent())
>>> multitask.run()
None
1
(2, 3)
caught exception: foo
"""
import collections
import errno
from functools import partial
import heapq
import os
import select
import sys
import time
import types
__author__ = 'Christopher Stawarz <cstawarz@csail.mit.edu>'
__version__ = '0.2.0'
# __revision__ = int('$Revision$'.split()[1])
################################################################################
#
# Timeout exception type
#
################################################################################
class Timeout(Exception):
'Raised in a yielding task when an operation times out'
pass
################################################################################
#
# _ChildTask class
#
################################################################################
class _ChildTask(object):
def __init__(self, parent, task):
self.parent = parent
self.task = task
def send(self, value):
return self.task.send(value)
def throw(self, type, value=None, traceback=None):
return self.task.throw(type, value, traceback)
################################################################################
#
# YieldCondition class
#
################################################################################
class YieldCondition(object):
"""
Base class for objects that are yielded by a task to the task
manager and specify the condition(s) under which the task should
be restarted. Only subclasses of this class are useful to
application code.
"""
def __init__(self, timeout=None):
"""
If timeout is None, the task will be suspended indefinitely
until the condition is met. Otherwise, if the condition is
not met within timeout seconds, a Timeout exception will be
raised in the yielding task.
"""
self.task = None
self.handle_expiration = None
if timeout is None:
self.expiration = None
else:
self.expiration = time.time() + float(timeout)
def _expires(self):
return (self.expiration is not None)
################################################################################
#
# _SleepDelay class and related functions
#
################################################################################
class _SleepDelay(YieldCondition):
def __init__(self, seconds):
seconds = float(seconds)
if seconds <= 0.0:
raise ValueError("'seconds' must be greater than 0")
super(_SleepDelay, self).__init__(seconds)
def sleep(seconds):
"""
A task that yields the result of this function will be resumed
after the specified number of seconds have elapsed. For example:
while too_early():
yield sleep(5) # Sleep for five seconds
do_something() # Done sleeping; get back to work
"""
return _SleepDelay(seconds)
################################################################################
#
# FDReady class and related functions
#
################################################################################
class FDReady(YieldCondition):
"""
A task that yields an instance of this class will be suspended
until a specified file descriptor is ready for I/O.
"""
def __init__(self, fd, read=False, write=False, exc=False, timeout=None):
"""
Resume the yielding task when fd is ready for reading,
writing, and/or "exceptional" condition handling. fd can be
any object accepted by select.select() (meaning an integer or
an object with a fileno() method that returns an integer).
Any exception raised by select() due to fd will be re-raised
in the yielding task.
If timeout is not None, a Timeout exception will be raised in
the yielding task if fd is not ready after timeout seconds
have elapsed.
"""
super(FDReady, self).__init__(timeout)
self.fd = (fd if _is_file_descriptor(fd) else fd.fileno())
if not (read or write or exc):
raise ValueError("'read', 'write', and 'exc' cannot all be false")
self.read = read
self.write = write
self.exc = exc
def fileno(self):
'Return the file descriptor on which the yielding task is waiting'
return self.fd
def _add_to_fdsets(self, read_fds, write_fds, exc_fds):
for add, fdset in ((self.read, read_fds),
(self.write, write_fds),
(self.exc, exc_fds)):
if add:
fdset.add(self)
def _remove_from_fdsets(self, read_fds, write_fds, exc_fds):
for fdset in (read_fds, write_fds, exc_fds):
fdset.discard(self)
def _is_file_descriptor(fd):
return isinstance(fd, (int, long))
def readable(fd, timeout=None):
"""
A task that yields the result of this function will be resumed
when fd is readable. If timeout is not None, a Timeout exception
will be raised in the yielding task if fd is not readable after
timeout seconds have elapsed. For example:
try:
yield readable(sock, timeout=5)
data = sock.recv(1024)
except Timeout:
# No data after 5 seconds
"""
return FDReady(fd, read=True, timeout=timeout)
def writable(fd, timeout=None):
"""
A task that yields the result of this function will be resumed
when fd is writable. If timeout is not None, a Timeout exception
will be raised in the yielding task if fd is not writable after
timeout seconds have elapsed. For example:
try:
yield writable(sock, timeout=5)
nsent = sock.send(data)
except Timeout:
# Can't send after 5 seconds
"""
return FDReady(fd, write=True, timeout=timeout)
################################################################################
#
# FDAction class and related functions
#
################################################################################
class FDAction(FDReady):
"""
A task that yields an instance of this class will be suspended
until an I/O operation on a specified file descriptor is complete.
"""
def __init__(self, fd, func, args=(), kwargs={}, read=False, write=False,
exc=False):
"""
Resume the yielding task when fd is ready for reading,
writing, and/or "exceptional" condition handling. fd can be
any object accepted by select.select() (meaning an integer or
an object with a fileno() method that returns an integer).
Any exception raised by select() due to fd will be re-raised
in the yielding task.
The value of the yield expression will be the result of
calling func with the specified args and kwargs (which
presumably performs a read, write, or other I/O operation on
fd). If func raises an exception, it will be re-raised in the
yielding task. Thus, FDAction is really just a convenient
subclass of FDReady that requests that the task manager
perform an I/O operation on the calling task's behalf.
If kwargs contains a timeout argument that is not None, a
Timeout exception will be raised in the yielding task if fd is
not ready after timeout seconds have elapsed.
"""
timeout = kwargs.pop('timeout', None)
super(FDAction, self).__init__(fd, read, write, exc, timeout)
self.func = func
self.args = args
self.kwargs = kwargs
def _eval(self):
return self.func(*(self.args), **(self.kwargs))
def read(fd, *args, **kwargs):
"""
A task that yields the result of this function will be resumed
when fd is readable, and the value of the yield expression will be
the result of reading from fd. If a timeout keyword is given and
is not None, a Timeout exception will be raised in the yielding
task if fd is not readable after timeout seconds have elapsed.
Other arguments will be passed to the read function (os.read() if
fd is an integer, fd.read() otherwise). For example:
try:
data = (yield read(fd, 1024, timeout=5))
except Timeout:
# No data after 5 seconds
"""
func = (partial(os.read, fd) if _is_file_descriptor(fd) else fd.read)
return FDAction(fd, func, args, kwargs, read=True)
def readline(fd, *args, **kwargs):
"""
A task that yields the result of this function will be resumed
when fd is readable, and the value of the yield expression will be
the result of reading a line from fd. If a timeout keyword is
given and is not None, a Timeout exception will be raised in the
yielding task if fd is not readable after timeout seconds have
elapsed. Other arguments will be passed to fd.readline(). For
example:
try:
data = (yield readline(fd, timeout=5))
except Timeout:
# No data after 5 seconds
"""
return FDAction(fd, fd.readline, args, kwargs, read=True)
def write(fd, *args, **kwargs):
"""
A task that yields the result of this function will be resumed
when fd is writable, and the value of the yield expression will be
the result of writing to fd. If a timeout keyword is given and is
not None, a Timeout exception will be raised in the yielding task
if fd is not writable after timeout seconds have elapsed. Other
arguments will be passed to the write function (os.write() if fd
is an integer, fd.write() otherwise). For example:
try:
nbytes = (yield write(fd, data, timeout=5))
except Timeout:
# Can't write after 5 seconds
"""
func = (partial(os.write, fd) if _is_file_descriptor(fd) else fd.write)
return FDAction(fd, func, args, kwargs, write=True)
def accept(sock, *args, **kwargs):
"""
A task that yields the result of this function will be resumed
when sock is readable, and the value of the yield expression will
be the result of accepting a new connection on sock. If a timeout
keyword is given and is not None, a Timeout exception will be
raised in the yielding task if sock is not readable after timeout
seconds have elapsed. Other arguments will be passed to
sock.accept(). For example:
try:
conn, address = (yield accept(sock, timeout=5))
except Timeout:
# No connections after 5 seconds
"""
return FDAction(sock, sock.accept, args, kwargs, read=True)
def recv(sock, *args, **kwargs):
"""
A task that yields the result of this function will be resumed
when sock is readable, and the value of the yield expression will
be the result of receiving from sock. If a timeout keyword is
given and is not None, a Timeout exception will be raised in the
yielding task if sock is not readable after timeout seconds have
elapsed. Other arguments will be passed to sock.recv(). For
example:
try:
data = (yield recv(sock, 1024, timeout=5))
except Timeout:
# No data after 5 seconds
"""
return FDAction(sock, sock.recv, args, kwargs, read=True)
def recvfrom(sock, *args, **kwargs):
"""
A task that yields the result of this function will be resumed
when sock is readable, and the value of the yield expression will
be the result of receiving from sock. If a timeout keyword is
given and is not None, a Timeout exception will be raised in the
yielding task if sock is not readable after timeout seconds have
elapsed. Other arguments will be passed to sock.recvfrom(). For
example:
try:
data, address = (yield recvfrom(sock, 1024, timeout=5))
except Timeout:
# No data after 5 seconds
"""
return FDAction(sock, sock.recvfrom, args, kwargs, read=True)
def send(sock, *args, **kwargs):
"""
A task that yields the result of this function will be resumed
when sock is writable, and the value of the yield expression will
be the result of sending to sock. If a timeout keyword is given
and is not None, a Timeout exception will be raised in the
yielding task if sock is not writable after timeout seconds have
elapsed. Other arguments will be passed to the sock.send(). For
example:
try:
nsent = (yield send(sock, data, timeout=5))
except Timeout:
# Can't send after 5 seconds
"""
return FDAction(sock, sock.send, args, kwargs, write=True)
def sendto(sock, *args, **kwargs):
"""
A task that yields the result of this function will be resumed
when sock is writable, and the value of the yield expression will
be the result of sending to sock. If a timeout keyword is given
and is not None, a Timeout exception will be raised in the
yielding task if sock is not writable after timeout seconds have
elapsed. Other arguments will be passed to the sock.sendto().
For example:
try:
nsent = (yield sendto(sock, data, address, timeout=5))
except Timeout:
# Can't send after 5 seconds
"""
return FDAction(sock, sock.sendto, args, kwargs, write=True)
################################################################################
#
# Queue and _QueueAction classes
#
################################################################################
class Queue(object):
"""
A multi-producer, multi-consumer FIFO queue (similar to
Queue.Queue) that can be used for exchanging data between tasks
"""
def __init__(self, contents=(), maxsize=0):
"""
Create a new Queue instance. contents is a sequence (empty by
default) containing the initial contents of the queue. If
maxsize is greater than 0, the queue will hold a maximum of
maxsize items, and put() will block until space is available
in the queue.
"""
self.maxsize = int(maxsize)
self._queue = collections.deque(contents)
def __len__(self):
'Return the number of items in the queue'
return len(self._queue)
def _get(self):
return self._queue.popleft()
def _put(self, item):
self._queue.append(item)
def empty(self):
'Return True is the queue is empty, False otherwise'
return (len(self) == 0)
def full(self):
'Return True is the queue is full, False otherwise'
return ((len(self) >= self.maxsize) if (self.maxsize > 0) else False)
def get(self, timeout=None):
"""
A task that yields the result of this method will be resumed
when an item is available in the queue, and the value of the
yield expression will be the item. If timeout is not None, a
Timeout exception will be raised in the yielding task if an
item is not available after timeout seconds have elapsed. For
example:
try:
item = (yield queue.get(timeout=5))
except Timeout:
# No item available after 5 seconds
"""
return _QueueAction(self, timeout=timeout)
def put(self, item, timeout=None):
"""
A task that yields the result of this method will be resumed
when item has been added to the queue. If timeout is not
None, a Timeout exception will be raised in the yielding task
if no space is available after timeout seconds have elapsed.
For example:
try:
yield queue.put(item, timeout=5)
except Timeout:
# No space available after 5 seconds
"""
return _QueueAction(self, item, timeout=timeout)
class _QueueAction(YieldCondition):
NO_ITEM = object()
def __init__(self, queue, item=NO_ITEM, timeout=None):
super(_QueueAction, self).__init__(timeout)
if not isinstance(queue, Queue):
raise TypeError("'queue' must be a Queue instance")
self.queue = queue
self.item = item
################################################################################
#
# SmartQueue and _SmartQueueAction classes
#
################################################################################
class SmartQueue(object):
"""
A multi-producer, multi-consumer FIFO queue (similar to
Queue.Queue) that can be used for exchanging data between tasks.
The difference with Queue is that this implements filtering criteria
on get and allows multiple get to be signalled for the same put.
On the downside, this uses list instead of deque and has lower
performance.
"""
def __init__(self, contents=(), maxsize=0):
"""
Create a new Queue instance. contents is a sequence (empty by
default) containing the initial contents of the queue. If
maxsize is greater than 0, the queue will hold a maximum of
maxsize items, and put() will block until space is available
in the queue.
"""
self.maxsize = int(maxsize)
self._pending = list(contents)
def __len__(self):
'Return the number of items in the queue'
return len(self._pending)
def _get(self, criteria=None):
#self._pending = filter(lambda x: x[1]<=now, self._pending) # remove expired ones
if criteria:
found = filter(lambda x: criteria(x), self._pending) # check any matching criteria
if found:
self._pending.remove(found[0])
return found[0]
else:
return None
else:
return self._pending.pop(0) if self._pending else None
def _put(self, item):
self._pending.append(item)
def empty(self):
'Return True is the queue is empty, False otherwise'
return (len(self) == 0)
def full(self):
'Return True is the queue is full, False otherwise'
return ((len(self) >= self.maxsize) if (self.maxsize > 0) else False)
def get(self, timeout=None, criteria=None):
"""
A task that yields the result of this method will be resumed
when an item is available in the queue and the item matches the
given criteria (a function, usually lambda), and the value of the
yield expression will be the item. If timeout is not None, a
Timeout exception will be raised in the yielding task if an
item is not available after timeout seconds have elapsed. For
example:
try:
item = (yield queue.get(timeout=5, criteria=lambda x: x.name='kundan'))
except Timeout:
# No item available after 5 seconds
"""
return _SmartQueueAction(self, timeout=timeout, criteria=criteria)
def put(self, item, timeout=None):
"""
A task that yields the result of this method will be resumed
when item has been added to the queue. If timeout is not
None, a Timeout exception will be raised in the yielding task
if no space is available after timeout seconds have elapsed.
TODO: Otherwise if space is available, the timeout specifies how
long to keep the item in the queue before discarding it if it
is not fetched in a get. In this case it doesnot throw exception.
For example:
try:
yield queue.put(item, timeout=5)
except Timeout:
# No space available after 5 seconds
"""
return _SmartQueueAction(self, item, timeout=timeout)
class _SmartQueueAction(YieldCondition):
NO_ITEM = object()
def __init__(self, queue, item=NO_ITEM, timeout=None, criteria=None):
super(_SmartQueueAction, self).__init__(timeout)
if not isinstance(queue, SmartQueue):
raise TypeError("'queue' must be a SmartQueue instance")
self.queue = queue
self.item = item
self.criteria = criteria
self.expires = (timeout is not None) and (time.time() + timeout) or 0
################################################################################
#
# TaskManager class
#
################################################################################
class TaskManager(object):
"""
Engine for running a set of cooperatively-multitasking tasks
within a single Python thread
"""
def __init__(self):
"""
Create a new TaskManager instance. Generally, there will only
be one of these per Python process. If you want to run two
existing instances simultaneously, merge them first, then run
one or the other.
"""
self._queue = collections.deque()
self._read_waits = set()
self._write_waits = set()
self._exc_waits = set()
self._queue_waits = collections.defaultdict(self._double_deque)
self._timeouts = []
@staticmethod
def _double_deque():
return (collections.deque(), collections.deque())
def merge(self, other):
"""
Merge this TaskManager with another. After the merge, the two
objects share the same (merged) internal data structures, so
either can be used to manage the combined task set.
"""
if not isinstance(other, TaskManager):
raise TypeError("'other' must be a TaskManager instance")
# Merge the data structures
self._queue.extend(other._queue)
self._read_waits |= other._read_waits
self._write_waits |= other._write_waits
self._exc_waits |= other._exc_waits
self._queue_waits.update(other._queue_waits)
self._timeouts.extend(other._timeouts)
heapq.heapify(self._timeouts)
# Make other reference the merged data structures. This is
# necessary because other's tasks may reference and use other
# (e.g. to add a new task in response to an event).
other._queue = self._queue
other._read_waits = self._read_waits
other._write_waits = self._write_waits
other._exc_waits = self._exc_waits
other._queue_waits = self._queue_waits
other._timeouts = self._timeouts
def add(self, task):
'Add a new task (i.e. a generator instance) to the run queue'
if not isinstance(task, types.GeneratorType):
raise TypeError("'task' must be a generator")
self._enqueue(task)
def _enqueue(self, task, input=None, exc_info=()):
self._queue.append((task, input, exc_info))
def run(self):
"""
Call run_next() repeatedly until there are no tasks that are
currently runnable, waiting for I/O, or waiting to time out.
Note that this method can block indefinitely (e.g. if there
are only I/O waits and no timeouts). If this is unacceptable,
use run_next() instead.
"""
while self.has_runnable() or self.has_io_waits() or self.has_timeouts():
self.run_next()
def has_runnable(self):
"""
Return True is there are runnable tasks in the queue, False
otherwise
"""
return bool(self._queue)
def has_io_waits(self):
"""
Return True is there are tasks waiting for I/O, False
otherwise
"""
return bool(self._read_waits or self._write_waits or self._exc_waits)
def has_timeouts(self):
"""
Return True is there are tasks with pending timeouts, False
otherwise
"""
return bool(self._timeouts)
def run_next(self, timeout=None):
"""
Perform one iteration of the run cycle: check whether any
pending I/O operations can be performed, check whether any
timeouts have expired, then run all currently runnable tasks.
The timeout argument specifies the maximum time to wait for
some task to become runnable. If timeout is None and there
are no currently runnable tasks, but there are tasks waiting
to perform I/O or time out, then this method will block until
at least one of the waiting tasks becomes runnable. To
prevent this method from blocking indefinitely, use timeout to
specify the maximum number of seconds to wait.
If there are runnable tasks in the queue when run_next() is
called, then it will check for I/O readiness using a
non-blocking call to select() (i.e. a poll), and only
already-expired timeouts will be handled. This ensures both
that the task manager is never idle when tasks can be run and
that tasks waiting for I/O never starve.
"""
while self.has_io_waits():
if self._handle_io_waits(self._fix_run_timeout(timeout)) or self.has_runnable(): break
if self.has_timeouts():
self._handle_timeouts(self._fix_run_timeout(timeout))
# Run all tasks currently in the queue
#for dummy in xrange(len(self._queue)):
while len(self._queue) > 0:
task, input, exc_info = self._queue.popleft()
try:
if exc_info:
output = task.throw(*exc_info)
else:
output = task.send(input)
except StopIteration, e:
if isinstance(task, _ChildTask):
if not e.args:
output = None
elif len(e.args) == 1:
output = e.args[0]
else:
output = e.args
self._enqueue(task.parent, input=output)
except:
if isinstance(task, _ChildTask):
# Propagate exception to parent
self._enqueue(task.parent, exc_info=sys.exc_info())
else:
# No parent task, so just die
raise
else:
self._handle_task_output(task, output)
def _fix_run_timeout(self, timeout):
if self.has_runnable():
# Don't block if there are tasks in the queue
timeout = 0.0
elif self.has_timeouts():
# If there are timeouts, block only until the first expiration
expiration_timeout = max(0.0, self._timeouts[0][0] - time.time())
if (timeout is None) or (timeout > expiration_timeout):
timeout = expiration_timeout
return timeout
def _handle_io_waits(self, timeout):
# The error handling here is (mostly) borrowed from Twisted
try:
read_ready, write_ready, exc_ready = \
select.select(self._read_waits,
self._write_waits,
self._exc_waits,
timeout)
except (TypeError, ValueError):
self._remove_bad_file_descriptors()
return False
except (select.error, IOError), err:
if err[0] == errno.EINTR:
return False
elif ((err[0] == errno.EBADF) or
((sys.platform == 'win32') and
(err[0] == 10038))): # WSAENOTSOCK
self._remove_bad_file_descriptors()
return False
else:
# Not an error we can handle, so die
raise
else:
for fd in set(read_ready + write_ready + exc_ready):
try:
input = (fd._eval() if isinstance(fd, FDAction) else None)
self._enqueue(fd.task, input=input)
except:
self._enqueue(fd.task, exc_info=sys.exc_info())
fd._remove_from_fdsets(self._read_waits,
self._write_waits,
self._exc_waits)
if fd._expires():
self._remove_timeout(fd)
return True
def _remove_bad_file_descriptors(self):
for fd in (self._read_waits | self._write_waits | self._exc_waits):
try:
select.select([fd], [fd], [fd], 0.0)
except:
# TODO: do not enqueue the exception (socket.error) so that it does not crash
# when closing an already closed socket. See rtmplite issue #28
# self._enqueue(fd.task, exc_info=sys.exc_info())
fd._remove_from_fdsets(self._read_waits,
self._write_waits,
self._exc_waits)
if fd._expires():
self._remove_timeout(fd)
def _add_timeout(self, item, handler):
item.handle_expiration = handler
heapq.heappush(self._timeouts, (item.expiration, item))
def _remove_timeout(self, item):
self._timeouts.remove((item.expiration, item))
heapq.heapify(self._timeouts)
def _handle_timeouts(self, timeout):
if (not self.has_runnable()) and (timeout > 0.0):
time.sleep(timeout)
current_time = time.time()
while self._timeouts and (self._timeouts[0][0] <= current_time):
item = heapq.heappop(self._timeouts)[1]
if isinstance(item, _SleepDelay):
self._enqueue(item.task)
else:
self._enqueue(item.task, exc_info=(Timeout,))
item.handle_expiration()
def _handle_task_output(self, task, output):
if isinstance(output, types.GeneratorType):
self._enqueue(_ChildTask(task, output))
elif isinstance(output, YieldCondition):
output.task = task
if isinstance(output, _SleepDelay):
self._add_timeout(output, None)
elif isinstance(output, FDReady):
self._handle_fdready(task, output)
elif isinstance(output, _QueueAction):
self._handle_queue_action(task, output)
elif isinstance(output, _SmartQueueAction):
self._handle_smart_queue_action(task, output)
else:
# Return any other output as input and send task to
# end of queue
self._enqueue(task, input=output)
def _handle_fdready(self, task, output):
output._add_to_fdsets(self._read_waits,
self._write_waits,
self._exc_waits)
if output._expires():
self._add_timeout(output,
(lambda:
output._remove_from_fdsets(self._read_waits,
self._write_waits,
self._exc_waits)))
def _handle_queue_action(self, task, output):
get_waits, put_waits = self._queue_waits[output.queue]
if output.item is output.NO_ITEM:
# Action is a get
if output.queue.empty():
get_waits.append(output)
if output._expires():
self._add_timeout(output,
(lambda: get_waits.remove(output)))
else:
item = output.queue._get()
self._enqueue(task, input=item)
if put_waits:
action = put_waits.popleft()
output.queue._put(action.item)
self._enqueue(action.task)
if action._expires():
self._remove_timeout(action)
else:
# Action is a put
if output.queue.full():
put_waits.append(output)
if output._expires():
self._add_timeout(output,
(lambda: put_waits.remove(output)))
else:
output.queue._put(output.item)
self._enqueue(task)
if get_waits:
action = get_waits.popleft()
item = output.queue._get()
self._enqueue(action.task, input=item)
if action._expires():
self._remove_timeout(action)
def _handle_smart_queue_action(self, task, output):
get_waits, put_waits = self._queue_waits[output.queue]
if output.item is output.NO_ITEM:
# Action is a get
item = output.queue._get(criteria=output.criteria)
if item is None:
get_waits.append(output)
if output._expires():
self._add_timeout(output,
(lambda: get_waits.remove(output)))
else:
self._enqueue(task, input=item)
if put_waits:
action = put_waits.popleft()
output.queue._put(action.item)
self._enqueue(action.task)
if action._expires():
self._remove_timeout(action)
else:
# Action is a put
if output.queue.full():
put_waits.append(output)
if output._expires():
self._add_timeout(output,
(lambda: put_waits.remove(output)))
else:
output.queue._put(output.item)
self._enqueue(task)
if get_waits:
actions = []
for action in get_waits:
item = output.queue._get(criteria=action.criteria)
if item is not None:
actions.append((action, item))
for action,item in actions:
get_waits.remove(action)
self._enqueue(action.task, input=item)
if action._expires():
self._remove_timeout(action)
################################################################################
#
# Default TaskManager instance
#
################################################################################
_default_task_manager = None
def get_default_task_manager():
'Return the default TaskManager instance'
global _default_task_manager
if _default_task_manager is None:
_default_task_manager = TaskManager()
return _default_task_manager
def add(task):
'Add a task to the default TaskManager instance'
get_default_task_manager().add(task)
def run():
'Run the default TaskManager instance'
get_default_task_manager().run()
################################################################################
#
# Test routine
#
################################################################################
if __name__ == '__main__':
if sys.platform == 'win32':
# Make sure WSAStartup() is called
import socket
def printer(name):
for i in xrange(1, 4):
print '%s:\t%d' % (name, i)
yield
t = TaskManager()
t.add(printer('first'))
t.add(printer('second'))
t.add(printer('third'))
queue = Queue()
def receiver():
print 'receiver started'
print 'receiver received: %s' % (yield queue.get())
print 'receiver finished'
def sender():
print 'sender started'
yield queue.put('from sender')
print 'sender finished'
def bad_descriptor():
print 'bad_descriptor running'
try:
yield readable(12)
except:
print 'exception in bad_descriptor:', sys.exc_info()[1]
def sleeper():
print 'sleeper started'
yield sleep(1)
print 'sleeper finished'
def timeout_immediately():
print 'timeout_immediately running'
try:
yield Queue().get(timeout=0)
except Timeout:
print 'timeout_immediately timed out'
t2 = TaskManager()
t2.add(receiver())
t2.add(bad_descriptor())
t2.add(sender())
t2.add(sleeper())
t2.add(timeout_immediately())
def parent():
print 'child returned: %s' % ((yield child()),)
try:
yield child(raise_exc=True)
except:
print 'exception in child:', sys.exc_info()[1]
def child(raise_exc=False):
yield
if raise_exc:
raise RuntimeError('foo')
raise StopIteration(1, 2, 3)
t3 = TaskManager()
t3.add(parent())
t.merge(t2)
t.merge(t3)
t.run()
assert not(t.has_runnable() or t.has_io_waits() or t.has_timeouts())
|
zhouhan0126/SCREENTEST1
|
tests/rtmplite/multitask.py
|
Python
|
gpl-2.0
| 41,396 | 0.002053 |
from lipd.lipd_io import lipd_read, lipd_write
from lipd.timeseries import extract, collapse, mode_ts, translate_expression, get_matches
from lipd.doi_main import doi_main
from lipd.csvs import get_csv_from_metadata
from lipd.excel import excel_main
from lipd.noaa import noaa_prompt, noaa_to_lpd, lpd_to_noaa, noaa_prompt_1
from lipd.dataframes import *
from lipd.directory import get_src_or_dst, list_files, collect_metadata_file
from lipd.loggers import create_logger, log_benchmark, create_benchmark
from lipd.misc import path_type, load_fn_matches_ext, rm_values_fields, get_dsn, rm_empty_fields, print_filename, rm_wds_url, rm_od_url
from lipd.tables import addModel, addTable
from lipd.validator_api import call_validator_api, display_results, get_validator_format
from lipd.alternates import FILE_TYPE_MAP
from lipd.regexes import re_url
from lipd.fetch_doi import update_dois
from lipd.download_lipd import download_from_url, get_download_path
from lipd.directory import _go_to_package
import re
from time import process_time as clock
import os
import json
import copy
from collections import OrderedDict
import subprocess
# READ
def run():
"""
Initialize and start objects. This is called automatically when importing the package.
:return none:
"""
# GLOBALS
global cwd, files, logger_start, logger_benchmark, settings, _timeseries_data
_timeseries_data = {}
# files = {".lpd": [ {"full_path", "filename_ext", "filename_no_ext", "dir"} ], ".xls": [...], ".txt": [...]}
settings = {"note_update": True, "note_validate": True, "verbose": True}
cwd = os.getcwd()
# logger created in whatever directory lipd is called from
logger_start = create_logger("start")
files = {".txt": [], ".lpd": [], ".xls": []}
return
def readLipd(usr_path="", remote_file_save=False):
"""
Read LiPD file(s).
Enter a file path, directory path, or leave args blank to trigger gui.
:param str usr_path: Path to file / directory (optional)
:return dict _d: Metadata
"""
global cwd, settings, files
try:
if settings["verbose"]:
__disclaimer(opt="update")
files[".lpd"] = []
__read(usr_path, ".lpd")
_d = __read_lipd_contents(usr_path, remote_file_save)
# Clear out the lipd files metadata. We're done loading, we dont need it anymore.
files[".lpd"] = []
except Exception as e:
pass
# Placeholder to catch errors so we can always chdir back to cwd
os.chdir(cwd)
return _d
def readExcel(usr_path=""):
"""
Read Excel file(s)
Enter a file path, directory path, or leave args blank to trigger gui.
:param str usr_path: Path to file / directory (optional)
:return str cwd: Current working directory
"""
global cwd, files
try:
files[".xls"] = []
__read(usr_path, ".xls")
except Exception as e:
pass
# Placeholder to catch errors so we can always chdir back to cwd
os.chdir(cwd)
return cwd
def readNoaa(usr_path=""):
"""
Read NOAA file(s)
Enter a file path, directory path, or leave args blank to trigger gui.
:param str usr_path: Path to file / directory (optional)
:return str cwd: Current working directory
"""
global cwd, files
try:
files[".txt"] = []
__read(usr_path, ".txt")
except Exception as e:
pass
# Placeholder to catch errors so we can always chdir back to cwd
os.chdir(cwd)
return cwd
def readAll(usr_path=""):
"""
Read all approved file types at once.
Enter a file path, directory path, or leave args blank to trigger gui.
:param str usr_path: Path to file / directory (optional)
:return str cwd: Current working directory
"""
print("readAll: This function no longer exists. Sorry! :(")
# global cwd, files
# start = clock()
# files = {".txt": [], ".lpd": [], ".xls": []}
# if not usr_path:
# usr_path, src_files = get_src_or_dst("read", "directory")
# __read_directory(usr_path, ".lpd")
# __read_directory(usr_path, ".xls")
# __read_directory(usr_path, ".xlsx")
# __read_directory(usr_path, ".txt")
# end = clock()
# logger_benchmark.info(log_benchmark("readAll", start, end))
# return cwd
def excel():
"""
Convert Excel files to LiPD files. LiPD data is returned directly from this function.
| Example
| 1: lipd.readExcel()
| 2: D = lipd.excel()
:return dict _d: Metadata
"""
global files, cwd, settings
_d = {}
# Turn off verbose. We don't want to clutter the console with extra reading/writing output statements
settings["verbose"] = False
try:
# Find excel files
print("Found " + str(len(files[".xls"])) + " Excel files")
logger_start.info("found excel files: {}".format(len(files[".xls"])))
# Loop for each excel file
for file in files[".xls"]:
# Convert excel file to LiPD
dsn = excel_main(file)
try:
# Read the new LiPD file back in, to get fixes, inferred calculations, updates, etc.
_d[dsn] = readLipd(os.path.join(file["dir"], dsn + ".lpd"))
# Write the modified LiPD file back out again.
writeLipd(_d[dsn], cwd)
except Exception as e:
logger_start.error("excel: Unable to read new LiPD file, {}".format(e))
print("Error: Unable to read new LiPD file: {}, {}".format(dsn, e))
except Exception as e:
pass
# Start printing stuff again.
settings["verbose"] = True
os.chdir(cwd)
return _d
def noaa(D="", path="", wds_url="", lpd_url="", version=""):
"""
Convert between NOAA and LiPD files
| Example: LiPD to NOAA converter
| 1: L = lipd.readLipd()
| 2: lipd.noaa(L, "/Users/someuser/Desktop", "https://www1.ncdc.noaa.gov/pub/data/paleo/pages2k/NAm2kHydro-2017/noaa-templates/data-version-1.0.0", "https://www1.ncdc.noaa.gov/pub/data/paleo/pages2k/NAm2kHydro-2017/data-version-1.0.0", "v1-1.0.0")
| Example: NOAA to LiPD converter
| 1: lipd.readNoaa()
| 2: lipd.noaa()
:param dict D: Metadata
:param str path: Path where output files will be written to
:param str wds_url: WDSPaleoUrl, where NOAA template file will be stored on NOAA's FTP server
:param str lpd_url: URL where LiPD file will be stored on NOAA's FTP server
:param str version: Version of the dataset
:return none:
"""
global files, cwd
try:
# When going from NOAA to LPD, use the global "files" variable.
# When going from LPD to NOAA, use the data from the LiPD Library.
# Choose the mode
_mode = noaa_prompt()
# LiPD mode: Convert LiPD files to NOAA files
if _mode == "1":
# _project, _version = noaa_prompt_1()
if not version or not lpd_url:
print("Missing parameters: Please try again and provide all parameters.")
return
if not D:
print("Error: LiPD data must be provided for LiPD -> NOAA conversions")
else:
try:
os.mkdir("noaa_files")
except FileExistsError:
pass
if "paleoData" in D:
_d = copy.deepcopy(D)
D = lpd_to_noaa(_d, wds_url, lpd_url, version, path)
else:
# For each LiPD file in the LiPD Library
for dsn, dat in D.items():
_d = copy.deepcopy(dat)
# Process this data through the converter
_d = lpd_to_noaa(_d, wds_url, lpd_url, version, path)
# Overwrite the data in the LiPD object with our new data.
D[dsn] = _d
# If no wds url is provided, then remove instances from jsonld metadata
if not wds_url:
D = rm_wds_url(D)
# Write out the new LiPD files, since they now contain the new NOAA URL data
if(path):
writeLipd(D, path)
else:
print("Path not provided. Writing to CWD...")
writeLipd(D, cwd)
# NOAA mode: Convert NOAA files to LiPD files
elif _mode == "2":
# Pass through the global files list. Use NOAA files directly on disk.
noaa_to_lpd(files)
else:
print("Invalid input. Try again.")
except Exception as e:
pass
# Placeholder to catch errors so we can always chdir back to cwd
os.chdir(cwd)
return
def doi(D, force=False):
"""
Use the DOI id stored in the LiPD publication data to fetch new information from the DOI.org using their API.
Merge the results with the existing data. This process will open the LiPD files on your computer, and overwrite them
when done. This will not affect LiPD data currently loaded into memory.
| Example
| 1: D = lipd.readLipd()
| 2: D = lipd.doi(D)
|
| DOI location : D["pub"][0]["doi"]
:param dict D: Metadata, either a single dataset or multiple datasets sorted by dataset name.
:param bool force: Force DOIs to update even if they have previously been processed. Default is False.
:return dict D: Metadata, with all publication data updated where possible
"""
global cwd
try:
D = doi_main(D, force)
except Exception as e:
pass
os.chdir(cwd)
return D
def fetchDoiWithCsv(csv_source, write_file=True):
"""
Retrieve DOI publication data for a list of DOI IDs that are stored in a CSV file. No LiPD files needed.
This process uses the DOI.org API for data.
:param str csv_source: The path to the CSV file stored on your computer
:param bool write_file: Write the results to a JSON file (default) or print the results to the console.
:return none:
"""
global cwd
try:
update_dois(csv_source, write_file)
except Exception as e:
pass
# Placeholder to catch errors so we can always chdir back to cwd
os.chdir(cwd)
return
def validate(D, detailed=True):
"""
Use the Validator API for lipd.net to validate all LiPD files in the LiPD Library.
Display the PASS/FAIL results. Display detailed results if the option is chosen.
:param dict D: Metadata (single or multiple datasets)
:param bool detailed: Show or hide the detailed results of each LiPD file. Shows warnings and errors
:return none:
"""
print("\n")
# Fetch new results by calling lipd.net/api/validator (costly, may take a while)
print("Fetching results from validator at lipd.net/validator... this may take a few moments.\n")
try:
results = []
# Get the validator-formatted data for each dataset.
if "paleoData" in D:
_api_data = get_validator_format(D)
# A list of lists of LiPD-content metadata
results.append(call_validator_api(D["dataSetName"], _api_data))
else:
for dsn, dat in D.items():
_api_data = get_validator_format(dat)
# A list of lists of LiPD-content metadata
results.append(call_validator_api(dsn, _api_data))
display_results(results, detailed)
except Exception as e:
print("Error: validate: {}".format(e))
__move_to_cwd()
return
# def viewLipd(D):
#
# try:
# # Move to py package dir, so we can relative reference json_viewer.py
# _go_to_package()
# # Open viewer in subprocess, so it's contained and closed in a new py process
# subprocess.call(('python', 'json_viewer.py', json.dumps(D)))
# except Exception as e:
# pass
# # Placeholder to catch errors so we can always chdir back to cwd
#
# __move_to_cwd()
# return
# PUT
# def addEnsemble(D, dsn, ensemble):
# """
# Create ensemble entry and then add it to the specified LiPD dataset.
#
# :param dict D: LiPD data
# :param str dsn: Dataset name
# :param list ensemble: Nested numpy array of ensemble column data.
# :return dict D: LiPD data
# """
#
# # Check that the given filename exists in the library
# if dsn in D:
# meta = D[dsn]
# # Create an ensemble dictionary entry
# ens = create_ensemble(ensemble)
# # If everything above worked, then there should be formatted ensemble data now.
# if ens:
# # Insert the formatted ensemble data into the master lipd library
# meta = insert_ensemble(meta, ens)
# # Set meta into lipd object
# D[dsn] = meta
# else:
# print("Error: This dataset was not found in your LiPD data: {}".format(dsn))
# return D
# DATA FRAMES
def ensToDf(ensemble):
"""
Create an ensemble data frame from some given nested numpy arrays
:param list ensemble: Ensemble data
:return obj df: Pandas dataframe
"""
try:
df = create_dataframe(ensemble)
except Exception as e:
pass
__move_to_cwd()
return df
# TODO Not adapted to objectless utilties. Does it need an update?
# def lipdToDf(D, dsn):
# """
# Get LiPD data frames from LiPD object
# :param dict D: LiPD data
# :param str dsn: Dataset name
# :return dict dfs: Pandas dataframes
# """
# try:
# dfs = lipd_lib.get_dfs(dsn)
# except KeyError:
# print("Error: Unable to find LiPD file")
# logger_start.warn("lipd_to_df: KeyError: missing lipds {}".format(filename))
# dfs = None
# return dfs
def tsToDf(tso):
"""
Create Pandas DataFrame from TimeSeries object.
Use: Must first extractTs to get a time series. Then pick one item from time series and pass it through
:param dict tso: Time series entry
:return dict dfs: Pandas dataframes
"""
dfs = {}
try:
dfs = ts_to_df(tso)
except Exception as e:
print("Error: Unable to create data frame")
logger_start.warn("ts_to_df: tso malformed: {}".format(e))
__move_to_cwd()
return dfs
# TODO Not adapted to objectless utilties. Does it need an update?
# def filterDfs(expr):
# """
# Get data frames based on some criteria. i.e. all measurement tables or all ensembles.
# :param str expr: Search expression. (i.e. "paleo measurement tables")
# :return dict dfs: Data frames indexed by filename
# """
# dfs = {}
# try:
# dfs = get_filtered_dfs(lipd_lib.get_master(), expr)
# except Exception:
# logger_dataframes.info("filter_dfs: Unable to filter data frames for expr: {}".format(expr))
# print("Error: unable to filter dataframes")
# return dfs
# ANALYSIS - TIME SERIES
def extractTs(d, whichtables="meas", mode="paleo"):
"""
Create a time series using LiPD data (uses paleoData by default)
| Example : (default) paleoData and meas tables
| 1. D = lipd.readLipd()
| 2. ts = lipd.extractTs(D)
| Example : chronData and all tables
| 1. D = lipd.readLipd()
| 2. ts = lipd.extractTs(D, "all", "chron")
:param dict d: Metadata
:param str whichtables: "all", "summ", "meas", "ens" - The tables that you would like in the timeseries
:param str mode: "paleo" or "chron" mode
:return list l: Time series
"""
# instead of storing each raw dataset per tso, store it once in the global scope. saves memory
global _timeseries_data
start = clock()
_l = []
try:
if not d:
print("Error: LiPD data not provided. Pass LiPD data into the function.")
else:
print(mode_ts("extract", mode))
if "paleoData" in d:
# One dataset: Process directly on file, don't loop
try:
_dsn = get_dsn(d)
_timeseries_data[start] = {}
_timeseries_data[start][_dsn] = d
# Use the LiPD data given to start time series extract
print("extracting: {}".format(_dsn))
# Copy, so we don't affect the original data
_v = copy.deepcopy(d)
# Start extract...
_l = (extract(_v, whichtables, mode, start))
except Exception as e:
print("Error: Unable to extractTs for dataset: {}: {}".format(_dsn, e))
logger_start.debug("extractTs: Exception: {}, {}".format(_dsn, e))
else:
_timeseries_data[start] = d
# Multiple datasets: Loop and append for each file
for k, v in d.items():
try:
# Use the LiPD data given to start time series extract
print("extracting: {}".format(k))
# Copy, so we don't affect the original data
_v = copy.deepcopy(v)
# Start extract...
_l += (extract(_v, whichtables, mode, start))
except Exception as e:
print("Error: Unable to extractTs for dataset: {}: {}".format(k, e))
logger_start.debug("extractTs: Exception: {}".format(e))
print("Created time series: {} entries".format(len(_l)))
except Exception as e:
print("Error: Unable to extractTs: {}".format(e))
logger_start.error("extractTs: Exception: {}".format(e))
__move_to_cwd()
return _l
def collapseTs(ts=None):
"""
Collapse a time series back into LiPD record form.
| Example
| 1. D = lipd.readLipd()
| 2. ts = lipd.extractTs(D)
| 3. New_D = lipd.collapseTs(ts)
_timeseries_data is sorted by time_id, and then by dataSetName
_timeseries_data[10103341]["ODP1098B"] = {data}
:param list ts: Time series
:return dict: Metadata
"""
# Retrieve the associated raw data according to the "time_id" found in each object. Match it in _timeseries_data
global _timeseries_data
_d = {}
try:
if not ts:
print("Error: Time series data not provided. Pass time series into the function.")
else:
# Send time series list through to be collapsed.
try:
_raw = _timeseries_data[ts[0]["time_id"]]
print(mode_ts("collapse", mode="", ts=ts))
_d = collapse(ts, _raw)
_d = rm_empty_fields(_d)
except Exception as e:
print("Error: Unable to collapse the time series: {}".format(e))
logger_start.error("collapseTs: unable to collapse the time series: {}".format(e))
except Exception as e:
pass
__move_to_cwd()
return _d
def filterTs(ts, expressions):
"""
Create a new time series that only contains entries that match the given expression.
| Example:
| D = lipd.loadLipd()
| ts = lipd.extractTs(D)
| new_ts = filterTs(ts, "archiveType == marine sediment")
| new_ts = filterTs(ts, ["paleoData_variableName == sst", "archiveType == marine sediment"])
| Expressions should use underscores to denote data nesting.
| Ex: paleoData_hasResolution_hasMedian or
:param list OR str expressions: Expressions
:param list ts: Time series
:return list new_ts: Filtered time series that matches the expression
"""
try:
# Make a copy of the ts. We're going to work directly on it.
new_ts = ts[:]
# User provided a single query string
if isinstance(expressions, str):
# Use some magic to turn the given string expression into a machine-usable comparative expression.
expr_lst = translate_expression(expressions)
# Only proceed if the translation resulted in a usable expression.
if expr_lst:
# Return the new filtered time series. This will use the same time series
# that filters down each loop.
new_ts, _idx = get_matches(expr_lst, new_ts)
# User provided a list of multiple queries
elif isinstance(expressions, list):
# Loop for each query
for expr in expressions:
# Use some magic to turn the given string expression into a machine-usable comparative expression.
expr_lst = translate_expression(expr)
# Only proceed if the translation resulted in a usable expression.
if expr_lst:
# Return the new filtered time series. This will use the same time series
# that filters down each loop.
new_ts, _idx = get_matches(expr_lst, new_ts)
except Exception as e:
pass
__move_to_cwd()
return new_ts
def queryTs(ts, expression):
"""
Find the indices of the time series entries that match the given expression.
| Example:
| D = lipd.loadLipd()
| ts = lipd.extractTs(D)
| matches = queryTs(ts, "archiveType == marine sediment")
| matches = queryTs(ts, "geo_meanElev <= 2000")
:param str expression: Expression
:param list ts: Time series
:return list _idx: Indices of entries that match the criteria
"""
try:
# Make a copy of the ts. We're going to work directly on it.
_idx = []
# User provided a single query string
if isinstance(expression, str):
# Use some magic to turn the given string expression into a machine-usable comparative expression.
expr_lst = translate_expression(expression)
# Only proceed if the translation resulted in a usable expression.
if expr_lst:
# Return the new filtered time series. This will use the same time series
# that filters down each loop.
new_ts, _idx = get_matches(expr_lst, ts)
# User provided a list of multiple queries
elif isinstance(expression, list):
# Loop for each query
for expr in expression:
# Use some magic to turn the given string expression into a machine-usable comparative expression.
expr_lst = translate_expression(expr)
# Only proceed if the translation resulted in a usable expression.
if expr_lst:
# Return the new filtered time series. This will use the same time series
# that filters down each loop.
new_ts, _idx = get_matches(expr_lst, ts)
except Exception as e:
pass
__move_to_cwd()
return _idx
def viewTs(ts):
"""
View the contents of one time series entry in a nicely formatted way
| Example
| 1. D = lipd.readLipd()
| 2. ts = lipd.extractTs(D)
| 3. viewTs(ts[0])
:param dict ts: One time series entry
:return none:
"""
try:
_ts = ts
if isinstance(ts, list):
_ts = ts[0]
print("It looks like you input a full time series. It's best to view one entry at a time.\n"
"I'll show you the first entry...")
_tmp_sort = OrderedDict()
_tmp_sort["ROOT"] = {}
_tmp_sort["PUBLICATION"] = {}
_tmp_sort["GEO"] = {}
_tmp_sort["OTHERS"] = {}
_tmp_sort["DATA"] = {}
# Organize the data by section
for k,v in _ts.items():
if not any(i == k for i in ["paleoData", "chronData", "mode", "@context"]):
if k in ["archiveType", "dataSetName", "googleSpreadSheetKey", "metadataMD5", "tagMD5", "googleMetadataWorksheet", "lipdVersion"]:
_tmp_sort["ROOT"][k] = v
elif "pub" in k:
_tmp_sort["PUBLICATION"][k] = v
elif "geo" in k:
_tmp_sort["GEO"][k] = v
elif "paleoData_" in k or "chronData_" in k:
if isinstance(v, list) and len(v) > 2:
_tmp_sort["DATA"][k] = "[{}, {}, {}, ...]".format(v[0], v[1], v[2])
else:
_tmp_sort["DATA"][k] = v
else:
if isinstance(v, list) and len(v) > 2:
_tmp_sort["OTHERS"][k] = "[{}, {}, {}, ...]".format(v[0], v[1], v[2])
else:
_tmp_sort["OTHERS"][k] = v
# Start printing the data to console
for k1, v1 in _tmp_sort.items():
print("\n{}\n===============".format(k1))
for k2, v2 in v1.items():
print("{} : {}".format(k2, v2))
except Exception as e:
pass
__move_to_cwd()
return
# DEPRECATED - TS no longer uses dictionaries or names.
# def _createTs(names, ts):
# """
# Create a new TS dictionary using
# index = find(logical expression)
# newTS = TS(index)
# :param str expression:
# :return dict:
# """
# d = {}
# for name in names:
# try:
# d[name] = ts[name]
# except KeyError as e:
# logger_start.warn("TS: KeyError: {} not in timeseries, {}".format(name, e))
# return d
# SHOW
def showLipds(D=None):
"""
Display the dataset names of a given LiPD data
| Example
| lipd.showLipds(D)
:pararm dict D: LiPD data
:return none:
"""
try:
if not D:
print("Error: LiPD data not provided. Pass LiPD data into the function.")
else:
print(json.dumps(D.keys(), indent=2))
except Exception as e:
pass
__move_to_cwd()
return
def showMetadata(dat):
"""
Display the metadata specified LiPD in pretty print
| Example
| showMetadata(D["Africa-ColdAirCave.Sundqvist.2013"])
:param dict dat: Metadata
:return none:
"""
try:
_tmp = rm_values_fields(copy.deepcopy(dat))
print(json.dumps(_tmp, indent=2))
except Exception as e:
pass
__move_to_cwd()
return
def showDfs(d):
"""
Display the available data frame names in a given data frame collection
:param dict d: Dataframe collection
:return none:
"""
try:
if "metadata" in d:
print("metadata")
if "paleoData" in d:
try:
for k, v in d["paleoData"].items():
print(k)
except KeyError:
pass
except AttributeError:
pass
if "chronData" in d:
try:
for k, v in d["chronData"].items():
print(k)
except KeyError:
pass
except AttributeError:
pass
except Exception as e:
pass
__move_to_cwd()
return
# GET
def getLipdNames(D=None):
"""
Get a list of all LiPD names in the library
| Example
| names = lipd.getLipdNames(D)
:return list f_list: File list
"""
_names = []
try:
if not D:
print("Error: LiPD data not provided. Pass LiPD data into the function.")
else:
_names = D.keys()
except Exception:
pass
__move_to_cwd()
return _names
def getMetadata(L):
"""
Get metadata from a LiPD data in memory
| Example
| m = lipd.getMetadata(D["Africa-ColdAirCave.Sundqvist.2013"])
:param dict L: One LiPD record
:return dict d: LiPD record (metadata only)
"""
_l = {}
try:
# Create a copy. Do not affect the original data.
_l = copy.deepcopy(L)
# Remove values fields
_l = rm_values_fields(_l)
except Exception as e:
# Input likely not formatted correctly, though other problems can occur.
print("Error: Unable to get data. Please check that input is LiPD data: {}".format(e))
return _l
def getCsv(L=None):
"""
Get CSV from LiPD metadata
| Example
| c = lipd.getCsv(D["Africa-ColdAirCave.Sundqvist.2013"])
:param dict L: One LiPD record
:return dict d: CSV data
"""
_c = {}
try:
if not L:
print("Error: LiPD data not provided. Pass LiPD data into the function.")
else:
_j, _c = get_csv_from_metadata(L["dataSetName"], L)
except KeyError as ke:
print("Error: Unable to get data. Please check that input is one LiPD dataset: {}".format(ke))
except Exception as e:
print("Error: Unable to get data. Something went wrong: {}".format(e))
logger_start.warn("getCsv: Exception: Unable to process lipd data: {}".format(e))
return _c
# WRITE
def writeLipd(dat, path=""):
"""
Write LiPD data to file(s)
:param dict dat: Metadata
:param str path: Destination (optional)
:return none:
"""
global settings
__write_lipd(dat, path)
return
# HELPERS
def __universal_read(file_path, file_type):
"""
Use a file path to create file metadata and load a file in the appropriate way, according to the provided file type.
:param str file_path: Path to file
:param str file_type: One of approved file types: xls, xlsx, txt, lpd
:return none:
"""
global files, cwd, settings
try:
# check that we are using the correct function to load this file type. (i.e. readNoaa for a .txt file)
correct_ext = load_fn_matches_ext(file_path, file_type)
# Check that this path references a file
valid_path = path_type(file_path, "file")
# is the path a file?
if valid_path and correct_ext:
# get file metadata for one file
file_meta = collect_metadata_file(file_path)
# append to global files, then load in D
if file_type == ".lpd":
# add meta to global file meta
files[".lpd"].append(file_meta)
# append to global files
elif file_type in [".xls", ".xlsx"]:
print("reading: {}".format(print_filename(file_meta["full_path"])))
files[".xls"].append(file_meta)
# append to global files
elif file_type == ".txt":
print("reading: {}".format(print_filename(file_meta["full_path"])))
files[".txt"].append(file_meta)
# we want to move around with the files we load
# change dir into the dir of the target file
cwd = file_meta["dir"]
if cwd:
os.chdir(cwd)
except Exception as e:
pass
# Placeholder to catch errors so we can always chdir back to cwd
os.chdir(cwd)
return
def __read(usr_path, file_type):
"""
Determine what path needs to be taken to read in file(s)
:param str usr_path: Path (optional)
:param str file_type: File type to read
:return none:
"""
global cwd
try:
# is there a file path specified ?
if usr_path:
# Is this a URL? Download the file and return the local path
is_url = re.match(re_url, usr_path)
if is_url:
# The usr_path will now be a local path to a single file. It will trigger the "elif" statement below
usr_path = download_from_url(usr_path)
# Directory path
if os.path.isdir(usr_path):
__read_directory(usr_path, file_type)
# File path
elif os.path.isfile(usr_path):
__read_file(usr_path, file_type)
# Invalid path given
else:
print("Error: Path given is invalid")
# no path specified. ask if they want to load dir or file
else:
choice = ""
count = 3
while not choice:
try:
print("Choose a read option:\n1. One file\n2. Multi-file select\n3. Directory")
choice = input("Option: ")
print("\n")
# now use the given file type and prompt answer to call _read_file or _read_dir
if choice in ["1", "2", "3"]:
# open directory picker
if choice == "3":
__read_directory(usr_path, file_type)
else:
# open a file picker
__read_file(usr_path, file_type)
break
else:
count -= 1
if count == 0:
print("Error: Too many failed attempts")
break
except Exception as e:
print("Error: Invalid input: {}".format(e))
except Exception as e:
pass
# Placeholder to catch errors so we can always chdir back to cwd
os.chdir(cwd)
return
def __read_lipd_contents(usr_path, remote_file_save):
"""
Use the file metadata to read in the LiPD file contents as a dataset library
:return dict: Metadata
"""
global files, settings
_d = {}
try:
# Read in one file, set data directly into dictionary
if len(files[".lpd"]) == 1:
_d = lipd_read(files[".lpd"][0]["full_path"])
# Remove any files that were downloaded remotely and user doesn't want to save
is_url = re.match(re_url, usr_path)
if not remote_file_save and is_url:
try:
os.remove(files[".lpd"][0]["full_path"])
except FileNotFoundError:
print("Unable to delete locally saved remote file")
if settings["verbose"]:
print("Finished read: 1 record")
# Read in multiple files, organize data by dataSetName (one extra layer)
else:
for file in files[".lpd"]:
_contents = lipd_read(file["full_path"])
_d[_contents["dataSetName"]] = _contents
if settings["verbose"]:
print("Finished read: {} records".format(len(_d)))
except Exception as e:
print("Error: read_lipd_contents: {}".format(e))
return _d
def __read_file(usr_path, file_type):
"""
Universal read file. Given a path and a type, it will do the appropriate read actions
:param str usr_path: Path to file
:param str file_type: One of approved file types: xls, xlsx, txt, lpd
:return none:
"""
global files
# no path provided. start gui browse
if not usr_path:
# src files could be a list of one, or a list of many. depending how many files the user selects
src_dir, src_files = get_src_or_dst("read", "file")
# check if src_files is a list of multiple files
if src_files:
for file_path in src_files:
__universal_read(file_path, file_type)
else:
print("No file(s) chosen")
else:
__universal_read(usr_path, file_type)
return
def __read_directory(usr_path, file_type):
"""
Universal read directory. Given a path and a type, it will do the appropriate read actions
:param str usr_path: Path to directory
:param str file_type: .xls, .xlsx, .txt, .lpd
:return none:
"""
# no path provided. start gui browse
if not usr_path:
# got dir path
usr_path, src_files = get_src_or_dst("read", "directory")
# Check if this is a valid directory path
valid_path = path_type(usr_path, "directory")
# If dir path is valid
if valid_path:
# List all files of target type in dir
files_found = []
# Extra case for xlsx excel files
if file_type == ".xls":
files_found += list_files(".xlsx", usr_path)
files_found += list_files(file_type, usr_path)
# notify how many files were found
print("Found: {} {} file(s)".format(len(files_found), FILE_TYPE_MAP[file_type]["file_type"]))
# Loop for each file found
for file_path in files_found:
# Call read lipd for each file found
__read_file(file_path, file_type)
else:
print("Directory path is not valid: {}".format(usr_path))
return
def __write_lipd(dat, usr_path):
"""
Write LiPD data to file, provided an output directory and dataset name.
:param dict dat: Metadata
:param str usr_path: Destination path
:param str dsn: Dataset name of one specific file to write
:return none:
"""
global settings
# no path provided. start gui browse
if not usr_path:
# got dir path
usr_path, _ignore = get_src_or_dst("write", "directory")
# Check if this is a valid directory path
valid_path = path_type(usr_path, "directory")
# If dir path is valid
if valid_path:
# Filename is given, write out one file
if "paleoData" in dat:
try:
if settings["verbose"]:
print("writing: {}".format(dat["dataSetName"]))
lipd_write(dat, usr_path)
except KeyError as ke:
print("Error: Unable to write file: unknown, {}".format(ke))
except Exception as e:
print("Error: Unable to write file: {}, {}".format(dat["dataSetName"], e))
# Filename is not given, write out whole library
else:
if dat:
for name, lipd_dat in dat.items():
try:
if settings["verbose"]:
print("writing: {}".format(name))
lipd_write(lipd_dat, usr_path)
except Exception as e:
print("Error: Unable to write file: {}, {}".format(name, e))
return
def __disclaimer(opt=""):
"""
Print the disclaimers once. If they've already been shown, skip over.
:return none:
"""
global settings
if opt == "update":
print("Disclaimer: LiPD files may be updated and modified to adhere to standards\n")
settings["note_update"] = False
if opt == "validate":
print("Note: Use lipd.validate() or www.LiPD.net/create "
"to ensure that your new LiPD file(s) are valid")
settings["note_validate"] = False
return
def __move_to_cwd():
global cwd
os.chdir(cwd)
return
# GLOBALS
run()
|
nickmckay/LiPD-utilities
|
Python/lipd/__init__.py
|
Python
|
gpl-2.0
| 38,558 | 0.002542 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Httpie(PythonPackage):
"""Modern, user-friendly command-line HTTP client for the API era."""
homepage = 'https://httpie.io/'
pypi = 'httpie/httpie-2.6.0.tar.gz'
maintainers = ['jakubroztocil']
version('2.6.0', sha256='ef929317b239bbf0a5bb7159b4c5d2edbfc55f8a0bcf9cd24ce597daec2afca5')
version('2.5.0', sha256='fe6a8bc50fb0635a84ebe1296a732e39357c3e1354541bf51a7057b4877e47f9')
# TODO: Remove both versions for HTTPie 2.7.0.
version('0.9.9', sha256='f1202e6fa60367e2265284a53f35bfa5917119592c2ab08277efc7fffd744fcb', deprecated=True)
version('0.9.8', sha256='515870b15231530f56fe2164190581748e8799b66ef0fe36ec9da3396f0df6e1', deprecated=True)
depends_on('python@3.6:', when='@2.5:', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-charset-normalizer@2:', when='@2.6:', type=('build', 'run'))
depends_on('py-defusedxml@0.6:', when='@2.5:', type=('build', 'run'))
depends_on('py-pygments@2.1.3:', type=('build', 'run'))
depends_on('py-pygments@2.5.2:', when='@2.5:', type=('build', 'run'))
depends_on('py-requests@2.11:', type=('build', 'run'))
depends_on('py-requests@2.22:+socks', when='@2.5:', type=('build', 'run'))
depends_on('py-requests-toolbelt@0.9.1:', when='@2.5:', type=('build', 'run'))
# TODO: Remove completely py-argparse for HTTPie 2.7.0.
# Concretization problem breaks this. Unconditional for now...
# https://github.com/spack/spack/issues/3628
# depends_on('py-argparse@1.2.1:', type=('build', 'run'),
# when='^python@:2.6,3.0:3.1')
depends_on('py-argparse@1.2.1:', type=('build', 'run'), when='^python@:2.6')
|
jakubroztocil/httpie
|
docs/packaging/spack/package.py
|
Python
|
bsd-3-clause
| 1,904 | 0.003151 |
from vmware.models import VM, VMwareHost
from rest_framework import serializers
class VMSerializer(serializers.ModelSerializer):
class Meta:
model = VM
fields = ('name',
'moid',
'vcenter',
'host',
'instance_uuid',
'os_type',
'added_time',
'is_template',
'state')
class VMWareHostSerializer(serializers.ModelSerializer):
baremetal = serializers.HyperlinkedRelatedField(many=False, view_name='baremetal-detail', read_only=True)
class Meta:
model = VMwareHost
fields = ('name',
'ip_address',
'vcenter',
'baremetal',
'state')
|
colinleefish/theotherbarn
|
vmware/serializers.py
|
Python
|
mit
| 786 | 0.003817 |
import os
import stat
import time
from inaugurator import sh
class TargetDevice:
_found = None
@classmethod
def device(cls, candidates):
if cls._found is None:
cls._found = cls._find(candidates)
return cls._found
pass
@classmethod
def _find(cls, candidates):
RETRIES = 5
for retry in xrange(RETRIES):
for device in candidates:
if not os.path.exists(device):
continue
if not stat.S_ISBLK(os.stat(device).st_mode):
continue
try:
output = sh.run("dosfslabel", device + 1)
if output.strip() == "STRATODOK":
raise Exception(
"DOK was found on SDA. cannot continue: its likely the "
"the HD driver was not loaded correctly")
except:
pass
print "Found target device %s" % device
return device
print "didn't find target device, sleeping before retry %d" % retry
time.sleep(1)
os.system("/usr/sbin/busybox mdev -s")
raise Exception("Failed finding target device")
|
eliran-stratoscale/inaugurator
|
inaugurator/targetdevice.py
|
Python
|
apache-2.0
| 1,263 | 0.001584 |
from django.db import models
from jsonfield import JSONField
from collections import OrderedDict
class BaseObject(models.Model):
"""
The base model from which all apps inherit
"""
# Type represents the app that uses it. Assets, Persons, Orgs, etc
type = models.CharField(max_length=256)
# Related-to represents the the relation of this object with other objects (of any type)
related_to = models.ManyToManyField("self", blank=True)
created_on = models.DateTimeField(auto_now_add=True)
updated_on = models.DateTimeField(auto_now_add=True, auto_now=True)
# Store all attributes/properties of the object as dictionary
attributes = JSONField(load_kwargs={'object_pairs_hook': OrderedDict}, blank=True)
def __init__(self, *args, **kwargs):
super(BaseObject, self).__init__(*args, **kwargs)
if not self.pk and not self.type:
self.type = self.TYPE
class BasePropertyManager(models.Manager):
def create_attributes(self, baseobject, **attributes):
"""
Given a set of key-value attributes for a given object,
create the attribute-set in table
"""
property_set = []
for attr, value in attributes.items():
property_set.append(BaseProperty(baseobject=baseobject, key=attr, value=value))
self.bulk_create(property_set)
class BaseProperty(models.Model):
"""
Key-Value attributes of objects are stored here.
"""
baseobject = models.ForeignKey(BaseObject)
key = models.CharField(max_length=256)
value = models.CharField(max_length=256)
objects = BasePropertyManager()
def __unicode__(self):
"""Representation of field"""
return {self.baseobject.id: {self.key: self.value}}
class ProxyObject(BaseObject):
class Meta:
proxy = True
|
jerynmathew/AssetManager
|
AssetManager/core/baseobject/models.py
|
Python
|
mit
| 1,839 | 0.001631 |
"""
Default settings for the ``mezzanine.generic`` app. Each of these can be
overridden in your project's settings module, just like regular
Django settings. The ``editable`` argument for each controls whether
the setting is editable via Django's admin.
Thought should be given to how a setting is actually used before
making it editable, as it may be inappropriate - for example settings
that are only read during startup shouldn't be editable, since changing
them would require an application reload.
"""
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from mezzanine.conf import register_setting
generic_comments = getattr(settings, "COMMENTS_APP", "") == "mezzanine.generic"
if generic_comments:
register_setting(
name="COMMENTS_ACCOUNT_REQUIRED",
label=_("Accounts required for commenting"),
description=_("If ``True``, users must log in to comment."),
editable=True,
default=False,
)
register_setting(
name="COMMENTS_DISQUS_SHORTNAME",
label=_("Disqus shortname"),
description=_("Shortname for the http://disqus.com comments "
"service."),
editable=True,
default="",
)
register_setting(
name="COMMENTS_DISQUS_API_PUBLIC_KEY",
label=_("Disqus public key"),
description=_("Public key for http://disqus.com developer API"),
editable=True,
default="",
)
register_setting(
name="COMMENTS_DISQUS_API_SECRET_KEY",
label=_("Disqus secret key"),
description=_("Secret key for http://disqus.com developer API"),
editable=True,
default="",
)
register_setting(
name="COMMENTS_DEFAULT_APPROVED",
label=_("Auto-approve comments"),
description=_("If ``True``, built-in comments are approved by "
"default."),
editable=True,
default=True,
)
register_setting(
name="COMMENT_FILTER",
description=_("Dotted path to the function to call on a comment's "
"value before it is rendered to the template."),
editable=False,
default=None,
)
register_setting(
name="COMMENTS_NOTIFICATION_EMAILS",
label=_("Comment notification email addresses"),
description=_("A comma separated list of email addresses that "
"will receive an email notification each time a "
"new comment is posted on the site."),
editable=True,
default="",
)
register_setting(
name="COMMENTS_NUM_LATEST",
label=_("Admin comments"),
description=_("Number of latest comments shown in the admin "
"dashboard."),
editable=True,
default=5,
)
register_setting(
name="COMMENTS_UNAPPROVED_VISIBLE",
label=_("Show unapproved comments"),
description=_("If ``True``, comments that have ``is_public`` "
"unchecked will still be displayed, but replaced with a "
"``waiting to be approved`` message."),
editable=True,
default=True,
)
register_setting(
name="COMMENTS_REMOVED_VISIBLE",
label=_("Show removed comments"),
description=_("If ``True``, comments that have ``removed`` "
"checked will still be displayed, but replaced "
"with a ``removed`` message."),
editable=True,
default=True,
)
register_setting(
name="COMMENTS_USE_RATINGS",
description=_("If ``True``, comments can be rated."),
editable=False,
default=True,
)
register_setting(
name="RATINGS_ACCOUNT_REQUIRED",
label=_("Accounts required for rating"),
description=_("If ``True``, users must log in to rate content "
"such as blog posts and comments."),
editable=True,
default=False,
)
register_setting(
name="RATINGS_RANGE",
description=_("A sequence of integers that are valid ratings."),
editable=False,
default=range(getattr(settings, "RATINGS_MIN", 1),
getattr(settings, "RATINGS_MAX", 5) + 1),
)
|
orlenko/bccf
|
src/mezzanine/generic/defaults.py
|
Python
|
unlicense
| 4,223 | 0.000947 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import fields, osv
class sale_report(osv.osv):
_name = "sale.report"
_description = "Sales Orders Statistics"
_auto = False
_rec_name = 'date'
_columns = {
'date': fields.datetime('Date Order', readonly=True),
'date_confirm': fields.date('Date Confirm', readonly=True),
'product_id': fields.many2one('product.product', 'Product', readonly=True),
'product_uom': fields.many2one('product.uom', 'Unit of Measure', readonly=True),
'product_uom_qty': fields.float('# of Qty', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'user_id': fields.many2one('res.users', 'Salesperson', readonly=True),
'price_total': fields.float('Total Price', readonly=True),
'delay': fields.float('Commitment Delay', digits=(16,2), readonly=True),
'categ_id': fields.many2one('product.category','Category of Product', readonly=True),
'nbr': fields.integer('# of Lines', readonly=True),
'state': fields.selection([
('draft', 'Quotation'),
('waiting_date', 'Waiting Schedule'),
('manual', 'Manual In Progress'),
('progress', 'In Progress'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
('cancel', 'Cancelled')
], 'Order Status', readonly=True),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', readonly=True),
'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
}
_order = 'date desc'
def _select(self):
select_str = """
SELECT min(l.id) as id,
l.product_id as product_id,
t.uom_id as product_uom,
sum(l.product_uom_qty / u.factor * u2.factor) as product_uom_qty,
sum(l.product_uom_qty * l.price_unit * (100.0-l.discount) / 100.0) as price_total,
count(*) as nbr,
s.date_order as date,
s.date_confirm as date_confirm,
s.partner_id as partner_id,
s.user_id as user_id,
s.company_id as company_id,
extract(epoch from avg(date_trunc('day',s.date_confirm)-date_trunc('day',s.create_date)))/(24*60*60)::decimal(16,2) as delay,
s.state,
t.categ_id as categ_id,
s.pricelist_id as pricelist_id,
s.project_id as analytic_account_id,
s.section_id as section_id
"""
return select_str
def _from(self):
from_str = """
sale_order_line l
join sale_order s on (l.order_id=s.id)
left join product_product p on (l.product_id=p.id)
left join product_template t on (p.product_tmpl_id=t.id)
left join product_uom u on (u.id=l.product_uom)
left join product_uom u2 on (u2.id=t.uom_id)
"""
return from_str
def _group_by(self):
group_by_str = """
GROUP BY l.product_id,
l.order_id,
t.uom_id,
t.categ_id,
s.date_order,
s.date_confirm,
s.partner_id,
s.user_id,
s.company_id,
s.state,
s.pricelist_id,
s.project_id,
s.section_id
"""
return group_by_str
def init(self, cr):
# self._table = sale_report
tools.drop_view_if_exists(cr, self._table)
cr.execute("""CREATE or REPLACE VIEW %s as (
%s
FROM ( %s )
%s
)""" % (self._table, self._select(), self._from(), self._group_by()))
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
bealdav/OpenUpgrade
|
addons/sale/report/sale_report.py
|
Python
|
agpl-3.0
| 5,204 | 0.002882 |
#!/usr/bin/env python3
"""
Lazy 'tox' to quickly check if branch is up to PR standards.
This is NOT a tox replacement, only a quick check during development.
"""
import os
import asyncio
import sys
import re
import shlex
from collections import namedtuple
try:
from colorlog.escape_codes import escape_codes
except ImportError:
escape_codes = None
RE_ASCII = re.compile(r"\033\[[^m]*m")
Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"])
PASS = "green"
FAIL = "bold_red"
def printc(the_color, *args):
"""Color print helper."""
msg = " ".join(args)
if not escape_codes:
print(msg)
return
try:
print(escape_codes[the_color] + msg + escape_codes["reset"])
except KeyError:
print(msg)
raise ValueError("Invalid color {}".format(the_color))
def validate_requirements_ok():
"""Validate requirements, returns True of ok."""
from gen_requirements_all import main as req_main
return req_main(True) == 0
async def read_stream(stream, display):
"""Read from stream line by line until EOF, display, and capture lines."""
output = []
while True:
line = await stream.readline()
if not line:
break
output.append(line)
display(line.decode()) # assume it doesn't block
return b"".join(output)
async def async_exec(*args, display=False):
"""Execute, return code & log."""
argsp = []
for arg in args:
if os.path.isfile(arg):
argsp.append("\\\n {}".format(shlex.quote(arg)))
else:
argsp.append(shlex.quote(arg))
printc("cyan", *argsp)
try:
kwargs = {
"loop": LOOP,
"stdout": asyncio.subprocess.PIPE,
"stderr": asyncio.subprocess.STDOUT,
}
if display:
kwargs["stderr"] = asyncio.subprocess.PIPE
proc = await asyncio.create_subprocess_exec(*args, **kwargs)
except FileNotFoundError as err:
printc(
FAIL,
"Could not execute {}. Did you install test requirements?".format(args[0]),
)
raise err
if not display:
# Readin stdout into log
stdout, _ = await proc.communicate()
else:
# read child's stdout/stderr concurrently (capture and display)
stdout, _ = await asyncio.gather(
read_stream(proc.stdout, sys.stdout.write),
read_stream(proc.stderr, sys.stderr.write),
)
exit_code = await proc.wait()
stdout = stdout.decode("utf-8")
return exit_code, stdout
async def git():
"""Exec git."""
if len(sys.argv) > 2 and sys.argv[1] == "--":
return sys.argv[2:]
_, log = await async_exec("git", "merge-base", "upstream/dev", "HEAD")
merge_base = log.splitlines()[0]
_, log = await async_exec("git", "diff", merge_base, "--name-only")
return log.splitlines()
async def pylint(files):
"""Exec pylint."""
_, log = await async_exec("pylint", "-f", "parseable", "--persistent=n", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 3:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], "", line[2].strip(), _fn.startswith("tests/")))
return res
async def flake8(files):
"""Exec flake8."""
_, log = await async_exec("flake8", "--doctests", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 4:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], line[2], line[3].strip(), False))
return res
async def lint(files):
"""Perform lint."""
files = [file for file in files if os.path.isfile(file)]
fres, pres = await asyncio.gather(flake8(files), pylint(files))
res = fres + pres
res.sort(key=lambda item: item.file)
if res:
print("Pylint & Flake8 errors:")
else:
printc(PASS, "Pylint and Flake8 passed")
lint_ok = True
for err in res:
err_msg = "{} {}:{} {}".format(err.file, err.line, err.col, err.msg)
# tests/* does not have to pass lint
if err.skip:
print(err_msg)
else:
printc(FAIL, err_msg)
lint_ok = False
return lint_ok
async def main():
"""Run the main loop."""
# Ensure we are in the homeassistant root
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
files = await git()
if not files:
print(
"No changed files found. Please ensure you have added your "
"changes with git add & git commit"
)
return
pyfile = re.compile(r".+\.py$")
pyfiles = [file for file in files if pyfile.match(file)]
print("=============================")
printc("bold", "CHANGED FILES:\n", "\n ".join(pyfiles))
print("=============================")
skip_lint = len(sys.argv) > 1 and sys.argv[1] == "--skiplint"
if skip_lint:
printc(FAIL, "LINT DISABLED")
elif not await lint(pyfiles):
printc(FAIL, "Please fix your lint issues before continuing")
return
test_files = set()
gen_req = False
for fname in pyfiles:
if fname.startswith("homeassistant/components/"):
gen_req = True # requirements script for components
# Find test files...
if fname.startswith("tests/"):
if "/test_" in fname and os.path.isfile(fname):
# All test helpers should be excluded
test_files.add(fname)
else:
parts = fname.split("/")
parts[0] = "tests"
if parts[-1] == "__init__.py":
parts[-1] = "test_init.py"
elif parts[-1] == "__main__.py":
parts[-1] = "test_main.py"
else:
parts[-1] = "test_" + parts[-1]
fname = "/".join(parts)
if os.path.isfile(fname):
test_files.add(fname)
if gen_req:
print("=============================")
if validate_requirements_ok():
printc(PASS, "script/gen_requirements.py passed")
else:
printc(FAIL, "Please run script/gen_requirements.py")
return
print("=============================")
if not test_files:
print("No test files identified, ideally you should run tox")
return
code, _ = await async_exec(
"pytest", "-vv", "--force-sugar", "--", *test_files, display=True
)
print("=============================")
if code == 0:
printc(PASS, "Yay! This will most likely pass tox")
else:
printc(FAIL, "Tests not passing")
if skip_lint:
printc(FAIL, "LINT DISABLED")
if __name__ == "__main__":
LOOP = (
asyncio.ProactorEventLoop()
if sys.platform == "win32"
else asyncio.get_event_loop()
)
try:
LOOP.run_until_complete(main())
except (FileNotFoundError, KeyboardInterrupt):
pass
finally:
LOOP.close()
|
fbradyirl/home-assistant
|
script/lazytox.py
|
Python
|
apache-2.0
| 7,111 | 0.000422 |
#!/usr/bin/env python
"""shuffle a dataset"""
import random
import sys
def sol_shuffle(filename, out_filename):
try:
file = open(filename, 'rb')
lines = file.readlines()
if len(lines) == 0:
print 'empty file'
file.close()
sys.exit()
if lines[-1][-1] != '\n':
lines[-1]+='\n'
random.shuffle(lines)
wfile = open(out_filename, 'wb')
wfile.writelines(lines)
wfile.close()
except IOError as e:
print "I/O error ({0}): {1}".format(e.errno, e.strerror)
sys.exit()
else:
file.close()
|
matthew-Ng/sol
|
exp_sol/sol_shuffle.py
|
Python
|
gpl-3.0
| 626 | 0.003195 |
import abc
class RuleLearner:
"""2D 2-person board game rule learner base class
TODO
"""
def __init__(self, board_height, board_width):
"""Initialize the rule learner
Subclasses should call this constructor.
:type board_height: positive integer
:param board_height: the height (number of rows) of the board
:type board_width: positive integer
:param board_width: the width (number of columns) of the board
"""
self._board_height = board_height
self._board_width = board_width
@abc.abstractmethod
def get_valid_moves(self, board):
"""Get the valid moves for the board.
:type board: Boards.Board
:param board: the board for which to determine the valid moves
:returns: a 2D Numpy array with the same dimensions as the board contains, the cells where moves are valid set
to 1, the rest set to 0
"""
pass
|
cmdunkers/DeeperMind
|
RuleLearner/RuleLearner.py
|
Python
|
bsd-3-clause
| 974 | 0.002053 |
#!/usr/bin/python2.7
# -*- coding: utf-8 -*-
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The devices file.
"""
class Devices:
def __init__(self):
fo = open("/proc/devices")
self._charmap = {}
self._blockmap = {}
for line in fo.readlines():
if line.startswith("Character"):
curmap = self._charmap
continue
elif line.startswith("Block"):
curmap = self._blockmap
continue
elif len(line) > 4:
[num, fmt] = line.split()
num = int(num)
curmap[num] = fmt
def __str__(self):
s = ["Character devices:"]
for num, fmt in self._charmap.items():
s.append("%3d %s" % (num, fmt))
s.append("\nBlock devices:")
for num, fmt in self._blockmap.items():
s.append("%3d %s" % (num, fmt))
return "\n".join(s)
def get_device(self, dtype, major, minor):
pass
def _test(argv):
d = Devices()
print d
if __name__ == "__main__":
import sys
_test(sys.argv)
|
kdart/pycopia
|
core/pycopia/OS/Linux/proc/devices.py
|
Python
|
apache-2.0
| 1,668 | 0.002398 |
import glob
def handle(userToken, _):
# Get usertoken data
userID = userToken.userID
# Make sure the match exists
matchID = userToken.matchID
if matchID not in glob.matches.matches:
return
match = glob.matches.matches[matchID]
# Get our slotID and change ready status
slotID = match.getUserSlotID(userID)
if slotID != None:
match.toggleSlotReady(slotID)
|
osuripple/ripple
|
c.ppy.sh/matchReadyEvent.py
|
Python
|
mit
| 370 | 0.035135 |
###
#
# W A R N I N G
#
# This recipe is obsolete!
#
# When you are looking for copying and pickling functionality for generators
# implemented in pure Python download the
#
# generator_tools
#
# package at the cheeseshop or at www.fiber-space.de
#
###
import new
import copy
import types
import sys
from opcode import*
def copy_generator(f_gen):
'''
Function used to copy a generator object.
@param f_gen: generator object.
@return: pair (g_gen, g) where g_gen is a new generator object and g a generator
function g producing g_gen. The function g is created from f_gen.gi_frame.
Usage: function copies a running generator.
def inc(start, step = 1):
i = start
while True:
yield i
i+= step
>>> inc_gen = inc(3)
>>> inc_gen.next()
3
>>> inc_gen.next()
4
>>> inc_gen_c, inc_c = copy_generator(inc_gen)
>>> inc_gen_c.next() == inc_gen.next()
True
>>> inc_gen_c.next()
6
Implementation strategy:
Inspecting the frame of a running generator object f provides following important
information about the state of the generator:
- the values of bound locals inside the generator object
- the last bytecode being executed
This state information of f is restored in a new function generator g in the following way:
- the signature of g is defined by the locals of f ( co_varnames of f ). So we can pass the
locals to g inspected from the current frame of running f. Yet unbound locals are assigned
to None.
All locals will be deepcopied. If one of the locals is a generator object it will be copied
using copy_generator. If a local is not copyable it will be assigned directly. Shared state
is therefore possible.
- bytecode hack. A JUMP_ABSOLUTE bytecode instruction is prepended to the bytecode of f with
an offset pointing to the next unevaluated bytecode instruction of f.
Corner cases:
- an unstarted generator ( last instruction = -1 ) will be just cloned.
- if a generator has been already closed ( gi_frame = None ) a ValueError exception
is raised.
'''
if not f_gen.gi_frame:
raise ValueError("Can't copy closed generator")
f_code = f_gen.gi_frame.f_code
offset = f_gen.gi_frame.f_lasti
locals = f_gen.gi_frame.f_locals
if offset == -1: # clone the generator
argcount = f_code.co_argcount
else:
# bytecode hack - insert jump to current offset
# the offset depends on the version of the Python interpreter
if sys.version_info[:2] == (2,4):
offset +=4
elif sys.version_info[:2] == (2,5):
offset +=5
start_sequence = (opmap["JUMP_ABSOLUTE"],)+divmod(offset, 256)[::-1]
modified_code = "".join([chr(op) for op in start_sequence])+f_code.co_code
argcount = f_code.co_nlocals
varnames = list(f_code.co_varnames)
for i, name in enumerate(varnames):
loc = locals.get(name)
if isinstance(loc, types.GeneratorType):
varnames[i] = copy_generator(loc)[0]
else:
try:
varnames[i] = copy.deepcopy(loc)
except TypeError:
varnames[i] = loc
new_code = new.code(argcount,
f_code.co_nlocals,
f_code.co_stacksize,
f_code.co_flags,
modified_code,
f_code.co_consts,
f_code.co_names,
f_code.co_varnames,
f_code.co_filename,
f_code.co_name,
f_code.co_firstlineno,
f_code.co_lnotab)
g = new.function(new_code, globals(),)
g_gen = g(*varnames)
return g_gen, g
|
ActiveState/code
|
recipes/Python/528949_Copying_Generators/recipe-528949.py
|
Python
|
mit
| 3,934 | 0.008897 |
# coding: utf-8
#
# Copyright © 2017 weirdgiraffe <giraffe@cyberzoo.xyz>
#
# Distributed under terms of the MIT license.
#
import sys
try: # real kodi
import xbmc
import xbmcaddon
import xbmcgui
import xbmcplugin
except ImportError: # mocked kodi
from mock_kodi import xbmc
from mock_kodi import xbmcaddon
from mock_kodi import xbmcgui
from mock_kodi import xbmcplugin
try: # python2
from urllib import urlencode
from urlparse import urlparse, parse_qs
except ImportError: # python3
from urllib.parse import urlparse, parse_qs, urlencode
class logger:
@staticmethod
def debug(s):
xbmc.log(s, xbmc.LOGDEBUG)
@staticmethod
def info(s):
xbmc.log(s, xbmc.LOGNOTICE)
@staticmethod
def error(s):
s += '\n\taddon arguments:\n\t{0}'.format('\n\t'.join(sys.argv[1:]))
xbmc.log(s, xbmc.LOGERROR)
def list_item(name, thumb):
li = xbmcgui.ListItem(name)
if thumb is not None:
li.setArt(thumb)
# it is sayed that both of these methods are deprecated
# see: http://kodi.wiki/view/Jarvis_API_changes
# but only these methods actually works with Jarvis
li.setIconImage(thumb)
li.setThumbnailImage(thumb)
return li
class Plugin:
def __init__(self, *args):
self._addon = xbmcaddon.Addon()
self._url = args[0]
self._handler = int(args[1], base=10)
# addon url has format:
# plugin://plugin.hello.blah?arg1=xxx&arg2=xxx
# where args are urlencoded
o = urlparse(args[2])
self._args = dict()
for k, v in parse_qs(o.query).items():
if len(v) == 1:
self._args[k] = v[0]
else:
self._args[k] = v
@property
def icon(self):
return self._addon.getAddonInfo('icon')
@property
def args(self):
return self._args
def read_input(self, header):
keyboard = xbmc.Keyboard('', 'Что искать?', False)
keyboard.doModal()
if keyboard.isConfirmed():
return keyboard.getText()
def play(self, url):
li = xbmcgui.ListItem(path=url)
xbmcplugin.setResolvedUrl(self._handler, True, li)
def add_screen_item(self, name, url, **kwargs):
thumb = kwargs.get('thumb')
li = list_item(name, thumb)
li.setProperty('IsPlayable', 'true')
ret = xbmcplugin.addDirectoryItem(self._handler, url, li, False)
if not ret:
logger.error('failed to add {0} playable item'.format(name))
def add_screen_directory(self, name, url, **kwargs):
thumb = kwargs.get('thumb')
li = list_item(name, thumb)
args = [self._handler, url, li, True]
items_count = kwargs.get('items_count')
if items_count:
args += [items_count]
ret = xbmcplugin.addDirectoryItem(*args)
if not ret:
logger.error('failed to add {0} directory item'.format(name))
def publish_screen(self, ok, refresh=False):
xbmcplugin.endOfDirectory(self._handler, ok, refresh)
def make_url(self, argv):
return '{0}?{1}'.format(self._url, urlencode(argv))
def settings_value(self, setting_id):
return self._addon.getSetting(setting_id)
def show_notification(self, title, message):
timeout = len(message) / 10 * 2000
title = title.replace('"', '\\"')
message = message.replace('"', '\\"')
xbmc.executebuiltin('Notification("{0}","{1}","{2}","{3}")'.format(
title.encode('ascii', 'ignore'),
message.encode('ascii', 'ignore'),
timeout,
self.icon))
|
weirdgiraffe/plugin.video.giraffe.seasonvar
|
resources/site-packages/kodi/__init__.py
|
Python
|
mit
| 3,703 | 0 |
#!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import copy, zipfile
from PyQt5.Qt import QAbstractItemModel, Qt, QColor, QFont, QIcon, \
QModelIndex, pyqtSignal, QPixmap
from calibre.utils.search_query_parser import SearchQueryParser
from calibre.utils.localization import get_language
from calibre.web.feeds.recipes.collection import \
get_builtin_recipe_collection, get_custom_recipe_collection, \
SchedulerConfig, download_builtin_recipe, update_custom_recipe, \
update_custom_recipes, add_custom_recipe, add_custom_recipes, \
remove_custom_recipe, get_custom_recipe, get_builtin_recipe
from calibre.utils.search_query_parser import ParseException
class NewsTreeItem(object):
def __init__(self, builtin, custom, scheduler_config, parent=None):
self.builtin, self.custom = builtin, custom
self.scheduler_config = scheduler_config
self.parent = parent
if self.parent is not None:
self.parent.append(self)
self.children = []
def row(self):
if self.parent is not None:
return self.parent.children.index(self)
return 0
def append(self, child):
child.parent = self
self.children.append(child)
def data(self, role):
return None
def flags(self):
return Qt.ItemIsEnabled|Qt.ItemIsSelectable
def sort(self):
self.children.sort()
for child in self.children:
child.sort()
def prune(self):
for child in list(self.children):
if len(child.children) == 0:
self.children.remove(child)
child.parent = None
class NewsCategory(NewsTreeItem):
def __init__(self, category, builtin, custom, scheduler_config, parent):
NewsTreeItem.__init__(self, builtin, custom, scheduler_config, parent)
self.category = category
self.cdata = get_language(self.category)
self.bold_font = QFont()
self.bold_font.setBold(True)
self.bold_font = (self.bold_font)
def data(self, role):
if role == Qt.DisplayRole:
return (self.cdata + ' [%d]'%len(self.children))
elif role == Qt.FontRole:
return self.bold_font
elif role == Qt.ForegroundRole and self.category == _('Scheduled'):
return (QColor(0, 255, 0))
return None
def flags(self):
return Qt.ItemIsEnabled
def __cmp__(self, other):
def decorate(x):
if x == _('Scheduled'):
x = '0' + x
elif x == _('Custom'):
x = '1' + x
else:
x = '2' + x
return x
return cmp(decorate(self.cdata), decorate(getattr(other, 'cdata', '')))
class NewsItem(NewsTreeItem):
def __init__(self, urn, title, default_icon, custom_icon, favicons, zf,
builtin, custom, scheduler_config, parent):
NewsTreeItem.__init__(self, builtin, custom, scheduler_config, parent)
self.urn, self.title = urn, title
self.icon = self.default_icon = None
self.default_icon = default_icon
self.favicons, self.zf = favicons, zf
if 'custom:' in self.urn:
self.icon = custom_icon
def data(self, role):
if role == Qt.DisplayRole:
return (self.title)
if role == Qt.DecorationRole:
if self.icon is None:
icon = '%s.png'%self.urn[8:]
p = QPixmap()
if icon in self.favicons:
try:
with zipfile.ZipFile(self.zf, 'r') as zf:
p.loadFromData(zf.read(self.favicons[icon]))
except:
pass
if not p.isNull():
self.icon = (QIcon(p))
else:
self.icon = self.default_icon
return self.icon
return None
def __cmp__(self, other):
return cmp(self.title.lower(), getattr(other, 'title', '').lower())
class AdaptSQP(SearchQueryParser):
def __init__(self, *args, **kwargs):
pass
class RecipeModel(QAbstractItemModel, AdaptSQP):
LOCATIONS = ['all']
searched = pyqtSignal(object)
def __init__(self, *args):
QAbstractItemModel.__init__(self, *args)
SearchQueryParser.__init__(self, locations=['all'])
self.default_icon = (QIcon(I('news.png')))
self.custom_icon = (QIcon(I('user_profile.png')))
self.builtin_recipe_collection = get_builtin_recipe_collection()
self.scheduler_config = SchedulerConfig()
try:
with zipfile.ZipFile(P('builtin_recipes.zip',
allow_user_override=False), 'r') as zf:
self.favicons = dict([(x.filename, x) for x in zf.infolist() if
x.filename.endswith('.png')])
except:
self.favicons = {}
self.do_refresh()
def get_builtin_recipe(self, urn, download=True):
if download:
try:
return download_builtin_recipe(urn)
except:
import traceback
traceback.print_exc()
return get_builtin_recipe(urn)
def get_recipe(self, urn, download=True):
coll = self.custom_recipe_collection if urn.startswith('custom:') \
else self.builtin_recipe_collection
for recipe in coll:
if recipe.get('id', False) == urn:
if coll is self.builtin_recipe_collection:
return self.get_builtin_recipe(urn[8:], download=download)
return get_custom_recipe(int(urn[len('custom:'):]))
def update_custom_recipe(self, urn, title, script):
id_ = int(urn[len('custom:'):])
update_custom_recipe(id_, title, script)
self.custom_recipe_collection = get_custom_recipe_collection()
def update_custom_recipes(self, script_urn_map):
script_ids = []
for urn, title_script in script_urn_map.iteritems():
id_ = int(urn[len('custom:'):])
(title, script) = title_script
script_ids.append((id_, title, script))
update_custom_recipes(script_ids)
self.custom_recipe_collection = get_custom_recipe_collection()
def add_custom_recipe(self, title, script):
add_custom_recipe(title, script)
self.custom_recipe_collection = get_custom_recipe_collection()
def add_custom_recipes(self, scriptmap):
add_custom_recipes(scriptmap)
self.custom_recipe_collection = get_custom_recipe_collection()
def remove_custom_recipes(self, urns):
ids = [int(x[len('custom:'):]) for x in urns]
for id_ in ids:
remove_custom_recipe(id_)
self.custom_recipe_collection = get_custom_recipe_collection()
def do_refresh(self, restrict_to_urns=set([])):
self.custom_recipe_collection = get_custom_recipe_collection()
zf = P('builtin_recipes.zip', allow_user_override=False)
def factory(cls, parent, *args):
args = list(args)
if cls is NewsItem:
args.extend([self.default_icon, self.custom_icon,
self.favicons, zf])
args += [self.builtin_recipe_collection,
self.custom_recipe_collection, self.scheduler_config,
parent]
return cls(*args)
def ok(urn):
if restrict_to_urns is None:
return False
return not restrict_to_urns or urn in restrict_to_urns
new_root = factory(NewsTreeItem, None)
scheduled = factory(NewsCategory, new_root, _('Scheduled'))
custom = factory(NewsCategory, new_root, _('Custom'))
lang_map = {}
self.all_urns = set([])
self.showing_count = 0
self.builtin_count = 0
for x in self.custom_recipe_collection:
urn = x.get('id')
self.all_urns.add(urn)
if ok(urn):
factory(NewsItem, custom, urn, x.get('title'))
self.showing_count += 1
for x in self.builtin_recipe_collection:
urn = x.get('id')
self.all_urns.add(urn)
if ok(urn):
lang = x.get('language', 'und')
if lang:
lang = lang.replace('-', '_')
if lang not in lang_map:
lang_map[lang] = factory(NewsCategory, new_root, lang)
factory(NewsItem, lang_map[lang], urn, x.get('title'))
self.showing_count += 1
self.builtin_count += 1
for x in self.scheduler_config.iter_recipes():
urn = x.get('id')
if urn not in self.all_urns:
self.scheduler_config.un_schedule_recipe(urn)
continue
if ok(urn):
factory(NewsItem, scheduled, urn, x.get('title'))
new_root.prune()
new_root.sort()
self.root = new_root
self.reset()
def reset(self):
self.beginResetModel(), self.endResetModel()
def recipe_from_urn(self, urn):
coll = self.custom_recipe_collection if 'custom:' in urn else \
self.builtin_recipe_collection
for x in coll:
if x.get('id', None) == urn:
return copy.deepcopy(x)
def schedule_info_from_urn(self, urn):
return self.scheduler_config.get_schedule_info(urn)
def account_info_from_urn(self, urn):
return self.scheduler_config.get_account_info(urn)
def universal_set(self):
return self.all_urns
def get_customize_info(self, urn):
return self.scheduler_config.get_customize_info(urn)
def get_matches(self, location, query):
query = query.strip().lower()
if not query:
return self.universal_set()
results = set([])
for urn in self.universal_set():
recipe = self.recipe_from_urn(urn)
if query in recipe.get('title', '').lower() or \
query in recipe.get('description', '').lower():
results.add(urn)
return results
def search(self, query):
results = []
try:
query = unicode(query).strip()
if query:
results = self.parse(query)
if not results:
results = None
except ParseException:
results = []
self.do_refresh(restrict_to_urns=results)
self.searched.emit(True)
def columnCount(self, parent):
return 1
def data(self, index, role):
if not index.isValid():
return None
item = index.internalPointer()
return item.data(role)
def headerData(self, *args):
return None
def flags(self, index):
if not index.isValid():
return Qt.ItemIsEnabled|Qt.ItemIsSelectable
item = index.internalPointer()
return item.flags()
def resort(self):
self.do_refresh()
def index(self, row, column, parent):
if not self.hasIndex(row, column, parent):
return QModelIndex()
if not parent.isValid():
parent_item = self.root
else:
parent_item = parent.internalPointer()
try:
child_item = parent_item.children[row]
except IndexError:
return QModelIndex()
ans = self.createIndex(row, column, child_item)
return ans
def parent(self, index):
if not index.isValid():
return QModelIndex()
child_item = index.internalPointer()
parent_item = child_item.parent
if parent_item is self.root or parent_item is None:
return QModelIndex()
ans = self.createIndex(parent_item.row(), 0, parent_item)
return ans
def rowCount(self, parent):
if parent.column() > 0:
return 0
if not parent.isValid():
parent_item = self.root
else:
parent_item = parent.internalPointer()
return len(parent_item.children)
def update_recipe_schedule(self, urn, schedule_type, schedule,
add_title_tag=True, custom_tags=[]):
recipe = self.recipe_from_urn(urn)
self.scheduler_config.schedule_recipe(recipe, schedule_type, schedule,
add_title_tag=add_title_tag, custom_tags=custom_tags)
def update_last_downloaded(self, urn):
self.scheduler_config.update_last_downloaded(urn)
def set_account_info(self, urn, un, pw):
self.scheduler_config.set_account_info(urn, un, pw)
def clear_account_info(self, urn):
self.scheduler_config.clear_account_info(urn)
def get_account_info(self, urn):
return self.scheduler_config.get_account_info(urn)
def get_schedule_info(self, urn):
return self.scheduler_config.get_schedule_info(urn)
def un_schedule_recipe(self, urn):
self.scheduler_config.un_schedule_recipe(urn)
def schedule_recipe(self, urn, sched_type, schedule):
self.scheduler_config.schedule_recipe(self.recipe_from_urn(urn),
sched_type, schedule)
def customize_recipe(self, urn, add_title_tag, custom_tags, keep_issues):
self.scheduler_config.customize_recipe(urn, add_title_tag,
custom_tags, keep_issues)
def get_to_be_downloaded_recipes(self):
ans = self.scheduler_config.get_to_be_downloaded_recipes()
ans2 = [x for x in ans if self.get_recipe(x, download=False) is not None]
for x in set(ans) - set(ans2):
self.un_schedule_recipe(x)
return ans2
def scheduled_urns(self):
ans = []
with self.scheduler_config.lock:
for recipe in self.scheduler_config.iter_recipes():
ans.append(recipe.get('id'))
return ans
|
ashang/calibre
|
src/calibre/web/feeds/recipes/model.py
|
Python
|
gpl-3.0
| 14,130 | 0.002052 |
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 NovaPoint Group LLC (<http://www.novapointgroup.com>)
# Copyright (C) 2004-2010 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from . import api
class stock_packages(osv.osv):
_inherit = "stock.packages"
def cancel_postage(self, cr, uid, ids, context=None):
for package in self.browse(cr, uid, ids, context=context):
if package.shipping_company_name.lower() != "usps":
continue
usps_config = api.v1.get_config(cr, uid, sale=package.pick_id.sale_id, context=context)
test = package.pick_id.logis_company.test_mode
if hasattr(package, "tracking_no") and package.tracking_no:
try:
response = api.v1.cancel_shipping(usps_config, package, shipper=None, test=test)
except Exception, e:
self.pool.get('stock.packages').write(cr, uid, package.id, {'ship_message': str(e)}, context=context)
return {
'type': 'ir.actions.client',
'tag': 'action_warn',
'name': _('Exception'),
'params': {'title': _('Exception'), 'text': str(e), 'sticky': True}
}
if hasattr(response, "error") or not response.refunds[0].refunded:
err = response.error if hasattr(response, "error") else response.refunds[0].message
self.pool.get('stock.packages').write(cr, uid, package.id, {'ship_message': err}, context=context)
return {
'type': 'ir.actions.client',
'tag': 'action_warn',
'name': _('Failure'),
'params': {
'title': _('Package #%s Cancellation Failed') % package.packge_no,
'text': err,
'sticky': True
}
}
else:
self.pool.get('stock.packages').write(cr, uid, package.id, {
'ship_message' : 'Shipment Cancelled', 'tracking_no': ''
}, context=context)
return super(stock_packages, self).cancel_postage(cr, uid, ids, context=context)
stock_packages()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
lercloud/shipping_api_usps
|
stock_packages.py
|
Python
|
gpl-3.0
| 3,338 | 0.004494 |
# -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
class InheritedDocs(type):
def __new__(mcs, class_name, bases, dict):
items_to_patch = [
(k, v) for k, v in dict.items() if not k.startswith("__") and not v.__doc__
]
for name, obj in items_to_patch:
doc = None
for base in bases:
if hasattr(base, name):
doc = getattr(base, name).__doc__
if doc:
if isinstance(obj, property) and not obj.fset:
obj.fget.__doc__ = doc
dict[name] = property(fget=obj.fget)
else:
obj.__doc__ = doc
break
return type.__new__(mcs, class_name, bases, dict)
|
bmcculley/splinter
|
splinter/meta.py
|
Python
|
bsd-3-clause
| 954 | 0.001048 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import cint, cstr, flt, nowdate, comma_and, date_diff
from frappe import msgprint, _
from frappe.model.document import Document
class LeaveControlPanel(Document):
def get_employees(self):
conditions, values = [], []
for field in ["employment_type", "branch", "designation", "department"]:
if self.get(field):
conditions.append("{0}=%s".format(field))
values.append(self.get(field))
condition_str = " and " + " and ".join(conditions) if len(conditions) else ""
e = frappe.db.sql("select name from tabEmployee where status='Active' {condition}"
.format(condition=condition_str), tuple(values))
return e
def validate_values(self):
for f in ["from_date", "to_date", "leave_type", "no_of_days"]:
if not self.get(f):
frappe.throw(_("{0} is required").format(self.meta.get_label(f)))
def to_date_validation(self):
if date_diff(self.to_date, self.from_date) <= 0:
return "Invalid period"
def allocate_leave(self):
self.validate_values()
leave_allocated_for = []
employees = self.get_employees()
if not employees:
frappe.throw(_("No employee found"))
for d in self.get_employees():
try:
la = frappe.new_doc('Leave Allocation')
la.set("__islocal", 1)
la.employee = cstr(d[0])
la.employee_name = frappe.db.get_value('Employee',cstr(d[0]),'employee_name')
la.leave_type = self.leave_type
la.from_date = self.from_date
la.to_date = self.to_date
la.carry_forward = cint(self.carry_forward)
la.new_leaves_allocated = flt(self.no_of_days)
la.docstatus = 1
la.save()
leave_allocated_for.append(d[0])
except:
pass
if leave_allocated_for:
msgprint(_("Leaves Allocated Successfully for {0}").format(comma_and(leave_allocated_for)))
|
bhupennewalkar1337/erpnext
|
erpnext/hr/doctype/leave_control_panel/leave_control_panel.py
|
Python
|
gpl-3.0
| 1,939 | 0.025271 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import latin_noun
import latin_pronoun
import latin_adj
import latin_conj
import latin_prep
import latin_verb_reg
import latin_verb_irreg
import util
class LatinDic:
dic = {}
auto_macron_mode = False
def flatten(text):
return text.replace(u'ā',u'a').replace(u'ē',u'e').replace(u'ī',u'i').replace(u'ō',u'o').replace(u'ū',u'u').replace(u'ȳ',u'y').lower()
def register(surface, info):
if not info.has_key('pos'): return
if LatinDic.auto_macron_mode:
surface = flatten(surface)
if LatinDic.dic.has_key(surface):
LatinDic.dic[surface].append(info)
else:
LatinDic.dic[surface] = [info]
def register_items(items):
for item in items:
register(item['surface'], item)
def lookup(word):
return LatinDic.dic.get(word, None)
def dump():
for k, v in LatinDic.dic.items():
print util.render2(k, v)
def load_def(file, tags={}):
items = []
with open(file, 'r') as fp:
for line in fp:
if len(line) == 0: continue
if line[0] == '#': continue
fs = line.rstrip().split('\t')
if len(fs) < 3: continue
surface = fs[0].decode('utf-8')
pos = fs[1]
ja = fs[2]
items.append(util.aggregate_dicts({'surface':surface, 'pos':pos, 'ja':ja}, tags))
return items
def load(auto_macron_mode=False):
LatinDic.auto_macron_mode = auto_macron_mode
items = []
items += latin_noun.load()
items += latin_pronoun.load()
items += latin_adj.load()
items += latin_conj.load()
items += latin_prep.load()
items += latin_verb_reg.load()
items += latin_verb_irreg.load()
items += load_def('words/adv.def', {'pos':'adv'})
items += load_def('words/other.def')
register_items(items)
# return ld
if __name__ == '__main__':
# for k, v in dic.items():
# print util.render(k), util.render(v)
pass
|
naoyat/latin
|
latin/latindic.py
|
Python
|
mit
| 1,985 | 0.010106 |
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.db import IntegrityError
from django.shortcuts import render, redirect
from django.contrib import messages
from django import forms as django_forms
from django.views.decorators.cache import cache_page
from django.utils.translation import ugettext_lazy as _
from core.utils.decorators import log
from . import forms
from . import logic
@log
@cache_page(60 * 3)
def index(request, template='user/blog/index.html', context={}):
blog_logic = logic.BlogLogic(request)
context['pages'] = blog_logic.pages()
context['posts'] = blog_logic.posts()
# context['beeps'] = blog_logic.beeps()
return render(request, template, context)
''' Pages '''
@log
def pages(request, template='user/blog/pages.html', context={}):
blog_logic = logic.BlogLogic(request)
context['pages'] = blog_logic.pages()
return render(request, template, context)
@log
@cache_page(60 * 3)
def page(request, page_slug, template='user/blog/page.html', context={}):
blog_logic = logic.BlogLogic(request)
context['page'] = blog_logic.page(page_slug)
return render(request, template, context)
''' Posts '''
@log
def posts(request, template='user/blog/posts.html', context={}):
blog_logic = logic.BlogLogic(request)
context['posts'] = blog_logic.posts()
return render(request, template, context)
@log
@cache_page(60 * 3)
def post(request, post_id, post_slug, template='user/blog/post.html', context={}):
blog_logic = logic.BlogLogic(request)
context['post'] = blog_logic.post(post_id, post_slug)
return render(request, template, context)
''' Others '''
@log
def contact(request, template="user/blog/contact.html", context={}):
contact_form = forms.ContactForm(request.POST or None)
if request.method == 'POST':
if contact_form.is_valid():
contact_form.save()
messages.add_message(request, messages.SUCCESS, _('Your message successfully submitted.'))
return redirect(reverse('blog_contact'))
else:
messages.add_message(request, messages.ERROR, _('Please fix errors bellow.'))
context['contact_form'] = contact_form
context['document_form'] = forms.DocumentForm()
return render(request, template, context)
@log
def document(request, template="user/blog/contact.html", context={}):
document_form = forms.DocumentForm(request.POST or None, request.FILES or None)
if request.method == 'POST':
if document_form.is_valid():
document_form.save()
messages.add_message(request, messages.SUCCESS, _('Your application successfully submitted.'))
return redirect(reverse('blog_contact'))
else:
messages.add_message(request, messages.ERROR, _('Please fix errors bellow.'))
context['contact_form'] = forms.ContactForm()
context['document_form'] = document_form
return render(request, template, context)
@log
def search(request, template='user/blog/search.html', context={}):
blog_logic = logic.BlogLogic(request)
term = blog_logic.get_param("term")
search_result = blog_logic.search(term)
context['term'] = term
context['pages'] = search_result.pages
context['posts'] = search_result.posts
return render(request, template, context)
@log
def subscribe(request):
blog_logic = logic.BlogLogic(request)
name = blog_logic.get_param("name")
email = blog_logic.get_param("email")
if not name or not email:
messages.add_message(request, messages.ERROR, _('Please enter your name and email.'))
else:
try:
django_forms.EmailField().clean(email)
blog_logic.new_subscription(name, email)
messages.add_message(request, messages.SUCCESS, _('You successfully subscribed.'))
except ValidationError:
messages.add_message(request, messages.ERROR, _('Please enter correct email.'))
except IntegrityError:
messages.add_message(request, messages.WARNING, _('You already have been subscribed.'))
return redirect(request.META.get('HTTP_REFERER'))
|
elweezy/django-skeleton
|
app/blog/views.py
|
Python
|
gpl-3.0
| 4,187 | 0.002627 |
__all__ = [
'fixed_value',
'coalesce',
]
try:
from itertools import ifilter as filter
except ImportError:
pass
class _FixedValue(object):
def __init__(self, value):
self._value = value
def __call__(self, *args, **kwargs):
return self._value
def fixed_value(value):
return _FixedValue(value)
class _Coalesce(object):
def _filter(self, x):
return x is not None
def __init__(self, callbacks, else_=None):
self._callbacks = callbacks
self._else = else_
def __call__(self, invoice):
results = (
callback(invoice)
for callback in self._callbacks
)
try:
return next(filter(
self._filter, results
))
except StopIteration:
return self._else
def coalesce(callbacks, else_=None):
return _Coalesce(callbacks, else_=else_)
|
calidae/python-aeat_sii
|
src/pyAEATsii/callback_utils.py
|
Python
|
apache-2.0
| 918 | 0 |
#MenuTitle: Set Preferred Names (Name IDs 16 and 17) for Width Variants
# -*- coding: utf-8 -*-
__doc__="""
Sets Preferred Names custom parameters (Name IDs 16 and 17) for all instances, so that width variants will appear in separate menus in Adobe apps.
"""
thisFont = Glyphs.font # frontmost font
widths = (
"Narrow", "Seminarrow", "Semi Narrow", "Extranarrow", "Extra Narrow", "Ultranarrow", "Ultra Narrow",
"Condensed", "Semicondensed", "Semi Condensed", "Extracondensed", "Extra Condensed", "Ultracondensed", "Ultra Condensed",
"Compressed", "Semicompressed", "Semi Compressed", "Extracompressed", "Extra Compressed", "Ultracompressed", "Ultra Compressed",
"Extended", "Semiextended", "Semi Extended", "Extraextended", "Extra Extended", "Ultraextended", "Ultra Extended",
"Expanded", "Semiexpanded", "Semi Expanded", "Extraexpanded", "Extra Expanded", "Ultraexpanded", "Ultra Expanded",
"Wide", "Semiwide", "Semi Wide", "Extrawide", "Extra Wide", "Ultrawide", "Ultra Wide",
)
for thisInstance in thisFont.instances:
print "Processing Instance:", thisInstance.name
familyName = thisFont.familyName
if thisInstance.customParameters["familyName"]:
familyName = thisInstance.customParameters["familyName"]
widthVariant = None
for width in widths:
if width in thisInstance.name:
widthVariant = width
elif " " in width:
width = width.replace(" ","")
if width in thisInstance.name:
widthVariant = width
if widthVariant:
preferredFamilyName = "%s %s" % ( thisFont.familyName.strip(), widthVariant.strip() )
preferredStyleName = thisInstance.name.replace(widthVariant,"").strip()
if not preferredStyleName:
preferredStyleName = "Regular"
thisInstance.customParameters["preferredFamilyName"] = preferredFamilyName
thisInstance.customParameters["preferredSubfamilyName"] = preferredStyleName
print " preferredFamilyName:", preferredFamilyName
print " preferredSubfamilyName:", preferredStyleName
|
schriftgestalt/Mekka-Scripts
|
Font Info/Set Preferred Names (Name IDs 16 and 17).py
|
Python
|
apache-2.0
| 1,956 | 0.027607 |
import os
import yaml
DEFAULT_DIR = '../etc/'
class BaseConfig(object):
__config = {}
__default_dir = None
@classmethod
def load(cls, filename, default_path=DEFAULT_DIR):
"""
Setup configuration
"""
path = "%s/%s.yaml" % (default_path, filename)
cls.__default_dir = default_path
if os.path.exists(path):
with open(path, 'rt') as filehandle:
cls.__config = dict(yaml.load(filehandle.read()).items() + \
cls.__config.items())
else:
raise OSError("Config doesn't exists: %s" % path)
@classmethod
def get_default_path(cls):
return cls.__default_dir
@classmethod
def get(cls, key, value=None):
if key in cls.__config:
return cls.__config.get(key, value)
return cls.__config.get(key.upper(), value)
@classmethod
def get_url(cls, method):
url = cls.__config.get('urls', {}).get(method)
if not url:
raise ValueError("Could not find url for method: %s" % method)
return Config.get('api_host') + url
Config = BaseConfig()
|
smtpinc/sendapi-python
|
lib/smtpcom/config.py
|
Python
|
mit
| 1,148 | 0.003484 |
# -*- coding: utf-8 -*-
# Copyright (c) 2004-2014 Alterra, Wageningen-UR
# Allard de Wit (allard.dewit@wur.nl), April 2014
from collections import namedtuple
from math import exp
from ..traitlets import Float, Int, Instance, AfgenTrait
from ..decorators import prepare_rates, prepare_states
from ..base_classes import ParamTemplate, StatesTemplate, SimulationObject,\
VariableKiosk
from .. import exceptions as exc
from warnings import warn
# Template for namedtuple containing partitioning factors
class PartioningFactors(namedtuple("partitioning_factors", "FR FL FS FO")):
pass
class DVS_Partitioning(SimulationObject):
"""Class for assimilate partioning based on development stage (`DVS`).
`DVS_partioning` calculates the partitioning of the assimilates to roots,
stems, leaves and storage organs using fixed partitioning tables as a
function of crop development stage. The available assimilates are first
split into below-ground and abovegrond using the values in FRTB. In a
second stage they are split into leaves (`FLTB`), stems (`FSTB`) and storage
organs (`FOTB`).
Since the partitioning fractions are derived from the state variable `DVS`
they are regarded state variables as well.
**Simulation parameters** (To be provided in cropdata dictionary):
======= ============================================= ======= ============
Name Description Type Unit
======= ============================================= ======= ============
FRTB Partitioning to roots as a function of TCr -
development stage.
FSTB Partitioning to stems as a function of TCr -
development stage.
FLTB Partitioning to leaves as a function of TCr -
development stage.
FOTB Partitioning to storage organs as a function TCr -
of development stage.
======= ============================================= ======= ============
**State variables**
======= ================================================= ==== ============
Name Description Pbl Unit
======= ================================================= ==== ============
FR Fraction partitioned to roots. Y -
FS Fraction partitioned to stems. Y -
FL Fraction partitioned to leaves. Y -
FO Fraction partitioned to storage orgains Y -
======= ================================================= ==== ============
**Rate variables**
None
**Signals send or handled**
None
**External dependencies:**
======= =================================== ================= ============
Name Description Provided by Unit
======= =================================== ================= ============
DVS Crop development stage DVS_Phenology -
======= =================================== ================= ============
*Exceptions raised*
A PartitioningError is raised if the partitioning coefficients to leaves,
stems and storage organs on a given day do not add up to '1'.
"""
class Parameters(ParamTemplate):
FRTB = AfgenTrait()
FLTB = AfgenTrait()
FSTB = AfgenTrait()
FOTB = AfgenTrait()
class StateVariables(StatesTemplate):
FR = Float(-99.)
FL = Float(-99.)
FS = Float(-99.)
FO = Float(-99.)
PF = Instance(PartioningFactors)
def initialize(self, day, kiosk, parvalues):
"""
:param day: start date of the simulation
:param kiosk: variable kiosk of this PCSE instance
:param parvalues: `ParameterProvider` object providing parameters as
key/value pairs
"""
self.params = self.Parameters(parvalues)
self.kiosk = kiosk
# initial partitioning factors (pf)
DVS = self.kiosk["DVS"]
FR = self.params.FRTB(DVS)
FL = self.params.FLTB(DVS)
FS = self.params.FSTB(DVS)
FO = self.params.FOTB(DVS)
# Pack partitioning factors into tuple
PF = PartioningFactors(FR, FL, FS, FO)
# Initial states
self.states = self.StateVariables(kiosk, publish=["FR","FL","FS","FO"],
FR=FR, FL=FL, FS=FS, FO=FO, PF=PF)
self._check_partitioning()
def _check_partitioning(self):
"""Check for partitioning errors."""
FR = self.states.FR
FL = self.states.FL
FS = self.states.FS
FO = self.states.FO
checksum = FR+(FL+FS+FO)*(1.-FR) - 1.
if abs(checksum) >= 0.0001:
msg = ("Error in partitioning!\n")
msg += ("Checksum: %f, FR: %5.3f, FL: %5.3f, FS: %5.3f, FO: %5.3f\n" \
% (checksum, FR, FL, FS, FO))
self.logger.error(msg)
warn(msg)
# raise exc.PartitioningError(msg)
@prepare_states
def integrate(self, day, delt=1.0):
"""Update partitioning factors based on development stage (DVS)"""
params = self.params
DVS = self.kiosk["DVS"]
self.states.FR = params.FRTB(DVS)
self.states.FL = params.FLTB(DVS)
self.states.FS = params.FSTB(DVS)
self.states.FO = params.FOTB(DVS)
# Pack partitioning factors into tuple
self.states.PF = PartioningFactors(self.states.FR, self.states.FL,
self.states.FS, self.states.FO)
self._check_partitioning()
def calc_rates(self, day, drv):
""" Return partitioning factors based on current DVS.
"""
# rate calculation does nothing for partioning as it is a derived
# state
return self.states.PF
class DVS_Partitioning_NPK(SimulationObject):
"""Class for assimilate partitioning based on development stage (`DVS`)
with influence of NPK stress.
`DVS_Partitioning_NPK` calculates the partitioning of the assimilates to roots,
stems, leaves and storage organs using fixed partitioning tables as a
function of crop development stage. The only different with the normal
partitioning class is the effect of nitrogen stress on partitioning to
leaves (parameter NPART). The available assimilates are first
split into below-ground and aboveground using the values in FRTB. In a
second stage they are split into leaves (`FLTB`), stems (`FSTB`) and storage
organs (`FOTB`).
Since the partitioning fractions are derived from the state variable `DVS`
they are regarded state variables as well.
**Simulation parameters** (To be provided in cropdata dictionary):
======= ============================================= ======= ============
Name Description Type Unit
======= ============================================= ======= ============
FRTB Partitioning to roots as a function of TCr -
development stage.
FSTB Partitioning to stems as a function of TCr -
development stage.
FLTB Partitioning to leaves as a function of TCr -
development stage.
FOTB Partitioning to starge organs as a function TCr -
of development stage.
NPART Coefficient for the effect of N stress on SCR -
leaf biomass allocation
======= ============================================= ======= ============
**State variables**
======= ================================================= ==== ============
Name Description Pbl Unit
======= ================================================= ==== ============
FR Fraction partitioned to roots. Y -
FS Fraction partitioned to stems. Y -
FL Fraction partitioned to leaves. Y -
FO Fraction partitioned to storage orgains Y -
======= ================================================= ==== ============
**Rate variables**
None
**Signals send or handled**
None
**External dependencies:**
======= =================================== ================= ============
Name Description Provided by Unit
======= =================================== ================= ============
DVS Crop development stage DVS_Phenology -
TRA Actual transpiration Simple_Evapotranspiration mm d-1
TRAMX Maximum transpiration Simple_Evapotranspiration mm d-1
NNI Nitrogen nutrition index npk_dynamics -
======= =================================== ================= ============
*Exceptions raised*
A PartitioningError is raised if the partitioning coefficients to leaves,
stems and storage organs on a given day do not add up to '1'.
"""
class Parameters(ParamTemplate):
FRTB = AfgenTrait()
FLTB = AfgenTrait()
FSTB = AfgenTrait()
FOTB = AfgenTrait()
NPART = Float(-99.) # coefficient for the effect of N stress on leaf allocation
class StateVariables(StatesTemplate):
FR = Float(-99.)
FL = Float(-99.)
FS = Float(-99.)
FO = Float(-99.)
PF = Instance(PartioningFactors)
def initialize(self, day, kiosk, cropdata):
"""
:param day: start date of the simulation
:param kiosk: variable kiosk of this PCSE instance
:param cropdata: dictionary with WOFOST cropdata key/value pairs
"""
self.params = self.Parameters(cropdata)
self.kiosk = kiosk
# initial partioning factors (pf)
DVS = self.kiosk["DVS"]
FR = self.params.FRTB(DVS)
FL = self.params.FLTB(DVS)
FS = self.params.FSTB(DVS)
FO = self.params.FOTB(DVS)
# Pack partitioning factors into tuple
PF = PartioningFactors(FR, FL, FS, FO)
# Initial states
self.states = self.StateVariables(kiosk, publish=["FR","FL","FS","FO"],
FR=FR, FL=FL, FS=FS, FO=FO, PF=PF)
self._check_partitioning()
def _check_partitioning(self):
"""Check for partitioning errors."""
FR = self.states.FR
FL = self.states.FL
FS = self.states.FS
FO = self.states.FO
checksum = FR+(FL+FS+FO)*(1.-FR) - 1.
if abs(checksum) >= 0.0001:
msg = ("Error in partitioning!\n")
msg += ("Checksum: %f, FR: %5.3f, FL: %5.3f, FS: %5.3f, FO: %5.3f\n" \
% (checksum, FR, FL, FS, FO))
self.logger.error(msg)
raise exc.PartitioningError(msg)
@prepare_states
def integrate(self, day, delt=1.0):
"""
Update partitioning factors based on development stage (DVS)
and the Nitrogen nutrition Index (NNI)
"""
params = self.params
states = self.states
DVS = self.kiosk["DVS"]
TRA = self.kiosk["TRA"]
TRAMX = self.kiosk["TRAMX"]
NNI = self.kiosk["NNI"]
TRANRF = TRA/TRAMX
if TRANRF < NNI:
# Water stress is more severe than nitrogen stress and the
# partitioning follows the original LINTUL2 assumptions
# Note: we use specifically nitrogen stress not nutrient stress!!!
FRTMOD = max( 1., 1./(TRANRF+0.5))
states.FR = min(0.6, params.FRTB(DVS) * FRTMOD)
states.FL = params.FLTB(DVS)
states.FS = params.FSTB(DVS)
states.FO = params.FOTB(DVS)
else:
# Nitrogen stress is more severe than water stress resulting in
# less partitioning to leaves and more to stems
FLVMOD = exp(-params.NPART * (1.0-NNI))
states.FL = params.FLTB(DVS) * FLVMOD
states.FS = params.FSTB(DVS) + params.FLTB(DVS) - states.FL
states.FR = params.FRTB(DVS)
states.FO = params.FOTB(DVS)
# Pack partitioning factors into tuple
states.PF = PartioningFactors(states.FR, states.FL,
states.FS, states.FO)
self._check_partitioning()
def calc_rates(self, day, drv):
""" Return partitioning factors based on current DVS.
"""
# rate calculation does nothing for partioning as it is a derived
# state
return self.states.PF
|
jajberni/pcse_web
|
main/pcse/crop/partitioning.py
|
Python
|
apache-2.0
| 12,949 | 0.005638 |
from temboo.Library.Amazon.SNS.AddPermission import AddPermission, AddPermissionInputSet, AddPermissionResultSet, AddPermissionChoreographyExecution
from temboo.Library.Amazon.SNS.ConfirmSubscription import ConfirmSubscription, ConfirmSubscriptionInputSet, ConfirmSubscriptionResultSet, ConfirmSubscriptionChoreographyExecution
from temboo.Library.Amazon.SNS.CreateTopic import CreateTopic, CreateTopicInputSet, CreateTopicResultSet, CreateTopicChoreographyExecution
from temboo.Library.Amazon.SNS.DeleteTopic import DeleteTopic, DeleteTopicInputSet, DeleteTopicResultSet, DeleteTopicChoreographyExecution
from temboo.Library.Amazon.SNS.GetTopicAttributes import GetTopicAttributes, GetTopicAttributesInputSet, GetTopicAttributesResultSet, GetTopicAttributesChoreographyExecution
from temboo.Library.Amazon.SNS.ListSubscriptions import ListSubscriptions, ListSubscriptionsInputSet, ListSubscriptionsResultSet, ListSubscriptionsChoreographyExecution
from temboo.Library.Amazon.SNS.ListSubscriptionsByTopic import ListSubscriptionsByTopic, ListSubscriptionsByTopicInputSet, ListSubscriptionsByTopicResultSet, ListSubscriptionsByTopicChoreographyExecution
from temboo.Library.Amazon.SNS.ListTopics import ListTopics, ListTopicsInputSet, ListTopicsResultSet, ListTopicsChoreographyExecution
from temboo.Library.Amazon.SNS.Publish import Publish, PublishInputSet, PublishResultSet, PublishChoreographyExecution
from temboo.Library.Amazon.SNS.RemovePermission import RemovePermission, RemovePermissionInputSet, RemovePermissionResultSet, RemovePermissionChoreographyExecution
from temboo.Library.Amazon.SNS.SetTopicAttributes import SetTopicAttributes, SetTopicAttributesInputSet, SetTopicAttributesResultSet, SetTopicAttributesChoreographyExecution
from temboo.Library.Amazon.SNS.Subscribe import Subscribe, SubscribeInputSet, SubscribeResultSet, SubscribeChoreographyExecution
from temboo.Library.Amazon.SNS.Unsubscribe import Unsubscribe, UnsubscribeInputSet, UnsubscribeResultSet, UnsubscribeChoreographyExecution
|
jordanemedlock/psychtruths
|
temboo/core/Library/Amazon/SNS/__init__.py
|
Python
|
apache-2.0
| 2,012 | 0.006461 |
from channels import Group as channelsGroup
from channels.sessions import channel_session
import random
from .models import Group as OtreeGroup, Subsession as OtreeSubsession, Constants
import json
import channels
import logging
from otree import constants_internal
import django.test
from otree.common_internal import (get_admin_secret_code)
client = django.test.Client()
ADMIN_SECRET_CODE = get_admin_secret_code()
# For automatic inactive pushing
#??? from .models import LiveManagementThread, LivePusherThread
from threading import Event
import time
# End-For automatic inactive pushing
from .pages import PresenterView
#############################################
#############################################
# Connected to websocket.connect
def ws_winnerpage_connect(message):
print("*********CONNECTWINNERPAGE************")
channelsGroup("WINNERPAGE").add(message.reply_channel)
# Connected to websocket.receive
def ws_winnerpage_message(message):
print("*********RECEIVEWINNERPAGE************")
# Connected to websocket.disconnect
def ws_winnerpage_disconnect(message):
print("*********DISCONNECTWINNERPAGE************")
channelsGroup("WINNERPAGE").discard(message.reply_channel)
#############################################
#############################################
# Connected to websocket.connect
def ws_connect(message):
print("*********CONNECT************")
channelsGroup("adminreport").add(message.reply_channel)
# Connected to websocket.receive
def ws_message(message):
print("*********RECEIVE************")
# Decrypt the url: No info in the url in this app
# Decrypt the received message
jsonmessage = json.loads(message.content['text'])
subsession_pk = jsonmessage['subsession_pk']
mysubsession = OtreeSubsession.objects.get(pk=subsession_pk)
if 'order' in jsonmessage:
order = jsonmessage['order']
# Manage the synchronisation page between the 2 parts
if order == "No Jump 2 Next":
mysubsession.jump_2_next = False
mysubsession.save()
mysubsession.session.vars['running_part_2'] = "False"
mysubsession.session.save()
elif order == "Jump 2 Next":
mysubsession.jump_2_next = True
mysubsession.save()
mysubsession.session.vars['running_part_2'] = "True"
mysubsession.session.save()
elif order == "push_all_players_on_page":
page_name = jsonmessage['page_name']
round_nb = jsonmessage['round_nb']
for p in mysubsession.get_players():
if ((str(p.participant._current_page_name) == page_name)
& (p.participant._round_number == round_nb)):
# This player is one of those who needs to be advanced
try:
if p.participant._current_form_page_url:
resp = client.post(
p.participant._current_form_page_url,
data={
constants_internal.timeout_happened: True,
constants_internal.admin_secret_code: ADMIN_SECRET_CODE
},
follow=True
)
else:
resp = client.get(p.participant._start_url(), follow=True)
except:
logging.exception("Failed to advance participant.")
raise
assert resp.status_code < 400
p.participant.vars['participant_was_pushed'] = 'True'
p.participant.save()
channels.Group(
'auto-advance-{}'.format(p.participant.code)
).send(
{'text': json.dumps(
{'auto_advanced': True})}
)
elif order == "push_active_players_on_page":
group_pk = jsonmessage['group_pk']
mygroup = OtreeGroup.objects.get(pk=group_pk)
page_name = jsonmessage['page_name']
round_nb = jsonmessage['round_nb']
for p in mygroup.get_players():
if ((str(p.participant._current_page_name) == page_name)
& (p.participant._round_number == round_nb)
& (p.participant.vars['active_flag'] != 'inactive')):
# This player is one of those who needs to be advanced
try:
if p.participant._current_form_page_url:
resp = client.post(
p.participant._current_form_page_url,
data={
constants_internal.timeout_happened: True,
constants_internal.admin_secret_code: ADMIN_SECRET_CODE
},
follow=True
)
else:
resp = client.get(p.participant._start_url(), follow=True)
except:
logging.exception("Failed to advance participant.")
raise
assert resp.status_code < 400
p.participant.vars['participant_was_pushed'] = 'True'
p.participant.save()
channels.Group(
'auto-advance-{}'.format(p.participant.code)
).send(
{'text': json.dumps(
{'auto_advanced': True})}
)
elif order == "push_inactive_players_on_page":
group_pk = jsonmessage['group_pk']
mygroup = OtreeGroup.objects.get(pk=group_pk)
page_name = jsonmessage['page_name']
round_nb = jsonmessage['round_nb']
for p in mygroup.get_players():
if ((str(p.participant._current_page_name) == page_name)
& (p.participant._round_number == round_nb)
& (p.participant.vars['active_flag'] == 'inactive')):
# This player is one of those who needs to be advanced
try:
if p.participant._current_form_page_url:
resp = client.post(
p.participant._current_form_page_url,
data={
constants_internal.timeout_happened: True,
constants_internal.admin_secret_code: ADMIN_SECRET_CODE
},
follow=True
)
else:
resp = client.get(p.participant._start_url(), follow=True)
except:
logging.exception("Failed to advance participant.")
raise
assert resp.status_code < 400
p.participant.vars['participant_was_pushed'] = 'True'
p.participant.save()
channels.Group(
'auto-advance-{}'.format(p.participant.code)
).send(
{'text': json.dumps(
{'auto_advanced': True})}
)
elif order == "deactivate_all_group_on_page":
group_pk = jsonmessage['group_pk']
mygroup = OtreeGroup.objects.get(pk=group_pk)
page_name = jsonmessage['page_name']
round_nb = jsonmessage['round_nb']
for p in mygroup.get_players():
if ((str(p.participant._current_page_name) == page_name)
& (p.participant._round_number == round_nb)):
p.participant.vars['active_flag'] = 'inactive'
p.participant.save()
elif order == "reactivate_all_group_on_page":
group_pk = jsonmessage['group_pk']
mygroup = OtreeGroup.objects.get(pk=group_pk)
page_name = jsonmessage['page_name']
round_nb = jsonmessage['round_nb']
for p in mygroup.get_players():
if ((str(p.participant._current_page_name) == page_name)
& (p.participant._round_number == round_nb)):
p.participant.vars['active_flag'] = time.time()
p.participant.save()
elif order == "DetermineWinner":
winner_id = mysubsession.determine_winner(subsession_pk)
# Give feedback
channelsGroup("adminreport").send({'text': json.dumps(
{'winner_id_in_subsession': winner_id})}
)
elif ((order == "InformWinnerResult") & ('winner_id' in mysubsession.session.vars)):
textforwinnerpage = json.dumps({
"winner_id": mysubsession.session.vars['winner_id'],
})
channelsGroup("WINNERPAGE").send({'text': textforwinnerpage})
mysubsession.computation_locked = True
mysubsession.save()
# Manage the computing of the results
elif order == "ComputeResults":
#?? mysubsession.session.vars['continue_computing'] = "True"
#?? if mysubsession.session.vars['computing_ongoing'] == "False":
#?? mysubsession.save()
mysubsession.compute_results(subsession_pk)
#?? elif order == "StopResultsComputing":
#?? mysubsession.session.vars['continue_computing'] = "False"
#?? mysubsession.save()
elif order == "MakeGrouping":
mysubsession.groupe(subsession_pk)
elif order == "StartMonitoring":
pass
#??? # Start it only if it's not currently running:
#??? date_time = mysubsession.session.vars['last_monitoring_time']
#??? pattern = '%d.%m.%Y %H:%M:%S'
#??? if date_time == 'not_yet_started':
#??? Constants.c_stopping_event_monitoring.clear()
#??? my_live_manager_thread = LiveManagementThread(mysubsession.pk, Constants.c_stopping_event_monitoring)
#??? my_live_manager_thread.start()
#??? else:
#??? epoch = float(time.mktime(time.strptime(date_time, pattern)))
#??? if epoch < (float(time.time() - Constants.c_inactive_monitoring_period_in_seconds)):
#??? Constants.c_stopping_event_monitoring.clear()
#??? my_live_manager_thread = LiveManagementThread(mysubsession.pk, Constants.c_stopping_event_monitoring)
#??? my_live_manager_thread.start()
elif order == "StartPushing":
pass
#??? # Start it only if it's not currently running:
#??? date_time = mysubsession.session.vars['last_pushing_time']
#??? pattern = '%d.%m.%Y %H:%M:%S'
#??? if date_time == 'not_yet_started':
#??? Constants.c_stopping_event_pushing.clear()
#??? my_live_pusher_thread = LivePusherThread(mysubsession.pk, Constants.c_stopping_event_pushing)
#??? my_live_pusher_thread.start()
#??? else:
#??? epoch = float(time.mktime(time.strptime(date_time, pattern)))
#??? if epoch < (float(time.time() - Constants.c_pushing_period_in_seconds)):
#??? Constants.c_stopping_event_pushing.clear()
#??? my_live_pusher_thread = LivePusherThread(mysubsession.pk, Constants.c_stopping_event_pushing)
#??? my_live_pusher_thread.start()
elif order == "StopMonitoring":
Constants.c_stopping_event_monitoring.set()
elif order == "StopPushing":
Constants.c_stopping_event_pushing.set()
#############################################
# Give feedback
channelsGroup("adminreport").send({'text': json.dumps(
{"order": "refresh"})}
)
# Connected to websocket.disconnect
def ws_disconnect(message):
print("*********DISCONNECT************")
channelsGroup("adminreport").discard(message.reply_channel)
#############################################
# Socket for the PresenterView
#############################################
# Connected to websocket.connect
def presenterview_ws_connect(message):
print("*********CONNECT_PresenterView************")
channelsGroup("ws_presenterview").add(message.reply_channel)
# Connected to websocket.receive
def presenterview_ws_message(message):
print("*********RECEIVE_PresenterView************")
# Decrypt the url: No info in the url in this app
# Decrypt the received message
jsonmessage = json.loads(message.content['text'])
order = jsonmessage['order']
if order == "set_sessioncode":
session_code = jsonmessage['session_code']
#############################################
PresenterView.set_session_code(session_code)
# Give feedback
channelsGroup("ws_presenterview").send({'text': json.dumps(
{"order": "refresh"})}
)
elif order == "set_phase":
phase = jsonmessage['phase']
#############################################
PresenterView.set_phase(phase)
# Give feedback
channelsGroup("ws_presenterview").send({'text': json.dumps(
{"order": "refresh"})}
)
# Connected to websocket.disconnect
def presenterview_ws_disconnect(message):
print("*********DISCONNECT_PresenterView************")
channelsGroup("ws_presenterview").discard(message.reply_channel)
|
anthropo-lab/XP
|
EPHEMER/EDHEC_Project/both_change_group_en/consumers.py
|
Python
|
gpl-3.0
| 13,913 | 0.004313 |
# i2c_esp.py Test program for asi2c.py
# Tests Responder on ESP8266
# The MIT License (MIT)
#
# Copyright (c) 2018 Peter Hinch
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# pyb esp8266
# scl X9 - 0
# sda X10 - 2
# sync X11 - 5
# ack Y8 - 4
# gnd - gnd
import uasyncio as asyncio
from machine import Pin, I2C
import asi2c
import ujson
i2c = I2C(scl=Pin(0),sda=Pin(2)) # software I2C
syn = Pin(5)
ack = Pin(4)
chan = asi2c.Responder(i2c, syn, ack)
async def receiver():
sreader = asyncio.StreamReader(chan)
await chan.ready()
print('started')
for _ in range(5): # Test flow control
res = await sreader.readline()
print('Received', ujson.loads(res))
await asyncio.sleep(4)
while True:
res = await sreader.readline()
print('Received', ujson.loads(res))
async def sender():
swriter = asyncio.StreamWriter(chan, {})
txdata = [0, 0]
while True:
await swriter.awrite(''.join((ujson.dumps(txdata), '\n')))
txdata[1] += 1
await asyncio.sleep_ms(1500)
loop = asyncio.get_event_loop()
loop.create_task(receiver())
loop.create_task(sender())
try:
loop.run_forever()
finally:
chan.close() # for subsequent runs
|
peterhinch/micropython-async
|
v2/i2c/i2c_esp.py
|
Python
|
mit
| 2,245 | 0.001336 |
# Borrowed and modified from xbmcswift
import logging
import xbmc
from pulsar.addon import ADDON_ID
class XBMCHandler(logging.StreamHandler):
xbmc_levels = {
'DEBUG': 0,
'INFO': 2,
'WARNING': 3,
'ERROR': 4,
'LOGCRITICAL': 5,
}
def emit(self, record):
xbmc_level = self.xbmc_levels.get(record.levelname)
xbmc.log(self.format(record), xbmc_level)
def _get_logger():
logger = logging.getLogger(ADDON_ID)
logger.setLevel(logging.DEBUG)
handler = XBMCHandler()
handler.setFormatter(logging.Formatter('[%(name)s] %(message)s'))
logger.addHandler(handler)
return logger
log = _get_logger()
|
steeve/plugin.video.pulsar
|
resources/site-packages/pulsar/logger.py
|
Python
|
bsd-3-clause
| 680 | 0.002941 |
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.compat.six import string_types
from ansible import constants as C
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.playbook import Playbook
from ansible.template import Templar
from ansible.utils.unicode import to_unicode
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class PlaybookExecutor:
'''
This is the primary class for executing playbooks, and thus the
basis for bin/ansible-playbook operation.
'''
def __init__(self, playbooks, inventory, variable_manager, loader, options, passwords):
self._playbooks = playbooks
self._inventory = inventory
self._variable_manager = variable_manager
self._loader = loader
self._options = options
self.passwords = passwords
self._unreachable_hosts = dict()
if options.listhosts or options.listtasks or options.listtags or options.syntax:
self._tqm = None
else:
self._tqm = TaskQueueManager(inventory=inventory, variable_manager=variable_manager, loader=loader, options=options, passwords=self.passwords)
def run(self):
'''
Run the given playbook, based on the settings in the play which
may limit the runs to serialized groups, etc.
'''
result = 0
entrylist = []
entry = {}
try:
for playbook_path in self._playbooks:
pb = Playbook.load(playbook_path, variable_manager=self._variable_manager, loader=self._loader)
self._inventory.set_playbook_basedir(os.path.dirname(playbook_path))
if self._tqm is None: # we are doing a listing
entry = {'playbook': playbook_path}
entry['plays'] = []
else:
# make sure the tqm has callbacks loaded
self._tqm.load_callbacks()
self._tqm.send_callback('v2_playbook_on_start', pb)
i = 1
plays = pb.get_plays()
display.vv(u'%d plays in %s' % (len(plays), to_unicode(playbook_path)))
for play in plays:
if play._included_path is not None:
self._loader.set_basedir(play._included_path)
else:
self._loader.set_basedir(pb._basedir)
# clear any filters which may have been applied to the inventory
self._inventory.remove_restriction()
if play.vars_prompt:
for var in play.vars_prompt:
vname = var['name']
prompt = var.get("prompt", vname)
default = var.get("default", None)
private = var.get("private", True)
confirm = var.get("confirm", False)
encrypt = var.get("encrypt", None)
salt_size = var.get("salt_size", None)
salt = var.get("salt", None)
if vname not in self._variable_manager.extra_vars:
if self._tqm:
self._tqm.send_callback('v2_playbook_on_vars_prompt', vname, private, prompt, encrypt, confirm, salt_size, salt, default)
play.vars[vname] = display.do_var_prompt(vname, private, prompt, encrypt, confirm, salt_size, salt, default)
else: # we are either in --list-<option> or syntax check
play.vars[vname] = default
# Create a temporary copy of the play here, so we can run post_validate
# on it without the templating changes affecting the original object.
all_vars = self._variable_manager.get_vars(loader=self._loader, play=play)
templar = Templar(loader=self._loader, variables=all_vars)
new_play = play.copy()
new_play.post_validate(templar)
if self._options.syntax:
continue
if self._tqm is None:
# we are just doing a listing
entry['plays'].append(new_play)
else:
self._tqm._unreachable_hosts.update(self._unreachable_hosts)
# we are actually running plays
for batch in self._get_serialized_batches(new_play):
if len(batch) == 0:
self._tqm.send_callback('v2_playbook_on_play_start', new_play)
self._tqm.send_callback('v2_playbook_on_no_hosts_matched')
break
# restrict the inventory to the hosts in the serialized batch
self._inventory.restrict_to_hosts(batch)
# and run it...
result = self._tqm.run(play=play)
# check the number of failures here, to see if they're above the maximum
# failure percentage allowed, or if any errors are fatal. If either of those
# conditions are met, we break out, otherwise we only break out if the entire
# batch failed
failed_hosts_count = len(self._tqm._failed_hosts) + len(self._tqm._unreachable_hosts)
if new_play.max_fail_percentage is not None and \
int((new_play.max_fail_percentage)/100.0 * len(batch)) > int((len(batch) - failed_hosts_count) / len(batch) * 100.0):
break
elif len(batch) == failed_hosts_count:
break
# clear the failed hosts dictionaires in the TQM for the next batch
self._unreachable_hosts.update(self._tqm._unreachable_hosts)
self._tqm.clear_failed_hosts()
# if the last result wasn't zero or 3 (some hosts were unreachable),
# break out of the serial batch loop
if result not in (0, 3):
break
i = i + 1 # per play
if entry:
entrylist.append(entry) # per playbook
# send the stats callback for this playbook
if self._tqm is not None:
if C.RETRY_FILES_ENABLED:
retries = set(self._tqm._failed_hosts.keys())
retries.update(self._tqm._unreachable_hosts.keys())
retries = sorted(retries)
if len(retries) > 0:
if C.RETRY_FILES_SAVE_PATH:
basedir = C.shell_expand(C.RETRY_FILES_SAVE_PATH)
else:
basedir = os.path.dirname(playbook_path)
(retry_name, _) = os.path.splitext(os.path.basename(playbook_path))
filename = os.path.join(basedir, "%s.retry" % retry_name)
if self._generate_retry_inventory(filename, retries):
display.display("\tto retry, use: --limit @%s\n" % filename)
self._tqm.send_callback('v2_playbook_on_stats', self._tqm._stats)
# if the last result wasn't zero, break out of the playbook file name loop
if result != 0:
break
if entrylist:
return entrylist
finally:
if self._tqm is not None:
self._tqm.cleanup()
if self._options.syntax:
display.display("No issues encountered")
return result
return result
def _get_serialized_batches(self, play):
'''
Returns a list of hosts, subdivided into batches based on
the serial size specified in the play.
'''
# make sure we have a unique list of hosts
all_hosts = self._inventory.get_hosts(play.hosts)
# check to see if the serial number was specified as a percentage,
# and convert it to an integer value based on the number of hosts
if isinstance(play.serial, string_types) and play.serial.endswith('%'):
serial_pct = int(play.serial.replace("%",""))
serial = int((serial_pct/100.0) * len(all_hosts)) or 1
else:
if play.serial is None:
serial = -1
else:
serial = int(play.serial)
# if the serial count was not specified or is invalid, default to
# a list of all hosts, otherwise split the list of hosts into chunks
# which are based on the serial size
if serial <= 0:
return [all_hosts]
else:
serialized_batches = []
while len(all_hosts) > 0:
play_hosts = []
for x in range(serial):
if len(all_hosts) > 0:
play_hosts.append(all_hosts.pop(0))
serialized_batches.append(play_hosts)
return serialized_batches
def _generate_retry_inventory(self, retry_path, replay_hosts):
'''
Called when a playbook run fails. It generates an inventory which allows
re-running on ONLY the failed hosts. This may duplicate some variable
information in group_vars/host_vars but that is ok, and expected.
'''
try:
with open(retry_path, 'w') as fd:
for x in replay_hosts:
fd.write("%s\n" % x)
except Exception as e:
display.error("Could not create retry file '%s'. The error was: %s" % (retry_path, e))
return False
return True
|
blueboxgroup/ansible
|
lib/ansible/executor/playbook_executor.py
|
Python
|
gpl-3.0
| 11,134 | 0.004581 |
#!/usr/bin/python
from pisi.actionsapi import shelltools, get, cmaketools, pisitools
def setup():
cmaketools.configure()
def build():
cmaketools.make()
def install():
cmaketools.install()
pisitools.dodoc ("AUTHORS", "ChangeLog", "COPYING")
|
richard-fisher/repository
|
desktop/util/tint2/actions.py
|
Python
|
gpl-2.0
| 263 | 0.019011 |
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.txt')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = [
'pyramid',
'pyramid_debugtoolbar',
'waitress',
]
setup(name='tilecost',
version='0.0',
description='tilecost',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Programming Language :: Python",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
],
author='',
author_email='',
url='',
keywords='web pyramid pylons',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
test_suite="tilecost",
entry_points="""\
[paste.app_factory]
main = tilecost:main
""",
)
|
Shopzilla-Ops/python-coding-challenge
|
cost-of-tile/mjones/tilecost/setup.py
|
Python
|
mit
| 998 | 0 |
__author__ = 'mwagner'
from PyQt4.Qt import Qt
from PyQt4.QtGui import QDialog, QIcon
from ..view.Ui_VertexDialog import Ui_VertexDialog
from ..model.VertexToolsError import *
class VertexDialog(QDialog, Ui_VertexDialog):
def __init__(self, plugin, parent=None):
super(VertexDialog, self).__init__(parent)
self.setAttribute(Qt.WA_DeleteOnClose)
self.plugin = plugin
self.setupUi(self)
self.helpButton.setIcon(self.plugin.get_icon("help.gif"))
self.setWindowIcon(QIcon(":beninCad/info.png"))
|
allspatial/vertex-tools
|
controller/VertexDialog.py
|
Python
|
mit
| 550 | 0.001818 |
# -*- coding: utf-8 -*-
# Open Source Initiative OSI - The MIT License (MIT):Licensing
#
# The MIT License (MIT)
# Copyright (c) 2012 DotCloud Inc (opensource@dotcloud.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import gevent
import zerorpc
from testutils import teardown, random_ipc_endpoint
def test_client_connect():
endpoint = random_ipc_endpoint()
class MySrv(zerorpc.Server):
def lolita(self):
return 42
srv = MySrv()
srv.bind(endpoint)
gevent.spawn(srv.run)
client = zerorpc.Client()
client.connect(endpoint)
assert client.lolita() == 42
def test_client_quick_connect():
endpoint = random_ipc_endpoint()
class MySrv(zerorpc.Server):
def lolita(self):
return 42
srv = MySrv()
srv.bind(endpoint)
gevent.spawn(srv.run)
client = zerorpc.Client(endpoint)
assert client.lolita() == 42
|
kanghtta/zerorpc-python
|
tests/test_client.py
|
Python
|
mit
| 1,919 | 0.002606 |
import json
import requests
import key
API_key = key.getAPIkey()
#load all champion pictures
def load_champion_pictures(champion_json):
print len(champion_json['data'])
version = champion_json['version']
print "version: " + version
for champion in champion_json['data']:
print champion
r = requests.get('http://ddragon.leagueoflegends.com/cdn/' + version + '/img/champion/' + champion + '.png')
if r.status_code == 200:
img = r.content
with open('static/images/champions/' + champion_json['data'][champion]['name'] + '.png', 'w') as f:
f.write(img)
print "img created"
else:
print "pictures: something went wrong"
#load champion json
#converts to python dict using json() and json.dump() for error checking
def load_champion_json():
try:
r = requests.get('https://global.api.pvp.net/api/lol/static-data/na/v1.2/champion?&api_key=' + API_key)
champion_json = r.json()
if 'status' in champion_json:
print champion_json['status']['message']
return
load_champion_pictures(champion_json)
# quick fix to change MonkeyKing to Wukong so that sort_keys sorts it properly
champion_json['data']['Wukong'] = champion_json['data']['MonkeyKing']
del champion_json['data']['MonkeyKing']
except ValueError as e:
print e.message
return
with open('static/json/champion.json', 'w') as f:
json.dump(champion_json, f, sort_keys=True)
load_champion_json()
|
dzhang55/riftwatch
|
static_images.py
|
Python
|
mit
| 1,397 | 0.027917 |
# Standard
import os
import sys
# Third Party
import numpy as np
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import seaborn as sns
import pyfilm as pf
from skimage.measure import label
from skimage import filters
plt.rcParams.update({'figure.autolayout': True})
mpl.rcParams['axes.unicode_minus'] = False
#local
from run import Run
import plot_style
plot_style.white()
pal = sns.color_palette('deep')
def structure_analysis(run, perc_thresh, create_film=False):
"""
Calculates the number of structures as a function of time for a given
percentile cut-off. Writes results and plots to an appropriate directory.
Parameters
----------
run : object
Run object calculated by the Run class.
perc_thresh : int
Percentile threshold at which to cut off fluctuations.
create_film : bool
Determines whether a film of the labelled structures is produced.
"""
run.read_ntot()
make_results_dir(run, perc_thresh)
labelled_image, nlabels = label_structures(run, perc_thresh)
no_structures = count_structures(run, labelled_image, nlabels)
plot_no_structures(run, no_structures, perc_thresh)
save_results(run, no_structures, perc_thresh)
if create_film:
make_film(run, no_structures, labelled_image, perc_thresh)
def make_results_dir(run, perc_thresh):
os.system('mkdir -p ' + run.run_dir + 'analysis/structures_' +
str(perc_thresh))
def label_structures(run, perc_thresh):
nlabels = np.empty(run.nt, dtype=int)
labelled_image = np.empty([run.nt, run.nx, run.ny], dtype=int)
for it in range(run.nt):
tmp = run.ntot_i[it,:,:].copy()
# Apply Gaussian filter
tmp = filters.gaussian(tmp, sigma=1)
thresh = np.percentile(tmp, perc_thresh,
interpolation='nearest')
tmp_max = np.max(tmp)
tmp_thresh = thresh/tmp_max
tmp /= tmp_max
tmp[tmp <= tmp_thresh] = 0
tmp[tmp > tmp_thresh] = 1
# Label the resulting structures
labelled_image[it,:,:], nlabels[it] = label(tmp, return_num=True,
background=0)
return(labelled_image, nlabels)
def count_structures(run, labelled_image, nlabels):
"""
Remove any structures which are too small and count structures.
"""
nblobs = np.empty(run.nt, dtype=int)
for it in range(run.nt):
hist = np.histogram(np.ravel(labelled_image[it]),
bins=range(1,nlabels[it]+1))[0]
smallest_struc = np.mean(hist)*0.1
hist = hist[hist > smallest_struc]
nblobs[it] = len(hist)
return(nblobs)
def plot_no_structures(run, no_structures, perc_thresh):
"""
Plot number of structures as a function of time.
"""
plt.clf()
plt.plot(no_structures)
plt.xlabel('Time index')
plt.ylabel('Number of structures')
plt.ylim(0)
plt.savefig(run.run_dir + 'analysis/structures_' + str(perc_thresh) +
'/nblobs.pdf')
def save_results(run, no_structures, perc_thresh):
"""
Save the number of structures as a function of time in a file.
"""
np.savetxt(run.run_dir + 'analysis/structures_' + str(perc_thresh) +
'/nblobs.csv', np.transpose((range(run.nt), no_structures)),
delimiter=',', fmt='%d', header='t_index,nblobs')
def make_film(run, no_structures, labelled_image, perc_thresh):
titles = []
for it in range(run.nt):
titles.append('No. of structures = {}'.format(no_structures[it]))
plot_options = {'cmap':'gist_rainbow',
'levels':np.arange(-1,np.max(labelled_image))
}
options = {'file_name':'structures',
'film_dir':run.run_dir + 'analysis/structures_' +
str(perc_thresh) ,
'frame_dir':run.run_dir + 'analysis/structures_' +
str(perc_thresh) + '/film_frames',
'nprocs':None,
'aspect':'equal',
'xlabel':r'$x$ (m)',
'ylabel':r'$y$ (m)',
'cbar_ticks':np.arange(-1,np.max(labelled_image),2),
'cbar_label':r'Label',
'fps':10,
'bbox_inches':'tight',
'title':titles
}
pf.make_film_2d(run.r, run.z, labelled_image,
plot_options=plot_options, options=options)
if __name__ == '__main__':
run = Run(sys.argv[1])
structure_analysis(run, 75, create_film=False)
structure_analysis(run, 95, create_film=False)
|
ferdinandvwyk/gs2_analysis
|
structure_analysis.py
|
Python
|
gpl-2.0
| 4,637 | 0.009273 |
#!/usr/bin/env python
# This file mainly exists to allow python setup.py test to work.
#
# You can test all the variations of tests by running:
#
# ./manage.py test && python runtests.py && ./setup.py test && echo OK
#
import os, sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
from django.core.management import call_command
def runtests():
# use the call_command approach so that we are as similar to running
# './manage.py test' as possible. Notably we need the South migrations to be
# run.
call_command('test', verbosity=2)
sys.exit(0)
if __name__ == '__main__':
runtests()
|
ciudadanointeligente/popit-django
|
runtests.py
|
Python
|
agpl-3.0
| 621 | 0.008052 |
# This file is part of CDS Invenio.
# Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008 CERN.
#
# CDS Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# CDS Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""BibClassify's web interface.
This module is NOT standalone safe - this component is never expected
to run in a standalone mode, but always inside invenio."""
import os
from cgi import escape
from urllib import quote
import time
import bibupload
from invenio.messages import gettext_set_language
from invenio.bibdocfile import BibRecDocs
from invenio.webinterface_handler import WebInterfaceDirectory
from invenio.webpage import pageheaderonly, pagefooteronly
from invenio.search_engine import get_colID, \
guess_primary_collection_of_a_record, create_navtrail_links, \
perform_request_search, get_record, print_record
from invenio.websearchadminlib import get_detailed_page_tabs
from invenio.template import load
from invenio.webinterface_handler import wash_urlargd
from invenio.webuser import collect_user_info
from invenio import access_control_engine as acce
from invenio import dbquery
from invenio import bibtask
from invenio import bibrecord
import bibclassify_config as bconfig
import bibclassify_text_extractor
import bibclassify_engine
import bibclassify_ontology_reader as bor
log = bconfig.get_logger("bibclassify.webinterface")
template = load('bibclassify')
def main_page(req, recid, tabs, ln, template):
"""Generates the main page for the keyword tab - http://url/record/[recid]/keywords
@var req: request object
@var recid: int docid
@var tabs: list of tab links
@var ln: language id
@var template: template object
@return: nothing, writes using req object
"""
form = req.form
argd = wash_urlargd(form, {
'generate': (str, 'no'),
'sorting': (str, 'occurences'),
'type': (str, 'tagcloud'),
'numbering': (str, 'off'),
'showall': (str, 'off'),
})
for k,v in argd.items():
argd[k] = escape(v)
req.write(template.detailed_record_container_top(recid, tabs, ln))
# Get the keywords from MARC (if any)
success, keywords, marcrec = record_get_keywords(recid)
if success:
# check for the cached file and delete it (we don't need it anymore, data are in the DB)
tmp_file = bibclassify_engine.get_tmp_file(recid)
if os.path.exists(tmp_file):
try:
os.remove(tmp_file)
except Exception, msg:
log.error('Error removing the cached file: %s' % tmp_file)
log.error(msg)
else:
# Give user possibility to generate them ONLY if not available already
# we may have some keywords, but they are the old ones and we want to generate new
new_found, new_keywords, marcrec = generate_keywords(req, recid, argd)
if keywords and new_keywords:
for key in keywords.keys():
if key in new_keywords:
log.warning('The old "DESY" keyword will be overwritten by the newly extracted one: %s' % key)
keywords.update(new_keywords)
if keywords:
# Output the keywords or the generate button or some message why kw not available
write_keywords_body(keywords, req, recid, argd, marcrec=marcrec)
req.write(template.detailed_record_container_bottom(recid,
tabs, ln))
def write_keywords_body(keywords, req, recid, argd, marcrec=None):
"""Writes the bibclassify keyword output into req object"""
if not keywords:
req.write(template.tmpl_page_no_keywords(req=req, **argd))
return
# test if more than half of the entries have weight (0,0) - ie. not weighted
#if argd['type'] == 'tagcloud' and len(filter(lambda x: (0,0) in x[0], keywords.values())) > (len(keywords) * .5):
# argd['type'] = 'list'
if argd['type'] == 'list':
# Display keywords as a list.
req.write(template.tmpl_page_list(keywords, req=req, **argd))
elif argd['type'] == 'tagcloud':
# Display keywords as a tag cloud.
req.write(template.tmpl_page_tagcloud(keywords=keywords, req=req, **argd))
elif argd['type'] == 'xml':
if marcrec:
marcxml = filter_marcrec(marcrec)
else:
marcxml = bibclassify_engine.build_marc(recid, keywords, {})
req.write(template.tmpl_page_xml_output(keywords,
marcxml,
req=req, **argd))
else:
_ = gettext_set_language(argd['ln'])
req.write(template.tmpl_page(top=_('Unknown type: %s') % argd['type'], **argd))
def record_get_keywords(record, main_field=bconfig.CFG_MAIN_FIELD,
others=bconfig.CFG_OTHER_FIELDS):
"""Returns a dictionary of keywordToken objects from the marc
record. Weight is set to (0,0) if no weight can be found.
This will load keywords from the field 653 and 695__a (which are the
old 'DESY' keywords)
@var record: int or marc record, if int - marc record is loaded
from the database. If you pass record instance, keywords are
extracted from it
@return: tuple (found, keywords, marcxml)
found - int indicating how many main_field keywords were found
the other fields are not counted
keywords - standard dictionary of keywordToken objects
marcrec - marc record object loaded with data
"""
keywords = {}
if isinstance(main_field, basestring):
main_field = [main_field]
if isinstance(others, basestring):
others = [others]
if isinstance(record, int):
rec = get_record(record)
else:
rec = record
found = 0
for m_field in main_field:
tag, ind1, ind2 = bibclassify_engine._parse_marc_code(m_field)
for field in rec.get(tag, []):
keyword = ''
weight = 0
type = ''
for subfield in field[0]:
if subfield[0] == 'a':
keyword = subfield[1]
elif subfield[0] == 'n':
weight = int(subfield[1])
elif subfield[0] == '9':
type = subfield[1]
if keyword:
found += 1
keywords[bor.KeywordToken(keyword, type=type)] = [[(0,0) for x in range(weight)]]
if others:
for field_no in others:
tag, ind1, ind2 = bibclassify_engine._parse_marc_code(field_no)
type = 'f%s' % field_no
for field in rec.get(tag, []):
keyword = ''
for subfield in field[0]:
if subfield[0] == 'a':
keyword = subfield[1]
keywords[bor.KeywordToken(keyword, type=type)] = [[(0,0)]]
break
return found, keywords, rec
def generate_keywords(req, recid, argd):
"""Extracts keywords from the fulltexts (if found) for the
given recid. It first checks whether the keywords are not already
stored in the temp file (maybe from the previous run).
@var req: req object
@var recid: record id
@var argd: arguments passed from web
@keyword store_keywords: boolean, whether to save records in the file
@return: standard dictionary of kw objects or {}
"""
ln = argd['ln']
_ = gettext_set_language(ln)
keywords = {}
# check the files were not already generated
abs_path = bibclassify_engine.get_tmp_file(recid)
if os.path.exists(abs_path):
try:
# Try to load the data from the tmp file
recs = bibupload.xml_marc_to_records(bibupload.open_marc_file(abs_path))
return record_get_keywords(recs[0])
except:
pass
# check it is allowed (for this user) to generate pages
(exit_stat, msg) = acce.acc_authorize_action(req, 'runbibclassify')
if exit_stat != 0:
log.info('Access denied: ' + msg)
msg = _("The site settings do not allow automatic keyword extraction")
req.write(template.tmpl_page_msg(msg=msg))
return 0, keywords, None
# register generation
bibdocfiles = BibRecDocs(recid).list_latest_files()
if bibdocfiles:
# User arrived at a page, but no keywords are available
inprogress, msg = _doc_already_submitted(recid)
if argd['generate'] != 'yes':
# Display a form and give them possibility to generate keywords
if inprogress:
req.write(template.tmpl_page_msg(msg='<div class="warningbox">%s</div>' % _(msg)))
else:
req.write(template.tmpl_page_generate_keywords(req=req, **argd))
return 0, keywords, None
else: # after user clicked on "generate" button
if inprogress:
req.write(template.tmpl_page_msg(msg='<div class="warningbox">%s</div>' % _(msg) ))
else:
schedule_extraction(recid, taxonomy=bconfig.CFG_EXTRACTION_TAXONOMY)
req.write(template.tmpl_page_msg(msg='<div class="warningbox">%s</div>' %
_('We have registered your request, the automated'
'keyword extraction will run after some time. Please return back in a while.')))
else:
req.write(template.tmpl_page_msg(msg='<div class="warningbox">%s</div>' %
_("Unfortunately, we don't have a PDF fulltext for this record in the storage, \
keywords cannot be generated using an automated process.")))
return 0, keywords, None
def upload_keywords(filename, mode='correct', recids=None):
"""Stores the extracted keywords in the database
@var filename: fullpath to the file with marc record
@keyword mode: correct|replace|add|delete
use correct to add fields if they are different
replace all fields with fields from the file
add - add (even duplicate) fields
delete - delete fields which are inside the file
@keyword recids: list of record ids, this arg comes from
the bibclassify daemon and it is used when the recids
contains one entry (recid) - ie. one individual document
was processed. We use it to mark the job title so that
it is possible to query database if the bibclassify
was run over that document (in case of collections with
many recids, we simply construct a general title)
"""
if mode == 'correct':
m = '-c'
elif mode == 'replace':
m = '-r'
elif mode == 'add':
m = '-a'
elif mode == 'delete':
m = '-d'
else:
raise Exception('Unknown mode')
# let's use the user column to store the information, cause no better alternative in sight...
user_title = 'bibclassify.upload'
if recids and len(recids) == 1:
user_title = 'extract:%d' % recids[0]
bibtask.task_low_level_submission('bibupload',
user_title, '-n', m, filename)
def schedule_extraction(recid, taxonomy):
bibtask.task_low_level_submission('bibclassify',
'extract:%s' % recid, '-k', taxonomy, '-i', '%s' % recid)
def _doc_already_submitted(recid):
# check extraction was already registered
sql = "SELECT COUNT(proc) FROM schTASK WHERE proc = %s AND user = %s\
AND (status='WAITING' OR status='RUNNING')"
if dbquery.run_sql(sql, ('bibclassify','extract:%s' % recid))[0][0] > 0:
return (True, "The automated keyword extraction \
for this document has been already scheduled. Please return back in a while.")
# check the upload is inside the scheduled tasks
sql = "SELECT COUNT(proc) FROM schTASK WHERE proc = %s AND user = %s\
AND (status='WAITING' OR status='RUNNING')"
if dbquery.run_sql(sql, ('bibupload','extract:%s' % recid))[0][0] > 0:
return (True, 'The document was already processed, '
'it will take a while for it to be ingested.')
# or the task was run and is already archived
sql = "SELECT COUNT(proc) FROM hstTASK WHERE proc = %s AND user = %s"
if dbquery.run_sql(sql, ('bibupload','extract:%s' % recid))[0][0] > 0:
return (True, 'The document was already processed, '
'at this moment, the automated extraction is not available.')
# or the task was already ran
sql = "SELECT COUNT(proc) FROM schTASK WHERE proc = %s AND user = %s\
AND (status='DONE')"
if dbquery.run_sql(sql, ('bibclassify','extract:%s' % recid))[0][0] > 0:
return (True, 'The document was already processed, '
'but automated extraction identified no suitable keywords.')
# or the extraction is in error stat
sql = "SELECT COUNT(proc) FROM schTASK WHERE proc = %s AND user = %s\
AND (status='ERROR')"
if dbquery.run_sql(sql, ('bibclassify','extract:%s' % recid))[0][0] > 0:
return (True, 'The document was already scheduled, '
'but an error happened. This requires an'
'administrator\'s intervention. Unfortunately, '
'for the moment we cannot display any data.')
return (False, None)
def filter_marcrec(marcrec, main_field=bconfig.CFG_MAIN_FIELD,
others=bconfig.CFG_OTHER_FIELDS):
"""Removes the unwanted fields and returns xml"""
if isinstance(main_field, basestring):
main_field = [main_field]
if isinstance(others, basestring):
others = [others]
key_map = ['001']
for field in main_field + others:
tag, ind1, ind2 = bibclassify_engine._parse_marc_code(field)
key_map.append(tag)
return bibrecord.print_rec(marcrec, 1, tags=key_map)
|
pombredanne/invenio-old
|
modules/bibclassify/lib/bibclassify_webinterface.py
|
Python
|
gpl-2.0
| 14,432 | 0.004088 |
#!/usr/bin/env python
import mredis
import time
ports = [6379, 6380]
servers = []
for port in ports:
servers.append({'host': 'localhost', 'port': port, 'db': 0})
mr = mredis.MRedis(servers)
# Destructive test of the database
#print mr.flushall()
#print mr.flushdb()
print mr.ping()
# Build a set of keys for operations
keys = set()
for x in xrange(0, 100):
key = 'key:%.8f' % time.time()
keys.add(key)
for key in keys:
mr.set(key, time.time())
fetched = mr.keys('key:*')
results = []
for server in fetched:
for key in fetched[server]:
results.append('%s->%s' % (key, mr.get(key)))
print '%i keys fetched' % len(results)
for key in keys:
mr.delete(key)
print mr.bgrewriteaof()
print mr.dbsize()
print mr.lastsave()
#print mr.info()
print mr.randomkey()
|
gmr/mredis
|
tests/general.py
|
Python
|
bsd-3-clause
| 795 | 0.003774 |
#!/usr/bin/python3
import argparse, random, textwrap
from datetime import datetime
from urllib import request
from xml.etree import ElementTree
labels = {
"clouds": "%",
"humidity": "%",
"precipitation": "%",
"temp": "°F",
"wind-direction": "°",
"wind-speed": " mph",
}
parser = argparse.ArgumentParser(description = "display weather using data from weather.gov")
parser.add_argument("latitude",
help = "latitude of location",
type = float)
parser.add_argument("longitude",
help = "longitude of location",
type = float)
args = parser.parse_args()
def print_weather(latitude, longitude):
# weather.gov provides two xml files: digitalDWML and dwml.
# digitalDWML includes detailed, 24-hour forecast data for the next 7 days.
# dwml includes simple data for the current day as well as text and icons.
# in this script, digitalDWML is referred to as "detailed" and dwml is
# referred to as "simple".
weather_detailed_xml = request.urlopen("http://forecast.weather.gov/MapClick.php?lat="
+ str(latitude) + "&lon=" + str(longitude)
+ "&FcstType=digitalDWML").read()
weather_simple_xml = request.urlopen("http://forecast.weather.gov/MapClick.php?lat="
+ str(latitude) + "&lon=" + str(longitude)
+ "&FcstType=dwml").read()
# these variables and functions refer to digitalDWML
root = ElementTree.fromstring(weather_detailed_xml)
parameters = root.find("data").find("parameters")
def temperature(type):
for node in parameters.iter("temperature"):
if node.get("type") == type:
return node
wrapped_description = "\n".join(
textwrap.wrap(
ElementTree.fromstring(weather_simple_xml).\
find("data").find("parameters").find("weather").\
find("weather-conditions").attrib["weather-summary"],
width = 30,
break_long_words = False))
print("Weather Forecast for "
+ root.find("data").find("location").find("city").text
+ ":\n"
+ wrapped_description
+ "\n"
)
print("Updated: "
# %z is defective so the timezone is cropped from the date string
+ datetime.strptime(
root.find("data").find("time-layout").find("start-valid-time").text[:-6],
"%Y-%m-%dT%H:%M:%S").strftime("%d %B %Y @ %I:%M %p")
)
print("Temperature: "
+ temperature("hourly")[0].text
+ labels["temp"]
)
print("Cloud Cover: "
+ parameters.find("cloud-amount")[0].text
+ labels["clouds"]
)
print("Sustained Wind: "
+ parameters.find("wind-speed")[0].text
+ labels["wind-speed"]
+ " @ "
+ parameters.find("direction")[0].text
+ labels["wind-direction"]
)
print("Humidity: "
+ parameters.find("humidity")[0].text
+ labels["humidity"]
)
print("Precipitation: "
+ parameters.find("probability-of-precipitation")[0].text
+ labels["precipitation"]
)
try:
print_weather(args.latitude, args.longitude)
except Exception as error:
if type(error) == ElementTree.ParseError:
print("error: invalid coordinates given or weather.gov's xml format has changed.")
else:
print("error: " + error)
|
ercas/scripts
|
weather.py
|
Python
|
apache-2.0
| 3,411 | 0.017014 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import errno
import hashlib
import os
from django.conf import settings
from django.core.files import File
from django.core.files.storage import FileSystemStorage
from django.utils.encoding import force_unicode
__all__ = ["HashedFileSystemStorage"]
__author__ = "pmeier82"
class ContentExists(Exception):
pass
class HashedFileSystemStorage(FileSystemStorage):
"""`FileSystemStorage` subclass that manages file names by content hashes"""
def get_available_name(self, name):
raise ContentExists()
def _get_content_name(self, name, content, chunk_size=None):
dir_name = os.path.split(name)[0]
file_name = self._generate_hash(content=content, chunk_size=chunk_size)
return os.path.join(dir_name, file_name)
def _generate_hash(self, content, chunk_size=None):
if chunk_size is None:
chunk_size = getattr(content, "DEFAULT_CHUNK_SIZE", File.DEFAULT_CHUNK_SIZE)
hash_gen = hashlib.sha1()
cursor = content.tell()
content.seek(0)
try:
while True:
data = content.read(chunk_size)
if not data:
break
hash_gen.update(data)
return hash_gen.hexdigest()
finally:
content.seek(cursor)
def save(self, name, content):
if getattr(settings, "DEBUG", None) is True:
print "{}::save({})".format(self.__class__.__name__, name)
if name is None:
name = content.name
name = self._get_content_name(name, content)
name = self._save(name, content)
return force_unicode(name.replace('\\', '/'))
def _save(self, name, content):
new_name = self._get_content_name(name=name, content=content)
try:
return super(HashedFileSystemStorage, self)._save(new_name, content)
except ContentExists:
pass
except OSError, e:
if e.errno == errno.EEXIST:
pass
else:
raise
return new_name
def delete(self, name):
if getattr(settings, "DEBUG", None) is True:
print "{}::delete({})".format(self.__class__.__name__, name)
return super(HashedFileSystemStorage, self).delete(name)
if __name__ == "__main__":
pass
|
pmeier82/spike_gnode
|
base/storage.py
|
Python
|
bsd-3-clause
| 2,381 | 0.00126 |
"""Extensions which provide a block segments."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
|
kevinconway/rpmvenv
|
rpmvenv/extensions/blocks/__init__.py
|
Python
|
mit
| 199 | 0 |
from django.shortcuts import render, get_object_or_404
from django.views import generic
from django.http import HttpResponse, HttpResponseRedirect
from django.template import loader
from rest_framework import reverse
from druidapi.query.models import QueryModel
from models import Result
from forms import SearchForm
import requests
import json
class IndexView(generic.View):
"""
The view for the main page, where the search form is
"""
def get(self, request):
form = SearchForm
return render(request, 'index.html', {'form': form})
def post(self, request):
form = SearchForm(request.POST)
if form.is_valid():
# Little bit of cheating, ideally the html would handle this
# but, I felt like building the webapp in django...
# alternatively, I could just reach over and build this.
start = form.cleaned_data['start'].isoformat()
end = form.cleaned_data['end'].isoformat()
# POST the query and return the pk, so we can look it up later
r = requests.post('http://localhost:9000/api/query/', data={'start_date': start, 'end_date': end})
result = Result.objects.create(key=r.json()["pk"])
result.save()
# To the results!
return HttpResponseRedirect("/{0}/".format(r.json()["pk"]))
else:
return render(request, 'index.html', {'form': form})
class ResultsView(generic.View):
"""
When the search is executed, it needs to display the results...
"""
def get(self, request, pk):
result = Result.objects.get(key=pk)
# GET the results for the key we're given
r = requests.get("http://localhost:9000/api/query/{0}/execute/".format(pk))
result.data = r.json()
return render(request, 'results.html', {'result': result})
|
nalabelle/druid-django
|
frontend/views.py
|
Python
|
mit
| 1,868 | 0.002677 |
# -*- coding:utf-8 -*-
from __future__ import unicode_literals
import unittest
from io import BytesIO, StringIO
from decimal import Decimal
import threading
from importlib import import_module
from ijson import common
from ijson.backends.python import basic_parse, Lexer
from ijson.compat import IS_PY2
JSON = b'''
{
"docs": [
{
"null": null,
"boolean": false,
"true": true,
"integer": 0,
"double": 0.5,
"exponent": 1.0e+2,
"long": 10000000000,
"string": "\\u0441\\u0442\\u0440\\u043e\\u043a\\u0430 - \xd1\x82\xd0\xb5\xd1\x81\xd1\x82"
},
{
"meta": [[1], {}]
},
{
"meta": {"key": "value"}
},
{
"meta": null
}
]
}
'''
JSON_EVENTS = [
('start_map', None),
('map_key', 'docs'),
('start_array', None),
('start_map', None),
('map_key', 'null'),
('null', None),
('map_key', 'boolean'),
('boolean', False),
('map_key', 'true'),
('boolean', True),
('map_key', 'integer'),
('number', 0),
('map_key', 'double'),
('number', Decimal('0.5')),
('map_key', 'exponent'),
('number', 100),
('map_key', 'long'),
('number', 10000000000),
('map_key', 'string'),
('string', 'строка - тест'),
('end_map', None),
('start_map', None),
('map_key', 'meta'),
('start_array', None),
('start_array', None),
('number', 1),
('end_array', None),
('start_map', None),
('end_map', None),
('end_array', None),
('end_map', None),
('start_map', None),
('map_key', 'meta'),
('start_map', None),
('map_key', 'key'),
('string', 'value'),
('end_map', None),
('end_map', None),
('start_map', None),
('map_key', 'meta'),
('null', None),
('end_map', None),
('end_array', None),
('end_map', None),
]
SCALAR_JSON = b'0'
INVALID_JSONS = [
b'["key", "value",]', # trailing comma
b'["key" "value"]', # no comma
b'{"key": "value",}', # trailing comma
b'{"key": "value" "key"}', # no comma
b'{"key" "value"}', # no colon
b'invalid', # unknown lexeme
b'[1, 2] dangling junk' # dangling junk
]
YAJL1_PASSING_INVALID = INVALID_JSONS[6]
INCOMPLETE_JSONS = [
b'',
b'"test',
b'[',
b'[1',
b'[1,',
b'{',
b'{"key"',
b'{"key":',
b'{"key": "value"',
b'{"key": "value",',
]
STRINGS_JSON = br'''
{
"str1": "",
"str2": "\"",
"str3": "\\",
"str4": "\\\\",
"special\t": "\b\f\n\r\t"
}
'''
NUMBERS_JSON = b'[1, 1.0, 1E2]'
SURROGATE_PAIRS_JSON = b'"\uD83D\uDCA9"'
class Parse(object):
'''
Base class for parsing tests that is used to create test cases for each
available backends.
'''
def test_basic_parse(self):
events = list(self.backend.basic_parse(BytesIO(JSON)))
self.assertEqual(events, JSON_EVENTS)
def test_basic_parse_threaded(self):
thread = threading.Thread(target=self.test_basic_parse)
thread.start()
thread.join()
def test_scalar(self):
events = list(self.backend.basic_parse(BytesIO(SCALAR_JSON)))
self.assertEqual(events, [('number', 0)])
def test_strings(self):
events = list(self.backend.basic_parse(BytesIO(STRINGS_JSON)))
strings = [value for event, value in events if event == 'string']
self.assertEqual(strings, ['', '"', '\\', '\\\\', '\b\f\n\r\t'])
self.assertTrue(('map_key', 'special\t') in events)
def test_surrogate_pairs(self):
event = next(self.backend.basic_parse(BytesIO(SURROGATE_PAIRS_JSON)))
parsed_string = event[1]
self.assertEqual(parsed_string, '💩')
def test_numbers(self):
events = list(self.backend.basic_parse(BytesIO(NUMBERS_JSON)))
types = [type(value) for event, value in events if event == 'number']
self.assertEqual(types, [int, Decimal, Decimal])
def test_invalid(self):
for json in INVALID_JSONS:
# Yajl1 doesn't complain about additional data after the end
# of a parsed object. Skipping this test.
if self.__class__.__name__ == 'YajlParse' and json == YAJL1_PASSING_INVALID:
continue
with self.assertRaises(common.JSONError) as cm:
list(self.backend.basic_parse(BytesIO(json)))
def test_incomplete(self):
for json in INCOMPLETE_JSONS:
with self.assertRaises(common.IncompleteJSONError):
list(self.backend.basic_parse(BytesIO(json)))
def test_utf8_split(self):
buf_size = JSON.index(b'\xd1') + 1
try:
events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size))
except UnicodeDecodeError:
self.fail('UnicodeDecodeError raised')
def test_lazy(self):
# shouldn't fail since iterator is not exhausted
self.backend.basic_parse(BytesIO(INVALID_JSONS[0]))
self.assertTrue(True)
def test_boundary_lexeme(self):
buf_size = JSON.index(b'false') + 1
events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size))
self.assertEqual(events, JSON_EVENTS)
def test_boundary_whitespace(self):
buf_size = JSON.index(b' ') + 1
events = list(self.backend.basic_parse(BytesIO(JSON), buf_size=buf_size))
self.assertEqual(events, JSON_EVENTS)
def test_api(self):
self.assertTrue(list(self.backend.items(BytesIO(JSON), '')))
self.assertTrue(list(self.backend.parse(BytesIO(JSON))))
# Generating real TestCase classes for each importable backend
for name in ['python', 'yajl', 'yajl2', 'yajl2_cffi']:
try:
classname = '%sParse' % ''.join(p.capitalize() for p in name.split('_'))
if IS_PY2:
classname = classname.encode('ascii')
locals()[classname] = type(
classname,
(unittest.TestCase, Parse),
{'backend': import_module('ijson.backends.%s' % name)},
)
except ImportError:
pass
class Common(unittest.TestCase):
'''
Backend independent tests. They all use basic_parse imported explicitly from
the python backend to generate parsing events.
'''
def test_object_builder(self):
builder = common.ObjectBuilder()
for event, value in basic_parse(BytesIO(JSON)):
builder.event(event, value)
self.assertEqual(builder.value, {
'docs': [
{
'string': 'строка - тест',
'null': None,
'boolean': False,
'true': True,
'integer': 0,
'double': Decimal('0.5'),
'exponent': 100,
'long': 10000000000,
},
{
'meta': [[1], {}],
},
{
'meta': {'key': 'value'},
},
{
'meta': None,
},
],
})
def test_scalar_builder(self):
builder = common.ObjectBuilder()
for event, value in basic_parse(BytesIO(SCALAR_JSON)):
builder.event(event, value)
self.assertEqual(builder.value, 0)
def test_parse(self):
events = common.parse(basic_parse(BytesIO(JSON)))
events = [value
for prefix, event, value in events
if prefix == 'docs.item.meta.item.item'
]
self.assertEqual(events, [1])
def test_items(self):
events = basic_parse(BytesIO(JSON))
meta = list(common.items(common.parse(events), 'docs.item.meta'))
self.assertEqual(meta, [
[[1], {}],
{'key': 'value'},
None,
])
class Stream(unittest.TestCase):
def test_bytes(self):
l = Lexer(BytesIO(JSON))
self.assertEqual(next(l)[1], '{')
def test_string(self):
l = Lexer(StringIO(JSON.decode('utf-8')))
self.assertEqual(next(l)[1], '{')
if __name__ == '__main__':
unittest.main()
|
catapult-project/catapult
|
third_party/ijson/tests.py
|
Python
|
bsd-3-clause
| 8,608 | 0.002679 |
import os, random
rfilename=random.choice(os.listdir("/storage/pictures"))
rextension=os.path.splitext(rfilename)[1]
picturespath='/storage/pictures/'
#TODO Probably dont need a forloop can possibly do random*
#TODO What if the directory is empty?
for filename in os.listdir(picturespath):
if filename.startswith("random"):
extension=os.path.splitext(filename)[1]
newname=picturespath + str(random.random()).rsplit('.',1)[1] + extension
# rename the existing random wallpaper to something random
filename=picturespath+filename
os.rename(filename, newname)
# now rename the newly randomly founded file to be random
rfilename=picturespath+rfilename
os.rename(rfilename, picturespath+'random'+rextension)
|
shoaibali/kodi.background.rotator
|
randombackground.py
|
Python
|
gpl-3.0
| 713 | 0.026648 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-11-14 21:43
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('recipe', '0010_auto_20171114_1443'),
]
operations = [
migrations.RemoveField(
model_name='direction',
name='recipe',
),
migrations.DeleteModel(
name='Direction',
),
]
|
RyanNoelk/OpenEats
|
api/v1/recipe/migrations/0011_auto_20171114_1543.py
|
Python
|
mit
| 466 | 0 |
from __future__ import division
from __future__ import print_function
import os
import sys
import functools
# Update path
root = os.path.join(os.getcwd().split('proj1')[0], 'proj1')
if root not in sys.path:
sys.path.append(root)
import numpy as np
import pandas as pd
import multiprocessing
from pdb import set_trace
from Simulator import simulate
from Utils.PlotsUtils import line, line2
from Utils.RandomUtil import Random
from Utils.MisclUtils import TimeUtil
rand = Random()
timer = TimeUtil()
# Set seed
rand.set_seed(seed_val=12458)
def customer_loss_rate(customers):
served = np.sum([customer.serviced for customer in customers])
total = len(customers)
return served / total
def plot_runtime(x=None, y=None):
line(x, y, x_label=r"$\rho$", y_label=r"Run Times", the_title=r"$\mathrm{Run\ Times\ in\ }\mu\mathrm{s\ vs.\ }\rho$")
def plot_runtime_vs_avg(x, y, y_1):
line2(x, y, x, y_1, label_1="Actual Runtimes", label_2="Expected value of $\rho$", x_label=r"$\rho$", y_label=r"Run Times", the_title=r"$\mathrm{Run\ Times\ in\ }\mu\mathrm{s\ vs.\ }\rho$")
def task_5():
rho_list = np.arange(0.05, 1, 0.1)
C = 1e5
elapsed = []
for rho in rho_list:
start_time = timer.current_time()
serviced = simulate(l = rho, server_lim = 40, max_serviced=C, L=1, verbose=False)
end_time = timer.current_time()
elapsed.append(end_time-start_time)
data = pd.DataFrame([[a,b] for a, b in zip(rho_list, elapsed)], columns=["Rho", "Seconds"])
data.to_csv(os.path.abspath(os.path.join(root,"tasks/task5.csv")))
def task5_plot():
data = pd.read_csv(os.path.abspath("tasks/task5.csv"))
plot_runtime(data["Rho"], data["Seconds"])
set_trace()
def compare_plot():
rho_list = np.arange(0.05, 1, 0.1)
average_rho = [np.mean([rand.exponential(lam=p) for _ in xrange(10000)]) for p in rho_list]
data = pd.read_csv(os.path.abspath("tasks/task5.csv"))
plot_runtime(data["Rho"], average_rho)
if __name__ == "__main__":
task_5()
task5_plot()
compare_plot()
|
rahlk/CSC579__Computer_Performance_Modeling
|
simulation/proj1/tasks/task5.py
|
Python
|
mit
| 2,063 | 0.010664 |
# Copyright (C) 2016 Intel Corporation
# Released under the MIT license (see COPYING.MIT)
from oeqa.core.exception import OEQAMissingVariable
from . import OETestDecorator, registerDecorator
def has_feature(td, feature):
"""
Checks for feature in DISTRO_FEATURES or IMAGE_FEATURES.
"""
if (feature in td.get('DISTRO_FEATURES', '') or
feature in td.get('IMAGE_FEATURES', '')):
return True
return False
@registerDecorator
class skipIfDataVar(OETestDecorator):
"""
Skip test based on value of a data store's variable.
It will get the info of var from the data store and will
check it against value; if are equal it will skip the test
with msg as the reason.
"""
attrs = ('var', 'value', 'msg')
def setUpDecorator(self):
msg = ('Checking if %r value is %r to skip test' %
(self.var, self.value))
self.logger.debug(msg)
if self.case.td.get(self.var) == self.value:
self.case.skipTest(self.msg)
@registerDecorator
class skipIfNotDataVar(OETestDecorator):
"""
Skip test based on value of a data store's variable.
It will get the info of var from the data store and will
check it against value; if are not equal it will skip the
test with msg as the reason.
"""
attrs = ('var', 'value', 'msg')
def setUpDecorator(self):
msg = ('Checking if %r value is not %r to skip test' %
(self.var, self.value))
self.logger.debug(msg)
if not self.case.td.get(self.var) == self.value:
self.case.skipTest(self.msg)
@registerDecorator
class skipIfNotInDataVar(OETestDecorator):
"""
Skip test if value is not in data store's variable.
"""
attrs = ('var', 'value', 'msg')
def setUpDecorator(self):
msg = ('Checking if %r value is in %r to run '
'the test' % (self.var, self.value))
self.logger.debug(msg)
if not self.value in self.case.td.get(self.var):
self.case.skipTest(self.msg)
@registerDecorator
class OETestDataDepends(OETestDecorator):
attrs = ('td_depends',)
def setUpDecorator(self):
for v in self.td_depends:
try:
value = self.case.td[v]
except KeyError:
raise OEQAMissingVariable("Test case need %s variable but"\
" isn't into td" % v)
@registerDecorator
class skipIfNotFeature(OETestDecorator):
"""
Skip test based on DISTRO_FEATURES.
value must be in distro features or it will skip the test
with msg as the reason.
"""
attrs = ('value', 'msg')
def setUpDecorator(self):
msg = ('Checking if %s is in DISTRO_FEATURES '
'or IMAGE_FEATURES' % (self.value))
self.logger.debug(msg)
if not has_feature(self.case.td, self.value):
self.case.skipTest(self.msg)
|
schleichdi2/OPENNFR-6.1-CORE
|
opennfr-openembedded-core/meta/lib/oeqa/core/decorator/data.py
|
Python
|
gpl-2.0
| 2,959 | 0.004055 |
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant les différents signaux.
Un signal est une classe toute simple, semblable à une exception en
ce qu'elle permet de transmettre des messages et met en pause l'exécution
pendant le temps que le message passe. Cependant, après réception
du signal, l'exécution peut se poursuivre.
"""
from secondaires.navigation.equipage.signaux.base import Signal
from secondaires.navigation.equipage.signaux.attendre import SignalAttendre
from secondaires.navigation.equipage.signaux.abandonne import SignalAbandonne
from secondaires.navigation.equipage.signaux.inutile import SignalInutile
from secondaires.navigation.equipage.signaux.relais import SignalRelais
from secondaires.navigation.equipage.signaux.repete import SignalRepete
from secondaires.navigation.equipage.signaux.termine import SignalTermine
|
vlegoff/tsunami
|
src/secondaires/navigation/equipage/signaux/__init__.py
|
Python
|
bsd-3-clause
| 2,386 | 0 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from neutronclient.common import exceptions as neutron_exceptions
from oslo_config import cfg
from oslo_utils import uuidutils
from ironic.common import exception
from ironic.common import neutron as neutron_common
from ironic.conductor import task_manager
from ironic.drivers.modules.network import neutron
from ironic.tests.unit.conductor import mgr_utils
from ironic.tests.unit.db import base as db_base
from ironic.tests.unit.objects import utils
CONF = cfg.CONF
CLIENT_ID1 = '20:00:55:04:01:fe:80:00:00:00:00:00:00:00:02:c9:02:00:23:13:92'
CLIENT_ID2 = '20:00:55:04:01:fe:80:00:00:00:00:00:00:00:02:c9:02:00:23:13:93'
VIFMIXINPATH = 'ironic.drivers.modules.network.common.VIFPortIDMixin'
class NeutronInterfaceTestCase(db_base.DbTestCase):
def setUp(self):
super(NeutronInterfaceTestCase, self).setUp()
self.config(enabled_drivers=['fake'])
mgr_utils.mock_the_extension_manager()
self.interface = neutron.NeutronNetwork()
self.node = utils.create_test_node(self.context,
network_interface='neutron')
self.port = utils.create_test_port(
self.context, node_id=self.node.id,
address='52:54:00:cf:2d:32',
extra={'vif_port_id': uuidutils.generate_uuid()})
self.neutron_port = {'id': '132f871f-eaec-4fed-9475-0d54465e0f00',
'mac_address': '52:54:00:cf:2d:32'}
@mock.patch('%s.vif_list' % VIFMIXINPATH)
def test_vif_list(self, mock_vif_list):
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.vif_list(task)
mock_vif_list.assert_called_once_with(task)
@mock.patch('%s.vif_attach' % VIFMIXINPATH)
def test_vif_attach(self, mock_vif_attach):
vif = mock.MagicMock()
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.vif_attach(task, vif)
mock_vif_attach.assert_called_once_with(task, vif)
@mock.patch('%s.vif_detach' % VIFMIXINPATH)
def test_vif_detach(self, mock_vif_detach):
vif_id = "vif"
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.vif_detach(task, vif_id)
mock_vif_detach.assert_called_once_with(task, vif_id)
@mock.patch('%s.port_changed' % VIFMIXINPATH)
def test_vif_port_changed(self, mock_p_changed):
port = mock.MagicMock()
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.port_changed(task, port)
mock_p_changed.assert_called_once_with(task, port)
def test_init_incorrect_provisioning_net(self):
self.config(provisioning_network=None, group='neutron')
self.assertRaises(exception.DriverLoadError, neutron.NeutronNetwork)
self.config(provisioning_network=uuidutils.generate_uuid(),
group='neutron')
self.config(cleaning_network=None, group='neutron')
self.assertRaises(exception.DriverLoadError, neutron.NeutronNetwork)
@mock.patch.object(neutron_common, 'validate_network', autospec=True)
def test_validate(self, validate_mock):
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.validate(task)
self.assertEqual([mock.call(CONF.neutron.cleaning_network,
'cleaning network'),
mock.call(CONF.neutron.provisioning_network,
'provisioning network')],
validate_mock.call_args_list)
@mock.patch.object(neutron_common, 'validate_network',
side_effect=lambda n, t: n)
@mock.patch.object(neutron_common, 'rollback_ports')
@mock.patch.object(neutron_common, 'add_ports_to_network')
def test_add_provisioning_network(self, add_ports_mock, rollback_mock,
validate_mock):
self.port.internal_info = {'provisioning_vif_port_id': 'vif-port-id'}
self.port.save()
add_ports_mock.return_value = {self.port.uuid: self.neutron_port['id']}
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.add_provisioning_network(task)
rollback_mock.assert_called_once_with(
task, CONF.neutron.provisioning_network)
add_ports_mock.assert_called_once_with(
task, CONF.neutron.provisioning_network,
security_groups=[])
validate_mock.assert_called_once_with(
CONF.neutron.provisioning_network,
'provisioning network')
self.port.refresh()
self.assertEqual(self.neutron_port['id'],
self.port.internal_info['provisioning_vif_port_id'])
@mock.patch.object(neutron_common, 'validate_network',
lambda n, t: n)
@mock.patch.object(neutron_common, 'rollback_ports')
@mock.patch.object(neutron_common, 'add_ports_to_network')
def test_add_provisioning_network_with_sg(self, add_ports_mock,
rollback_mock):
sg_ids = []
for i in range(2):
sg_ids.append(uuidutils.generate_uuid())
self.config(provisioning_network_security_groups=sg_ids,
group='neutron')
add_ports_mock.return_value = {self.port.uuid: self.neutron_port['id']}
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.add_provisioning_network(task)
rollback_mock.assert_called_once_with(
task, CONF.neutron.provisioning_network)
add_ports_mock.assert_called_once_with(
task, CONF.neutron.provisioning_network,
security_groups=(
CONF.neutron.provisioning_network_security_groups))
self.port.refresh()
self.assertEqual(self.neutron_port['id'],
self.port.internal_info['provisioning_vif_port_id'])
@mock.patch.object(neutron_common, 'validate_network',
side_effect=lambda n, t: n)
@mock.patch.object(neutron_common, 'remove_ports_from_network')
def test_remove_provisioning_network(self, remove_ports_mock,
validate_mock):
self.port.internal_info = {'provisioning_vif_port_id': 'vif-port-id'}
self.port.save()
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.remove_provisioning_network(task)
remove_ports_mock.assert_called_once_with(
task, CONF.neutron.provisioning_network)
validate_mock.assert_called_once_with(
CONF.neutron.provisioning_network,
'provisioning network')
self.port.refresh()
self.assertNotIn('provisioning_vif_port_id', self.port.internal_info)
@mock.patch.object(neutron_common, 'validate_network',
side_effect=lambda n, t: n)
@mock.patch.object(neutron_common, 'rollback_ports')
@mock.patch.object(neutron_common, 'add_ports_to_network')
def test_add_cleaning_network(self, add_ports_mock, rollback_mock,
validate_mock):
add_ports_mock.return_value = {self.port.uuid: self.neutron_port['id']}
with task_manager.acquire(self.context, self.node.id) as task:
res = self.interface.add_cleaning_network(task)
rollback_mock.assert_called_once_with(
task, CONF.neutron.cleaning_network)
self.assertEqual(res, add_ports_mock.return_value)
validate_mock.assert_called_once_with(
CONF.neutron.cleaning_network,
'cleaning network')
self.port.refresh()
self.assertEqual(self.neutron_port['id'],
self.port.internal_info['cleaning_vif_port_id'])
@mock.patch.object(neutron_common, 'validate_network',
lambda n, t: n)
@mock.patch.object(neutron_common, 'rollback_ports')
@mock.patch.object(neutron_common, 'add_ports_to_network')
def test_add_cleaning_network_with_sg(self, add_ports_mock, rollback_mock):
add_ports_mock.return_value = {self.port.uuid: self.neutron_port['id']}
sg_ids = []
for i in range(2):
sg_ids.append(uuidutils.generate_uuid())
self.config(cleaning_network_security_groups=sg_ids, group='neutron')
with task_manager.acquire(self.context, self.node.id) as task:
res = self.interface.add_cleaning_network(task)
add_ports_mock.assert_called_once_with(
task, CONF.neutron.cleaning_network,
security_groups=CONF.neutron.cleaning_network_security_groups)
rollback_mock.assert_called_once_with(
task, CONF.neutron.cleaning_network)
self.assertEqual(res, add_ports_mock.return_value)
self.port.refresh()
self.assertEqual(self.neutron_port['id'],
self.port.internal_info['cleaning_vif_port_id'])
@mock.patch.object(neutron_common, 'validate_network',
side_effect=lambda n, t: n)
@mock.patch.object(neutron_common, 'remove_ports_from_network')
def test_remove_cleaning_network(self, remove_ports_mock,
validate_mock):
self.port.internal_info = {'cleaning_vif_port_id': 'vif-port-id'}
self.port.save()
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.remove_cleaning_network(task)
remove_ports_mock.assert_called_once_with(
task, CONF.neutron.cleaning_network)
validate_mock.assert_called_once_with(
CONF.neutron.cleaning_network,
'cleaning network')
self.port.refresh()
self.assertNotIn('cleaning_vif_port_id', self.port.internal_info)
@mock.patch.object(neutron_common, 'unbind_neutron_port')
def test_unconfigure_tenant_networks(self, mock_unbind_port):
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.unconfigure_tenant_networks(task)
mock_unbind_port.assert_called_once_with(
self.port.extra['vif_port_id'])
def test_configure_tenant_networks_no_ports_for_node(self):
n = utils.create_test_node(self.context, network_interface='neutron',
uuid=uuidutils.generate_uuid())
with task_manager.acquire(self.context, n.id) as task:
self.assertRaisesRegexp(
exception.NetworkError, 'No ports are associated',
self.interface.configure_tenant_networks, task)
@mock.patch.object(neutron_common, 'get_client')
@mock.patch.object(neutron, 'LOG')
def test_configure_tenant_networks_no_vif_id(self, log_mock, client_mock):
self.port.extra = {}
self.port.save()
upd_mock = mock.Mock()
client_mock.return_value.update_port = upd_mock
with task_manager.acquire(self.context, self.node.id) as task:
self.assertRaisesRegex(exception.NetworkError,
'No neutron ports or portgroups are '
'associated with node',
self.interface.configure_tenant_networks,
task)
client_mock.assert_called_once_with()
upd_mock.assert_not_called()
self.assertIn('No neutron ports or portgroups are associated with',
log_mock.error.call_args[0][0])
@mock.patch.object(neutron_common, 'get_client')
@mock.patch.object(neutron, 'LOG')
def test_configure_tenant_networks_multiple_ports_one_vif_id(
self, log_mock, client_mock):
expected_body = {
'port': {
'binding:vnic_type': 'baremetal',
'binding:host_id': self.node.uuid,
'binding:profile': {'local_link_information':
[self.port.local_link_connection]}
}
}
utils.create_test_port(self.context, node_id=self.node.id,
address='52:54:00:cf:2d:33', extra={},
uuid=uuidutils.generate_uuid())
upd_mock = mock.Mock()
client_mock.return_value.update_port = upd_mock
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.configure_tenant_networks(task)
client_mock.assert_called_once_with()
upd_mock.assert_called_once_with(self.port.extra['vif_port_id'],
expected_body)
@mock.patch.object(neutron_common, 'get_client')
def test_configure_tenant_networks_update_fail(self, client_mock):
client = client_mock.return_value
client.update_port.side_effect = neutron_exceptions.ConnectionFailed(
reason='meow')
with task_manager.acquire(self.context, self.node.id) as task:
self.assertRaisesRegexp(
exception.NetworkError, 'Could not add',
self.interface.configure_tenant_networks, task)
client_mock.assert_called_once_with()
@mock.patch.object(neutron_common, 'get_client')
def _test_configure_tenant_networks(self, client_mock, is_client_id=False,
vif_int_info=False):
upd_mock = mock.Mock()
client_mock.return_value.update_port = upd_mock
if vif_int_info:
kwargs = {'internal_info': {
'tenant_vif_port_id': uuidutils.generate_uuid()}}
self.port.internal_info = {
'tenant_vif_port_id': self.port.extra['vif_port_id']}
self.port.extra = {}
else:
kwargs = {'extra': {'vif_port_id': uuidutils.generate_uuid()}}
second_port = utils.create_test_port(
self.context, node_id=self.node.id, address='52:54:00:cf:2d:33',
uuid=uuidutils.generate_uuid(),
local_link_connection={'switch_id': '0a:1b:2c:3d:4e:ff',
'port_id': 'Ethernet1/1',
'switch_info': 'switch2'},
**kwargs
)
if is_client_id:
client_ids = (CLIENT_ID1, CLIENT_ID2)
ports = (self.port, second_port)
for port, client_id in zip(ports, client_ids):
extra = port.extra
extra['client-id'] = client_id
port.extra = extra
port.save()
expected_body = {
'port': {
'binding:vnic_type': 'baremetal',
'binding:host_id': self.node.uuid,
}
}
port1_body = copy.deepcopy(expected_body)
port1_body['port']['binding:profile'] = {
'local_link_information': [self.port.local_link_connection]
}
port2_body = copy.deepcopy(expected_body)
port2_body['port']['binding:profile'] = {
'local_link_information': [second_port.local_link_connection]
}
if is_client_id:
port1_body['port']['extra_dhcp_opts'] = (
[{'opt_name': 'client-id', 'opt_value': client_ids[0]}])
port2_body['port']['extra_dhcp_opts'] = (
[{'opt_name': 'client-id', 'opt_value': client_ids[1]}])
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.configure_tenant_networks(task)
client_mock.assert_called_once_with()
if vif_int_info:
portid1 = self.port.internal_info['tenant_vif_port_id']
portid2 = second_port.internal_info['tenant_vif_port_id']
else:
portid1 = self.port.extra['vif_port_id']
portid2 = second_port.extra['vif_port_id']
upd_mock.assert_has_calls(
[mock.call(portid1, port1_body),
mock.call(portid2, port2_body)],
any_order=True
)
def test_configure_tenant_networks_vif_extra(self):
self.node.instance_uuid = uuidutils.generate_uuid()
self.node.save()
self._test_configure_tenant_networks()
def test_configure_tenant_networks_vif_int_info(self):
self.node.instance_uuid = uuidutils.generate_uuid()
self.node.save()
self._test_configure_tenant_networks(vif_int_info=True)
def test_configure_tenant_networks_no_instance_uuid(self):
self._test_configure_tenant_networks()
def test_configure_tenant_networks_with_client_id(self):
self.node.instance_uuid = uuidutils.generate_uuid()
self.node.save()
self._test_configure_tenant_networks(is_client_id=True)
@mock.patch.object(neutron_common, 'get_client')
def test_configure_tenant_networks_with_portgroups(self, client_mock):
pg = utils.create_test_portgroup(
self.context, node_id=self.node.id, address='ff:54:00:cf:2d:32',
extra={'vif_port_id': uuidutils.generate_uuid()})
port1 = utils.create_test_port(
self.context, node_id=self.node.id, address='ff:54:00:cf:2d:33',
uuid=uuidutils.generate_uuid(),
portgroup_id=pg.id,
local_link_connection={'switch_id': '0a:1b:2c:3d:4e:ff',
'port_id': 'Ethernet1/1',
'switch_info': 'switch2'}
)
port2 = utils.create_test_port(
self.context, node_id=self.node.id, address='ff:54:00:cf:2d:34',
uuid=uuidutils.generate_uuid(),
portgroup_id=pg.id,
local_link_connection={'switch_id': '0a:1b:2c:3d:4e:ff',
'port_id': 'Ethernet1/2',
'switch_info': 'switch2'}
)
upd_mock = mock.Mock()
client_mock.return_value.update_port = upd_mock
expected_body = {
'port': {
'binding:vnic_type': 'baremetal',
'binding:host_id': self.node.uuid,
}
}
call1_body = copy.deepcopy(expected_body)
call1_body['port']['binding:profile'] = {
'local_link_information': [self.port.local_link_connection]
}
call2_body = copy.deepcopy(expected_body)
call2_body['port']['binding:profile'] = {
'local_link_information': [port1.local_link_connection,
port2.local_link_connection]
}
with task_manager.acquire(self.context, self.node.id) as task:
self.interface.configure_tenant_networks(task)
client_mock.assert_called_once_with()
upd_mock.assert_has_calls(
[mock.call(self.port.extra['vif_port_id'], call1_body),
mock.call(pg.extra['vif_port_id'], call2_body)]
)
|
ruyang/ironic
|
ironic/tests/unit/drivers/modules/network/test_neutron.py
|
Python
|
apache-2.0
| 19,670 | 0 |
# Copyright 2013, Big Switch Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.template import defaultfilters as filters
from django.utils.translation import pgettext_lazy
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import exceptions
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard import policy
LOG = logging.getLogger(__name__)
class AddRuleLink(tables.LinkAction):
name = "addrule"
verbose_name = _("Add Rule")
url = "horizon:project:firewalls:addrule"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_firewall_rule"),)
class AddPolicyLink(tables.LinkAction):
name = "addpolicy"
verbose_name = _("Add Policy")
url = "horizon:project:firewalls:addpolicy"
classes = ("ajax-modal", "btn-addpolicy",)
icon = "plus"
policy_rules = (("network", "create_firewall_policy"),)
class AddFirewallLink(tables.LinkAction):
name = "addfirewall"
verbose_name = _("Create Firewall")
url = "horizon:project:firewalls:addfirewall"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_firewall"),)
class DeleteRuleLink(policy.PolicyTargetMixin, tables.DeleteAction):
name = "deleterule"
policy_rules = (("network", "delete_firewall_rule"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Rule",
u"Delete Rules",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Rule",
u"Scheduled deletion of Rules",
count
)
def allowed(self, request, datum=None):
if datum and datum.policy:
return False
return True
def delete(self, request, obj_id):
try:
api.fwaas.rule_delete(request, obj_id)
except Exception as e:
exceptions.handle(request, _('Unable to delete rule. %s') % e)
class DeletePolicyLink(policy.PolicyTargetMixin, tables.DeleteAction):
name = "deletepolicy"
policy_rules = (("network", "delete_firewall_policy"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Policy",
u"Delete Policies",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Policy",
u"Scheduled deletion of Policies",
count
)
def delete(self, request, obj_id):
try:
api.fwaas.policy_delete(request, obj_id)
except Exception as e:
exceptions.handle(request, _('Unable to delete policy. %s') % e)
class DeleteFirewallLink(policy.PolicyTargetMixin,
tables.DeleteAction):
name = "deletefirewall"
policy_rules = (("network", "delete_firewall"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Firewall",
u"Delete Firewalls",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Firewall",
u"Scheduled deletion of Firewalls",
count
)
def delete(self, request, obj_id):
try:
api.fwaas.firewall_delete(request, obj_id)
except Exception as e:
exceptions.handle(request, _('Unable to delete firewall. %s') % e)
class UpdateRuleLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updaterule"
verbose_name = _("Edit Rule")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_firewall_rule"),)
def get_link_url(self, rule):
base_url = reverse("horizon:project:firewalls:updaterule",
kwargs={'rule_id': rule.id})
return base_url
class UpdatePolicyLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updatepolicy"
verbose_name = _("Edit Policy")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_firewall_policy"),)
def get_link_url(self, policy):
base_url = reverse("horizon:project:firewalls:updatepolicy",
kwargs={'policy_id': policy.id})
return base_url
class UpdateFirewallLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updatefirewall"
verbose_name = _("Edit Firewall")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_firewall"),)
def get_link_url(self, firewall):
base_url = reverse("horizon:project:firewalls:updatefirewall",
kwargs={'firewall_id': firewall.id})
return base_url
def allowed(self, request, firewall):
if firewall.status in ("PENDING_CREATE",
"PENDING_UPDATE",
"PENDING_DELETE"):
return False
return True
class InsertRuleToPolicyLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "insertrule"
verbose_name = _("Insert Rule")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "get_firewall_policy"),
("network", "insert_rule"),)
def get_link_url(self, policy):
base_url = reverse("horizon:project:firewalls:insertrule",
kwargs={'policy_id': policy.id})
return base_url
class RemoveRuleFromPolicyLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "removerule"
verbose_name = _("Remove Rule")
classes = ("ajax-modal", "btn-danger",)
policy_rules = (("network", "get_firewall_policy"),
("network", "remove_rule"),)
def get_link_url(self, policy):
base_url = reverse("horizon:project:firewalls:removerule",
kwargs={'policy_id': policy.id})
return base_url
def allowed(self, request, policy):
if len(policy.rules) > 0:
return True
return False
class AddRouterToFirewallLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "addrouter"
verbose_name = _("Add Router")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "get_firewall"),
("network", "add_router"),)
def get_link_url(self, firewall):
base_url = reverse("horizon:project:firewalls:addrouter",
kwargs={'firewall_id': firewall.id})
return base_url
def allowed(self, request, firewall):
if not api.neutron.is_extension_supported(request,
'fwaasrouterinsertion'):
return False
tenant_id = firewall['tenant_id']
available_routers = api.fwaas.firewall_unassociated_routers_list(
request, tenant_id)
return bool(available_routers)
class RemoveRouterFromFirewallLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "removerouter"
verbose_name = _("Remove Router")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "get_firewall"),
("network", "remove_router"),)
def get_link_url(self, firewall):
base_url = reverse("horizon:project:firewalls:removerouter",
kwargs={'firewall_id': firewall.id})
return base_url
def allowed(self, request, firewall):
if not api.neutron.is_extension_supported(request,
'fwaasrouterinsertion'):
return False
return bool(firewall['router_ids'])
def get_rules_name(datum):
return ', '.join([rule.name or rule.id[:13]
for rule in datum.rules])
def get_routers_name(firewall):
if firewall.routers:
return ', '.join(router.name_or_id for router in firewall.routers)
def get_policy_name(datum):
if datum.policy:
return datum.policy.name or datum.policy.id
def get_policy_link(datum):
if datum.policy:
return reverse('horizon:project:firewalls:policydetails',
kwargs={'policy_id': datum.policy.id})
class RulesTable(tables.DataTable):
ACTION_DISPLAY_CHOICES = (
("Allow", pgettext_lazy("Action Name of a Firewall Rule", u"ALLOW")),
("Deny", pgettext_lazy("Action Name of a Firewall Rule", u"DENY")),
)
name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:project:firewalls:ruledetails")
description = tables.Column('description', verbose_name=_('Description'))
protocol = tables.Column("protocol",
filters=(lambda v: filters.default(v, _("ANY")),
filters.upper,),
verbose_name=_("Protocol"))
source_ip_address = tables.Column("source_ip_address",
verbose_name=_("Source IP"))
source_port = tables.Column("source_port",
verbose_name=_("Source Port"))
destination_ip_address = tables.Column("destination_ip_address",
verbose_name=_("Destination IP"))
destination_port = tables.Column("destination_port",
verbose_name=_("Destination Port"))
action = tables.Column("action",
display_choices=ACTION_DISPLAY_CHOICES,
verbose_name=_("Action"))
shared = tables.Column("shared",
verbose_name=_("Shared"),
filters=(filters.yesno, filters.capfirst))
enabled = tables.Column("enabled",
verbose_name=_("Enabled"),
filters=(filters.yesno, filters.capfirst))
firewall_policy_id = tables.Column(get_policy_name,
link=get_policy_link,
verbose_name=_("In Policy"))
class Meta(object):
name = "rulestable"
verbose_name = _("Rules")
table_actions = (AddRuleLink, DeleteRuleLink)
row_actions = (UpdateRuleLink, DeleteRuleLink)
class PoliciesTable(tables.DataTable):
name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:project:firewalls:policydetails")
description = tables.Column('description', verbose_name=_('Description'))
firewall_rules = tables.Column(get_rules_name,
verbose_name=_("Rules"))
shared = tables.Column("shared",
verbose_name=_("Shared"),
filters=(filters.yesno, filters.capfirst))
audited = tables.Column("audited",
verbose_name=_("Audited"),
filters=(filters.yesno, filters.capfirst))
class Meta(object):
name = "policiestable"
verbose_name = _("Policies")
table_actions = (AddPolicyLink, DeletePolicyLink)
row_actions = (UpdatePolicyLink, InsertRuleToPolicyLink,
RemoveRuleFromPolicyLink, DeletePolicyLink)
class FirewallsTable(tables.DataTable):
STATUS_DISPLAY_CHOICES = (
("Active", pgettext_lazy("Current status of a Firewall",
u"Active")),
("Down", pgettext_lazy("Current status of a Firewall",
u"Down")),
("Error", pgettext_lazy("Current status of a Firewall",
u"Error")),
("Created", pgettext_lazy("Current status of a Firewall",
u"Created")),
("Pending_Create", pgettext_lazy("Current status of a Firewall",
u"Pending Create")),
("Pending_Update", pgettext_lazy("Current status of a Firewall",
u"Pending Update")),
("Pending_Delete", pgettext_lazy("Current status of a Firewall",
u"Pending Delete")),
("Inactive", pgettext_lazy("Current status of a Firewall",
u"Inactive")),
)
ADMIN_STATE_DISPLAY_CHOICES = (
("UP", pgettext_lazy("Admin state of a Firewall", u"UP")),
("DOWN", pgettext_lazy("Admin state of a Firewall", u"DOWN")),
)
name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:project:firewalls:firewalldetails")
description = tables.Column('description', verbose_name=_('Description'))
firewall_policy_id = tables.Column(get_policy_name,
link=get_policy_link,
verbose_name=_("Policy"))
router_ids = tables.Column(get_routers_name,
verbose_name=_("Associated Routers"))
status = tables.Column("status",
verbose_name=_("Status"),
display_choices=STATUS_DISPLAY_CHOICES)
admin_state = tables.Column("admin_state",
verbose_name=_("Admin State"),
display_choices=ADMIN_STATE_DISPLAY_CHOICES)
class Meta(object):
name = "firewallstable"
verbose_name = _("Firewalls")
table_actions = (AddFirewallLink, DeleteFirewallLink)
row_actions = (UpdateFirewallLink, DeleteFirewallLink,
AddRouterToFirewallLink, RemoveRouterFromFirewallLink)
def __init__(self, request, data=None, needs_form_wrapper=None, **kwargs):
super(FirewallsTable, self).__init__(
request, data=data,
needs_form_wrapper=needs_form_wrapper, **kwargs)
try:
if not api.neutron.is_extension_supported(request,
'fwaasrouterinsertion'):
del self.columns['router_ids']
except Exception as e:
msg = _('Failed to verify extension support %(reason)s') % {
'reason': e}
LOG.error(msg)
exceptions.handle(request, msg)
|
Athrun29/horizon
|
openstack_dashboard/dashboards/project/firewalls/tables.py
|
Python
|
apache-2.0
| 15,142 | 0 |
import os
import numpy as np
from scipy.optimize import curve_fit
def gauss(x, A, mu, sigma):
return A * np.exp(-(x - mu)**2 / (2. * sigma**2))
scriptmode = True
SDM_name = 'test' # The prefix to use for all output files
# SDM_name = '13A-213.sb20685305.eb20706999.56398.113012800924'
# Set up some useful variables (these will be altered later on)
msfile = SDM_name + '.ms'
hisplitms = SDM_name + '.hi.ms'
splitms = SDM_name + '.hi.src.split.ms'
contsubms = SDM_name + '.hi.src.split.ms.contsub'
rawcleanms = SDM_name + '.hi.src.split.ms.contsub.rawcleanimg'
cleanms = SDM_name + '.hi.src.split.ms.contsub.cleanimg'
pathname = os.environ.get('CASAPATH').split()[0]
pipepath = '/home/dcolombo/pipe_scripts/'
# pipepath = '/home/dario/pipe_scripts/'
source = 'SextansA'
# VOS stuff
vos_dir = '../vos/'
vos_proc = './'
vos_link = '../vos_link/'
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%%&%&%&%&%&%&%%&%
# Find the 21cm spw and check if the obs
# is single pointing or mosaic
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%%&%&%&%&%&%&%%&%
print "Find HI spw..."
# But first find the spw corresponding to it
tb.open(vos_dir + msfile + '/SPECTRAL_WINDOW')
freqs = tb.getcol('REF_FREQUENCY')
nchans = tb.getcol('NUM_CHAN')
tb.close()
spws = range(0, len(freqs))
# Select the 21cm
sel = np.where((freqs > 1.40 * 10**9) & (freqs < 1.43 * 10**9))
hispw = str(spws[sel[0][0]])
freq = freqs[sel[0][0]]
nchan = nchans[sel[0][0]]
print "Selected spw ", hispw, "with frequency ", freq, "and ", nchan, " channels"
print "Starting split the HI line"
# Mosaic or single pointing?
tb.open(vos_dir + msfile + '/FIELD')
names = tb.getcol('NAME')
tb.close()
moscount = 0
for name in names:
chsrc = name.find(source)
if chsrc != -1:
moscount = moscount + 1
if moscount > 1:
imagermode = "mosaic"
else:
imagermode = "csclean"
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# Split the corrected source data from the rest
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
print "Starting source split..."
os.system('rm -rf ' + vos_proc + splitms)
default('split')
vis = vos_dir + hisplitms
outputvis = vos_proc + splitms
field = source
spw = ''
datacolumn = 'corrected'
keepflags = False
split()
print "Created splitted-source .ms " + splitms
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# UV continum subtraction
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# 1) Save a .txt file of the amplitude vs
# channels, plotms runs only to get the
# ASCII file
print "Estimating channels with signal..."
real_amps = []
imag_amps = []
default('visstat')
vis = vos_proc + splitms
field = '0'
datacolumn = 'data'
selectdata = True
useflags = False
for nc in range(nchan):
spw = '0:' + str(nc)
axis = 'real'
pdata = visstat()
real_amps.append(pdata['DATA']['mean'])
axis = 'imag'
pdata = visstat()
imag_amps.append(pdata['DATA']['mean'])
real_amps = np.asarray(real_amps)
imag_amps = np.asarray(imag_amps)
amps = np.sqrt(real_amps**2 + imag_amps**2)
chans = np.arange(nchan) + 1
# Guessing parameters for fitting
A = max(amps)
mu = chans[amps.tolist().index(A)]
hm = chans[amps > A / 2]
sigma = float(hm[-1] - hm[0]) / 2.35
opar, _ = curve_fit(gauss, chans, amps, p0=[A, mu, sigma])
# Move away to 3.5sigma for the fit, in order to exclude the data
# from the fit
chan1 = int(mu - 3.5 * opar[2])
chan2 = int(mu + 3.5 * opar[2])
fitspws = str(chan1) + '~' + str(chan2)
print "Signal within channels " + fitspws
print "Starting contsub..."
# Run the routinne
os.system('rm -rf ' + vos_proc + contsubms)
default('uvcontsub')
vis = vos_proc + splitms
fitspw = '0:' + fitspws
excludechans = True
solint = 0.0
fitorder = 0
fitmode = 'subtract'
splitdata = True
uvcontsub()
print "Created continum subtracted image" + contsubms
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# CLEANing
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
print "Starting CLEANing..."
os.system('rm -rf ' + vos_proc + rawcleanms + '*')
# First generate a 0-iterations
# image to estimate the noise level
# (threshold)
# Get max baseline and dish size
bline_max = au.getBaselineExtrema(vos_proc + splitms)[0]
tb.open(vos_proc + splitms + '/ANTENNA')
dishs = tb.getcol('DISH_DIAMETER')
dish_min = min(dishs)
tb.close()
# Find the beam
hi_lambda = 299792458.0 / (freq)
min_lambda = 299792458.0 / (min(freqs))
syn_beam = (hi_lambda / bline_max) * 180 / np.pi * 3600
prim_beam = (min_lambda / dish_min) * 180 / np.pi * 3600
# Setting CLEANing parameters
sel_cell = str(round(syn_beam / 5)) + 'arcsec'
sel_imsize = int(round(prim_beam / (syn_beam / 5)))
# Increase the sel_imsize of a couple of beam
# to be sure
dx = int(round(syn_beam / prim_beam * sel_imsize))
sel_imsize = sel_imsize + 1 * dx
# The image size should be a multiplier of
# 2, 3 and 5 to work well with clean so:
sel_imsize = sel_imsize - 1
pnum = 1 * sel_imsize
while pnum != 1:
sel_imsize = sel_imsize + 1
pnum = 1 * sel_imsize
while pnum % 2 == 0:
pnum = pnum / 2
while pnum % 3 == 0:
pnum = pnum / 3
while pnum % 5 == 0:
pnum = pnum / 5
print "Image size:", sel_imsize
print "Cell size:", sel_cell
# First generate a 0-iterations
# image to estimate the noise level
# (threshold)
default('clean')
vis = vos_proc + contsubms
imagename = vos_proc + rawcleanms
cell = [sel_cell, sel_cell]
imsize = [sel_imsize, sel_imsize]
imagermode = imagermode
mode = "channel"
nchan = 4
start = chan1 - 5
width = 1
field = '0'
spw = '0'
interactive = False
pbcor = False
minpb = 0.25
restfreq = '1.420405752GHz'
niter = 0
clean()
print "Estimating sigma..."
default('imstat')
imagename = vos_proc + rawcleanms + '.image'
chans = '0~3'
rawclean_stat = imstat()
rms = rawclean_stat['sigma'][0] * 1000
rms = round(rms)
rms = str(int(rms)) + 'mJy'
print "Sigma=", rms, ". Now the real CLEANing..."
# Now run the real cleaning
os.system('rm -rf ' + cleanms + '*')
default('clean')
vis = vos_proc + contsubms
imagename = vos_proc + cleanms
cell = [sel_cell, sel_cell]
imsize = [sel_imsize, sel_imsize]
imagermode = imagermode
mode = "channel"
start = chan1
nchan = chan2 - chan1
width = 1
field = ''
spw = ''
interactive = False
restfreq = '1.420405752GHz'
outframe = 'LSRK'
niter = 10000
threshold = rms
usescratch = True
clean()
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# Moment maps 0,1,2
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
default("immoments")
imagename = vos_proc + cleanms + '.image'
moments = [0, 1, 2]
outfile = vos_proc + cleanms
immoments()
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
# Convert everything to fits file
# %&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%&%
print "Exporting the image fits..."
default('exportfits')
imagename = vos_proc + cleanms + '.image'
fitsimage = vos_proc + source + '_21cm.fits'
velocity = True
optical = False
overwrite = True
dropstokes = True
exportfits()
print "Exporting moment maps..."
default('exportfits')
# Moment 0
imagename = vos_proc + cleanms + '.integrated'
fitsimage = vos_proc + source + '_21cm_mom0.fits'
velocity = True
optical = False
overwrite = True
dropstokes = True
exportfits()
default('exportfits')
# Moment 1
imagename = vos_proc + cleanms + '.weighted_coord'
fitsimage = vos_proc + source + '_21cm_mom1.fits'
velocity = True
optical = False
overwrite = True
dropstokes = True
exportfits()
default('exportfits')
# Moment 2
imagename = vos_proc + cleanms + '.weighted_dispersion_coord'
fitsimage = vos_proc + source + '_21cm_mom2.fits'
velocity = True
optical = False
overwrite = True
dropstokes = True
exportfits()
|
e-koch/canfar_scripts
|
img_pipe/casanfar_image.py
|
Python
|
mit
| 7,517 | 0.000133 |
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'SPOptionsIdPPolicy.enabled'
db.add_column('saml_spoptionsidppolicy', 'enabled', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False)
def backwards(self, orm):
# Deleting field 'SPOptionsIdPPolicy.enabled'
db.delete_column('saml_spoptionsidppolicy', 'enabled')
models = {
'attribute_aggregator.attributesource': {
'Meta': {'object_name': 'AttributeSource'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'idp.attributeitem': {
'Meta': {'object_name': 'AttributeItem'},
'attribute_name': ('django.db.models.fields.CharField', [], {'default': "('OpenLDAProotDSE', 'OpenLDAProotDSE')", 'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'output_name_format': ('django.db.models.fields.CharField', [], {'default': "('urn:oasis:names:tc:SAML:2.0:attrname-format:basic', 'SAMLv2 BASIC')", 'max_length': '100'}),
'output_namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['attribute_aggregator.AttributeSource']", 'null': 'True', 'blank': 'True'})
},
'idp.attributelist': {
'Meta': {'object_name': 'AttributeList'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'attributes of the list'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['idp.AttributeItem']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'idp.attributepolicy': {
'Meta': {'object_name': 'AttributePolicy'},
'attribute_filter_for_sso_from_push_sources': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'filter attributes of push sources with list'", 'null': 'True', 'to': "orm['idp.AttributeList']"}),
'attribute_list_for_sso_from_pull_sources': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes from pull sources'", 'null': 'True', 'to': "orm['idp.AttributeList']"}),
'filter_source_of_filtered_attributes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'forward_attributes_from_push_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'map_attributes_from_push_sources': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'map_attributes_of_filtered_attributes': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'output_name_format': ('django.db.models.fields.CharField', [], {'default': "('urn:oasis:names:tc:SAML:2.0:attrname-format:basic', 'SAMLv2 BASIC')", 'max_length': '100'}),
'output_namespace': ('django.db.models.fields.CharField', [], {'default': "('Default', 'Default')", 'max_length': '100'}),
'send_error_and_no_attrs_if_missing_required_attrs': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source_filter_for_sso_from_push_sources': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'filter attributes of push sources with sources'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['attribute_aggregator.AttributeSource']"})
},
'saml.authorizationattributemap': {
'Meta': {'object_name': 'AuthorizationAttributeMap'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'})
},
'saml.authorizationattributemapping': {
'Meta': {'object_name': 'AuthorizationAttributeMapping'},
'attribute_name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'attribute_value': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'attribute_value_format': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'map': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['saml.AuthorizationAttributeMap']"}),
'source_attribute_name': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'})
},
'saml.authorizationsppolicy': {
'Meta': {'object_name': 'AuthorizationSPPolicy'},
'attribute_map': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'authorization_attributes'", 'null': 'True', 'to': "orm['saml.AuthorizationAttributeMap']"}),
'default_denial_message': ('django.db.models.fields.CharField', [], {'default': "u'You are not authorized to access the service.'", 'max_length': '80'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'})
},
'saml.idpoptionssppolicy': {
'Meta': {'object_name': 'IdPOptionsSPPolicy'},
'allow_create': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'binding_for_sso_response': ('django.db.models.fields.CharField', [], {'default': "'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact'", 'max_length': '60'}),
'enable_binding_for_sso_response': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enable_http_method_for_defederation_request': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enable_http_method_for_slo_request': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'http_method_for_defederation_request': ('django.db.models.fields.IntegerField', [], {'default': '5', 'max_length': '60'}),
'http_method_for_slo_request': ('django.db.models.fields.IntegerField', [], {'default': '4', 'max_length': '60'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'no_nameid_policy': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'requested_name_id_format': ('django.db.models.fields.CharField', [], {'default': "'none'", 'max_length': '20'}),
'transient_is_persistent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user_consent': ('django.db.models.fields.CharField', [], {'default': "'urn:oasis:names:tc:SAML:2.0:consent:current-implicit'", 'max_length': '60'}),
'want_authn_request_signed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'want_force_authn_request': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'want_is_passive_authn_request': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'saml.keyvalue': {
'Meta': {'object_name': 'KeyValue'},
'key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'value': ('authentic2.saml.fields.PickledObjectField', [], {})
},
'saml.libertyartifact': {
'Meta': {'object_name': 'LibertyArtifact'},
'artifact': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'content': ('django.db.models.fields.TextField', [], {}),
'creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'django_session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'provider_id': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'saml.libertyassertion': {
'Meta': {'object_name': 'LibertyAssertion'},
'assertion': ('django.db.models.fields.TextField', [], {}),
'assertion_id': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'provider_id': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'session_index': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'saml.libertyfederation': {
'Meta': {'unique_together': "(('name_id_qualifier', 'name_id_format', 'name_id_content', 'name_id_sp_name_qualifier'),)", 'object_name': 'LibertyFederation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'idp_id': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'name_id_content': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_id_format': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name_id_qualifier': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
'name_id_sp_name_qualifier': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name_id_sp_provided_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'sp_id': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'saml.libertyidentitydump': {
'Meta': {'object_name': 'LibertyIdentityDump'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identity_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'saml.libertyidentityprovider': {
'Meta': {'object_name': 'LibertyIdentityProvider'},
'authorization_policy': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'authorization_policy'", 'null': 'True', 'to': "orm['saml.AuthorizationSPPolicy']"}),
'enable_following_authorization_policy': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enable_following_idp_options_policy': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'idp_options_policy': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'idp_options_policy'", 'null': 'True', 'to': "orm['saml.IdPOptionsSPPolicy']"}),
'liberty_provider': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'identity_provider'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['saml.LibertyProvider']"})
},
'saml.libertymanagedump': {
'Meta': {'object_name': 'LibertyManageDump'},
'django_session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'manage_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'saml.libertyprovider': {
'Meta': {'object_name': 'LibertyProvider'},
'ca_cert_chain': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'entity_id': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'}),
'entity_id_sha1': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'federation_source': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'metadata': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'protocol_conformance': ('django.db.models.fields.IntegerField', [], {'max_length': '10'}),
'public_key': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'ssl_certificate': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'saml.libertyproviderpolicy': {
'Meta': {'object_name': 'LibertyProviderPolicy'},
'authn_request_signature_check_hint': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'})
},
'saml.libertyserviceprovider': {
'Meta': {'object_name': 'LibertyServiceProvider'},
'attribute_policy': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['idp.AttributePolicy']", 'null': 'True', 'blank': 'True'}),
'enable_following_sp_options_policy': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'liberty_provider': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'service_provider'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['saml.LibertyProvider']"}),
'policy': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': "orm['saml.LibertyProviderPolicy']", 'null': 'True'}),
'sp_options_policy': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sp_options_policy'", 'null': 'True', 'to': "orm['saml.SPOptionsIdPPolicy']"})
},
'saml.libertysession': {
'Meta': {'object_name': 'LibertySession'},
'assertion': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['saml.LibertyAssertion']", 'null': 'True'}),
'creation': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'django_session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'federation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['saml.LibertyFederation']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_id_content': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name_id_format': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'name_id_qualifier': ('django.db.models.fields.CharField', [], {'max_length': '150', 'null': 'True'}),
'name_id_sp_name_qualifier': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True'}),
'provider_id': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'session_index': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'saml.libertysessiondump': {
'Meta': {'object_name': 'LibertySessionDump'},
'django_session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.IntegerField', [], {}),
'session_dump': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'saml.libertysessionsp': {
'Meta': {'object_name': 'LibertySessionSP'},
'django_session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'federation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['saml.LibertyFederation']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'session_index': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'saml.spoptionsidppolicy': {
'Meta': {'object_name': 'SPOptionsIdPPolicy'},
'accepted_name_id_format': ('authentic2.saml.fields.MultiSelectField', [], {'max_length': '31', 'blank': 'True'}),
'ask_user_consent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'authn_request_signed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'default_name_id_format': ('django.db.models.fields.CharField', [], {'default': "'none'", 'max_length': '20'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'encrypt_assertion': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'encrypt_nameid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'idp_initiated_sso': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'prefered_assertion_consumer_binding': ('django.db.models.fields.CharField', [], {'default': "'meta'", 'max_length': '4'})
}
}
complete_apps = ['saml']
|
incuna/authentic
|
authentic2/saml/migrations/0010_auto__add_field_spoptionsidppolicy_enabled.py
|
Python
|
agpl-3.0
| 22,083 | 0.007472 |
from django.apps import AppConfig
class DataimportConfig(AppConfig):
name = "intranet.apps.dataimport"
|
tjcsl/ion
|
intranet/apps/dataimport/apps.py
|
Python
|
gpl-2.0
| 109 | 0 |
#!/usr/bin/env python
import numpy as np
from horton import *
# specify the even tempered basis set
alpha_low = 5e-3
alpha_high = 5e2
nbasis = 30
lnratio = (np.log(alpha_high) - np.log(alpha_low))/(nbasis-1)
# build a list of "contractions". These aren't real contractions as every
# contraction only contains one basis function.
bcs = []
for ibasis in xrange(nbasis):
alpha = alpha_low**lnratio
# arguments of GOBasisContraction:
# shell_type, list of exponents, list of contraction coefficients
bcs.append(GOBasisContraction(0, np.array([alpha]), np.array([1.0])))
# Finish setting up the basis set:
ba = GOBasisAtom(bcs)
obasis = get_gobasis(np.array([[0.0, 0.0, 0.0]]), np.array([3]), default=ba)
|
eustislab/horton
|
data/examples/hamiltonian/even_tempered_li.py
|
Python
|
gpl-3.0
| 725 | 0 |
"""SCons.Variables.PathVariable
This file defines an option type for SCons implementing path settings.
To be used whenever a a user-specified path override should be allowed.
Arguments to PathVariable are:
option-name = name of this option on the command line (e.g. "prefix")
option-help = help string for option
option-dflt = default value for this option
validator = [optional] validator for option value. Predefined
validators are:
PathAccept -- accepts any path setting; no validation
PathIsDir -- path must be an existing directory
PathIsDirCreate -- path must be a dir; will create
PathIsFile -- path must be a file
PathExists -- path must exist (any type) [default]
The validator is a function that is called and which
should return True or False to indicate if the path
is valid. The arguments to the validator function
are: (key, val, env). The key is the name of the
option, the val is the path specified for the option,
and the env is the env to which the Otions have been
added.
Usage example:
Examples:
prefix=/usr/local
opts = Variables()
opts = Variables()
opts.Add(PathVariable('qtdir',
'where the root of Qt is installed',
qtdir, PathIsDir))
opts.Add(PathVariable('qt_includes',
'where the Qt includes are installed',
'$qtdir/includes', PathIsDirCreate))
opts.Add(PathVariable('qt_libraries',
'where the Qt library is installed',
'$qtdir/lib'))
"""
#
# Copyright (c) 2001 - 2014 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Variables/PathVariable.py 2014/08/24 12:12:31 garyo"
__all__ = ['PathVariable',]
import os
import os.path
import SCons.Errors
class _PathVariableClass(object):
def PathAccept(self, key, val, env):
"""Accepts any path, no checking done."""
pass
def PathIsDir(self, key, val, env):
"""Validator to check if Path is a directory."""
if not os.path.isdir(val):
if os.path.isfile(val):
m = 'Directory path for option %s is a file: %s'
else:
m = 'Directory path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def PathIsDirCreate(self, key, val, env):
"""Validator to check if Path is a directory,
creating it if it does not exist."""
if os.path.isfile(val):
m = 'Path for option %s is a file, not a directory: %s'
raise SCons.Errors.UserError(m % (key, val))
if not os.path.isdir(val):
os.makedirs(val)
def PathIsFile(self, key, val, env):
"""validator to check if Path is a file"""
if not os.path.isfile(val):
if os.path.isdir(val):
m = 'File path for option %s is a directory: %s'
else:
m = 'File path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def PathExists(self, key, val, env):
"""validator to check if Path exists"""
if not os.path.exists(val):
m = 'Path for option %s does not exist: %s'
raise SCons.Errors.UserError(m % (key, val))
def __call__(self, key, help, default, validator=None):
# NB: searchfunc is currenty undocumented and unsupported
"""
The input parameters describe a 'path list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
The 'default' option specifies the default path to use if the
user does not specify an override with this option.
validator is a validator, see this file for examples
"""
if validator is None:
validator = self.PathExists
if SCons.Util.is_List(key) or SCons.Util.is_Tuple(key):
return (key, '%s ( /path/to/%s )' % (help, key[0]), default,
validator, None)
else:
return (key, '%s ( /path/to/%s )' % (help, key), default,
validator, None)
PathVariable = _PathVariableClass()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
engineer0x47/SCONS
|
engine/SCons/Variables/PathVariable.py
|
Python
|
mit
| 5,616 | 0.00089 |
# ----------------------------------------------------------------------------
# Copyright (c) 2008 Andrew D. Straw and Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
# Based on pygxinput originally by Andrew D. Straw
# http://code.astraw.com/projects/motmot/wiki/pygxinput
import ctypes
import pyglet
from pyglet.window.xlib import xlib
import lib_xinput as xi
class XInputDevice:
def __init__(self, display, device_info):
self._x_display = display._display
self._device_id = device_info.id
self.name = device_info.name
self._open_device = None
# TODO: retrieve inputclassinfo from device_info and expose / save
# for valuator axes etc.
def open(self):
if self._open_device:
return
self._open_device = xi.XOpenDevice(self._x_display, self._device_id)
if not self._open_device:
raise Exception('Cannot open device')
def close(self):
if not self._open_device:
return
xi.XCloseDevice(self._x_display, self._open_device)
def attach(self, window):
assert window._x_display == self._x_display
return XInputDeviceInstance(self, window)
class XInputDeviceInstance(pyglet.event.EventDispatcher):
def __init__(self, device, window):
"""Create an opened instance of a device on the given window.
:Parameters:
`device` : XInputDevice
Device to open
`window` : Window
Window to open device on
"""
assert device._x_display == window._x_display
assert device._open_device
self.device = device
self.window = window
self._events = list()
try:
dispatcher = window.__xinput_window_event_dispatcher
except AttributeError:
dispatcher = window.__xinput_window_event_dispatcher = \
XInputWindowEventDispatcher()
dispatcher.add_instance(self)
device = device._open_device.contents
if not device.num_classes:
return
# Bind matching extended window events to bound instance methods
# on this object.
#
# This is inspired by test.c of xinput package by Frederic
# Lepied available at x.org.
#
# In C, this stuff is normally handled by the macro DeviceKeyPress and
# friends. Since we don't have access to those macros here, we do it
# this way.
for i in range(device.num_classes):
class_info = device.classes[i]
if class_info.input_class == xi.KeyClass:
self._add(class_info, xi._deviceKeyPress,
dispatcher._event_xinput_key_press)
self._add(class_info, xi._deviceKeyRelease,
dispatcher._event_xinput_key_release)
elif class_info.input_class == xi.ButtonClass:
self._add(class_info, xi._deviceButtonPress,
dispatcher._event_xinput_button_press)
self._add(class_info, xi._deviceButtonRelease,
dispatcher._event_xinput_button_release)
elif class_info.input_class == xi.ValuatorClass:
self._add(class_info, xi._deviceMotionNotify,
dispatcher._event_xinput_motion)
elif class_info.input_class == xi.ProximityClass:
self._add(class_info, xi._proximityIn,
dispatcher._event_xinput_proximity_in)
self._add(class_info, xi._proximityOut,
dispatcher._event_xinput_proximity_out)
elif class_info.input_class == xi.FeedbackClass:
pass
elif class_info.input_class == xi.FocusClass:
pass
elif class_info.input_class == xi.OtherClass:
pass
array = (xi.XEventClass * len(self._events))(*self._events)
xi.XSelectExtensionEvent(window._x_display,
window._window,
array,
len(array))
def _add(self, class_info, event, handler):
_type = class_info.event_type_base + event
_class = self.device._device_id << 8 | _type
self._events.append(_class)
self.window._event_handlers[_type] = handler
XInputDeviceInstance.register_event_type('on_button_press')
XInputDeviceInstance.register_event_type('on_button_release')
XInputDeviceInstance.register_event_type('on_motion')
XInputDeviceInstance.register_event_type('on_proximity_in')
XInputDeviceInstance.register_event_type('on_proximity_out')
class XInputWindowEventDispatcher:
def __init__(self):
self._instances = dict()
def add_instance(self, instance):
self._instances[instance.device._device_id] = instance
def remove_instance(self, instance):
del self._instances[instance.device._device_id]
def dispatch_instance_event(self, e, *args):
try:
instance = self._instances[e.deviceid]
except KeyError:
return
instance.dispatch_event(*args)
@pyglet.window.xlib.XlibEventHandler(0)
def _event_xinput_key_press(self, ev):
raise NotImplementedError('TODO')
@pyglet.window.xlib.XlibEventHandler(0)
def _event_xinput_key_release(self, ev):
raise NotImplementedError('TODO')
@pyglet.window.xlib.XlibEventHandler(0)
def _event_xinput_button_press(self, ev):
e = ctypes.cast(ctypes.byref(ev),
ctypes.POINTER(xi.XDeviceButtonEvent)).contents
self.dispatch_instance_event(e, 'on_button_press', e.button)
@pyglet.window.xlib.XlibEventHandler(0)
def _event_xinput_button_release(self, ev):
e = ctypes.cast(ctypes.byref(ev),
ctypes.POINTER(xi.XDeviceButtonEvent)).contents
self.dispatch_instance_event(e, 'on_button_release', e.button)
@pyglet.window.xlib.XlibEventHandler(0)
def _event_xinput_motion(self, ev):
e = ctypes.cast(ctypes.byref(ev),
ctypes.POINTER(xi.XDeviceMotionEvent)).contents
axis_data = list()
for i in range(e.axes_count):
axis_data.append(e.axis_data[i])
self.dispatch_instance_event(e, 'on_motion', axis_data, e.x, e.y)
@pyglet.window.xlib.XlibEventHandler(0)
def _event_xinput_proximity_in(self, ev):
e = ctypes.cast(ctypes.byref(ev),
ctypes.POINTER(xi.XProximityNotifyEvent)).contents
self.dispatch_instance_event(e, 'on_proximity_in')
@pyglet.window.xlib.XlibEventHandler(-1)
def _event_xinput_proximity_out(self, ev):
e = ctypes.cast(ctypes.byref(ev),
ctypes.POINTER(xi.XProximityNotifyEvent)).contents
self.dispatch_instance_event(e, 'on_proximity_out')
def _check_extension(display):
major_opcode = ctypes.c_int()
first_event = ctypes.c_int()
first_error = ctypes.c_int()
xlib.XQueryExtension(display._display, 'XInputExtension',
ctypes.byref(major_opcode),
ctypes.byref(first_event),
ctypes.byref(first_error))
if not major_opcode.value:
raise Exception('XInput extension not available')
def get_devices(display):
_check_extension(display)
devices = list()
count = ctypes.c_int(0)
device_list = xi.XListInputDevices(display._display, count)
for i in range(count.value):
device_info = device_list[i]
devices.append(XInputDevice(display, device_info))
return devices
|
bitcraft/pyglet
|
contrib/experimental/input/xinput.py
|
Python
|
bsd-3-clause
| 9,260 | 0 |
# Author: Peter Prettenhofer <peter.prettenhofer@gmail.com>
# Lars Buitinck <L.J.Buitinck@uva.nl>
# License: Simplified BSD
from sklearn.datasets import fetch_20newsgroups
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn import metrics
from sklearn.cluster import KMeans, MiniBatchKMeans
import logging
from optparse import OptionParser
import sys
from time import time
import numpy as np
# Display progress logs on stdout
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
# parse commandline arguments
op = OptionParser()
op.add_option("--no-minibatch",
action="store_false", dest="minibatch", default=True,
help="Use ordinary k-means algorithm.")
print __doc__
op.print_help()
(opts, args) = op.parse_args()
if len(args) > 0:
op.error("this script takes no arguments.")
sys.exit(1)
###############################################################################
# Load some categories from the training set
categories = [
'alt.atheism',
'talk.religion.misc',
'comp.graphics',
'sci.space',
]
# Uncomment the following to do the analysis on all the categories
#categories = None
print "Loading 20 newsgroups dataset for categories:"
print categories
dataset = fetch_20newsgroups(subset='all', categories=categories,
shuffle=True, random_state=42)
print "%d documents" % len(dataset.data)
print "%d categories" % len(dataset.target_names)
print
labels = dataset.target
true_k = np.unique(labels).shape[0]
print "Extracting features from the training dataset using a sparse vectorizer"
t0 = time()
vectorizer = TfidfVectorizer(max_df=0.5, max_features=10000,
stop_words='english')
X = vectorizer.fit_transform(dataset.data)
print "done in %fs" % (time() - t0)
print "n_samples: %d, n_features: %d" % X.shape
print
###############################################################################
# Do the actual clustering
if opts.minibatch:
km = MiniBatchKMeans(n_clusters=true_k, init='k-means++', n_init=1,
init_size=1000,
batch_size=1000, verbose=1)
else:
km = KMeans(n_clusters=true_k, init='random', max_iter=100, n_init=1, verbose=1)
print "Clustering sparse data with %s" % km
t0 = time()
km.fit(X)
print "done in %0.3fs" % (time() - t0)
print
print "Homogeneity: %0.3f" % metrics.homogeneity_score(labels, km.labels_)
print "Completeness: %0.3f" % metrics.completeness_score(labels, km.labels_)
print "V-measure: %0.3f" % metrics.v_measure_score(labels, km.labels_)
print "Adjusted Rand-Index: %.3f" % \
metrics.adjusted_rand_score(labels, km.labels_)
print "Silhouette Coefficient: %0.3f" % metrics.silhouette_score(
X, labels, sample_size=1000)
print
|
clemsos/mitras
|
tests/examples/kmeans.py
|
Python
|
mit
| 2,846 | 0.001054 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Any, Dict
from unittest.mock import Mock
def get_column_mock(params: Dict[str, Any]) -> Mock:
mock = Mock()
mock.id = params["id"]
mock.column_name = params["column_name"]
mock.verbose_name = params["verbose_name"]
mock.description = params["description"]
mock.expression = params["expression"]
mock.filterable = params["filterable"]
mock.groupby = params["groupby"]
mock.is_dttm = params["is_dttm"]
mock.type = params["type"]
return mock
def get_metric_mock(params: Dict[str, Any]) -> Mock:
mock = Mock()
mock.id = params["id"]
mock.metric_name = params["metric_name"]
mock.metric_name = params["verbose_name"]
mock.description = params["description"]
mock.expression = params["expression"]
mock.warning_text = params["warning_text"]
mock.d3format = params["d3format"]
return mock
def get_dataset_mock() -> Mock:
mock = Mock()
mock.id = None
mock.column_formats = {"ratio": ".2%"}
mock.database = {"id": 1}
mock.description = "Adding a DESCRip"
mock.default_endpoint = ""
mock.filter_select_enabled = True
mock.name = "birth_names"
mock.table_name = "birth_names"
mock.datasource_name = "birth_names"
mock.type = "table"
mock.schema = None
mock.offset = 66
mock.cache_timeout = 55
mock.sql = ""
mock.columns = [
get_column_mock(
{
"id": 504,
"column_name": "ds",
"verbose_name": "",
"description": None,
"expression": "",
"filterable": True,
"groupby": True,
"is_dttm": True,
"type": "DATETIME",
}
),
get_column_mock(
{
"id": 505,
"column_name": "gender",
"verbose_name": None,
"description": None,
"expression": "",
"filterable": True,
"groupby": True,
"is_dttm": False,
"type": "VARCHAR(16)",
}
),
get_column_mock(
{
"id": 506,
"column_name": "name",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "VARCHAR(255)",
}
),
get_column_mock(
{
"id": 508,
"column_name": "state",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "VARCHAR(10)",
}
),
get_column_mock(
{
"id": 509,
"column_name": "num_boys",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "BIGINT(20)",
}
),
get_column_mock(
{
"id": 510,
"column_name": "num_girls",
"verbose_name": None,
"description": None,
"expression": "",
"filterable": False,
"groupby": False,
"is_dttm": False,
"type": "BIGINT(20)",
}
),
get_column_mock(
{
"id": 532,
"column_name": "num",
"verbose_name": None,
"description": None,
"expression": None,
"filterable": True,
"groupby": True,
"is_dttm": None,
"type": "BIGINT(20)",
}
),
get_column_mock(
{
"id": 522,
"column_name": "num_california",
"verbose_name": None,
"description": None,
"expression": "CASE WHEN state = 'CA' THEN num ELSE 0 END",
"filterable": False,
"groupby": False,
"is_dttm": False,
"type": "NUMBER",
}
),
]
mock.metrics = (
[
get_metric_mock(
{
"id": 824,
"metric_name": "sum__num",
"verbose_name": "Babies",
"description": "",
"expression": "SUM(num)",
"warning_text": "",
"d3format": "",
}
),
get_metric_mock(
{
"id": 836,
"metric_name": "count",
"verbose_name": "",
"description": None,
"expression": "count(1)",
"warning_text": None,
"d3format": None,
}
),
get_metric_mock(
{
"id": 843,
"metric_name": "ratio",
"verbose_name": "Ratio Boys/Girls",
"description": "This represents the ratio of boys/girls",
"expression": "sum(num_boys) / sum(num_girls)",
"warning_text": "no warning",
"d3format": ".2%",
}
),
],
)
return mock
|
apache/incubator-superset
|
tests/unit_tests/fixtures/datasets.py
|
Python
|
apache-2.0
| 6,514 | 0 |
from celery.task import Task
import requests
class StracksFlushTask(Task):
def run(self, url, data):
requests.post(url + "/", data=data)
|
Stracksapp/stracks_api
|
stracks_api/tasks.py
|
Python
|
bsd-2-clause
| 152 | 0.013158 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# spaceClustering.py
#
# Copyright 2014 Carlos "casep" Sepulveda <carlos.sepulveda@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
# Performs basic clustering based on the size of the RF
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '../..','LIB'))
import rfestimationLib as rfe
import argparse # argument parsing
import numpy as np # Numpy
import densityPeaks as dp
import matplotlib
matplotlib.use('Agg')
from matplotlib import pyplot as plt
from sklearn import mixture
clustersColours = ['blue', 'red', 'green', 'orange', 'black','yellow', \
'#ff006f','#00e8ff','#fcfa00', '#ff0000', '#820c2c', \
'#ff006f', '#af00ff','#0200ff','#008dff','#00e8ff', \
'#0c820e','#28ea04','#ea8404','#c8628f','#6283ff', \
'#5b6756','#0c8248','k','#820cff','#932c11', \
'#002c11','#829ca7']
def main():
parser = argparse.ArgumentParser(prog='spaceClustering.py',
description='Performs basic clustering based on the size of th RF',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--sourceFolder',
help='Source folder',
type=str, required=True)
parser.add_argument('--outputFolder',
help='Output folder',
type=str, required=True)
parser.add_argument('--percentage',
help='Percentage used to calculate the distance',
type=float, default='2', required=False)
parser.add_argument('--xSize',
help='X size of the stimuli',
type=int, default='31', required=False)
parser.add_argument('--ySize',
help='Y size of the stimuli',
type=int, default='31', required=False)
args = parser.parse_args()
#Source folder of the files with the timestamps
sourceFolder = rfe.fixPath(args.sourceFolder)
if not os.path.exists(sourceFolder):
print ''
print 'Source folder does not exists ' + sourceFolder
print ''
sys.exit()
#Output folder for the graphics
outputFolder = rfe.fixPath(args.outputFolder)
if not os.path.exists(outputFolder):
try:
os.makedirs(outputFolder)
except:
print ''
print 'Unable to create folder ' + outputFolder
print ''
sys.exit()
units = []
dataCluster = np.zeros((1,7))
for unitFile in sorted(os.listdir(sourceFolder)):
if os.path.isdir(sourceFolder+unitFile):
unitName = unitFile.rsplit('_', 1)[0]
fitResult = rfe.loadFitMatrix(sourceFolder,unitFile)
dataCluster = np.vstack((dataCluster,[fitResult[0][2],\
fitResult[0][3],fitResult[0][1],fitResult[0][4],\
fitResult[0][5],fitResult[0][2]*fitResult[0][3]*3,\
(fitResult[0][2]+fitResult[0][3])/2]))
units.append(unitName)
# remove the first row of zeroes
dataCluster = dataCluster[1:,:]
percentage = args.percentage #exploratory, '...for large data sets, the results of the analysis are robust with respect to the choice of d_c'
# Area instead o Radius
#clustersNumber, labels = dp.predict(dataCluster[:,0:2], percentage)
clustersNumber, labels = dp.predict(dataCluster[:,5:7], percentage)
gmix = mixture.GMM(n_components=clustersNumber, covariance_type='spherical')
gmix.fit(dataCluster[:,5:7])
labels = gmix.predict(dataCluster[:,5:7])
for clusterId in range(clustersNumber):
clusterFile = open(outputFolder+'cluster_'+str(clusterId)+'.csv', "w")
for unit in range(labels.size):
if labels[unit] == clusterId:
clusterFile.write(units[unit]+'\n')
clusterFile.close
xSize = args.xSize
ySize = args.ySize
# generate graphics of all ellipses
for clusterId in range(clustersNumber):
dataGrilla = np.zeros((1,7))
for unitId in range(dataCluster.shape[0]):
if labels[unitId] == clusterId:
datos=np.zeros((1,7))
datos[0]=dataCluster[unitId,:]
dataGrilla = np.append(dataGrilla,datos, axis=0)
## remove the first row of zeroes
dataGrilla = dataGrilla[1:,:]
rfe.graficaGrilla(dataGrilla, outputFolder+'Grilla_'+str(clusterId)+'.png', 0, clustersColours[clusterId], xSize, ySize)
return 0
if __name__ == '__main__':
main()
|
creyesp/RF_Estimation
|
Clustering/clustering/spaceClustering.py
|
Python
|
gpl-2.0
| 4,653 | 0.041694 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
# Copyright: (c) 2014, Ahti Kitsik <ak@ahtik.com>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = """
---
module: lineinfile
author:
- Daniel Hokka Zakrissoni (@dhozac)
- Ahti Kitsik (@ahtik)
extends_documentation_fragment:
- files
- validate
short_description: Manage lines in text files
description:
- This module ensures a particular line is in a file, or replace an
existing line using a back-referenced regular expression.
- This is primarily useful when you want to change a single line in
a file only. See the M(replace) module if you want to change
multiple, similar lines or check M(blockinfile) if you want to insert/update/remove a block of lines in a file.
For other cases, see the M(copy) or M(template) modules.
version_added: "0.7"
options:
path:
description:
- The file to modify.
- Before 2.3 this option was only usable as I(dest), I(destfile) and I(name).
aliases: [ dest, destfile, name ]
required: true
regexp:
aliases: [ 'regex' ]
description:
- The regular expression to look for in every line of the file. For
C(state=present), the pattern to replace if found. Only the last line
found will be replaced. For C(state=absent), the pattern of the line(s)
to remove. Uses Python regular expressions.
See U(http://docs.python.org/2/library/re.html).
version_added: '1.7'
state:
description:
- Whether the line should be there or not.
choices: [ absent, present ]
default: present
line:
description:
- Required for C(state=present). The line to insert/replace into the
file. If C(backrefs) is set, may contain backreferences that will get
expanded with the C(regexp) capture groups if the regexp matches.
backrefs:
description:
- Used with C(state=present). If set, C(line) can contain backreferences
(both positional and named) that will get populated if the C(regexp)
matches. This flag changes the operation of the module slightly;
C(insertbefore) and C(insertafter) will be ignored, and if the C(regexp)
doesn't match anywhere in the file, the file will be left unchanged.
If the C(regexp) does match, the last matching line will be replaced by
the expanded line parameter.
type: bool
default: 'no'
version_added: "1.1"
insertafter:
description:
- Used with C(state=present). If specified, the line will be inserted
after the last match of specified regular expression.
If the first match is required, use(firstmatch=yes).
A special value is available; C(EOF) for inserting the line at the
end of the file.
If specified regular expression has no matches, EOF will be used instead.
If regular expressions are passed to both C(regexp) and C(insertafter), C(insertafter) is only honored if no match for C(regexp) is found.
May not be used with C(backrefs).
choices: [ EOF, '*regex*' ]
default: EOF
insertbefore:
description:
- Used with C(state=present). If specified, the line will be inserted
before the last match of specified regular expression.
If the first match is required, use(firstmatch=yes).
A value is available; C(BOF) for inserting the line at
the beginning of the file.
If specified regular expression has no matches, the line will be
inserted at the end of the file.
If regular expressions are passed to both C(regexp) and C(insertbefore), C(insertbefore) is only honored if no match for C(regexp) is found.
May not be used with C(backrefs).
choices: [ BOF, '*regex*' ]
version_added: "1.1"
create:
description:
- Used with C(state=present). If specified, the file will be created
if it does not already exist. By default it will fail if the file
is missing.
type: bool
default: 'no'
backup:
description:
- Create a backup file including the timestamp information so you can
get the original file back if you somehow clobbered it incorrectly.
type: bool
default: 'no'
firstmatch:
description:
- Used with C(insertafter) or C(insertbefore). If set, C(insertafter) and C(inserbefore) find
a first line has regular expression matches.
type: bool
default: 'no'
version_added: "2.5"
others:
description:
- All arguments accepted by the M(file) module also work here.
notes:
- As of Ansible 2.3, the I(dest) option has been changed to I(path) as default, but I(dest) still works as well.
"""
EXAMPLES = r"""
# Before 2.3, option 'dest', 'destfile' or 'name' was used instead of 'path'
- lineinfile:
path: /etc/selinux/config
regexp: '^SELINUX='
line: 'SELINUX=enforcing'
- lineinfile:
path: /etc/sudoers
state: absent
regexp: '^%wheel'
# Searches for a line that begins with 127.0.0.1 and replaces it with the value of the 'line' parameter
- lineinfile:
path: /etc/hosts
regexp: '^127\.0\.0\.1'
line: '127.0.0.1 localhost'
owner: root
group: root
mode: 0644
- lineinfile:
path: /etc/httpd/conf/httpd.conf
regexp: '^Listen '
insertafter: '^#Listen '
line: 'Listen 8080'
- lineinfile:
path: /etc/services
regexp: '^# port for http'
insertbefore: '^www.*80/tcp'
line: '# port for http by default'
# Add a line to a file if the file does not exist, without passing regexp
- lineinfile:
path: /tmp/testfile
line: '192.168.1.99 foo.lab.net foo'
create: yes
# Fully quoted because of the ': ' on the line. See the Gotchas in the YAML docs.
- lineinfile:
path: /etc/sudoers
state: present
regexp: '^%wheel\s'
line: '%wheel ALL=(ALL) NOPASSWD: ALL'
# Yaml requires escaping backslashes in double quotes but not in single quotes
- lineinfile:
path: /opt/jboss-as/bin/standalone.conf
regexp: '^(.*)Xms(\\d+)m(.*)$'
line: '\1Xms${xms}m\3'
backrefs: yes
# Validate the sudoers file before saving
- lineinfile:
path: /etc/sudoers
state: present
regexp: '^%ADMIN ALL='
line: '%ADMIN ALL=(ALL) NOPASSWD: ALL'
validate: '/usr/sbin/visudo -cf %s'
"""
import os
import re
import tempfile
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import b
from ansible.module_utils._text import to_bytes, to_native
def write_changes(module, b_lines, dest):
tmpfd, tmpfile = tempfile.mkstemp()
with open(tmpfile, 'wb') as f:
f.writelines(b_lines)
validate = module.params.get('validate', None)
valid = not validate
if validate:
if "%s" not in validate:
module.fail_json(msg="validate must contain %%s: %s" % (validate))
(rc, out, err) = module.run_command(to_bytes(validate % tmpfile, errors='surrogate_or_strict'))
valid = rc == 0
if rc != 0:
module.fail_json(msg='failed to validate: '
'rc:%s error:%s' % (rc, err))
if valid:
module.atomic_move(tmpfile,
to_native(os.path.realpath(to_bytes(dest, errors='surrogate_or_strict')), errors='surrogate_or_strict'),
unsafe_writes=module.params['unsafe_writes'])
def check_file_attrs(module, changed, message, diff):
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False, diff=diff):
if changed:
message += " and "
changed = True
message += "ownership, perms or SE linux context changed"
return message, changed
def present(module, dest, regexp, line, insertafter, insertbefore, create,
backup, backrefs, firstmatch):
diff = {'before': '',
'after': '',
'before_header': '%s (content)' % dest,
'after_header': '%s (content)' % dest}
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if not os.path.exists(b_dest):
if not create:
module.fail_json(rc=257, msg='Destination %s does not exist !' % dest)
b_destpath = os.path.dirname(b_dest)
if not os.path.exists(b_destpath) and not module.check_mode:
try:
os.makedirs(b_destpath)
except Exception as e:
module.fail_json(msg='Error creating %s Error code: %s Error description: %s' % (b_destpath, e[0], e[1]))
b_lines = []
else:
with open(b_dest, 'rb') as f:
b_lines = f.readlines()
if module._diff:
diff['before'] = to_native(b('').join(b_lines))
if regexp is not None:
bre_m = re.compile(to_bytes(regexp, errors='surrogate_or_strict'))
if insertafter not in (None, 'BOF', 'EOF'):
bre_ins = re.compile(to_bytes(insertafter, errors='surrogate_or_strict'))
elif insertbefore not in (None, 'BOF'):
bre_ins = re.compile(to_bytes(insertbefore, errors='surrogate_or_strict'))
else:
bre_ins = None
# index[0] is the line num where regexp has been found
# index[1] is the line num where insertafter/inserbefore has been found
index = [-1, -1]
m = None
b_line = to_bytes(line, errors='surrogate_or_strict')
for lineno, b_cur_line in enumerate(b_lines):
if regexp is not None:
match_found = bre_m.search(b_cur_line)
else:
match_found = b_line == b_cur_line.rstrip(b('\r\n'))
if match_found:
index[0] = lineno
m = match_found
elif bre_ins is not None and bre_ins.search(b_cur_line):
if insertafter:
# + 1 for the next line
index[1] = lineno + 1
if firstmatch:
break
if insertbefore:
# index[1] for the previous line
index[1] = lineno
if firstmatch:
break
msg = ''
changed = False
b_linesep = to_bytes(os.linesep, errors='surrogate_or_strict')
# Regexp matched a line in the file
if index[0] != -1:
if backrefs:
b_new_line = m.expand(b_line)
else:
# Don't do backref expansion if not asked.
b_new_line = b_line
if not b_new_line.endswith(b_linesep):
b_new_line += b_linesep
# If no regexp was given and a line match is found anywhere in the file,
# insert the line appropriately if using insertbefore or insertafter
if regexp is None and m:
# Insert lines
if insertafter and insertafter != 'EOF':
# Ensure there is a line separator after the found string
# at the end of the file.
if b_lines and not b_lines[-1][-1:] in (b('\n'), b('\r')):
b_lines[-1] = b_lines[-1] + b_linesep
# If the line to insert after is at the end of the file
# use the appropriate index value.
if len(b_lines) == index[1]:
if b_lines[index[1] - 1].rstrip(b('\r\n')) != b_line:
b_lines.append(b_line + b_linesep)
msg = 'line added'
changed = True
elif b_lines[index[1]].rstrip(b('\r\n')) != b_line:
b_lines.insert(index[1], b_line + b_linesep)
msg = 'line added'
changed = True
elif insertbefore and insertbefore != 'BOF':
# If the line to insert before is at the beginning of the file
# use the appropriate index value.
if index[1] == 0:
if b_lines[index[1]].rstrip(b('\r\n')) != b_line:
b_lines.insert(index[1], b_line + b_linesep)
msg = 'line replaced'
changed = True
elif b_lines[index[1] - 1].rstrip(b('\r\n')) != b_line:
b_lines.insert(index[1], b_line + b_linesep)
msg = 'line replaced'
changed = True
elif b_lines[index[0]] != b_new_line:
b_lines[index[0]] = b_new_line
msg = 'line replaced'
changed = True
elif backrefs:
# Do absolutely nothing, since it's not safe generating the line
# without the regexp matching to populate the backrefs.
pass
# Add it to the beginning of the file
elif insertbefore == 'BOF' or insertafter == 'BOF':
b_lines.insert(0, b_line + b_linesep)
msg = 'line added'
changed = True
# Add it to the end of the file if requested or
# if insertafter/insertbefore didn't match anything
# (so default behaviour is to add at the end)
elif insertafter == 'EOF' or index[1] == -1:
# If the file is not empty then ensure there's a newline before the added line
if b_lines and not b_lines[-1][-1:] in (b('\n'), b('\r')):
b_lines.append(b_linesep)
b_lines.append(b_line + b_linesep)
msg = 'line added'
changed = True
# insert matched, but not the regexp
else:
b_lines.insert(index[1], b_line + b_linesep)
msg = 'line added'
changed = True
if module._diff:
diff['after'] = to_native(b('').join(b_lines))
backupdest = ""
if changed and not module.check_mode:
if backup and os.path.exists(b_dest):
backupdest = module.backup_local(dest)
write_changes(module, b_lines, dest)
if module.check_mode and not os.path.exists(b_dest):
module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=diff)
attr_diff = {}
msg, changed = check_file_attrs(module, changed, msg, attr_diff)
attr_diff['before_header'] = '%s (file attributes)' % dest
attr_diff['after_header'] = '%s (file attributes)' % dest
difflist = [diff, attr_diff]
module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=difflist)
def absent(module, dest, regexp, line, backup):
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if not os.path.exists(b_dest):
module.exit_json(changed=False, msg="file not present")
msg = ''
diff = {'before': '',
'after': '',
'before_header': '%s (content)' % dest,
'after_header': '%s (content)' % dest}
with open(b_dest, 'rb') as f:
b_lines = f.readlines()
if module._diff:
diff['before'] = to_native(b('').join(b_lines))
if regexp is not None:
bre_c = re.compile(to_bytes(regexp, errors='surrogate_or_strict'))
found = []
b_line = to_bytes(line, errors='surrogate_or_strict')
def matcher(b_cur_line):
if regexp is not None:
match_found = bre_c.search(b_cur_line)
else:
match_found = b_line == b_cur_line.rstrip(b('\r\n'))
if match_found:
found.append(b_cur_line)
return not match_found
b_lines = [l for l in b_lines if matcher(l)]
changed = len(found) > 0
if module._diff:
diff['after'] = to_native(b('').join(b_lines))
backupdest = ""
if changed and not module.check_mode:
if backup:
backupdest = module.backup_local(dest)
write_changes(module, b_lines, dest)
if changed:
msg = "%s line(s) removed" % len(found)
attr_diff = {}
msg, changed = check_file_attrs(module, changed, msg, attr_diff)
attr_diff['before_header'] = '%s (file attributes)' % dest
attr_diff['after_header'] = '%s (file attributes)' % dest
difflist = [diff, attr_diff]
module.exit_json(changed=changed, found=len(found), msg=msg, backup=backupdest, diff=difflist)
def main():
module = AnsibleModule(
argument_spec=dict(
path=dict(type='path', required=True, aliases=['dest', 'destfile', 'name']),
state=dict(type='str', default='present', choices=['absent', 'present']),
regexp=dict(type='str', aliases=['regex']),
line=dict(type='str', aliases=['value']),
insertafter=dict(type='str'),
insertbefore=dict(type='str'),
backrefs=dict(type='bool', default=False),
create=dict(type='bool', default=False),
backup=dict(type='bool', default=False),
firstmatch=dict(default=False, type='bool'),
validate=dict(type='str'),
),
mutually_exclusive=[['insertbefore', 'insertafter']],
add_file_common_args=True,
supports_check_mode=True,
)
params = module.params
create = params['create']
backup = params['backup']
backrefs = params['backrefs']
path = params['path']
firstmatch = params['firstmatch']
regexp = params['regexp']
line = params['line']
if regexp == '':
module.warn(
"The regular expression is an empty string, which will match every line in the file. "
"This may have unintended consequences, such as replacing the last line in the file rather than appending. "
"If this is desired, use '^' to match every line in the file and avoid this warning.")
b_path = to_bytes(path, errors='surrogate_or_strict')
if os.path.isdir(b_path):
module.fail_json(rc=256, msg='Path %s is a directory !' % path)
if params['state'] == 'present':
if backrefs and regexp is None:
module.fail_json(msg='regexp is required with backrefs=true')
if line is None:
module.fail_json(msg='line is required with state=present')
# Deal with the insertafter default value manually, to avoid errors
# because of the mutually_exclusive mechanism.
ins_bef, ins_aft = params['insertbefore'], params['insertafter']
if ins_bef is None and ins_aft is None:
ins_aft = 'EOF'
present(module, path, regexp, line,
ins_aft, ins_bef, create, backup, backrefs, firstmatch)
else:
if regexp is None and line is None:
module.fail_json(msg='one of line or regexp is required with state=absent')
absent(module, path, regexp, line, backup)
if __name__ == '__main__':
main()
|
jimi-c/ansible
|
lib/ansible/modules/files/lineinfile.py
|
Python
|
gpl-3.0
| 18,737 | 0.001868 |
import re
from django.conf import settings
from django.utils.html import escape
from django.utils.encoding import smart_str
from urlobject import URLObject
from urlobject.query_string import QueryString
from django.template import StringOrigin
from django.template.base import Lexer, Parser
from django.template.defaulttags import kwarg_re
from django.template import Template, Library, Node, TemplateSyntaxError
try:
from builtins import str
except ImportError:
str = unicode
register = Library()
TRUE_RE = re.compile(r"^(true|on)$", flags=re.IGNORECASE)
TEMPLATE_DEBUG = getattr(settings, "TEMPLATE_DEBUG", False)
def convert_to_boolean(string_or_boolean):
if isinstance(string_or_boolean, bool):
return string_or_boolean
if isinstance(string_or_boolean, str):
return bool(TRUE_RE.match(string_or_boolean))
class SpurlURLBuilder(object):
def __init__(self, args, context, tags, filters):
self.args = args
self.context = context
self.tags = tags
self.filters = filters
self.autoescape = self.context.autoescape
self.url = URLObject()
def build(self):
for argument, value in self.args:
self.handle_argument(argument, value)
try:
self.set_sensible_defaults()
url = unicode(self.url)
if self.autoescape:
url = escape(url)
url = url.replace("%20", "+")
url = url.replace("%2C", ",")
url = url.replace("&", "&")
except Exception as e:
url = self.url
return url
def handle_argument(self, argument, value):
argument = smart_str(argument, "ascii")
handler_name = "handle_%s" % argument
handler = getattr(self, handler_name, None)
if handler is not None:
value = value.resolve(self.context)
handler(value)
def handle_base(self, value):
base = self.prepare_value(value)
self.url = URLObject(base)
def handle_secure(self, value):
is_secure = convert_to_boolean(value)
scheme = "https" if is_secure else "http"
self.url = self.url.with_scheme(scheme)
def handle_query(self, value):
query = self.prepare_value(value)
if isinstance(query, dict):
query = QueryString().set_params(**query)
self.url = self.url.with_query(QueryString(query))
def handle_query_from(self, value):
url = URLObject(value)
self.url = self.url.with_query(url.query)
def handle_add_query(self, value):
query_to_add = self.prepare_value(value)
if isinstance(query_to_add, str):
query_to_add = QueryString(query_to_add).dict
self.url = self.url.add_query_params(**query_to_add)
def handle_add_query_from(self, value):
url = URLObject(value)
self.url = self.url.add_query_params(**url.query.dict)
def handle_set_query(self, value):
query_to_set = self.prepare_value(value)
if isinstance(query_to_set, str):
query_to_set = QueryString(query_to_set).dict
self.url = self.url.set_query_params(**query_to_set)
def handle_active_query(self, value):
query_to_toggle = self.prepare_value(value)
if isinstance(query_to_toggle, str):
query_to_toggle = QueryString(query_to_toggle).dict
current_query = self.url.query.dict
for key, value in query_to_toggle.items():
if key in current_query and value in current_query[key]:
self.url = True
else:
self.url = False
def handle_set_query_from(self, value):
url = URLObject(value)
self.url = self.url.set_query_params(**url.query.dict)
def handle_remove_query_param(self, value):
self.url = self.url.del_query_param(value)
def handle_toggle_query(self, value):
query_to_toggle = self.prepare_value(value)
if isinstance(query_to_toggle, str):
query_to_toggle = QueryString(query_to_toggle).dict
current_query = self.url.query.dict
for key, value in query_to_toggle.items():
if isinstance(value, str):
value = value.split(",")
first, second = value
if key in current_query and first in current_query[key]:
self.url = self.url.set_query_param(key, second)
else:
self.url = self.url.set_query_param(key, first)
def handle_trigger_query(self, value):
query_to_trigger = self.prepare_value(value)
if isinstance(query_to_trigger, str):
query_to_trigger = QueryString(query_to_trigger).dict
current_query = self.url.query.dict
for key, value in query_to_trigger.items():
if isinstance(value, str):
value = value
if key in current_query and value in current_query[key]:
# unset
self.url = self.url.del_query_param(key)
else:
# set
self.url = self.url.set_query_param(key, value)
def handle_trigger_mquery(self, value):
query_to_trigger = self.prepare_value(value)
if isinstance(query_to_trigger, str):
query_to_trigger = QueryString(query_to_trigger).dict
current_query = self.url.query.dict
for key, value in query_to_trigger.items():
# exact match of query -> unset it
if key in current_query and query_to_trigger[key] == current_query[key]:
self.url = self.url.del_query_param(key)
return
# check if current query has multiple items
try:
ext = current_query[key]
ext = ext.split(",")
except Exception as e:
ext = None
if ext and len(ext) > 1:
if key in current_query and value in ext:
# we have a key-match, so remove it from the string
ext = [x for x in ext if x != value]
else:
# no key match, so add it to the string
ext.append(value)
ext.sort()
self.url = self.url.set_query_param(key, ",".join(ext))
elif ext and len(ext) == 1:
# param already here > append
ext.append(value)
ext.sort()
ext = list(set(ext))
self.url = self.url.set_query_param(key, ",".join(ext))
else:
if isinstance(value, str):
value = value
if key in current_query and value in current_query[key]:
# unset
pass
# self.url = self.url.del_query_param(key)
else:
# set
self.url = self.url.set_query_param(key, value)
def handle_active_mquery(self, value):
active = None
query_to_trigger = self.prepare_value(value)
if isinstance(query_to_trigger, str):
query_to_trigger = QueryString(query_to_trigger).dict
current_query = self.url.query.dict
for key, value in query_to_trigger.items():
# exact match of query -> unset it
if key in current_query and query_to_trigger[key] == current_query[key]:
active = True
# check if current query has multiple items
try:
ext = current_query[key]
ext = ext.split(",")
except Exception as e:
ext = None
if ext and len(ext) > 1:
if key in current_query and value in ext:
active = True
self.url = active
def handle_scheme(self, value):
self.url = self.url.with_scheme(value)
def handle_scheme_from(self, value):
url = URLObject(value)
self.url = self.url.with_scheme(url.scheme)
def handle_host(self, value):
host = self.prepare_value(value)
self.url = self.url.with_hostname(host)
def handle_host_from(self, value):
url = URLObject(value)
self.url = self.url.with_hostname(url.hostname)
def handle_path(self, value):
path = self.prepare_value(value)
self.url = self.url.with_path(path)
def handle_path_from(self, value):
url = URLObject(value)
self.url = self.url.with_path(url.path)
def handle_add_path(self, value):
path_to_add = self.prepare_value(value)
self.url = self.url.add_path(path_to_add)
def handle_add_path_from(self, value):
url = URLObject(value)
path_to_add = url.path
if path_to_add.startswith("/"):
path_to_add = path_to_add[1:]
self.url = self.url.add_path(path_to_add)
def handle_fragment(self, value):
fragment = self.prepare_value(value)
self.url = self.url.with_fragment(fragment)
def handle_fragment_from(self, value):
url = URLObject(value)
self.url = self.url.with_fragment(url.fragment)
def handle_port(self, value):
self.url = self.url.with_port(int(value))
def handle_port_from(self, value):
url = URLObject(value)
self.url = self.url.with_port(url.port)
def handle_autoescape(self, value):
self.autoescape = convert_to_boolean(value)
def set_sensible_defaults(self):
if self.url.hostname and not self.url.scheme:
self.url = self.url.with_scheme("http")
def prepare_value(self, value):
"""Prepare a value by unescaping embedded template tags
and rendering through Django's template system"""
if isinstance(value, str):
value = self.unescape_tags(value)
value = self.render_template(value)
return value
def unescape_tags(self, template_string):
"""Spurl allows the use of templatetags inside templatetags, if
the inner templatetags are escaped - {\% and %\}"""
return template_string.replace("{\%", "{%").replace("%\}", "%}")
def compile_string(self, template_string, origin):
"""Re-implementation of django.template.base.compile_string
that takes into account the tags and filter of the parser
that rendered the parent template"""
if TEMPLATE_DEBUG:
from django.template.debug import DebugLexer, DebugParser
lexer_class, parser_class = DebugLexer, DebugParser
else:
lexer_class, parser_class = Lexer, Parser
# TODO: investigate. in django 1.9 `Lexer` only takes one argument
try:
lexer = lexer_class(template_string, origin)
except TypeError:
lexer = lexer_class(template_string)
parser = parser_class(lexer.tokenize())
# Attach the tags and filters from the parent parser
parser.tags = self.tags
parser.filters = self.filters
return parser.parse()
def render_template(self, template_string):
"""Used to render an "inner" template, ie one which
is passed as an argument to spurl"""
original_autoescape = self.context.autoescape
self.context.autoescape = False
template = Template("")
if TEMPLATE_DEBUG:
origin = StringOrigin(template_string)
else:
origin = None
template.nodelist = self.compile_string(template_string, origin)
rendered = template.render(self.context)
self.context.autoescape = original_autoescape
return rendered
class SpurlNode(Node):
def __init__(self, args, tags, filters, asvar=None):
self.args = args
self.asvar = asvar
self.tags = tags
self.filters = filters
def render(self, context):
builder = SpurlURLBuilder(self.args, context, self.tags, self.filters)
url = builder.build()
if self.asvar:
context[self.asvar] = url
return ""
return url
@register.tag
def spurl(parser, token):
bits = token.split_contents()
if len(bits) < 2:
raise TemplateSyntaxError("'spurl' takes at least one argument")
args = []
asvar = None
bits = bits[1:]
if len(bits) >= 2 and bits[-2] == "as":
asvar = bits[-1]
bits = bits[:-2]
for bit in bits:
name, value = kwarg_re.match(bit).groups()
if not (name and value):
raise TemplateSyntaxError("Malformed arguments to spurl tag")
args.append((name, parser.compile_filter(value)))
return SpurlNode(args, parser.tags, parser.filters, asvar)
|
hzlf/openbroadcast.org
|
website/tools/spurl/templatetags/spurl.py
|
Python
|
gpl-3.0
| 12,763 | 0.00047 |
from sympy import AccumBounds, Symbol, floor, nan, oo, E, symbols, ceiling, pi, \
Rational, Float, I, sin, exp, log, factorial, frac
from sympy.utilities.pytest import XFAIL
x = Symbol('x')
i = Symbol('i', imaginary=True)
y = Symbol('y', real=True)
k, n = symbols('k,n', integer=True)
def test_floor():
assert floor(nan) == nan
assert floor(oo) == oo
assert floor(-oo) == -oo
assert floor(0) == 0
assert floor(1) == 1
assert floor(-1) == -1
assert floor(E) == 2
assert floor(-E) == -3
assert floor(2*E) == 5
assert floor(-2*E) == -6
assert floor(pi) == 3
assert floor(-pi) == -4
assert floor(Rational(1, 2)) == 0
assert floor(-Rational(1, 2)) == -1
assert floor(Rational(7, 3)) == 2
assert floor(-Rational(7, 3)) == -3
assert floor(Float(17.0)) == 17
assert floor(-Float(17.0)) == -17
assert floor(Float(7.69)) == 7
assert floor(-Float(7.69)) == -8
assert floor(I) == I
assert floor(-I) == -I
e = floor(i)
assert e.func is floor and e.args[0] == i
assert floor(oo*I) == oo*I
assert floor(-oo*I) == -oo*I
assert floor(2*I) == 2*I
assert floor(-2*I) == -2*I
assert floor(I/2) == 0
assert floor(-I/2) == -I
assert floor(E + 17) == 19
assert floor(pi + 2) == 5
assert floor(E + pi) == floor(E + pi)
assert floor(I + pi) == floor(I + pi)
assert floor(floor(pi)) == 3
assert floor(floor(y)) == floor(y)
assert floor(floor(x)) == floor(floor(x))
assert floor(x) == floor(x)
assert floor(2*x) == floor(2*x)
assert floor(k*x) == floor(k*x)
assert floor(k) == k
assert floor(2*k) == 2*k
assert floor(k*n) == k*n
assert floor(k/2) == floor(k/2)
assert floor(x + y) == floor(x + y)
assert floor(x + 3) == floor(x + 3)
assert floor(x + k) == floor(x + k)
assert floor(y + 3) == floor(y) + 3
assert floor(y + k) == floor(y) + k
assert floor(3 + I*y + pi) == 6 + floor(y)*I
assert floor(k + n) == k + n
assert floor(x*I) == floor(x*I)
assert floor(k*I) == k*I
assert floor(Rational(23, 10) - E*I) == 2 - 3*I
assert floor(sin(1)) == 0
assert floor(sin(-1)) == -1
assert floor(exp(2)) == 7
assert floor(log(8)/log(2)) != 2
assert int(floor(log(8)/log(2)).evalf(chop=True)) == 3
assert floor(factorial(50)/exp(1)) == \
11188719610782480504630258070757734324011354208865721592720336800
assert (floor(y) <= y) == True
assert (floor(y) > y) == False
assert (floor(x) <= x).is_Relational # x could be non-real
assert (floor(x) > x).is_Relational
assert (floor(x) <= y).is_Relational # arg is not same as rhs
assert (floor(x) > y).is_Relational
def test_ceiling():
assert ceiling(nan) == nan
assert ceiling(oo) == oo
assert ceiling(-oo) == -oo
assert ceiling(0) == 0
assert ceiling(1) == 1
assert ceiling(-1) == -1
assert ceiling(E) == 3
assert ceiling(-E) == -2
assert ceiling(2*E) == 6
assert ceiling(-2*E) == -5
assert ceiling(pi) == 4
assert ceiling(-pi) == -3
assert ceiling(Rational(1, 2)) == 1
assert ceiling(-Rational(1, 2)) == 0
assert ceiling(Rational(7, 3)) == 3
assert ceiling(-Rational(7, 3)) == -2
assert ceiling(Float(17.0)) == 17
assert ceiling(-Float(17.0)) == -17
assert ceiling(Float(7.69)) == 8
assert ceiling(-Float(7.69)) == -7
assert ceiling(I) == I
assert ceiling(-I) == -I
e = ceiling(i)
assert e.func is ceiling and e.args[0] == i
assert ceiling(oo*I) == oo*I
assert ceiling(-oo*I) == -oo*I
assert ceiling(2*I) == 2*I
assert ceiling(-2*I) == -2*I
assert ceiling(I/2) == I
assert ceiling(-I/2) == 0
assert ceiling(E + 17) == 20
assert ceiling(pi + 2) == 6
assert ceiling(E + pi) == ceiling(E + pi)
assert ceiling(I + pi) == ceiling(I + pi)
assert ceiling(ceiling(pi)) == 4
assert ceiling(ceiling(y)) == ceiling(y)
assert ceiling(ceiling(x)) == ceiling(ceiling(x))
assert ceiling(x) == ceiling(x)
assert ceiling(2*x) == ceiling(2*x)
assert ceiling(k*x) == ceiling(k*x)
assert ceiling(k) == k
assert ceiling(2*k) == 2*k
assert ceiling(k*n) == k*n
assert ceiling(k/2) == ceiling(k/2)
assert ceiling(x + y) == ceiling(x + y)
assert ceiling(x + 3) == ceiling(x + 3)
assert ceiling(x + k) == ceiling(x + k)
assert ceiling(y + 3) == ceiling(y) + 3
assert ceiling(y + k) == ceiling(y) + k
assert ceiling(3 + pi + y*I) == 7 + ceiling(y)*I
assert ceiling(k + n) == k + n
assert ceiling(x*I) == ceiling(x*I)
assert ceiling(k*I) == k*I
assert ceiling(Rational(23, 10) - E*I) == 3 - 2*I
assert ceiling(sin(1)) == 1
assert ceiling(sin(-1)) == 0
assert ceiling(exp(2)) == 8
assert ceiling(-log(8)/log(2)) != -2
assert int(ceiling(-log(8)/log(2)).evalf(chop=True)) == -3
assert ceiling(factorial(50)/exp(1)) == \
11188719610782480504630258070757734324011354208865721592720336801
assert (ceiling(y) >= y) == True
assert (ceiling(y) < y) == False
assert (ceiling(x) >= x).is_Relational # x could be non-real
assert (ceiling(x) < x).is_Relational
assert (ceiling(x) >= y).is_Relational # arg is not same as rhs
assert (ceiling(x) < y).is_Relational
def test_frac():
assert isinstance(frac(x), frac)
assert frac(oo) == AccumBounds(0, 1)
assert frac(-oo) == AccumBounds(0, 1)
assert frac(n) == 0
assert frac(nan) == nan
assert frac(Rational(4, 3)) == Rational(1, 3)
assert frac(-Rational(4, 3)) == Rational(2, 3)
r = Symbol('r', real=True)
assert frac(I*r) == I*frac(r)
assert frac(1 + I*r) == I*frac(r)
assert frac(0.5 + I*r) == 0.5 + I*frac(r)
assert frac(n + I*r) == I*frac(r)
assert frac(n + I*k) == 0
assert frac(x + I*x) == frac(x + I*x)
assert frac(x + I*n) == frac(x)
assert frac(x).rewrite(floor) == x - floor(x)
def test_series():
x, y = symbols('x,y')
assert floor(x).nseries(x, y, 100) == floor(y)
assert ceiling(x).nseries(x, y, 100) == ceiling(y)
assert floor(x).nseries(x, pi, 100) == 3
assert ceiling(x).nseries(x, pi, 100) == 4
assert floor(x).nseries(x, 0, 100) == 0
assert ceiling(x).nseries(x, 0, 100) == 1
assert floor(-x).nseries(x, 0, 100) == -1
assert ceiling(-x).nseries(x, 0, 100) == 0
@XFAIL
def test_issue_4149():
assert floor(3 + pi*I + y*I) == 3 + floor(pi + y)*I
assert floor(3*I + pi*I + y*I) == floor(3 + pi + y)*I
assert floor(3 + E + pi*I + y*I) == 5 + floor(pi + y)*I
def test_issue_11207():
assert floor(floor(x)) == floor(x)
assert floor(ceiling(x)) == ceiling(x)
assert ceiling(floor(x)) == floor(x)
assert ceiling(ceiling(x)) == ceiling(x)
|
drufat/sympy
|
sympy/functions/elementary/tests/test_integers.py
|
Python
|
bsd-3-clause
| 6,826 | 0.001025 |
# -*- coding: utf-8 -*-
#################################################################################################
import json
import xbmc
import xbmcgui
import clientinfo
import downloadutils
import embydb_functions as embydb
import playbackutils as pbutils
import utils
#################################################################################################
class KodiMonitor(xbmc.Monitor):
def __init__(self):
self.clientInfo = clientinfo.ClientInfo()
self.addonName = self.clientInfo.getAddonName()
self.doUtils = downloadutils.DownloadUtils()
self.logMsg("Kodi monitor started.", 1)
def logMsg(self, msg, lvl=1):
self.className = self.__class__.__name__
utils.logMsg("%s %s" % (self.addonName, self.className), msg, lvl)
def onScanStarted(self, library):
self.logMsg("Kodi library scan %s running." % library, 2)
if library == "video":
utils.window('emby_kodiScan', value="true")
def onScanFinished(self, library):
self.logMsg("Kodi library scan %s finished." % library, 2)
if library == "video":
utils.window('emby_kodiScan', clear=True)
def onSettingsChanged(self):
# Monitor emby settings
# Review reset setting at a later time, need to be adjusted to account for initial setup
# changes.
'''currentPath = utils.settings('useDirectPaths')
if utils.window('emby_pluginpath') != currentPath:
# Plugin path value changed. Offer to reset
self.logMsg("Changed to playback mode detected", 1)
utils.window('emby_pluginpath', value=currentPath)
resp = xbmcgui.Dialog().yesno(
heading="Playback mode change detected",
line1=(
"Detected the playback mode has changed. The database "
"needs to be recreated for the change to be applied. "
"Proceed?"))
if resp:
utils.reset()'''
currentLog = utils.settings('logLevel')
if utils.window('emby_logLevel') != currentLog:
# The log level changed, set new prop
self.logMsg("New log level: %s" % currentLog, 1)
utils.window('emby_logLevel', value=currentLog)
def onNotification(self, sender, method, data):
doUtils = self.doUtils
if method not in ("Playlist.OnAdd"):
self.logMsg("Method: %s Data: %s" % (method, data), 1)
if data:
data = json.loads(data,'utf-8')
if method == "Player.OnPlay":
# Set up report progress for emby playback
item = data.get('item')
try:
kodiid = item['id']
type = item['type']
except (KeyError, TypeError):
self.logMsg("Item is invalid for playstate update.", 1)
else:
if ((utils.settings('useDirectPaths') == "1" and not type == "song") or
(type == "song" and utils.settings('enableMusic') == "true")):
# Set up properties for player
embyconn = utils.kodiSQL('emby')
embycursor = embyconn.cursor()
emby_db = embydb.Embydb_Functions(embycursor)
emby_dbitem = emby_db.getItem_byKodiId(kodiid, type)
try:
itemid = emby_dbitem[0]
except TypeError:
self.logMsg("No kodiid returned.", 1)
else:
url = "{server}/emby/Users/{UserId}/Items/%s?format=json" % itemid
result = doUtils.downloadUrl(url)
self.logMsg("Item: %s" % result, 2)
playurl = None
count = 0
while not playurl and count < 2:
try:
playurl = xbmc.Player().getPlayingFile()
except RuntimeError:
count += 1
xbmc.sleep(200)
else:
listItem = xbmcgui.ListItem()
playback = pbutils.PlaybackUtils(result)
if type == "song" and utils.settings('streamMusic') == "true":
utils.window('emby_%s.playmethod' % playurl,
value="DirectStream")
else:
utils.window('emby_%s.playmethod' % playurl,
value="DirectPlay")
# Set properties for player.py
playback.setProperties(playurl, listItem)
finally:
embycursor.close()
elif method == "VideoLibrary.OnUpdate":
# Manually marking as watched/unwatched
playcount = data.get('playcount')
item = data.get('item')
try:
kodiid = item['id']
type = item['type']
except (KeyError, TypeError):
self.logMsg("Item is invalid for playstate update.", 1)
else:
# Send notification to the server.
embyconn = utils.kodiSQL('emby')
embycursor = embyconn.cursor()
emby_db = embydb.Embydb_Functions(embycursor)
emby_dbitem = emby_db.getItem_byKodiId(kodiid, type)
try:
itemid = emby_dbitem[0]
except TypeError:
self.logMsg("Could not find itemid in emby database.", 1)
else:
# Stop from manually marking as watched unwatched, with actual playback.
if utils.window('emby_skipWatched%s' % itemid) == "true":
# property is set in player.py
utils.window('emby_skipWatched%s' % itemid, clear=True)
else:
# notify the server
url = "{server}/emby/Users/{UserId}/PlayedItems/%s?format=json" % itemid
if playcount != 0:
doUtils.downloadUrl(url, type="POST")
self.logMsg("Mark as watched for itemid: %s" % itemid, 1)
else:
doUtils.downloadUrl(url, type="DELETE")
self.logMsg("Mark as unwatched for itemid: %s" % itemid, 1)
finally:
embycursor.close()
elif method == "VideoLibrary.OnRemove":
# Removed function, because with plugin paths + clean library, it will wipe
# entire library if user has permissions. Instead, use the emby context menu available
# in Isengard and higher version
pass
'''try:
kodiid = data['id']
type = data['type']
except (KeyError, TypeError):
self.logMsg("Item is invalid for emby deletion.", 1)
else:
# Send the delete action to the server.
embyconn = utils.kodiSQL('emby')
embycursor = embyconn.cursor()
emby_db = embydb.Embydb_Functions(embycursor)
emby_dbitem = emby_db.getItem_byKodiId(kodiid, type)
try:
itemid = emby_dbitem[0]
except TypeError:
self.logMsg("Could not find itemid in emby database.", 1)
else:
if utils.settings('skipContextMenu') != "true":
resp = xbmcgui.Dialog().yesno(
heading="Confirm delete",
line1="Delete file on Emby Server?")
if not resp:
self.logMsg("User skipped deletion.", 1)
embycursor.close()
return
url = "{server}/emby/Items/%s?format=json" % itemid
self.logMsg("Deleting request: %s" % itemid)
doUtils.downloadUrl(url, type="DELETE")
finally:
embycursor.close()'''
elif method == "System.OnWake":
# Allow network to wake up
xbmc.sleep(10000)
utils.window('emby_onWake', value="true")
elif method == "Playlist.OnClear":
pass
|
angelblue05/Embytest.Kodi
|
resources/lib/kodimonitor.py
|
Python
|
gpl-2.0
| 8,856 | 0.0035 |
#
# core.py
#
# Copyright (C) 2014 dredkin <dmitry.redkin@gmail.com>
#
# Basic plugin template created by:
# Copyright (C) 2008 Martijn Voncken <mvoncken@gmail.com>
# Copyright (C) 2007-2009 Andrew Resch <andrewresch@gmail.com>
# Copyright (C) 2009 Damien Churchill <damoxc@gmail.com>
#
# Deluge is free software.
#
# You may redistribute it and/or modify it under the terms of the
# GNU General Public License, as published by the Free Software
# Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# deluge is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with deluge. If not, write to:
# The Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor
# Boston, MA 02110-1301, USA.
#
# In addition, as a special exception, the copyright holders give
# permission to link the code of portions of this program with the OpenSSL
# library.
# You must obey the GNU General Public License in all respects for all of
# the code used other than OpenSSL. If you modify file(s) with this
# exception, you may extend this exception to your version of the file(s),
# but you are not obligated to do so. If you do not wish to do so, delete
# this exception statement from your version. If you delete this exception
# statement from all source files in the program, then also delete it here.
#
from deluge.log import LOG as log
from deluge.plugins.pluginbase import CorePluginBase
import deluge.component as component
import deluge.configmanager
from deluge.core.rpcserver import export
import os
import locale
import pkg_resources
import gettext
def windows():
return os.name == "nt"
if windows():
import win32api
DEFAULT_PREFS = {
#Default to empty to have no specified root dir.
"RootDirPath":"",
"DisableTraversal":"false"
}
UTF8 = 'UTF-8'
CURRENT_LOCALE = locale.getdefaultlocale()[1]
if CURRENT_LOCALE is None:
CURRENT_LOCALE = UTF8
class Core(CorePluginBase):
def enable(self):
self.config = deluge.configmanager.ConfigManager("browsebutton.conf", DEFAULT_PREFS)
def disable(self):
#self.config.save()
pass
def update(self):
pass
def drives_list(self):
if windows():
drives = win32api.GetLogicalDriveStrings()
return drives.split('\000')[:-1]
else:
return "/"
def subfolders_list(self, absolutepath):
subfolders = []
try:
list = os.listdir(absolutepath)
except:
list = []
for f in list:
if os.path.isdir(os.path.join(absolutepath,f)):
f2 = f.decode(CURRENT_LOCALE).encode(UTF8)
subfolders.append(f2)
return subfolders
def is_root_folder(self, folder):
return os.path.dirname(folder) == folder
@export
def save_config(self):
"""Saves the config"""
self.config.save()
log.debug("RBB: config saved")
@export
def set_config(self, config):
"""Sets the config dictionary"""
log.debug("RBB: set_config")
for key in config.keys():
self.config[key] = config[key]
log.debug("RBB: added history "+str(key)+"->"+str(config[key]))
self.save_config()
@export
def get_config(self):
"""Returns the config dictionary"""
log.debug("RBB: config assigned")
return self.config.config
@export
def serverlog(self, line):
log.debug(line)
@export
def get_folder_list(self, folder, subfolder):
"""Returns the list of subfolders for specified folder on server"""
error = ""
if folder == "":
folder = os.path.expanduser("~")
else:
folder = folder.encode(CURRENT_LOCALE)
log.debug("RBB:native folder"+folder)
log.debug("RBB:orig subfolder"+subfolder)
subfolder = subfolder.encode(CURRENT_LOCALE)
newfolder = os.path.join(folder,subfolder)
absolutepath = os.path.normpath(newfolder)
if not os.path.isdir(absolutepath):
log.info("RBB:NOT A FOLDER!:"+absolutepath+" (normalized from "+newfolder+")")
error = "Cannot List Contents of "+absolutepath
absolutepath = os.path.expanduser("~")
if windows():
isroot = self.is_root_folder(folder) and (subfolder == "..")
else:
isroot = self.is_root_folder(absolutepath)
if windows() and isroot:
subfolders = self.drives_list()
absolutepath = ""
else:
subfolders = self.subfolders_list(absolutepath)
return [absolutepath.decode(CURRENT_LOCALE).encode(UTF8), isroot, subfolders, error]
|
Mattie432/deluge-rbb
|
browsebutton/core.py
|
Python
|
gpl-2.0
| 5,012 | 0.003591 |
import random
from common import generalUtils
from common.log import logUtils as log
from constants import clientPackets
from constants import matchModModes
from constants import matchTeamTypes
from constants import matchTeams
from constants import slotStatuses
from objects import glob
def handle(userToken, packetData):
# Read new settings
packetData = clientPackets.changeMatchSettings(packetData)
# Get match ID
matchID = userToken.matchID
# Make sure the match exists
if matchID not in glob.matches.matches:
return
# Host check
with glob.matches.matches[matchID] as match:
if userToken.userID != match.hostUserID:
return
# Some dank memes easter egg
memeTitles = [
"RWC 2020",
"Fokabot is a duck",
"Dank memes",
"1337ms Ping",
"Iscriviti a Xenotoze",
"...e i marò?",
"Superman dies",
"The brace is on fire",
"print_foot()",
"#FREEZEBARKEZ",
"Ripple devs are actually cats",
"Thank Mr Shaural",
"NEVER GIVE UP",
"T I E D W I T H U N I T E D",
"HIGHEST HDHR LOBBY OF ALL TIME",
"This is gasoline and I set myself on fire",
"Everyone is cheating apparently",
"Kurwa mac",
"TATOE",
"This is not your drama landfill.",
"I like cheese",
"NYO IS NOT A CAT HE IS A DO(N)G",
"Datingu startuato"
]
# Set match name
match.matchName = packetData["matchName"] if packetData["matchName"] != "meme" else random.choice(memeTitles)
# Update match settings
match.inProgress = packetData["inProgress"]
if packetData["matchPassword"] != "":
match.matchPassword = generalUtils.stringMd5(packetData["matchPassword"])
else:
match.matchPassword = ""
match.beatmapName = packetData["beatmapName"]
match.beatmapID = packetData["beatmapID"]
match.hostUserID = packetData["hostUserID"]
match.gameMode = packetData["gameMode"]
oldBeatmapMD5 = match.beatmapMD5
oldMods = match.mods
oldMatchTeamType = match.matchTeamType
match.mods = packetData["mods"]
match.beatmapMD5 = packetData["beatmapMD5"]
match.matchScoringType = packetData["scoringType"]
match.matchTeamType = packetData["teamType"]
match.matchModMode = packetData["freeMods"]
# Reset ready if needed
if oldMods != match.mods or oldBeatmapMD5 != match.beatmapMD5:
match.resetReady()
# Reset mods if needed
if match.matchModMode == matchModModes.NORMAL:
# Reset slot mods if not freeMods
match.resetMods()
else:
# Reset match mods if freemod
match.mods = 0
# Initialize teams if team type changed
if match.matchTeamType != oldMatchTeamType:
match.initializeTeams()
# Force no freemods if tag coop
if match.matchTeamType == matchTeamTypes.TAG_COOP or match.matchTeamType == matchTeamTypes.TAG_TEAM_VS:
match.matchModMode = matchModModes.NORMAL
# Send updated settings
match.sendUpdates()
# Console output
log.info("MPROOM{}: Updated room settings".format(match.matchID))
|
osuripple/pep.py
|
events/changeMatchSettingsEvent.py
|
Python
|
agpl-3.0
| 2,905 | 0.027893 |
from ..rerequest import TemplateRequest
init_req = TemplateRequest(
re = r'(http://)?(www\.)?(?P<domain>ur(play)?)\.se/(?P<req_url>.+)',
encode_vars = lambda v: { 'req_url': 'http://%(domain)s.se/%(req_url)s' % v } )
hls = { 'title': 'UR-play', 'url': 'http://urplay.se/', 'feed_url': 'http://urplay.se/rss',
'items': [init_req,
TemplateRequest(
re = r'file_html5":\s?"(?P<final_url>[^"]+)".*?"subtitles":\s?"(?P<subtitles>[^",]*)',
encode_vars = lambda v: { 'final_url': ('http://130.242.59.75/%(final_url)s/playlist.m3u8' % v).replace('\\', ''),
'suffix-hint': 'mp4',
'subtitles': v.get('subtitles', '').replace('\\', '') % v } )] }
rtmp = { 'items': [init_req,
TemplateRequest(
re = r'file_flash":\s?"(?P<final_url>[^"]+\.(?P<ext>mp[34]))".*?"subtitles":\s?"(?P<subtitles>[^",]*)',
encode_vars = lambda v: { 'final_url': ('rtmp://130.242.59.75/ondemand playpath=%(ext)s:/%(final_url)s app=ondemand' % v).replace('\\', ''),
'suffix-hint': 'flv',
'rtmpdump-realtime': True,
'subtitles': v.get('subtitles', '').replace('\\', '') % v } )] }
services = [hls, rtmp]
|
jackuess/pirateplay.se
|
lib/pirateplay/lib/services/ur.py
|
Python
|
gpl-3.0
| 1,161 | 0.046512 |
# Copyright (C) 2010 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of Launch Control.
#
# Launch Control is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Launch Control is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Launch Control. If not, see <http://www.gnu.org/licenses/>.
"""
Tests for the Attachment model
"""
from django.contrib.contenttypes import generic
from django.core.files.base import ContentFile
from django.db import models
from django.test import TestCase
from dashboard_app.models import Attachment
class ModelWithAttachments(models.Model):
"""
Test model that uses attachments
"""
attachments = generic.GenericRelation(Attachment)
class Meta:
# This requires a bit of explanation. Traditionally we could add new
# models inside test modules and they would be picked up by django and
# synchronized (created in the test database) as a part of the test
# provisioning process.
# Since we started using south, synchronization is no longer occurring
# for the 'dashboard_app' application. This caused some test failures
# such as any tests that depended on the existence of this model.
# As a workaround we artificially "stick" this model into the only
# application that we can count on to exist _and_ not use south as well
# -- that is south itself.
# This way the test model gets synchronized when south is synchronized
# and all the test code below works as expected.
app_label = "south"
class AttachmentTestCase(TestCase):
_CONTENT = "text"
_FILENAME = "filename"
def setUp(self):
self.obj = ModelWithAttachments.objects.create()
def test_attachment_can_be_added_to_models(self):
attachment = self.obj.attachments.create(
content_filename=self._FILENAME, content=None)
self.assertEqual(attachment.content_object, self.obj)
def test_attachment_can_be_accessed_via_model(self):
self.obj.attachments.create(
content_filename=self._FILENAME, content=None)
self.assertEqual(self.obj.attachments.count(), 1)
retrieved_attachment = self.obj.attachments.all()[0]
self.assertEqual(retrieved_attachment.content_object, self.obj)
def test_attachment_stores_data(self):
attachment = self.obj.attachments.create(
content_filename=self._FILENAME, content=None)
attachment.content.save(
self._FILENAME,
ContentFile(self._CONTENT))
self.assertEqual(attachment.content_filename, self._FILENAME)
attachment.content.open()
try:
self.assertEqual(attachment.content.read(), self._CONTENT)
finally:
attachment.content.close()
attachment.content.delete(save=False)
def test_unicode(self):
obj = Attachment(content_filename="test.json")
self.assertEqual(unicode(obj), "test.json")
|
OSSystems/lava-server
|
dashboard_app/tests/models/attachment.py
|
Python
|
agpl-3.0
| 3,410 | 0 |
#!/usr/bin/env python3
import unittest
from tests import testfunctions
from dftintegrate.fourier import vaspdata
class TestExtractingVASPDataToDatFiles(unittest.TestCase,
testfunctions.TestFunctions):
def setUp(self):
print('Testing extracting VASP data to .dat files...')
self.cases = [str(x) for x in range(1, 3)]
self.root = './tests/fourier/extractvaspdata/'
def test_runtestcases(self):
for case in self.cases:
print(' Testing case '+case+'...')
vaspdata.VASPData(self.root+'tocheck/test'+case)
kpts_eigenvals_ans = self.readfile(case, 'answer',
'kpts_eigenvals')
kpts_eigenvals_tocheck = self.readfile(case, 'tocheck',
'kpts_eigenvals')
self.assertEqual(kpts_eigenvals_ans, kpts_eigenvals_tocheck,
msg='kpts_eigenvals case '+case)
symops_trans_ans = self.readfile(case, 'answer',
'symops_trans')
symops_trans_tocheck = self.readfile(case, 'tocheck',
'symops_trans')
self.assertEqual(symops_trans_ans, symops_trans_tocheck,
msg='symops_trans case '+case)
kmax_ans = self.readfile(case, 'answer', 'kmax')
kmax_tocheck = self.readfile(case, 'tocheck', 'kmax')
self.assertEqual(kmax_ans, kmax_tocheck, msg='kmax case '+case)
|
mmb90/dftintegrate
|
tests/fourier/extractvaspdata/test_extractvaspdata.py
|
Python
|
mit
| 1,597 | 0 |
"""Solve the Project Euler problems using functional Python.
https://projecteuler.net/archives
"""
from importlib import import_module
from os import listdir
from os.path import abspath, dirname
from re import match
SOLVED = set(
int(m.group(1))
for f in listdir(abspath(dirname(__file__)))
for m in (match(r"^p(\d{3})\.py$", f),) if m
)
def compute(problem: int):
"""Compute the answer to problem `problem`."""
assert problem in SOLVED, "Problem currently unsolved."
module = import_module("euler.p{:03d}".format(problem))
return module.compute()
|
2Cubed/ProjectEuler
|
euler/__init__.py
|
Python
|
mit
| 584 | 0 |
import random
import pymel.core as pm
from impress import models, register
def randomTransform( translate=False, translateAmount=1.0, translateAxis=(False,False,False),
rotate=False, rotateAmount=1.0, rotateAxis=(False,False,False),
scale=False, scaleAmount=1.0, scaleAxis=(False,False,False) ):
"""
Transforms selected objects with random values.
"""
objects = pm.ls( selection=True, type='transform')
assert len(objects), 'randomTransform requires at least 1 selected transform object.'
for object in objects:
if translate:
offset = map(lambda axis: random.uniform( -translateAmount, translateAmount )*float(axis), translateAxis)
object.setTranslation( offset, relative=True )
if rotate:
offset = map(lambda axis: random.uniform( -rotateAmount, rotateAmount )*float(axis), rotateAxis)
object.setRotation( offset, relative=True )
if scale:
offset = map(lambda axis: 1 + ( random.uniform( -scaleAmount, scaleAmount )*float(axis) ), scaleAxis)
object.setScale( offset )
print '# Results: %i object randomized. #' % len(objects)
class RandomTransformOptions( models.OptionModel ):
translate = models.CheckBox( default=1, ann='about the checkbox' )
translateAmount = models.FloatSlider( default=1, precision=3, requires=(translate, 1) )
translateAxis = models.CheckBox( labels=['X', 'Y', 'Z'], default=[1, 1, 1], requires=(translate, 1) )
sep1 = models.Separator( style='in', height=14 )
rotate = models.CheckBox( default=1, ann='about the checkbox' )
rotateAmount = models.FloatSlider( default=1, precision=3, requires=(rotate, 1) )
rotateAxis = models.CheckBox( labels=['X', 'Y', 'Z'], default=[1, 1, 1], requires=(rotate, 1) )
sep2 = models.Separator( style='in', height=14 )
scale = models.CheckBox( default=1, ann='about the checkbox' )
scaleAmount = models.FloatSlider( default=1, precision=3, requires=(scale, 1) )
scaleAxis = models.CheckBox( labels=['X', 'Y', 'Z'], default=[1, 1, 1], requires=(scale, 1) )
class Meta:
button_label = 'Randomize'
performRandomTransform = register.PerformCommand( randomTransform, RandomTransformOptions )
performRandomTransform(1)
|
kinetifex/maya-impress
|
examples/options_example.py
|
Python
|
bsd-3-clause
| 2,324 | 0.027539 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
import six
from cryptography import utils
def generate_parameters(key_size, backend):
return backend.generate_dsa_parameters(key_size)
def generate_private_key(key_size, backend):
return backend.generate_dsa_private_key_and_parameters(key_size)
def _check_dsa_parameters(parameters):
if utils.bit_length(parameters.p) not in [1024, 2048, 3072]:
raise ValueError("p must be exactly 1024, 2048, or 3072 bits long")
if utils.bit_length(parameters.q) not in [160, 256]:
raise ValueError("q must be exactly 160 or 256 bits long")
if not (1 < parameters.g < parameters.p):
raise ValueError("g, p don't satisfy 1 < g < p.")
def _check_dsa_private_numbers(numbers):
parameters = numbers.public_numbers.parameter_numbers
_check_dsa_parameters(parameters)
if numbers.x <= 0 or numbers.x >= parameters.q:
raise ValueError("x must be > 0 and < q.")
if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p):
raise ValueError("y must be equal to (g ** x % p).")
class DSAParameterNumbers(object):
def __init__(self, p, q, g):
if (
not isinstance(p, six.integer_types) or
not isinstance(q, six.integer_types) or
not isinstance(g, six.integer_types)
):
raise TypeError(
"DSAParameterNumbers p, q, and g arguments must be integers."
)
self._p = p
self._q = q
self._g = g
p = utils.read_only_property("_p")
q = utils.read_only_property("_q")
g = utils.read_only_property("_g")
def parameters(self, backend):
return backend.load_dsa_parameter_numbers(self)
class DSAPublicNumbers(object):
def __init__(self, y, parameter_numbers):
if not isinstance(y, six.integer_types):
raise TypeError("DSAPublicNumbers y argument must be an integer.")
if not isinstance(parameter_numbers, DSAParameterNumbers):
raise TypeError(
"parameter_numbers must be a DSAParameterNumbers instance."
)
self._y = y
self._parameter_numbers = parameter_numbers
y = utils.read_only_property("_y")
parameter_numbers = utils.read_only_property("_parameter_numbers")
def public_key(self, backend):
return backend.load_dsa_public_numbers(self)
class DSAPrivateNumbers(object):
def __init__(self, x, public_numbers):
if not isinstance(x, six.integer_types):
raise TypeError("DSAPrivateNumbers x argument must be an integer.")
if not isinstance(public_numbers, DSAPublicNumbers):
raise TypeError(
"public_numbers must be a DSAPublicNumbers instance."
)
self._public_numbers = public_numbers
self._x = x
x = utils.read_only_property("_x")
public_numbers = utils.read_only_property("_public_numbers")
def private_key(self, backend):
return backend.load_dsa_private_numbers(self)
|
viraptor/cryptography
|
cryptography/hazmat/primitives/asymmetric/dsa.py
|
Python
|
apache-2.0
| 3,615 | 0 |
###############################################################################
# Copyright 2016 - Climate Research Division
# Environment and Climate Change Canada
#
# This file is part of the "EC-CAS diags" package.
#
# "EC-CAS diags" is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# "EC-CAS diags" is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with "EC-CAS diags". If not, see <http://www.gnu.org/licenses/>.
###############################################################################
from .zonalmean import ZonalMean as Zonal
from .vinterp import VInterp
from . import TimeVaryingDiagnostic
class ZonalMean(Zonal,VInterp,TimeVaryingDiagnostic):
"""
Zonal mean (or standard deviation) of a field, animated in time.
"""
def __str__ (self):
return 'zonal'+self.typestat+'_'+self.zaxis
def do (self, inputs):
from .movie import ZonalMovie
prefix = '_'.join(inp.name for inp in inputs) + '_zonal'+self.typestat+'_'+self.fieldname+'_on_'+self.zaxis+self.suffix+self.end_suffix
title = 'Zonal %s %s (in %s)'%(self.typestat,self.fieldname,self.units)
aspect_ratio = 1.0
shape = (1,len(inputs))
subtitles = [inp.title for inp in inputs]
fields = [inp.datasets[0].vars[0] for inp in inputs]
cmaps = [inp.cmap for inp in inputs]
cap_extremes = [getattr(inp,'cap_extremes',False) for inp in inputs]
movie = ZonalMovie(fields, title=title, subtitles=subtitles, shape=shape, aspect_ratio=aspect_ratio, cmaps=cmaps, cap_extremes=cap_extremes)
movie.save (outdir=self.outdir, prefix=prefix)
from . import table
table['zonal-movie'] = ZonalMean
|
neishm/EC-CAS-diags
|
eccas_diags/diagnostics/movie_zonal.py
|
Python
|
lgpl-3.0
| 2,094 | 0.009551 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.