text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
from django.conf.urls import url
from . import views
app_name = 'repo'
urlpatterns = [
url(r'^$', views.home, name='home'),
url(r'^home/$', views.home, name='home'),
url(r'^library/$', views.library, name='library'),
url(r'^login/$', views.login, name='login'),
url(r'^register/$', views.register, name='register'),
url(r'^results/?P<form>[A-Za-z]+/$', views.results, name='results'),
url(r'^(?P<sn>[-\/\d\w]{5,100})/borrow/$', views.borrow, name='borrow'),
#url(r'^(?P<sn>[.\D\d.]+)/borrow/$', views.borrow, name='borrow'),
]
|
giantas/elibrary
|
repo/urls.py
|
Python
|
mit
| 535 | 0.018692 |
self.description = "Backup file relocation"
lp1 = pmpkg("bash")
lp1.files = ["etc/profile*"]
lp1.backup = ["etc/profile"]
self.addpkg2db("local", lp1)
p1 = pmpkg("bash", "1.0-2")
self.addpkg(p1)
lp2 = pmpkg("filesystem")
self.addpkg2db("local", lp2)
p2 = pmpkg("filesystem", "1.0-2")
p2.files = ["etc/profile**"]
p2.backup = ["etc/profile"]
p2.depends = [ "bash" ]
self.addpkg(p2)
self.args = "-U %s" % " ".join([p.filename() for p in (p1, p2)])
self.filesystem = ["etc/profile"]
self.addrule("PACMAN_RETCODE=0")
self.addrule("PKG_VERSION=bash|1.0-2")
self.addrule("PKG_VERSION=filesystem|1.0-2")
self.addrule("!FILE_PACSAVE=etc/profile")
self.addrule("FILE_PACNEW=etc/profile")
self.addrule("FILE_EXIST=etc/profile")
|
kylon/pacman-fakeroot
|
test/pacman/tests/upgrade042.py
|
Python
|
gpl-2.0
| 725 | 0.002759 |
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import logging
from odoo import api, SUPERUSER_ID
_logger = logging.getLogger(__name__)
def post_init_hook(cr, registry):
"""
Create a payment group for every existint payment
"""
env = api.Environment(cr, SUPERUSER_ID, {})
# payments = env['account.payment'].search(
# [('payment_type', '!=', 'transfer')])
# on v10, on reconciling from statements, if not partner is choosen, then
# a payment is created with no partner. We still make partners mandatory
# on payment groups. So, we dont create payment groups for payments
# without partner_id
payments = env['account.payment'].search(
[('partner_id', '!=', False)])
for payment in payments:
_logger.info('creating payment group for payment %s' % payment.id)
_state = payment.state in ['sent', 'reconciled'] and 'posted' or payment.state
_state = _state if _state != 'cancelled' else 'cancel'
env['account.payment.group'].create({
'company_id': payment.company_id.id,
'partner_type': payment.partner_type,
'partner_id': payment.partner_id.id,
'payment_date': payment.date,
'communication': payment.ref,
'payment_ids': [(4, payment.id, False)],
'state': _state,
})
|
ingadhoc/account-payment
|
account_payment_group/hooks.py
|
Python
|
agpl-3.0
| 1,366 | 0.000732 |
import logging
import time
from collections import OrderedDict, defaultdict
from datetime import datetime, timedelta
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
from django.conf import settings
from django.db import connection
from django.db.models import F
from psycopg2.sql import SQL, Composable, Identifier, Literal
from analytics.models import (
BaseCount,
FillState,
InstallationCount,
RealmCount,
StreamCount,
UserCount,
installation_epoch,
last_successful_fill,
)
from zerver.lib.logging_util import log_to_file
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, floor_to_hour, verify_UTC
from zerver.models import (
Message,
Realm,
RealmAuditLog,
Stream,
UserActivityInterval,
UserProfile,
models,
)
## Logging setup ##
logger = logging.getLogger('zulip.management')
log_to_file(logger, settings.ANALYTICS_LOG_PATH)
# You can't subtract timedelta.max from a datetime, so use this instead
TIMEDELTA_MAX = timedelta(days=365*1000)
## Class definitions ##
class CountStat:
HOUR = 'hour'
DAY = 'day'
FREQUENCIES = frozenset([HOUR, DAY])
def __init__(self, property: str, data_collector: 'DataCollector', frequency: str,
interval: Optional[timedelta]=None) -> None:
self.property = property
self.data_collector = data_collector
# might have to do something different for bitfields
if frequency not in self.FREQUENCIES:
raise AssertionError(f"Unknown frequency: {frequency}")
self.frequency = frequency
if interval is not None:
self.interval = interval
elif frequency == CountStat.HOUR:
self.interval = timedelta(hours=1)
else: # frequency == CountStat.DAY
self.interval = timedelta(days=1)
def __str__(self) -> str:
return f"<CountStat: {self.property}>"
class LoggingCountStat(CountStat):
def __init__(self, property: str, output_table: Type[BaseCount], frequency: str) -> None:
CountStat.__init__(self, property, DataCollector(output_table, None), frequency)
class DependentCountStat(CountStat):
def __init__(self, property: str, data_collector: 'DataCollector', frequency: str,
interval: Optional[timedelta] = None, dependencies: Sequence[str] = []) -> None:
CountStat.__init__(self, property, data_collector, frequency, interval=interval)
self.dependencies = dependencies
class DataCollector:
def __init__(self, output_table: Type[BaseCount],
pull_function: Optional[Callable[[str, datetime, datetime, Optional[Realm]], int]]) -> None:
self.output_table = output_table
self.pull_function = pull_function
## CountStat-level operations ##
def process_count_stat(stat: CountStat, fill_to_time: datetime,
realm: Optional[Realm]=None) -> None:
# TODO: The realm argument is not yet supported, in that we don't
# have a solution for how to update FillState if it is passed. It
# exists solely as partial plumbing for when we do fully implement
# doing single-realm analytics runs for use cases like data import.
#
# Also, note that for the realm argument to be properly supported,
# the CountStat object passed in needs to have come from
# E.g. get_count_stats(realm), i.e. have the realm_id already
# entered into the SQL query defined by the CountState object.
if stat.frequency == CountStat.HOUR:
time_increment = timedelta(hours=1)
elif stat.frequency == CountStat.DAY:
time_increment = timedelta(days=1)
else:
raise AssertionError(f"Unknown frequency: {stat.frequency}")
verify_UTC(fill_to_time)
if floor_to_hour(fill_to_time) != fill_to_time:
raise ValueError(f"fill_to_time must be on an hour boundary: {fill_to_time}")
fill_state = FillState.objects.filter(property=stat.property).first()
if fill_state is None:
currently_filled = installation_epoch()
fill_state = FillState.objects.create(property=stat.property,
end_time=currently_filled,
state=FillState.DONE)
logger.info("INITIALIZED %s %s", stat.property, currently_filled)
elif fill_state.state == FillState.STARTED:
logger.info("UNDO START %s %s", stat.property, fill_state.end_time)
do_delete_counts_at_hour(stat, fill_state.end_time)
currently_filled = fill_state.end_time - time_increment
do_update_fill_state(fill_state, currently_filled, FillState.DONE)
logger.info("UNDO DONE %s", stat.property)
elif fill_state.state == FillState.DONE:
currently_filled = fill_state.end_time
else:
raise AssertionError(f"Unknown value for FillState.state: {fill_state.state}.")
if isinstance(stat, DependentCountStat):
for dependency in stat.dependencies:
dependency_fill_time = last_successful_fill(dependency)
if dependency_fill_time is None:
logger.warning("DependentCountStat %s run before dependency %s.",
stat.property, dependency)
return
fill_to_time = min(fill_to_time, dependency_fill_time)
currently_filled = currently_filled + time_increment
while currently_filled <= fill_to_time:
logger.info("START %s %s", stat.property, currently_filled)
start = time.time()
do_update_fill_state(fill_state, currently_filled, FillState.STARTED)
do_fill_count_stat_at_hour(stat, currently_filled, realm)
do_update_fill_state(fill_state, currently_filled, FillState.DONE)
end = time.time()
currently_filled = currently_filled + time_increment
logger.info("DONE %s (%dms)", stat.property, (end-start)*1000)
def do_update_fill_state(fill_state: FillState, end_time: datetime, state: int) -> None:
fill_state.end_time = end_time
fill_state.state = state
fill_state.save()
# We assume end_time is valid (e.g. is on a day or hour boundary as appropriate)
# and is timezone aware. It is the caller's responsibility to enforce this!
def do_fill_count_stat_at_hour(stat: CountStat, end_time: datetime, realm: Optional[Realm]=None) -> None:
start_time = end_time - stat.interval
if not isinstance(stat, LoggingCountStat):
timer = time.time()
assert(stat.data_collector.pull_function is not None)
rows_added = stat.data_collector.pull_function(stat.property, start_time, end_time, realm)
logger.info("%s run pull_function (%dms/%sr)",
stat.property, (time.time()-timer)*1000, rows_added)
do_aggregate_to_summary_table(stat, end_time, realm)
def do_delete_counts_at_hour(stat: CountStat, end_time: datetime) -> None:
if isinstance(stat, LoggingCountStat):
InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete()
if stat.data_collector.output_table in [UserCount, StreamCount]:
RealmCount.objects.filter(property=stat.property, end_time=end_time).delete()
else:
UserCount.objects.filter(property=stat.property, end_time=end_time).delete()
StreamCount.objects.filter(property=stat.property, end_time=end_time).delete()
RealmCount.objects.filter(property=stat.property, end_time=end_time).delete()
InstallationCount.objects.filter(property=stat.property, end_time=end_time).delete()
def do_aggregate_to_summary_table(stat: CountStat, end_time: datetime,
realm: Optional[Realm]=None) -> None:
cursor = connection.cursor()
# Aggregate into RealmCount
output_table = stat.data_collector.output_table
if realm is not None:
realm_clause = SQL("AND zerver_realm.id = {}").format(Literal(realm.id))
else:
realm_clause = SQL("")
if output_table in (UserCount, StreamCount):
realmcount_query = SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
zerver_realm.id, COALESCE(sum({output_table}.value), 0), %(property)s,
{output_table}.subgroup, %(end_time)s
FROM zerver_realm
JOIN {output_table}
ON
zerver_realm.id = {output_table}.realm_id
WHERE
{output_table}.property = %(property)s AND
{output_table}.end_time = %(end_time)s
{realm_clause}
GROUP BY zerver_realm.id, {output_table}.subgroup
""").format(
output_table=Identifier(output_table._meta.db_table),
realm_clause=realm_clause,
)
start = time.time()
cursor.execute(realmcount_query, {
'property': stat.property,
'end_time': end_time,
})
end = time.time()
logger.info(
"%s RealmCount aggregation (%dms/%sr)",
stat.property, (end - start) * 1000, cursor.rowcount,
)
if realm is None:
# Aggregate into InstallationCount. Only run if we just
# processed counts for all realms.
#
# TODO: Add support for updating installation data after
# changing an individual realm's values.
installationcount_query = SQL("""
INSERT INTO analytics_installationcount
(value, property, subgroup, end_time)
SELECT
sum(value), %(property)s, analytics_realmcount.subgroup, %(end_time)s
FROM analytics_realmcount
WHERE
property = %(property)s AND
end_time = %(end_time)s
GROUP BY analytics_realmcount.subgroup
""")
start = time.time()
cursor.execute(installationcount_query, {
'property': stat.property,
'end_time': end_time,
})
end = time.time()
logger.info(
"%s InstallationCount aggregation (%dms/%sr)",
stat.property, (end - start) * 1000, cursor.rowcount,
)
cursor.close()
## Utility functions called from outside counts.py ##
# called from zerver/lib/actions.py; should not throw any errors
def do_increment_logging_stat(zerver_object: Union[Realm, UserProfile, Stream], stat: CountStat,
subgroup: Optional[Union[str, int, bool]], event_time: datetime,
increment: int=1) -> None:
if not increment:
return
table = stat.data_collector.output_table
if table == RealmCount:
id_args = {'realm': zerver_object}
elif table == UserCount:
id_args = {'realm': zerver_object.realm, 'user': zerver_object}
else: # StreamCount
id_args = {'realm': zerver_object.realm, 'stream': zerver_object}
if stat.frequency == CountStat.DAY:
end_time = ceiling_to_day(event_time)
else: # CountStat.HOUR:
end_time = ceiling_to_hour(event_time)
row, created = table.objects.get_or_create(
property=stat.property, subgroup=subgroup, end_time=end_time,
defaults={'value': increment}, **id_args)
if not created:
row.value = F('value') + increment
row.save(update_fields=['value'])
def do_drop_all_analytics_tables() -> None:
UserCount.objects.all().delete()
StreamCount.objects.all().delete()
RealmCount.objects.all().delete()
InstallationCount.objects.all().delete()
FillState.objects.all().delete()
def do_drop_single_stat(property: str) -> None:
UserCount.objects.filter(property=property).delete()
StreamCount.objects.filter(property=property).delete()
RealmCount.objects.filter(property=property).delete()
InstallationCount.objects.filter(property=property).delete()
FillState.objects.filter(property=property).delete()
## DataCollector-level operations ##
QueryFn = Callable[[Dict[str, Composable]], Composable]
def do_pull_by_sql_query(
property: str,
start_time: datetime,
end_time: datetime,
query: QueryFn,
group_by: Optional[Tuple[models.Model, str]],
) -> int:
if group_by is None:
subgroup = SQL('NULL')
group_by_clause = SQL('')
else:
subgroup = Identifier(group_by[0]._meta.db_table, group_by[1])
group_by_clause = SQL(', {}').format(subgroup)
# We do string replacement here because cursor.execute will reject a
# group_by_clause given as a param.
# We pass in the datetimes as params to cursor.execute so that we don't have to
# think about how to convert python datetimes to SQL datetimes.
query_ = query({
'subgroup': subgroup,
'group_by_clause': group_by_clause,
})
cursor = connection.cursor()
cursor.execute(query_, {
'property': property,
'time_start': start_time,
'time_end': end_time,
})
rowcount = cursor.rowcount
cursor.close()
return rowcount
def sql_data_collector(
output_table: Type[BaseCount],
query: QueryFn,
group_by: Optional[Tuple[models.Model, str]],
) -> DataCollector:
def pull_function(property: str, start_time: datetime, end_time: datetime,
realm: Optional[Realm] = None) -> int:
# The pull function type needs to accept a Realm argument
# because the 'minutes_active::day' CountStat uses
# DataCollector directly for do_pull_minutes_active, which
# requires the realm argument. We ignore it here, because the
# realm should have been already encoded in the `query` we're
# passed.
return do_pull_by_sql_query(property, start_time, end_time, query, group_by)
return DataCollector(output_table, pull_function)
def do_pull_minutes_active(property: str, start_time: datetime, end_time: datetime,
realm: Optional[Realm] = None) -> int:
user_activity_intervals = UserActivityInterval.objects.filter(
end__gt=start_time, start__lt=end_time,
).select_related(
'user_profile',
).values_list(
'user_profile_id', 'user_profile__realm_id', 'start', 'end')
seconds_active: Dict[Tuple[int, int], float] = defaultdict(float)
for user_id, realm_id, interval_start, interval_end in user_activity_intervals:
if realm is None or realm.id == realm_id:
start = max(start_time, interval_start)
end = min(end_time, interval_end)
seconds_active[(user_id, realm_id)] += (end - start).total_seconds()
rows = [UserCount(user_id=ids[0], realm_id=ids[1], property=property,
end_time=end_time, value=int(seconds // 60))
for ids, seconds in seconds_active.items() if seconds >= 60]
UserCount.objects.bulk_create(rows)
return len(rows)
def count_message_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
zerver_userprofile.id, zerver_userprofile.realm_id, count(*),
%(property)s, {subgroup}, %(time_end)s
FROM zerver_userprofile
JOIN zerver_message
ON
zerver_userprofile.id = zerver_message.sender_id
WHERE
zerver_userprofile.date_joined < %(time_end)s AND
zerver_message.date_sent >= %(time_start)s AND
{realm_clause}
zerver_message.date_sent < %(time_end)s
GROUP BY zerver_userprofile.id {group_by_clause}
""").format(**kwargs, realm_clause=realm_clause)
# Note: ignores the group_by / group_by_clause.
def count_message_type_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(realm_id, user_id, value, property, subgroup, end_time)
SELECT realm_id, id, SUM(count) AS value, %(property)s, message_type, %(time_end)s
FROM
(
SELECT zerver_userprofile.realm_id, zerver_userprofile.id, count(*),
CASE WHEN
zerver_recipient.type = 1 THEN 'private_message'
WHEN
zerver_recipient.type = 3 THEN 'huddle_message'
WHEN
zerver_stream.invite_only = TRUE THEN 'private_stream'
ELSE 'public_stream'
END
message_type
FROM zerver_userprofile
JOIN zerver_message
ON
zerver_userprofile.id = zerver_message.sender_id AND
zerver_message.date_sent >= %(time_start)s AND
{realm_clause}
zerver_message.date_sent < %(time_end)s
JOIN zerver_recipient
ON
zerver_message.recipient_id = zerver_recipient.id
LEFT JOIN zerver_stream
ON
zerver_recipient.type_id = zerver_stream.id
GROUP BY
zerver_userprofile.realm_id, zerver_userprofile.id,
zerver_recipient.type, zerver_stream.invite_only
) AS subquery
GROUP BY realm_id, id, message_type
""").format(**kwargs, realm_clause=realm_clause)
# This query joins to the UserProfile table since all current queries that
# use this also subgroup on UserProfile.is_bot. If in the future there is a
# stat that counts messages by stream and doesn't need the UserProfile
# table, consider writing a new query for efficiency.
def count_message_by_stream_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_stream.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_streamcount
(stream_id, realm_id, value, property, subgroup, end_time)
SELECT
zerver_stream.id, zerver_stream.realm_id, count(*), %(property)s, {subgroup}, %(time_end)s
FROM zerver_stream
JOIN zerver_recipient
ON
zerver_stream.id = zerver_recipient.type_id
JOIN zerver_message
ON
zerver_recipient.id = zerver_message.recipient_id
JOIN zerver_userprofile
ON
zerver_message.sender_id = zerver_userprofile.id
WHERE
zerver_stream.date_created < %(time_end)s AND
zerver_recipient.type = 2 AND
zerver_message.date_sent >= %(time_start)s AND
{realm_clause}
zerver_message.date_sent < %(time_end)s
GROUP BY zerver_stream.id {group_by_clause}
""").format(**kwargs, realm_clause=realm_clause)
# Hardcodes the query needed by active_users:is_bot:day, since that is
# currently the only stat that uses this.
def count_user_by_realm_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
zerver_realm.id, count(*), %(property)s, {subgroup}, %(time_end)s
FROM zerver_realm
JOIN zerver_userprofile
ON
zerver_realm.id = zerver_userprofile.realm_id
WHERE
zerver_realm.date_created < %(time_end)s AND
zerver_userprofile.date_joined >= %(time_start)s AND
zerver_userprofile.date_joined < %(time_end)s AND
{realm_clause}
zerver_userprofile.is_active = TRUE
GROUP BY zerver_realm.id {group_by_clause}
""").format(**kwargs, realm_clause=realm_clause)
# Currently hardcodes the query needed for active_users_audit:is_bot:day.
# Assumes that a user cannot have two RealmAuditLog entries with the same event_time and
# event_type in [RealmAuditLog.USER_CREATED, USER_DEACTIVATED, etc].
# In particular, it's important to ensure that migrations don't cause that to happen.
def check_realmauditlog_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
ral1.modified_user_id, ral1.realm_id, 1, %(property)s, {subgroup}, %(time_end)s
FROM zerver_realmauditlog ral1
JOIN (
SELECT modified_user_id, max(event_time) AS max_event_time
FROM zerver_realmauditlog
WHERE
event_type in ({user_created}, {user_activated}, {user_deactivated}, {user_reactivated}) AND
{realm_clause}
event_time < %(time_end)s
GROUP BY modified_user_id
) ral2
ON
ral1.event_time = max_event_time AND
ral1.modified_user_id = ral2.modified_user_id
JOIN zerver_userprofile
ON
ral1.modified_user_id = zerver_userprofile.id
WHERE
ral1.event_type in ({user_created}, {user_activated}, {user_reactivated})
""").format(
**kwargs,
user_created=Literal(RealmAuditLog.USER_CREATED),
user_activated=Literal(RealmAuditLog.USER_ACTIVATED),
user_deactivated=Literal(RealmAuditLog.USER_DEACTIVATED),
user_reactivated=Literal(RealmAuditLog.USER_REACTIVATED),
realm_clause=realm_clause,
)
def check_useractivityinterval_by_user_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("zerver_userprofile.realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_usercount
(user_id, realm_id, value, property, subgroup, end_time)
SELECT
zerver_userprofile.id, zerver_userprofile.realm_id, 1, %(property)s, {subgroup}, %(time_end)s
FROM zerver_userprofile
JOIN zerver_useractivityinterval
ON
zerver_userprofile.id = zerver_useractivityinterval.user_profile_id
WHERE
zerver_useractivityinterval.end >= %(time_start)s AND
{realm_clause}
zerver_useractivityinterval.start < %(time_end)s
GROUP BY zerver_userprofile.id {group_by_clause}
""").format(**kwargs, realm_clause=realm_clause)
def count_realm_active_humans_query(realm: Optional[Realm]) -> QueryFn:
if realm is None:
realm_clause = SQL("")
else:
realm_clause = SQL("realm_id = {} AND").format(Literal(realm.id))
return lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
usercount1.realm_id, count(*), %(property)s, NULL, %(time_end)s
FROM (
SELECT realm_id, user_id
FROM analytics_usercount
WHERE
property = 'active_users_audit:is_bot:day' AND
subgroup = 'false' AND
{realm_clause}
end_time = %(time_end)s
) usercount1
JOIN (
SELECT realm_id, user_id
FROM analytics_usercount
WHERE
property = '15day_actives::day' AND
{realm_clause}
end_time = %(time_end)s
) usercount2
ON
usercount1.user_id = usercount2.user_id
GROUP BY usercount1.realm_id
""").format(**kwargs, realm_clause=realm_clause)
# Currently unused and untested
count_stream_by_realm_query = lambda kwargs: SQL("""
INSERT INTO analytics_realmcount
(realm_id, value, property, subgroup, end_time)
SELECT
zerver_realm.id, count(*), %(property)s, {subgroup}, %(time_end)s
FROM zerver_realm
JOIN zerver_stream
ON
zerver_realm.id = zerver_stream.realm_id AND
WHERE
zerver_realm.date_created < %(time_end)s AND
zerver_stream.date_created >= %(time_start)s AND
zerver_stream.date_created < %(time_end)s
GROUP BY zerver_realm.id {group_by_clause}
""").format(**kwargs)
def get_count_stats(realm: Optional[Realm]=None) -> Dict[str, CountStat]:
## CountStat declarations ##
count_stats_ = [
# Messages sent stats
# Stats that count the number of messages sent in various ways.
# These are also the set of stats that read from the Message table.
CountStat('messages_sent:is_bot:hour',
sql_data_collector(UserCount, count_message_by_user_query(
realm), (UserProfile, 'is_bot')),
CountStat.HOUR),
CountStat('messages_sent:message_type:day',
sql_data_collector(
UserCount, count_message_type_by_user_query(realm), None),
CountStat.DAY),
CountStat('messages_sent:client:day',
sql_data_collector(UserCount, count_message_by_user_query(realm),
(Message, 'sending_client_id')), CountStat.DAY),
CountStat('messages_in_stream:is_bot:day',
sql_data_collector(StreamCount, count_message_by_stream_query(realm),
(UserProfile, 'is_bot')), CountStat.DAY),
# Number of users stats
# Stats that count the number of active users in the UserProfile.is_active sense.
# 'active_users_audit:is_bot:day' is the canonical record of which users were
# active on which days (in the UserProfile.is_active sense).
# Important that this stay a daily stat, so that 'realm_active_humans::day' works as expected.
CountStat('active_users_audit:is_bot:day',
sql_data_collector(UserCount, check_realmauditlog_by_user_query(
realm), (UserProfile, 'is_bot')),
CountStat.DAY),
# Important note: LoggingCountStat objects aren't passed the
# Realm argument, because by nature they have a logging
# structure, not a pull-from-database structure, so there's no
# way to compute them for a single realm after the fact (the
# use case for passing a Realm argument).
# Sanity check on 'active_users_audit:is_bot:day', and a archetype for future LoggingCountStats.
# In RealmCount, 'active_users_audit:is_bot:day' should be the partial
# sum sequence of 'active_users_log:is_bot:day', for any realm that
# started after the latter stat was introduced.
LoggingCountStat('active_users_log:is_bot:day',
RealmCount, CountStat.DAY),
# Another sanity check on 'active_users_audit:is_bot:day'. Is only an
# approximation, e.g. if a user is deactivated between the end of the
# day and when this stat is run, they won't be counted. However, is the
# simplest of the three to inspect by hand.
CountStat('active_users:is_bot:day',
sql_data_collector(RealmCount, count_user_by_realm_query(realm), (UserProfile, 'is_bot')),
CountStat.DAY, interval=TIMEDELTA_MAX),
# Messages read stats. messages_read::hour is the total
# number of messages read, whereas
# messages_read_interactions::hour tries to count the total
# number of UI interactions resulting in messages being marked
# as read (imperfect because of batching of some request
# types, but less likely to be overwhelmed by a single bulk
# operation).
LoggingCountStat('messages_read::hour', UserCount, CountStat.HOUR),
LoggingCountStat('messages_read_interactions::hour', UserCount, CountStat.HOUR),
# User activity stats
# Stats that measure user activity in the UserActivityInterval sense.
CountStat('1day_actives::day',
sql_data_collector(
UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY, interval=timedelta(days=1)-UserActivityInterval.MIN_INTERVAL_LENGTH),
CountStat('7day_actives::day',
sql_data_collector(
UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY, interval=timedelta(days=7)-UserActivityInterval.MIN_INTERVAL_LENGTH),
CountStat('15day_actives::day',
sql_data_collector(
UserCount, check_useractivityinterval_by_user_query(realm), None),
CountStat.DAY, interval=timedelta(days=15)-UserActivityInterval.MIN_INTERVAL_LENGTH),
CountStat('minutes_active::day', DataCollector(
UserCount, do_pull_minutes_active), CountStat.DAY),
# Rate limiting stats
# Used to limit the number of invitation emails sent by a realm
LoggingCountStat('invites_sent::day', RealmCount, CountStat.DAY),
# Dependent stats
# Must come after their dependencies.
# Canonical account of the number of active humans in a realm on each day.
DependentCountStat('realm_active_humans::day',
sql_data_collector(
RealmCount, count_realm_active_humans_query(realm), None),
CountStat.DAY,
dependencies=['active_users_audit:is_bot:day', '15day_actives::day']),
]
return OrderedDict((stat.property, stat) for stat in count_stats_)
# To avoid refactoring for now COUNT_STATS can be used as before
COUNT_STATS = get_count_stats()
|
showell/zulip
|
analytics/lib/counts.py
|
Python
|
apache-2.0
| 29,578 | 0.003719 |
'''
Testing class for database API's course related functions.
Authors: Ari Kairala, Petteri Ponsimaa
Originally adopted from Ivan's exercise 1 test class.
'''
import unittest, hashlib
import re, base64, copy, json, server
from database_api_test_common import BaseTestCase, db
from flask import json, jsonify
from exam_archive import ExamDatabaseErrorNotFound, ExamDatabaseErrorExists
from unittest import TestCase
from resources_common import COLLECTIONJSON, PROBLEMJSON, COURSE_PROFILE, API_VERSION
class RestCourseTestCase(BaseTestCase):
'''
RestCourseTestCase contains course related unit tests of the database API.
'''
# List of user credentials in exam_archive_data_dump.sql for testing purposes
super_user = "bigboss"
super_pw = hashlib.sha256("ultimatepw").hexdigest()
admin_user = "antti.admin"
admin_pw = hashlib.sha256("qwerty1234").hexdigest()
basic_user = "testuser"
basic_pw = hashlib.sha256("testuser").hexdigest()
wrong_pw = "wrong-pw"
test_course_template_1 = {"template": {
"data": [
{"name": "archiveId", "value": 1},
{"name": "courseCode", "value": "810136P"},
{"name": "name", "value": "Johdatus tietojenk\u00e4sittelytieteisiin"},
{"name": "description", "value": "Lorem ipsum"},
{"name": "inLanguage", "value": "fi"},
{"name": "creditPoints", "value": 4},
{"name": "teacherId", "value": 1}]
}
}
test_course_template_2 = {"template": {
"data": [
{"name": "archiveId", "value": 1},
{"name": "courseCode", "value": "810137P"},
{"name": "name", "value": "Introduction to Information Processing Sciences"},
{"name": "description", "value": "Aaa Bbbb"},
{"name": "inLanguage", "value": "en"},
{"name": "creditPoints", "value": 5},
{"name": "teacherId", "value": 2}]
}
}
course_resource_url = '/exam_archive/api/archives/1/courses/1/'
course_resource_not_allowed_url = '/exam_archive/api/archives/2/courses/1/'
courselist_resource_url = '/exam_archive/api/archives/1/courses/'
# Set a ready header for authorized admin user
header_auth = {'Authorization': 'Basic ' + base64.b64encode(super_user + ":" + super_pw)}
# Define a list of the sample contents of the database, so we can later compare it to the test results
@classmethod
def setUpClass(cls):
print "Testing ", cls.__name__
def test_user_not_authorized(self):
'''
Check that user in not able to get course list without authenticating.
'''
print '(' + self.test_user_not_authorized.__name__ + ')', \
self.test_user_not_authorized.__doc__
# Test CourseList/GET
rv = self.app.get(self.courselist_resource_url)
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Test CourseList/POST
rv = self.app.post(self.courselist_resource_url)
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Test Course/GET
rv = self.app.get(self.course_resource_url)
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Test Course/PUT
rv = self.app.put(self.course_resource_url)
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Test Course/DELETE
rv = self.app.put(self.course_resource_url)
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Try to Course/POST when not admin or super user
rv = self.app.post(self.courselist_resource_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.basic_user + ":" + self.basic_pw)})
self.assertEquals(rv.status_code,403)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Try to delete course, when not admin or super user
rv = self.app.delete(self.course_resource_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.basic_user + ":" + self.basic_pw)})
self.assertEquals(rv.status_code,403)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Try to get Course list as basic user from unallowed archive
rv = self.app.get(self.course_resource_not_allowed_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.basic_user + ":" + self.basic_pw)})
self.assertEquals(rv.status_code,403)
self.assertEquals(PROBLEMJSON,rv.mimetype)
# Try to get Course list as super user with wrong password
rv = self.app.get(self.courselist_resource_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.super_user + ":" + self.wrong_pw)})
self.assertEquals(rv.status_code,401)
self.assertEquals(PROBLEMJSON,rv.mimetype)
def test_user_authorized(self):
'''
Check that authenticated user is able to get course list.
'''
print '(' + self.test_user_authorized.__name__ + ')', \
self.test_user_authorized.__doc__
# Try to get Course list as basic user from the correct archive
rv = self.app.get(self.course_resource_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.basic_user + ":" + self.basic_pw)})
self.assertEquals(rv.status_code,200)
self.assertEquals(COLLECTIONJSON+";"+COURSE_PROFILE,rv.content_type)
# User authorized as super user
rv = self.app.get(self.courselist_resource_url, headers={'Authorization': 'Basic ' + \
base64.b64encode(self.super_user + ":" + self.super_pw)})
self.assertEquals(rv.status_code,200)
self.assertEquals(COLLECTIONJSON+";"+COURSE_PROFILE,rv.content_type)
def test_course_get(self):
'''
Check data consistency of Course/GET and CourseList/GET.
'''
print '(' + self.test_course_get.__name__ + ')', \
self.test_course_get.__doc__
# Test CourseList/GET
self._course_get(self.courselist_resource_url)
# Test single course Course/GET
self._course_get(self.course_resource_url)
def _course_get(self, resource_url):
'''
Check data consistency of CourseList/GET.
'''
# Get all the courses from database
courses = db.browse_courses(1)
# Get all the courses from API
rv = self.app.get(resource_url, headers=self.header_auth)
self.assertEquals(rv.status_code,200)
self.assertEquals(COLLECTIONJSON+";"+COURSE_PROFILE,rv.content_type)
input = json.loads(rv.data)
assert input
# Go through the data
data = input['collection']
items = data['items']
self.assertEquals(data['href'], resource_url)
self.assertEquals(data['version'], API_VERSION)
for item in items:
obj = self._create_dict(item['data'])
course = db.get_course(obj['courseId'])
assert self._isIdentical(obj, course)
def test_course_post(self):
'''
Check that a new course can be created.
'''
print '(' + self.test_course_post.__name__ + ')', \
self.test_course_post.__doc__
resource_url = self.courselist_resource_url
new_course = self.test_course_template_1.copy()
# Test CourseList/POST
rv = self.app.post(resource_url, headers=self.header_auth, data=json.dumps(new_course))
self.assertEquals(rv.status_code,201)
# Post returns the address of newly created resource URL in header, in 'location'. Get the identifier of
# the just created item, fetch it from database and compare.
location = rv.location
location_match = re.match('.*courses/([^/]+)/', location)
self.assertIsNotNone(location_match)
new_id = location_match.group(1)
# Fetch the item from database and set it to course_id_db, and convert the filled post template data above to
# similar format by replacing the keys with post data attributes.
course_in_db = db.get_course(new_id)
course_posted = self._convert(new_course)
# Compare the data in database and the post template above.
self.assertDictContainsSubset(course_posted, course_in_db)
# Next, try to add the same course twice - there should be conflict
rv = self.app.post(resource_url, headers=self.header_auth, data=json.dumps(new_course))
self.assertEquals(rv.status_code,409)
# Next check that by posting invalid JSON data we get status code 415
invalid_json = "INVALID " + json.dumps(new_course)
rv = self.app.post(resource_url, headers=self.header_auth, data=invalid_json)
self.assertEquals(rv.status_code,415)
# Check that template structure is validated
invalid_json = json.dumps(new_course['template'])
rv = self.app.post(resource_url, headers=self.header_auth, data=invalid_json)
self.assertEquals(rv.status_code,400)
# Check for the missing required field by removing the third row in array (course name)
invalid_template = copy.deepcopy(new_course)
invalid_template['template']['data'].pop(2)
rv = self.app.post(resource_url, headers=self.header_auth, data=json.dumps(invalid_template))
self.assertEquals(rv.status_code,400)
# Lastly, delete the item
rv = self.app.delete(location, headers=self.header_auth)
self.assertEquals(rv.status_code,204)
def test_course_put(self):
'''
Check that an existing course can be modified.
'''
print '(' + self.test_course_put.__name__ + ')', \
self.test_course_put.__doc__
resource_url = self.courselist_resource_url
new_course = self.test_course_template_1
edited_course = self.test_course_template_2
# First create the course
rv = self.app.post(resource_url, headers=self.header_auth, data=json.dumps(new_course))
self.assertEquals(rv.status_code,201)
location = rv.location
self.assertIsNotNone(location)
# Then try to edit the course
rv = self.app.put(location, headers=self.header_auth, data=json.dumps(edited_course))
self.assertEquals(rv.status_code,200)
location = rv.location
self.assertIsNotNone(location)
# Put returns the address of newly created resource URL in header, in 'location'. Get the identifier of
# the just created item, fetch it from database and compare.
location = rv.location
location_match = re.match('.*courses/([^/]+)/', location)
self.assertIsNotNone(location_match)
new_id = location_match.group(1)
# Fetch the item from database and set it to course_id_db, and convert the filled post template data above to
# similar format by replacing the keys with post data attributes.
course_in_db = db.get_course(new_id)
course_posted = self._convert(edited_course)
# Compare the data in database and the post template above.
self.assertDictContainsSubset(course_posted, course_in_db)
# Next check that by posting invalid JSON data we get status code 415
invalid_json = "INVALID " + json.dumps(new_course)
rv = self.app.put(location, headers=self.header_auth, data=invalid_json)
self.assertEquals(rv.status_code,415)
# Check that template structure is validated
invalid_json = json.dumps(new_course['template'])
rv = self.app.put(location, headers=self.header_auth, data=invalid_json)
self.assertEquals(rv.status_code,400)
# Lastly, we delete the course
rv = self.app.delete(location, headers=self.header_auth)
self.assertEquals(rv.status_code,204)
def test_course_delete(self):
'''
Check that course in not able to get course list without authenticating.
'''
print '(' + self.test_course_delete.__name__ + ')', \
self.test_course_delete.__doc__
# First create the course
resource_url = self.courselist_resource_url
rv = self.app.post(resource_url, headers=self.header_auth, data=json.dumps(self.test_course_template_2))
self.assertEquals(rv.status_code,201)
location = rv.location
self.assertIsNotNone(location)
# Get the identifier of the just created item, fetch it from database and compare.
location = rv.location
location_match = re.match('.*courses/([^/]+)/', location)
self.assertIsNotNone(location_match)
new_id = location_match.group(1)
# Then, we delete the course
rv = self.app.delete(location, headers=self.header_auth)
self.assertEquals(rv.status_code,204)
# Try to fetch the deleted course from database - expect to fail
self.assertIsNone(db.get_course(new_id))
def test_for_method_not_allowed(self):
'''
For inconsistency check for 405, method not allowed.
'''
print '(' + self.test_course_get.__name__ + ')', \
self.test_course_get.__doc__
# CourseList/PUT should not exist
rv = self.app.put(self.courselist_resource_url, headers=self.header_auth)
self.assertEquals(rv.status_code,405)
# CourseList/DELETE should not exist
rv = self.app.delete(self.courselist_resource_url, headers=self.header_auth)
self.assertEquals(rv.status_code,405)
# Course/POST should not exist
rv = self.app.post(self.course_resource_url, headers=self.header_auth)
self.assertEquals(rv.status_code,405)
def _isIdentical(self, api_item, db_item):
'''
Check whether template data corresponds to data stored in the database.
'''
return api_item['courseId'] == db_item['course_id'] and \
api_item['name'] == db_item['course_name'] and \
api_item['archiveId'] == db_item['archive_id'] and \
api_item['description'] == db_item['description'] and \
api_item['inLanguage'] == db_item['language_id'] and \
api_item['creditPoints'] == db_item['credit_points'] and \
api_item['courseCode'] == db_item['course_code']
def _convert(self, template_data):
'''
Convert template data to a dictionary representing the format the data is saved in the database.
'''
trans_table = {"name":"course_name", "url":"url", "archiveId":"archive_id", "courseCode":"course_code",
"dateModified": "modified_date", "modifierId":"modifier_id", "courseId":"course_id",
"description":"description", "inLanguage":"language_id", "creditPoints":"credit_points",
"teacherId":"teacher_id", "teacherName":"teacher_name"}
data = self._create_dict(template_data['template']['data'])
db_item = {}
for key, val in data.items():
db_item[trans_table[key]] = val
return db_item
def _create_dict(self,item):
'''
Create a dictionary from template data for easier handling.
'''
dict = {}
for f in item:
dict[f['name']] = f['value']
return dict
if __name__ == '__main__':
print 'Start running tests'
unittest.main()
|
petterip/exam-archive
|
test/rest_api_test_course.py
|
Python
|
mit
| 16,344 | 0.006975 |
import time
seen = set()
import_order = []
elapsed_times = {}
level = 0
parent = None
children = {}
def new_import(name, globals={}, locals={}, fromlist=[]):
global level, parent
if name in seen:
return old_import(name, globals, locals, fromlist)
seen.add(name)
import_order.append((name, level, parent))
t1 = time.time()
old_parent = parent
parent = name
level += 1
module = old_import(name, globals, locals, fromlist)
level -= 1
parent = old_parent
t2 = time.time()
elapsed_times[name] = t2-t1
return module
old_import = __builtins__.__import__
__builtins__.__import__ = new_import
from sympy import *
parents = {}
is_parent = {}
for name, level, parent in import_order:
parents[name] = parent
is_parent[parent] = True
print "== Tree =="
for name, level, parent in import_order:
print "%s%s: %.3f (%s)" % (" "*level, name, elapsed_times.get(name,0),
parent)
print "\n"
print "== Slowest (including children) =="
slowest = sorted((t, name) for (name, t) in elapsed_times.items())[-50:]
for elapsed_time, name in slowest[::-1]:
print "%.3f %s (%s)" % (elapsed_time, name, parents[name])
|
hazelnusse/sympy-old
|
bin/sympy_time.py
|
Python
|
bsd-3-clause
| 1,207 | 0.023198 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class DummyOperator(BaseOperator):
"""
Operator that does literally nothing. It can be used to group tasks in a
DAG.
"""
ui_color = '#e8f7e4'
@apply_defaults
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
def execute(self, context):
pass
|
wileeam/airflow
|
airflow/operators/dummy_operator.py
|
Python
|
apache-2.0
| 1,203 | 0 |
#!/usr/bin/python
'''
Example of zmq client.
Can be used to record test data on
remote PC
Nacho Mas January-2017
'''
import sys
import zmq
import time
import json
from config import *
# Socket to talk to server
context = zmq.Context()
socket = context.socket(zmq.SUB)
#socket.setsockopt(zmq.CONFLATE, 1)
socket.connect ("tcp://cronostamper:%s" % zmqShutterPort)
topicfilter = ShutterFlange
socket.setsockopt(zmq.SUBSCRIBE, topicfilter)
# Process
while True:
topic, msg = demogrify(socket.recv())
print "%f" % msg['unixUTC']
#time.sleep(5)
|
nachoplus/cronoStamper
|
zmqClient.py
|
Python
|
gpl-2.0
| 568 | 0.021127 |
import asyncio
import errno
import json
import logging
import os
import stat
import sys
from functools import partial
from pathlib import Path
from platform import system
from shutil import rmtree, which
from subprocess import CalledProcessError
from sys import version_info
from tempfile import TemporaryDirectory
from typing import (
Any,
Callable,
Dict,
List,
NamedTuple,
Optional,
Sequence,
Tuple,
Union,
)
from urllib.parse import urlparse
import click
TEN_MINUTES_SECONDS = 600
WINDOWS = system() == "Windows"
BLACK_BINARY = "black.exe" if WINDOWS else "black"
GIT_BINARY = "git.exe" if WINDOWS else "git"
LOG = logging.getLogger(__name__)
# Windows needs a ProactorEventLoop if you want to exec subprocesses
# Starting with 3.8 this is the default - can remove when Black >= 3.8
# mypy only respects sys.platform if directly in the evaluation
# https://mypy.readthedocs.io/en/latest/common_issues.html#python-version-and-system-platform-checks # noqa: B950
if sys.platform == "win32":
asyncio.set_event_loop(asyncio.ProactorEventLoop())
class Results(NamedTuple):
stats: Dict[str, int] = {}
failed_projects: Dict[str, CalledProcessError] = {}
async def _gen_check_output(
cmd: Sequence[str],
timeout: float = TEN_MINUTES_SECONDS,
env: Optional[Dict[str, str]] = None,
cwd: Optional[Path] = None,
stdin: Optional[bytes] = None,
) -> Tuple[bytes, bytes]:
process = await asyncio.create_subprocess_exec(
*cmd,
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.STDOUT,
env=env,
cwd=cwd,
)
try:
(stdout, stderr) = await asyncio.wait_for(process.communicate(stdin), timeout)
except asyncio.TimeoutError:
process.kill()
await process.wait()
raise
# A non-optional timeout was supplied to asyncio.wait_for, guaranteeing
# a timeout or completed process. A terminated Python process will have a
# non-empty returncode value.
assert process.returncode is not None
if process.returncode != 0:
cmd_str = " ".join(cmd)
raise CalledProcessError(
process.returncode, cmd_str, output=stdout, stderr=stderr
)
return (stdout, stderr)
def analyze_results(project_count: int, results: Results) -> int:
failed_pct = round(((results.stats["failed"] / project_count) * 100), 2)
success_pct = round(((results.stats["success"] / project_count) * 100), 2)
if results.failed_projects:
click.secho("\nFailed projects:\n", bold=True)
for project_name, project_cpe in results.failed_projects.items():
print(f"## {project_name}:")
print(f" - Returned {project_cpe.returncode}")
if project_cpe.stderr:
print(f" - stderr:\n{project_cpe.stderr.decode('utf8')}")
if project_cpe.stdout:
print(f" - stdout:\n{project_cpe.stdout.decode('utf8')}")
print("")
click.secho("-- primer results 📊 --\n", bold=True)
click.secho(
f"{results.stats['success']} / {project_count} succeeded ({success_pct}%) ✅",
bold=True,
fg="green",
)
click.secho(
f"{results.stats['failed']} / {project_count} FAILED ({failed_pct}%) 💩",
bold=bool(results.stats["failed"]),
fg="red",
)
s = "" if results.stats["disabled"] == 1 else "s"
click.echo(f" - {results.stats['disabled']} project{s} disabled by config")
s = "" if results.stats["wrong_py_ver"] == 1 else "s"
click.echo(
f" - {results.stats['wrong_py_ver']} project{s} skipped due to Python version"
)
click.echo(
f" - {results.stats['skipped_long_checkout']} skipped due to long checkout"
)
if results.failed_projects:
failed = ", ".join(results.failed_projects.keys())
click.secho(f"\nFailed projects: {failed}\n", bold=True)
return results.stats["failed"]
def _flatten_cli_args(cli_args: List[Union[Sequence[str], str]]) -> List[str]:
"""Allow a user to put long arguments into a list of strs
to make the JSON human readable"""
flat_args = []
for arg in cli_args:
if isinstance(arg, str):
flat_args.append(arg)
continue
args_as_str = "".join(arg)
flat_args.append(args_as_str)
return flat_args
async def black_run(
project_name: str,
repo_path: Optional[Path],
project_config: Dict[str, Any],
results: Results,
no_diff: bool = False,
) -> None:
"""Run Black and record failures"""
if not repo_path:
results.stats["failed"] += 1
results.failed_projects[project_name] = CalledProcessError(
69, [], f"{project_name} has no repo_path: {repo_path}".encode(), b""
)
return
stdin_test = project_name.upper() == "STDIN"
cmd = [str(which(BLACK_BINARY))]
if "cli_arguments" in project_config and project_config["cli_arguments"]:
cmd.extend(_flatten_cli_args(project_config["cli_arguments"]))
cmd.append("--check")
if not no_diff:
cmd.append("--diff")
# Workout if we should read in a python file or search from cwd
stdin = None
if stdin_test:
cmd.append("-")
stdin = repo_path.read_bytes()
elif "base_path" in project_config:
cmd.append(project_config["base_path"])
else:
cmd.append(".")
timeout = (
project_config["timeout_seconds"]
if "timeout_seconds" in project_config
else TEN_MINUTES_SECONDS
)
with TemporaryDirectory() as tmp_path:
# Prevent reading top-level user configs by manipulating environment variables
env = {
**os.environ,
"XDG_CONFIG_HOME": tmp_path, # Unix-like
"USERPROFILE": tmp_path, # Windows (changes `Path.home()` output)
}
cwd_path = repo_path.parent if stdin_test else repo_path
try:
LOG.debug(f"Running black for {project_name}: {' '.join(cmd)}")
_stdout, _stderr = await _gen_check_output(
cmd, cwd=cwd_path, env=env, stdin=stdin, timeout=timeout
)
except asyncio.TimeoutError:
results.stats["failed"] += 1
LOG.error(f"Running black for {repo_path} timed out ({cmd})")
except CalledProcessError as cpe:
# TODO: Tune for smarter for higher signal
# If any other return value than 1 we raise - can disable project in config
if cpe.returncode == 1:
if not project_config["expect_formatting_changes"]:
results.stats["failed"] += 1
results.failed_projects[repo_path.name] = cpe
else:
results.stats["success"] += 1
return
elif cpe.returncode > 1:
results.stats["failed"] += 1
results.failed_projects[repo_path.name] = cpe
return
LOG.error(f"Unknown error with {repo_path}")
raise
# If we get here and expect formatting changes something is up
if project_config["expect_formatting_changes"]:
results.stats["failed"] += 1
results.failed_projects[repo_path.name] = CalledProcessError(
0, cmd, b"Expected formatting changes but didn't get any!", b""
)
return
results.stats["success"] += 1
async def git_checkout_or_rebase(
work_path: Path,
project_config: Dict[str, Any],
rebase: bool = False,
*,
depth: int = 1,
) -> Optional[Path]:
"""git Clone project or rebase"""
git_bin = str(which(GIT_BINARY))
if not git_bin:
LOG.error("No git binary found")
return None
repo_url_parts = urlparse(project_config["git_clone_url"])
path_parts = repo_url_parts.path[1:].split("/", maxsplit=1)
repo_path: Path = work_path / path_parts[1].replace(".git", "")
cmd = [git_bin, "clone", "--depth", str(depth), project_config["git_clone_url"]]
cwd = work_path
if repo_path.exists() and rebase:
cmd = [git_bin, "pull", "--rebase"]
cwd = repo_path
elif repo_path.exists():
return repo_path
try:
_stdout, _stderr = await _gen_check_output(cmd, cwd=cwd)
except (asyncio.TimeoutError, CalledProcessError) as e:
LOG.error(f"Unable to git clone / pull {project_config['git_clone_url']}: {e}")
return None
return repo_path
def handle_PermissionError(
func: Callable[..., None], path: Path, exc: Tuple[Any, Any, Any]
) -> None:
"""
Handle PermissionError during shutil.rmtree.
This checks if the erroring function is either 'os.rmdir' or 'os.unlink', and that
the error was EACCES (i.e. Permission denied). If true, the path is set writable,
readable, and executable by everyone. Finally, it tries the error causing delete
operation again.
If the check is false, then the original error will be reraised as this function
can't handle it.
"""
excvalue = exc[1]
LOG.debug(f"Handling {excvalue} from {func.__name__}... ")
if func in (os.rmdir, os.unlink) and excvalue.errno == errno.EACCES:
LOG.debug(f"Setting {path} writable, readable, and executable by everyone... ")
os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO) # chmod 0777
func(path) # Try the error causing delete operation again
else:
raise
async def load_projects_queue(
config_path: Path,
projects_to_run: List[str],
) -> Tuple[Dict[str, Any], asyncio.Queue]:
"""Load project config and fill queue with all the project names"""
with config_path.open("r") as cfp:
config = json.load(cfp)
# TODO: Offer more options here
# e.g. Run on X random packages etc.
queue: asyncio.Queue = asyncio.Queue(maxsize=len(projects_to_run))
for project in projects_to_run:
await queue.put(project)
return config, queue
async def project_runner(
idx: int,
config: Dict[str, Any],
queue: asyncio.Queue,
work_path: Path,
results: Results,
long_checkouts: bool = False,
rebase: bool = False,
keep: bool = False,
no_diff: bool = False,
) -> None:
"""Check out project and run Black on it + record result"""
loop = asyncio.get_event_loop()
py_version = f"{version_info[0]}.{version_info[1]}"
while True:
try:
project_name = queue.get_nowait()
except asyncio.QueueEmpty:
LOG.debug(f"project_runner {idx} exiting")
return
LOG.debug(f"worker {idx} working on {project_name}")
project_config = config["projects"][project_name]
# Check if disabled by config
if "disabled" in project_config and project_config["disabled"]:
results.stats["disabled"] += 1
LOG.info(f"Skipping {project_name} as it's disabled via config")
continue
# Check if we should run on this version of Python
if (
"all" not in project_config["py_versions"]
and py_version not in project_config["py_versions"]
):
results.stats["wrong_py_ver"] += 1
LOG.debug(f"Skipping {project_name} as it's not enabled for {py_version}")
continue
# Check if we're doing big projects / long checkouts
if not long_checkouts and project_config["long_checkout"]:
results.stats["skipped_long_checkout"] += 1
LOG.debug(f"Skipping {project_name} as it's configured as a long checkout")
continue
repo_path: Optional[Path] = Path(__file__)
stdin_project = project_name.upper() == "STDIN"
if not stdin_project:
repo_path = await git_checkout_or_rebase(work_path, project_config, rebase)
if not repo_path:
continue
await black_run(project_name, repo_path, project_config, results, no_diff)
if not keep and not stdin_project:
LOG.debug(f"Removing {repo_path}")
rmtree_partial = partial(
rmtree, path=repo_path, onerror=handle_PermissionError
)
await loop.run_in_executor(None, rmtree_partial)
LOG.info(f"Finished {project_name}")
async def process_queue(
config_file: str,
work_path: Path,
workers: int,
projects_to_run: List[str],
keep: bool = False,
long_checkouts: bool = False,
rebase: bool = False,
no_diff: bool = False,
) -> int:
"""
Process the queue with X workers and evaluate results
- Success is guaged via the config "expect_formatting_changes"
Integer return equals the number of failed projects
"""
results = Results()
results.stats["disabled"] = 0
results.stats["failed"] = 0
results.stats["skipped_long_checkout"] = 0
results.stats["success"] = 0
results.stats["wrong_py_ver"] = 0
config, queue = await load_projects_queue(Path(config_file), projects_to_run)
project_count = queue.qsize()
s = "" if project_count == 1 else "s"
LOG.info(f"{project_count} project{s} to run Black over")
if project_count < 1:
return -1
s = "" if workers == 1 else "s"
LOG.debug(f"Using {workers} parallel worker{s} to run Black")
# Wait until we finish running all the projects before analyzing
await asyncio.gather(
*[
project_runner(
i,
config,
queue,
work_path,
results,
long_checkouts,
rebase,
keep,
no_diff,
)
for i in range(workers)
]
)
LOG.info("Analyzing results")
return analyze_results(project_count, results)
if __name__ == "__main__": # pragma: nocover
raise NotImplementedError("lib is a library, funnily enough.")
|
psf/black
|
src/black_primer/lib.py
|
Python
|
mit
| 13,941 | 0.001507 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for testing `OperatorPDBase` and related classes."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import numpy as np
import six
import tensorflow as tf
@six.add_metaclass(abc.ABCMeta) # pylint: disable=no-init
class OperatorPDDerivedClassTest(tf.test.TestCase):
"""Tests for derived classes.
Subclasses should implement every abstractmethod, and this will enable all
test methods to work.
"""
def setUp(self):
self._rng = np.random.RandomState(42)
def _compare_results(
self, expected, actual, static_shapes=True, atol=1e-5):
"""Compare expected value (array) to the actual value (Tensor)."""
if static_shapes:
self.assertEqual(expected.shape, actual.get_shape())
self.assertAllClose(expected, actual.eval(), atol=atol)
@abc.abstractmethod
def _build_operator_and_mat(self, batch_shape, k, dtype=np.float64):
"""Build a batch matrix and an Operator that should have similar behavior.
Every operator represents a (batch) matrix. This method returns both
together, and is used e.g. by tests.
Args:
batch_shape: List-like of Python integers giving batch shape of operator.
k: Python integer, the event size.
dtype: Numpy dtype. Data type of returned array/operator.
Returns:
operator: `OperatorPDBase` subclass.
mat: numpy array representing a (batch) matrix.
"""
# Create a matrix as a numpy array. Shape = batch_shape + [k, k].
# Create an OperatorPDDiag that should have the same behavior as the matrix.
# All arguments are convertable to numpy arrays.
#
batch_shape = list(batch_shape)
mat_shape = batch_shape + [k, k]
# return operator, mat
raise NotImplementedError("Not implemented yet.")
def testToDense(self):
with self.test_session():
for batch_shape in [(), (2, 3,)]:
for k in [1, 4]:
for dtype in [np.float32, np.float64]:
operator, mat = self._build_operator_and_mat(
batch_shape, k, dtype=dtype)
self._compare_results(
expected=mat,
actual=operator.to_dense())
def testSqrtToDense(self):
with self.test_session():
for batch_shape in [(), (2, 3,)]:
for k in [1, 4]:
operator, mat = self._build_operator_and_mat(batch_shape, k)
sqrt = operator.sqrt_to_dense()
self.assertEqual(mat.shape, sqrt.get_shape())
# Square roots are not unique, but SS^T should equal mat. In this
# case however, we should have S = S^T.
self._compare_results(
expected=mat,
actual=tf.batch_matmul(sqrt, sqrt))
def testDeterminants(self):
with self.test_session():
for batch_shape in [(), (2, 3,)]:
for k in [1, 4]:
operator, mat = self._build_operator_and_mat(batch_shape, k)
expected_det = tf.matrix_determinant(mat).eval()
self._compare_results(expected_det, operator.det())
self._compare_results(np.log(expected_det), operator.log_det())
def testMatmul(self):
with self.test_session():
for batch_shape in [(), (2, 3,)]:
for k in [1, 4]:
operator, mat = self._build_operator_and_mat(batch_shape, k)
# Work with 5 simultaneous systems. 5 is arbitrary.
x = self._rng.randn(*(batch_shape + (k, 5)))
self._compare_results(
expected=tf.batch_matmul(mat, x).eval(),
actual=operator.matmul(x))
def testSqrtMatmul(self):
# Square roots are not unique, but we should have SS^T x = Ax, and in our
# case, we should have S = S^T, so SSx = Ax.
with self.test_session():
for batch_shape in [(), (2, 3,)]:
for k in [1, 4]:
operator, mat = self._build_operator_and_mat(batch_shape, k)
# Work with 5 simultaneous systems. 5 is arbitrary.
x = self._rng.randn(*(batch_shape + (k, 5)))
self._compare_results(
expected=tf.batch_matmul(mat, x).eval(),
actual=operator.sqrt_matmul(operator.sqrt_matmul(x)))
def testSolve(self):
with self.test_session():
for batch_shape in [(), (2, 3,)]:
for k in [1, 4]:
operator, mat = self._build_operator_and_mat(batch_shape, k)
# Work with 5 simultaneous systems. 5 is arbitrary.
x = self._rng.randn(*(batch_shape + (k, 5)))
self._compare_results(
expected=tf.matrix_solve(mat, x).eval(), actual=operator.solve(x))
def testSqrtSolve(self):
# Square roots are not unique, but we should still have
# S^{-T} S^{-1} x = A^{-1} x.
# In our case, we should have S = S^T, so then S^{-1} S^{-1} x = A^{-1} x.
with self.test_session():
for batch_shape in [(), (2, 3,)]:
for k in [1, 4]:
operator, mat = self._build_operator_and_mat(batch_shape, k)
# Work with 5 simultaneous systems. 5 is arbitrary.
x = self._rng.randn(*(batch_shape + (k, 5)))
self._compare_results(
expected=tf.matrix_solve(mat, x).eval(),
actual=operator.sqrt_solve(operator.sqrt_solve(x)))
def testAddToTensor(self):
with self.test_session():
for batch_shape in [(), (2, 3,)]:
for k in [1, 4]:
operator, mat = self._build_operator_and_mat(batch_shape, k)
tensor = tf.ones_like(mat)
self._compare_results(
expected=(mat + tensor).eval(),
actual=operator.add_to_tensor(tensor))
|
cg31/tensorflow
|
tensorflow/contrib/distributions/python/ops/operator_test_util.py
|
Python
|
apache-2.0
| 6,295 | 0.008896 |
#!/F3/core/tweet_model.py
# A class for representating a tweet.
# Author : Ismail Sunni/@ismailsunni
# Created : 2012-03-30
from db_control import db_conn
from datetime import datetime, timedelta
import preprocess as pp
class tweet_model:
'''A class for representating a tweet.'''
def __init__(self, id, time, text, sentiment = 0, negation = 0):
'''Standar __init__ function'''
self.id = id
self.time = time
self.text = text
self.negation = negation
self.sentiment = sentiment
self.parsed_word = []
self.parsed = False
self.post_parsed_word = []
self.post_parsed = False # this attribute indicate that the parsed_word has been preprocess again
def print_tweet(self):
'''Print procedure'''
import unicodedata
print unicodedata.normalize('NFKD', self.text.decode('latin-1')).encode('ascii', 'ignore'), self.sentiment
def get_normal_text(self):
'''Return content of the tweet in normal form.'''
import unicodedata
return unicodedata.normalize('NFKD', self.text.decode('latin-1')).encode('ascii', 'ignore')
def preprocess(self, dict_param = None):
'''Preprocess a tweet and save the result in parsed_word and negation.'''
self.negation, preprocesssed_text = pp.preprocess_tweet(self.text, dict_param)
self.parsed_word = preprocesssed_text.split(' ')
self.parsed = True
temp_post_parsed_word = pp.postparsed_text(preprocesssed_text)
self.post_parsed_word = temp_post_parsed_word.split(' ')
self.post_parsed = True
# public function
def get_dev_data():
'''Retrieve data from database for training and test as list of tweet object.'''
db = db_conn()
tweets = []
query = "SELECT * FROM " + db.test_table + " WHERE `dev_tweet` = 1"
retval = db.read(query)
for row in retval:
id = row[0]
time = row[2]
text = row[1]
sentiment = row[3]
negation = row[4]
tweets.append(tweet_model(id, time, text, sentiment, negation))
return tweets
def get_test_data(keyword = "", start_time = None, end_time = None):
'''Retrieve data from database for training and test as list of tweet object.'''
db = db_conn()
tweets = []
query = "SELECT * FROM " + db.test_table
where = " WHERE `tweet_text` LIKE '%" + keyword + "%' AND `dev_tweet` != 1"
if start_time != None:
where += " AND `created_at` >= '" + start_time.__str__() + "'"
if end_time != None:
where += " AND `created_at` <= '" + end_time.__str__() + "'"
order = " ORDER BY `created_at` ASC"
retval = db.read(query + where)
for row in retval:
id = row[0]
time = row[2]
text = row[1]
sentiment = row[3]
negation = row[4]
tweets.append(tweet_model(id, time, text, sentiment, negation))
return tweets
def get_test_data_by_duration(keyword = "", start_time = None, end_time = None, duration_hour = 1):
'''return test data divide byu duration.'''
duration_second = duration_hour * 3600
delta_duration = timedelta(0, duration_second)
cur_time = start_time
retval = []
dur_times = []
while (cur_time + delta_duration < end_time):
retval.append(get_test_data(keyword, cur_time, cur_time + delta_duration))
dur_times.append(cur_time)
cur_time += delta_duration
if (cur_time < end_time):
dur_times.append(cur_time)
retval.append(get_test_data(keyword, cur_time, end_time))
return retval, dur_times
# main function for testing only
if __name__ == '__main__':
keyword = "foke"
start_time = datetime.strptime("10-4-2012 18:00:00", '%d-%m-%Y %H:%M:%S')
end_time = datetime.strptime("18-4-2012 12:00:00", '%d-%m-%Y %H:%M:%S')
duration_hour = 6
retval, dur_times = get_test_data_by_duration(keyword, start_time, end_time, duration_hour)
num_tweet = 0
for ret in retval:
print len(ret)
num_tweet += len(ret)
print num_tweet
# write in excel
from xlwt import Workbook
from tempfile import TemporaryFile
import util
book = Workbook()
try:
sheet_idx = 1
for list_tweet in retval:
activeSheet = book.add_sheet(str(sheet_idx))
activeSheet.write(0, 0, dur_times[sheet_idx - 1].__str__())
i = 1
activeSheet.write(i, 0, 'No')
activeSheet.write(i, 1, 'Tweet Id')
activeSheet.write(i, 2, 'Created')
activeSheet.write(i, 3, 'Text')
i += 1
for tweet in list_tweet:
activeSheet.write(i, 0, str(i - 1))
activeSheet.write(i, 1, str(tweet.id))
activeSheet.write(i, 2, tweet.time.__str__())
activeSheet.write(i, 3, pp.normalize_character(tweet.text))
i += 1
sheet_idx += 1
book.save('output.xls')
book.save(TemporaryFile())
except Exception, e:
util.debug(str(e))
print 'fin'
|
ismailsunni/f3-factor-finder
|
core/tweet_model.py
|
Python
|
gpl-2.0
| 4,719 | 0.042594 |
# -*- coding: utf-8 -*-
from orator.orm import Factory, Model, belongs_to, has_many
from orator.connections import SQLiteConnection
from orator.connectors import SQLiteConnector
from .. import OratorTestCase, mock
class FactoryTestCase(OratorTestCase):
@classmethod
def setUpClass(cls):
Model.set_connection_resolver(DatabaseConnectionResolver())
@classmethod
def tearDownClass(cls):
Model.unset_connection_resolver()
def connection(self):
return Model.get_connection_resolver().connection()
def schema(self):
return self.connection().get_schema_builder()
def setUp(self):
with self.schema().create("users") as table:
table.increments("id")
table.string("name").unique()
table.string("email").unique()
table.boolean("admin").default(True)
table.timestamps()
with self.schema().create("posts") as table:
table.increments("id")
table.integer("user_id")
table.string("title").unique()
table.text("content").unique()
table.timestamps()
table.foreign("user_id").references("id").on("users")
self.factory = Factory()
@self.factory.define(User)
def users_factory(faker):
return {"name": faker.name(), "email": faker.email(), "admin": False}
@self.factory.define(User, "admin")
def users_factory(faker):
attributes = self.factory.raw(User)
attributes.update({"admin": True})
return attributes
@self.factory.define(Post)
def posts_factory(faker):
return {"title": faker.sentence(), "content": faker.text()}
def tearDown(self):
self.schema().drop("posts")
self.schema().drop("users")
def test_factory_make(self):
user = self.factory.make(User)
self.assertIsInstance(user, User)
self.assertIsNotNone(user.name)
self.assertIsNotNone(user.email)
self.assertIsNone(User.where("name", user.name).first())
def test_factory_create(self):
user = self.factory.create(User)
self.assertIsInstance(user, User)
self.assertIsNotNone(user.name)
self.assertIsNotNone(user.email)
self.assertIsNotNone(User.where("name", user.name).first())
def test_factory_create_with_attributes(self):
user = self.factory.create(User, name="foo", email="foo@bar.com")
self.assertIsInstance(user, User)
self.assertEqual("foo", user.name)
self.assertEqual("foo@bar.com", user.email)
self.assertIsNotNone(User.where("name", user.name).first())
def test_factory_create_with_relations(self):
users = self.factory.build(User, 3)
users = users.create().each(lambda u: u.posts().save(self.factory.make(Post)))
self.assertEqual(3, len(users))
self.assertIsInstance(users[0], User)
self.assertEqual(3, User.count())
self.assertEqual(3, Post.count())
def test_factory_call(self):
user = self.factory(User).create()
self.assertFalse(user.admin)
users = self.factory(User, 3).create()
self.assertEqual(3, len(users))
self.assertFalse(users[0].admin)
admin = self.factory(User, "admin").create()
self.assertTrue(admin.admin)
admins = self.factory(User, "admin", 3).create()
self.assertEqual(3, len(admins))
self.assertTrue(admins[0].admin)
class User(Model):
__guarded__ = ["id"]
@has_many("user_id")
def posts(self):
return Post
class Post(Model):
__guarded__ = []
@belongs_to("user_id")
def user(self):
return User
class DatabaseConnectionResolver(object):
_connection = None
def connection(self, name=None):
if self._connection:
return self._connection
self._connection = SQLiteConnection(
SQLiteConnector().connect({"database": ":memory:"})
)
return self._connection
def get_default_connection(self):
return "default"
def set_default_connection(self, name):
pass
|
sdispater/orator
|
tests/orm/test_factory.py
|
Python
|
mit
| 4,197 | 0.000477 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
from oscdebug.tests import base
from oscdebug.v1 import auth
class TestAuthTypeShow(base.TestCommand):
def setUp(self):
super(TestAuthTypeShow, self).setUp()
# Get the command object to test
self.cmd = auth.ShowAuthType(self.app, None)
def test_auth_type_show(self):
arglist = [
'password',
]
verifylist = [
('auth_type', 'password'),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# DisplayCommandBase.take_action() returns two tuples
columns, data = self.cmd.take_action(parsed_args)
collist = ('name', 'options')
self.assertEqual(collist, columns)
datalist = (
'password',
mock.ANY,
)
self.assertEqual(datalist, data)
|
dtroyer/osc-debug
|
oscdebug/tests/v1/test_auth.py
|
Python
|
apache-2.0
| 1,401 | 0 |
from UM.Scene.SceneNodeDecorator import SceneNodeDecorator
class GCodeListDecorator(SceneNodeDecorator):
def __init__(self):
super().__init__()
self._gcode_list = []
def getGCodeList(self):
return self._gcode_list
def setGCodeList(self, list):
self._gcode_list = list
|
alephobjects/Cura2
|
cura/Scene/GCodeListDecorator.py
|
Python
|
lgpl-3.0
| 316 | 0 |
from .ica import *
#from .ica_gpu import ica_gpu
|
alvarouc/ica
|
ica/__init__.py
|
Python
|
gpl-3.0
| 49 | 0.020408 |
# -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Tests for user handling.
"""
import tempfile
from unittest import TestCase as UnitTestCase
from django.test import TestCase
from unittest import SkipTest
from django.core.urlresolvers import reverse
from django.contrib.auth.models import AnonymousUser, User, Group
from django.core import mail
from django.test.utils import override_settings
from django.core.management import call_command
from django.http import HttpRequest, HttpResponseRedirect
from weblate.accounts.models import (
Profile,
notify_merge_failure,
notify_new_string,
notify_new_suggestion,
notify_new_comment,
notify_new_translation,
notify_new_contributor,
notify_new_language,
)
from weblate.accounts.captcha import (
hash_question, unhash_question, MathCaptcha
)
from weblate.accounts import avatar
from weblate.accounts.middleware import RequireLoginMiddleware
from weblate.accounts.models import VerifiedEmail
from weblate.trans.tests.test_views import ViewTestCase, RegistrationTestMixin
from weblate.trans.tests.utils import get_test_file
from weblate.trans.tests import OverrideSettings
from weblate.trans.models.unitdata import Suggestion, Comment
from weblate.lang.models import Language
REGISTRATION_DATA = {
'username': 'username',
'email': 'noreply@weblate.org',
'first_name': 'First Last',
'captcha_id': '00',
'captcha': '9999'
}
class RegistrationTest(TestCase, RegistrationTestMixin):
clear_cookie = False
def assert_registration(self, match=None):
url = self.assert_registration_mailbox(match)
if self.clear_cookie:
del self.client.cookies['sessionid']
# Confirm account
response = self.client.get(url, follow=True)
self.assertRedirects(
response,
reverse('password')
)
@OverrideSettings(REGISTRATION_CAPTCHA=True)
def test_register_captcha(self):
# Enable captcha
response = self.client.post(
reverse('register'),
REGISTRATION_DATA
)
self.assertContains(
response,
'Please check your math and try again.'
)
@OverrideSettings(REGISTRATION_OPEN=False)
def test_register_closed(self):
# Disable registration
response = self.client.post(
reverse('register'),
REGISTRATION_DATA
)
self.assertContains(
response,
'Sorry, but registrations on this site are disabled.'
)
@OverrideSettings(REGISTRATION_CAPTCHA=False)
def test_register(self):
# Disable captcha
response = self.client.post(
reverse('register'),
REGISTRATION_DATA
)
# Check we did succeed
self.assertRedirects(response, reverse('email-sent'))
# Confirm account
self.assert_registration()
# Set password
response = self.client.post(
reverse('password'),
{
'password1': 'password',
'password2': 'password',
}
)
self.assertRedirects(response, reverse('profile'))
# Check we can access home (was redirected to password change)
response = self.client.get(reverse('home'))
self.assertContains(response, 'First Last')
user = User.objects.get(username='username')
# Verify user is active
self.assertTrue(user.is_active)
# Verify stored first/last name
self.assertEqual(user.first_name, 'First Last')
@OverrideSettings(REGISTRATION_CAPTCHA=False)
def test_register_missing(self):
# Disable captcha
response = self.client.post(
reverse('register'),
REGISTRATION_DATA
)
# Check we did succeed
self.assertRedirects(response, reverse('email-sent'))
# Confirm account
url = self.assert_registration_mailbox()
# Remove session ID from URL
url = url.split('&id=')[0]
# Delete session ID from cookies
del self.client.cookies['sessionid']
# Confirm account
response = self.client.get(url, follow=True)
self.assertRedirects(response, reverse('login'))
self.assertContains(response, 'Failed to verify your registration')
def test_reset(self):
'''
Test for password reset.
'''
User.objects.create_user('testuser', 'test@example.com', 'x')
response = self.client.post(
reverse('password_reset'),
{
'email': 'test@example.com'
}
)
self.assertRedirects(response, reverse('email-sent'))
self.assert_registration('[Weblate] Password reset on Weblate')
def test_wrong_username(self):
data = REGISTRATION_DATA.copy()
data['username'] = ''
response = self.client.post(
reverse('register'),
data
)
self.assertContains(
response,
'This field is required.',
)
def test_wrong_mail(self):
data = REGISTRATION_DATA.copy()
data['email'] = 'x'
response = self.client.post(
reverse('register'),
data
)
self.assertContains(
response,
'Enter a valid email address.'
)
def test_spam(self):
data = REGISTRATION_DATA.copy()
data['content'] = 'x'
response = self.client.post(
reverse('register'),
data
)
self.assertContains(
response,
'Invalid value'
)
def test_add_mail(self):
# Create user
self.test_register()
mail.outbox.pop()
# Check adding email page
response = self.client.get(
reverse('email_login')
)
self.assertContains(response, 'Register email')
# Add email account
response = self.client.post(
reverse('social:complete', kwargs={'backend': 'email'}),
{'email': 'second@example.net'},
follow=True,
)
self.assertRedirects(response, reverse('email-sent'))
# Verify confirmation mail
url = self.assert_registration_mailbox()
response = self.client.get(url, follow=True)
self.assertRedirects(
response, '{0}#auth'.format(reverse('profile'))
)
# Check database models
user = User.objects.get(username='username')
self.assertEqual(
VerifiedEmail.objects.filter(social__user=user).count(), 2
)
self.assertTrue(
VerifiedEmail.objects.filter(
social__user=user, email='second@example.net'
).exists()
)
class NoCookieRegistrationTest(RegistrationTest):
clear_cookie = True
class CommandTest(TestCase):
'''
Tests for management commands.
'''
def test_createadmin(self):
call_command('createadmin')
user = User.objects.get(username='admin')
self.assertEqual(user.first_name, 'Weblate Admin')
self.assertEqual(user.last_name, '')
self.assertFalse(user.check_password('admin'))
def test_createadmin_password(self):
call_command('createadmin', password='admin')
user = User.objects.get(username='admin')
self.assertEqual(user.first_name, 'Weblate Admin')
self.assertEqual(user.last_name, '')
self.assertTrue(user.check_password('admin'))
def test_setupgroups(self):
call_command('setupgroups')
group = Group.objects.get(name='Users')
self.assertTrue(
group.permissions.filter(
codename='save_translation'
).exists()
)
call_command('setupgroups', move=True)
def test_importusers(self):
# First import
call_command('importusers', get_test_file('users.json'))
# Test that second import does not change anything
user = User.objects.get(username='weblate')
user.first_name = 'Weblate test user'
user.save()
call_command('importusers', get_test_file('users.json'))
user2 = User.objects.get(username='weblate')
self.assertEqual(user.first_name, user2.first_name)
def test_importdjangousers(self):
# First import
call_command('importusers', get_test_file('users-django.json'))
self.assertEqual(User.objects.count(), 2)
def test_userdata(self):
# Create test user
user = User.objects.create_user('testuser', 'test@example.com', 'x')
profile = Profile.objects.create(user=user)
profile.translated = 1000
profile.save()
with tempfile.NamedTemporaryFile() as output:
call_command('dumpuserdata', output.name)
call_command('importuserdata', output.name)
profile = Profile.objects.get(user__username='testuser')
self.assertEqual(profile.translated, 2000)
class ViewTest(TestCase):
'''
Test for views.
'''
def get_user(self):
user = User.objects.create_user(
username='testuser',
password='testpassword'
)
user.first_name = 'First Second'
user.email = 'noreply@weblate.org'
user.save()
Profile.objects.get_or_create(user=user)
return user
def test_contact(self):
'''
Test for contact form.
'''
# Basic get
response = self.client.get(reverse('contact'))
self.assertContains(response, 'id="id_message"')
# Sending message
response = self.client.post(
reverse('contact'),
{
'name': 'Test',
'email': 'noreply@weblate.org',
'subject': 'Message from dark side',
'message': 'Hi\n\nThis app looks really cool!',
}
)
self.assertRedirects(response, reverse('home'))
# Verify message
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'[Weblate] Message from dark side'
)
@OverrideSettings(OFFER_HOSTING=False)
def test_hosting_disabled(self):
'''
Test for hosting form with disabled hosting
'''
self.get_user()
self.client.login(username='testuser', password='testpassword')
response = self.client.get(reverse('hosting'))
self.assertRedirects(response, reverse('home'))
@OverrideSettings(OFFER_HOSTING=True)
def test_hosting(self):
'''
Test for hosting form with enabled hosting.
'''
self.get_user()
self.client.login(username='testuser', password='testpassword')
response = self.client.get(reverse('hosting'))
self.assertContains(response, 'id="id_message"')
# Sending message
response = self.client.post(
reverse('hosting'),
{
'name': 'Test',
'email': 'noreply@weblate.org',
'project': 'HOST',
'url': 'http://example.net',
'repo': 'git://github.com/nijel/weblate.git',
'mask': 'po/*.po',
'message': 'Hi\n\nI want to use it!',
}
)
self.assertRedirects(response, reverse('home'))
# Verify message
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'[Weblate] Hosting request for HOST'
)
def test_contact_subject(self):
# With set subject
response = self.client.get(
reverse('contact'),
{'subject': 'Weblate test message'}
)
self.assertContains(response, 'Weblate test message')
def test_contact_user(self):
self.get_user()
# Login
self.client.login(username='testuser', password='testpassword')
response = self.client.get(
reverse('contact'),
)
self.assertContains(response, 'value="First Second"')
self.assertContains(response, 'noreply@weblate.org')
def test_user(self):
'''
Test user pages.
'''
# Setup user
user = self.get_user()
# Login as user
self.client.login(username='testuser', password='testpassword')
# Get public profile
response = self.client.get(
reverse('user_page', kwargs={'user': user.username})
)
self.assertContains(response, '="/activity/')
def test_login(self):
self.get_user()
# Login
response = self.client.post(
reverse('login'),
{'username': 'testuser', 'password': 'testpassword'}
)
self.assertRedirects(response, reverse('home'))
# Login redirect
response = self.client.get(reverse('login'))
self.assertRedirects(response, reverse('profile'))
# Logout
response = self.client.get(reverse('logout'))
self.assertRedirects(response, reverse('login'))
def test_removal(self):
# Create user
self.get_user()
# Login
self.client.login(username='testuser', password='testpassword')
response = self.client.post(
reverse('remove')
)
self.assertRedirects(response, reverse('home'))
self.assertFalse(
User.objects.filter(username='testuser').exists()
)
def test_password(self):
# Create user
self.get_user()
# Login
self.client.login(username='testuser', password='testpassword')
# Change without data
response = self.client.post(
reverse('password')
)
self.assertContains(response, 'This field is required.')
# Change with wrong password
response = self.client.post(
reverse('password'),
{
'password': '123456',
'password1': '123456',
'password2': '123456'
}
)
self.assertContains(response, 'You have entered an invalid password.')
# Change
response = self.client.post(
reverse('password'),
{
'password': 'testpassword',
'password1': '123456',
'password2': '123456'
}
)
self.assertRedirects(response, reverse('profile'))
self.assertTrue(
User.objects.get(username='testuser').check_password('123456')
)
class ProfileTest(ViewTestCase):
def test_profile(self):
# Get profile page
response = self.client.get(reverse('profile'))
self.assertContains(response, 'action="/accounts/profile/"')
# Save profile
response = self.client.post(
reverse('profile'),
{
'language': 'cs',
'languages': Language.objects.get(code='cs').id,
'secondary_languages': Language.objects.get(code='cs').id,
'first_name': 'First Last',
'email': 'noreply@weblate.org',
'username': 'testik',
}
)
self.assertRedirects(response, reverse('profile'))
class NotificationTest(ViewTestCase):
def setUp(self):
super(NotificationTest, self).setUp()
self.user.email = 'noreply@weblate.org'
self.user.save()
profile = Profile.objects.get(user=self.user)
profile.subscribe_any_translation = True
profile.subscribe_new_string = True
profile.subscribe_new_suggestion = True
profile.subscribe_new_contributor = True
profile.subscribe_new_comment = True
profile.subscribe_new_language = True
profile.subscribe_merge_failure = True
profile.subscriptions.add(self.project)
profile.languages.add(
Language.objects.get(code='cs')
)
profile.save()
def second_user(self):
user = User.objects.create_user(
username='seconduser',
password='secondpassword'
)
Profile.objects.create(user=user)
return user
def test_notify_merge_failure(self):
notify_merge_failure(
self.subproject,
'Failed merge',
'Error\nstatus'
)
# Check mail (second one is for admin)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
'[Weblate] Merge failure in Test/Test'
)
# Add project owner
self.subproject.project.owners.add(self.second_user())
notify_merge_failure(
self.subproject,
'Failed merge',
'Error\nstatus'
)
# Check mail (second one is for admin)
self.assertEqual(len(mail.outbox), 5)
def test_notify_new_string(self):
notify_new_string(self.get_translation())
# Check mail
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'[Weblate] New string to translate in Test/Test - Czech'
)
def test_notify_new_translation(self):
unit = self.get_unit()
unit2 = self.get_translation().unit_set.get(
source='Thank you for using Weblate.'
)
notify_new_translation(
unit,
unit2,
self.second_user()
)
# Check mail
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'[Weblate] New translation in Test/Test - Czech'
)
def test_notify_new_language(self):
second_user = self.second_user()
notify_new_language(
self.subproject,
Language.objects.filter(code='de'),
second_user
)
# Check mail (second one is for admin)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
'[Weblate] New language request in Test/Test'
)
# Add project owner
self.subproject.project.owners.add(second_user)
notify_new_language(
self.subproject,
Language.objects.filter(code='de'),
second_user,
)
# Check mail (second one is for admin)
self.assertEqual(len(mail.outbox), 5)
def test_notify_new_contributor(self):
unit = self.get_unit()
notify_new_contributor(
unit,
self.second_user()
)
# Check mail
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'[Weblate] New contributor in Test/Test - Czech'
)
def test_notify_new_suggestion(self):
unit = self.get_unit()
notify_new_suggestion(
unit,
Suggestion.objects.create(
contentsum=unit.contentsum,
project=unit.translation.subproject.project,
language=unit.translation.language,
target='Foo'
),
self.second_user()
)
# Check mail
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'[Weblate] New suggestion in Test/Test - Czech'
)
def test_notify_new_comment(self):
unit = self.get_unit()
notify_new_comment(
unit,
Comment.objects.create(
contentsum=unit.contentsum,
project=unit.translation.subproject.project,
language=unit.translation.language,
comment='Foo'
),
self.second_user(),
''
)
# Check mail
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'[Weblate] New comment in Test/Test'
)
def test_notify_new_comment_report(self):
unit = self.get_unit()
notify_new_comment(
unit,
Comment.objects.create(
contentsum=unit.contentsum,
project=unit.translation.subproject.project,
language=None,
comment='Foo'
),
self.second_user(),
'noreply@weblate.org'
)
# Check mail
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
'[Weblate] New comment in Test/Test'
)
self.assertEqual(
mail.outbox[1].subject,
'[Weblate] New comment in Test/Test'
)
class CaptchaTest(UnitTestCase):
def test_decode(self):
question = '1 + 1'
timestamp = 1000
hashed = hash_question(question, timestamp)
self.assertEqual(
(question, timestamp),
unhash_question(hashed)
)
def test_tamper(self):
hashed = hash_question('', 0) + '00'
self.assertRaises(
ValueError,
unhash_question,
hashed
)
def test_invalid(self):
self.assertRaises(
ValueError,
unhash_question,
''
)
def test_object(self):
captcha = MathCaptcha('1 * 2')
self.assertFalse(
captcha.validate(1)
)
self.assertTrue(
captcha.validate(2)
)
restored = MathCaptcha.from_hash(captcha.hashed)
self.assertEqual(
captcha.question,
restored.question
)
self.assertRaises(
ValueError,
MathCaptcha.from_hash,
captcha.hashed[:40]
)
def test_generate(self):
'''
Test generating of captcha for every operator.
'''
captcha = MathCaptcha()
for operator in MathCaptcha.operators:
captcha.operators = (operator,)
self.assertIn(operator, captcha.generate_question())
class MiddlewareTest(TestCase):
def view_method(self):
return 'VIEW'
def test_disabled(self):
middleware = RequireLoginMiddleware()
request = HttpRequest()
self.assertIsNone(
middleware.process_view(request, self.view_method, (), {})
)
@override_settings(LOGIN_REQUIRED_URLS=(r'/project/(.*)$',))
def test_protect_project(self):
middleware = RequireLoginMiddleware()
request = HttpRequest()
request.user = User()
request.META['SERVER_NAME'] = 'server'
request.META['SERVER_PORT'] = '80'
# No protection for not protected path
self.assertIsNone(
middleware.process_view(request, self.view_method, (), {})
)
request.path = '/project/foo/'
# No protection for protected path and logged in user
self.assertIsNone(
middleware.process_view(request, self.view_method, (), {})
)
# Protection for protected path and not logged in user
request.user = AnonymousUser()
self.assertIsInstance(
middleware.process_view(request, self.view_method, (), {}),
HttpResponseRedirect
)
# No protection for login and not logged in user
request.path = '/accounts/login/'
self.assertIsNone(
middleware.process_view(request, self.view_method, (), {})
)
class AvatarTest(ViewTestCase):
def setUp(self):
super(AvatarTest, self).setUp()
self.user.email = 'test@example.com'
self.user.save()
def assert_url(self):
url = avatar.avatar_for_email(self.user.email)
self.assertEqual(
'https://seccdn.libravatar.org/avatar/'
'55502f40dc8b7c769880b10874abc9d0',
url.split('?')[0]
)
def test_avatar_for_email_own(self):
backup = avatar.HAS_LIBRAVATAR
try:
avatar.HAS_LIBRAVATAR = False
self.assert_url()
finally:
avatar.HAS_LIBRAVATAR = backup
def test_avatar_for_email_libravatar(self):
if not avatar.HAS_LIBRAVATAR:
raise SkipTest('Libravatar not installed')
self.assert_url()
def test_avatar(self):
# Real user
response = self.client.get(
reverse(
'user_avatar',
kwargs={'user': self.user.username, 'size': 32}
)
)
self.assertPNG(response)
# Test caching
response = self.client.get(
reverse(
'user_avatar',
kwargs={'user': self.user.username, 'size': 32}
)
)
self.assertPNG(response)
def test_anonymous_avatar(self):
anonymous = User.objects.get(username='anonymous')
# Anonymous user
response = self.client.get(
reverse(
'user_avatar',
kwargs={'user': anonymous.username, 'size': 32}
)
)
self.assertPNG(response)
|
electrolinux/weblate
|
weblate/accounts/tests.py
|
Python
|
gpl-3.0
| 26,044 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2007-2020 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import inspect
import itertools
import numpy as np
import pytest
from numpy.testing import assert_allclose
import hyperspy.api as hs
from hyperspy import components1d
from hyperspy.component import Component
from hyperspy.misc.test_utils import ignore_warning
from hyperspy.models.model1d import Model1D
TRUE_FALSE_2_TUPLE = [p for p in itertools.product((True, False), repeat=2)]
def get_components1d_name_list():
components1d_name_list = []
for c_name in dir(components1d):
obj = getattr(components1d, c_name)
if inspect.isclass(obj) and issubclass(obj, Component):
components1d_name_list.append(c_name)
# Remove EELSCLEdge, since it is tested elsewhere more appropriate
components1d_name_list.remove('EELSCLEdge')
return components1d_name_list
@pytest.mark.filterwarnings("ignore:invalid value encountered in true_divide:RuntimeWarning")
@pytest.mark.filterwarnings("ignore:divide by zero encountered in true_divide:RuntimeWarning")
@pytest.mark.filterwarnings("ignore:invalid value encountered in cos:RuntimeWarning")
@pytest.mark.filterwarnings("ignore:The API of the")
@pytest.mark.parametrize('component_name', get_components1d_name_list())
def test_creation_components1d(component_name):
s = hs.signals.Signal1D(np.zeros(1024))
s.axes_manager[0].offset = 100
s.axes_manager[0].scale = 0.01
kwargs = {}
if component_name == 'ScalableFixedPattern':
kwargs['signal1D'] = s
elif component_name == 'Expression':
kwargs.update({'expression': "a*x+b", "name": "linear"})
component = getattr(components1d, component_name)(**kwargs)
component.function(np.arange(1, 100))
m = s.create_model()
m.append(component)
class TestPowerLaw:
def setup_method(self, method):
s = hs.signals.Signal1D(np.zeros(1024))
s.axes_manager[0].offset = 100
s.axes_manager[0].scale = 0.01
m = s.create_model()
m.append(hs.model.components1D.PowerLaw())
m[0].A.value = 1000
m[0].r.value = 4
self.m = m
self.s = s
@pytest.mark.parametrize(("only_current", "binned"), TRUE_FALSE_2_TUPLE)
def test_estimate_parameters(self, only_current, binned):
self.m.signal.metadata.Signal.binned = binned
s = self.m.as_signal(parallel=False)
assert s.metadata.Signal.binned == binned
g = hs.model.components1D.PowerLaw()
g.estimate_parameters(s, None, None, only_current=only_current)
A_value = 1008.4913 if binned else 1006.4378
r_value = 4.001768 if binned else 4.001752
assert_allclose(g.A.value, A_value)
assert_allclose(g.r.value, r_value)
if only_current:
A_value, r_value = 0, 0
# Test that it all works when calling it with a different signal
s2 = hs.stack((s, s))
g.estimate_parameters(s2, None, None, only_current=only_current)
assert_allclose(g.A.map["values"][1], A_value)
assert_allclose(g.r.map["values"][1], r_value)
def test_EDS_missing_data(self):
g = hs.model.components1D.PowerLaw()
s = self.m.as_signal(parallel=False)
s2 = hs.signals.EDSTEMSpectrum(s.data)
g.estimate_parameters(s2, None, None)
def test_function_grad_cutoff(self):
pl = self.m[0]
pl.left_cutoff.value = 105.0
axis = self.s.axes_manager[0].axis
for attr in ['function', 'grad_A', 'grad_r', 'grad_origin']:
values = getattr(pl, attr)((axis))
assert_allclose(values[:501], np.zeros((501)))
assert getattr(pl, attr)((axis))[500] == 0
getattr(pl, attr)((axis))[502] > 0
def test_exception_gradient_calculation(self):
# if this doesn't warn, it means that sympy can compute the gradients
# and the power law component can be updated.
with pytest.warns(UserWarning):
hs.model.components1D.PowerLaw(compute_gradients=True)
class TestDoublePowerLaw:
def setup_method(self, method):
s = hs.signals.Signal1D(np.zeros(1024))
s.axes_manager[0].offset = 100
s.axes_manager[0].scale = 0.1
m = s.create_model()
m.append(hs.model.components1D.DoublePowerLaw())
m[0].A.value = 1000
m[0].r.value = 4
m[0].ratio.value = 200
self.m = m
@pytest.mark.parametrize(("binned"), (True, False))
def test_fit(self, binned):
self.m.signal.metadata.Signal.binned = binned
s = self.m.as_signal(parallel=False)
assert s.metadata.Signal.binned == binned
g = hs.model.components1D.DoublePowerLaw()
# Fix the ratio parameter to test the fit
g.ratio.free = False
g.ratio.value = 200
m = s.create_model()
m.append(g)
m.fit_component(g, signal_range=(None, None))
assert_allclose(g.A.value, 1000.0)
assert_allclose(g.r.value, 4.0)
assert_allclose(g.ratio.value, 200.)
class TestOffset:
def setup_method(self, method):
s = hs.signals.Signal1D(np.zeros(10))
s.axes_manager[0].scale = 0.01
m = s.create_model()
m.append(hs.model.components1D.Offset())
m[0].offset.value = 10
self.m = m
@pytest.mark.parametrize(("only_current", "binned"), TRUE_FALSE_2_TUPLE)
def test_estimate_parameters(self, only_current, binned):
self.m.signal.metadata.Signal.binned = binned
s = self.m.as_signal(parallel=False)
assert s.metadata.Signal.binned == binned
o = hs.model.components1D.Offset()
o.estimate_parameters(s, None, None, only_current=only_current)
assert_allclose(o.offset.value, 10)
def test_function_nd(self):
s = self.m.as_signal(parallel=False)
s = hs.stack([s] * 2)
o = hs.model.components1D.Offset()
o.estimate_parameters(s, None, None, only_current=False)
axis = s.axes_manager.signal_axes[0]
assert_allclose(o.function_nd(axis.axis), s.data)
@pytest.mark.filterwarnings("ignore:The API of the `Polynomial` component")
class TestDeprecatedPolynomial:
def setup_method(self, method):
s = hs.signals.Signal1D(np.zeros(1024))
s.axes_manager[0].offset = -5
s.axes_manager[0].scale = 0.01
m = s.create_model()
m.append(hs.model.components1D.Polynomial(order=2))
coeff_values = (0.5, 2, 3)
self.m = m
s_2d = hs.signals.Signal1D(np.arange(1000).reshape(10, 100))
self.m_2d = s_2d.create_model()
self.m_2d.append(hs.model.components1D.Polynomial(order=2))
s_3d = hs.signals.Signal1D(np.arange(1000).reshape(2, 5, 100))
self.m_3d = s_3d.create_model()
self.m_3d.append(hs.model.components1D.Polynomial(order=2))
# if same component is pased, axes_managers get mixed up, tests
# sometimes randomly fail
for _m in [self.m, self.m_2d, self.m_3d]:
_m[0].coefficients.value = coeff_values
def test_gradient(self):
c = self.m[0]
np.testing.assert_array_almost_equal(c.grad_coefficients(1),
np.array([[6, ], [4.5], [3.5]]))
assert c.grad_coefficients(np.arange(10)).shape == (3, 10)
@pytest.mark.parametrize(("only_current", "binned"), TRUE_FALSE_2_TUPLE)
def test_estimate_parameters(self, only_current, binned):
self.m.signal.metadata.Signal.binned = binned
s = self.m.as_signal(parallel=False)
assert s.metadata.Signal.binned == binned
g = hs.model.components1D.Polynomial(order=2)
g.estimate_parameters(s, None, None, only_current=only_current)
assert_allclose(g.coefficients.value[0], 0.5)
assert_allclose(g.coefficients.value[1], 2)
assert_allclose(g.coefficients.value[2], 3)
def test_2d_signal(self):
# This code should run smoothly, any exceptions should trigger failure
s = self.m_2d.as_signal(parallel=False)
model = Model1D(s)
p = hs.model.components1D.Polynomial(order=2)
model.append(p)
p.estimate_parameters(s, 0, 100, only_current=False)
np.testing.assert_allclose(p.coefficients.map['values'],
np.tile([0.5, 2, 3], (10, 1)))
@pytest.mark.filterwarnings("ignore:The API of the `Polynomial`")
def test_3d_signal(self):
# This code should run smoothly, any exceptions should trigger failure
s = self.m_3d.as_signal(parallel=False)
model = Model1D(s)
p = hs.model.components1D.Polynomial(order=2)
model.append(p)
p.estimate_parameters(s, 0, 100, only_current=False)
np.testing.assert_allclose(p.coefficients.map['values'],
np.tile([0.5, 2, 3], (2, 5, 1)))
@pytest.mark.filterwarnings("ignore:The API of the")
def test_conversion_dictionary_to_polynomial2(self):
from hyperspy._components.polynomial import convert_to_polynomial
s = hs.signals.Signal1D(np.zeros(1024))
s.axes_manager[0].offset = -5
s.axes_manager[0].scale = 0.01
poly = hs.model.components1D.Polynomial(order=2, legacy=True)
poly.coefficients.value = [1, 2, 3]
poly.coefficients.value = [1, 2, 3]
poly.coefficients._bounds = ((None, None), (10, 0.0), (None, None))
poly_dict = poly.as_dictionary(True)
poly2_dict = convert_to_polynomial(poly_dict)
poly2 = hs.model.components1D.Polynomial(order=2, legacy=False)
_ = poly2._load_dictionary(poly2_dict)
assert poly2.a2.value == 1
assert poly2.a2._bounds == (None, None)
assert poly2.a1.value == 2
assert poly2.a1._bounds == (10, 0.0)
assert poly2.a0.value == 3
class TestPolynomial:
def setup_method(self, method):
s = hs.signals.Signal1D(np.zeros(1024))
s.axes_manager[0].offset = -5
s.axes_manager[0].scale = 0.01
m = s.create_model()
m.append(hs.model.components1D.Polynomial(order=2, legacy=False))
coeff_values = (0.5, 2, 3)
self.m = m
s_2d = hs.signals.Signal1D(np.arange(1000).reshape(10, 100))
self.m_2d = s_2d.create_model()
self.m_2d.append(hs.model.components1D.Polynomial(order=2, legacy=False))
s_3d = hs.signals.Signal1D(np.arange(1000).reshape(2, 5, 100))
self.m_3d = s_3d.create_model()
self.m_3d.append(hs.model.components1D.Polynomial(order=2, legacy=False))
data = 50*np.ones(100)
s_offset = hs.signals.Signal1D(data)
self.m_offset = s_offset.create_model()
# if same component is pased, axes_managers get mixed up, tests
# sometimes randomly fail
for _m in [self.m, self.m_2d, self.m_3d]:
_m[0].a2.value = coeff_values[0]
_m[0].a1.value = coeff_values[1]
_m[0].a0.value = coeff_values[2]
def test_gradient(self):
poly = self.m[0]
assert poly.a2.grad(1) == 1
assert poly.a1.grad(1) == 1
assert poly.a0.grad(1) == 1
assert poly.a2.grad(np.arange(10)).shape == (10,)
@pytest.mark.parametrize(("only_current", "binned"), TRUE_FALSE_2_TUPLE)
def test_estimate_parameters(self, only_current, binned):
self.m.signal.metadata.Signal.binned = binned
s = self.m.as_signal(parallel=False)
s.metadata.Signal.binned = binned
p = hs.model.components1D.Polynomial(order=2, legacy=False)
p.estimate_parameters(s, None, None, only_current=only_current)
assert_allclose(p.a2.value, 0.5)
assert_allclose(p.a1.value, 2)
assert_allclose(p.a0.value, 3)
def test_zero_order(self):
m = self.m_offset
with pytest.raises(ValueError):
m.append(hs.model.components1D.Polynomial(order=0, legacy=False))
def test_2d_signal(self):
# This code should run smoothly, any exceptions should trigger failure
s = self.m_2d.as_signal(parallel=False)
model = Model1D(s)
p = hs.model.components1D.Polynomial(order=2, legacy=False)
model.append(p)
p.estimate_parameters(s, 0, 100, only_current=False)
np.testing.assert_allclose(p.a2.map['values'], 0.5)
np.testing.assert_allclose(p.a1.map['values'], 2)
np.testing.assert_allclose(p.a0.map['values'], 3)
def test_3d_signal(self):
# This code should run smoothly, any exceptions should trigger failure
s = self.m_3d.as_signal(parallel=False)
model = Model1D(s)
p = hs.model.components1D.Polynomial(order=2, legacy=False)
model.append(p)
p.estimate_parameters(s, 0, 100, only_current=False)
np.testing.assert_allclose(p.a2.map['values'], 0.5)
np.testing.assert_allclose(p.a1.map['values'], 2)
np.testing.assert_allclose(p.a0.map['values'], 3)
def test_function_nd(self):
s = self.m.as_signal(parallel=False)
s = hs.stack([s]*2)
p = hs.model.components1D.Polynomial(order=2, legacy=False)
p.estimate_parameters(s, None, None, only_current=False)
axis = s.axes_manager.signal_axes[0]
assert_allclose(p.function_nd(axis.axis), s.data)
class TestGaussian:
def setup_method(self, method):
s = hs.signals.Signal1D(np.zeros(1024))
s.axes_manager[0].offset = -5
s.axes_manager[0].scale = 0.01
m = s.create_model()
m.append(hs.model.components1D.Gaussian())
m[0].sigma.value = 0.5
m[0].centre.value = 1
m[0].A.value = 2
self.m = m
@pytest.mark.parametrize(("only_current", "binned"), TRUE_FALSE_2_TUPLE)
def test_estimate_parameters_binned(self, only_current, binned):
self.m.signal.metadata.Signal.binned = binned
s = self.m.as_signal(parallel=False)
assert s.metadata.Signal.binned == binned
g = hs.model.components1D.Gaussian()
g.estimate_parameters(s, None, None, only_current=only_current)
assert_allclose(g.sigma.value, 0.5)
assert_allclose(g.A.value, 2)
assert_allclose(g.centre.value, 1)
@pytest.mark.parametrize("binned", (True, False))
def test_function_nd(self, binned):
self.m.signal.metadata.Signal.binned = binned
s = self.m.as_signal(parallel=False)
s2 = hs.stack([s] * 2)
g = hs.model.components1D.Gaussian()
g.estimate_parameters(s2, None, None, only_current=False)
assert g.binned == binned
axis = s.axes_manager.signal_axes[0]
factor = axis.scale if binned else 1
assert_allclose(g.function_nd(axis.axis) * factor, s2.data)
class TestExpression:
def setup_method(self, method):
self.g = hs.model.components1D.Expression(
expression="height * exp(-(x - x0) ** 2 * 4 * log(2)/ fwhm ** 2)",
name="Gaussian",
position="x0",
height=1,
fwhm=1,
x0=0,
module="numpy")
def test_name(self):
assert self.g.name == "Gaussian"
def test_position(self):
assert self.g._position is self.g.x0
def test_f(self):
assert self.g.function(0) == 1
def test_grad_height(self):
assert_allclose(
self.g.grad_height(2),
1.5258789062500007e-05)
def test_grad_x0(self):
assert_allclose(
self.g.grad_x0(2),
0.00016922538587889289)
def test_grad_fwhm(self):
assert_allclose(
self.g.grad_fwhm(2),
0.00033845077175778578)
def test_function_nd(self):
assert self.g.function_nd(0) == 1
def test_expression_symbols():
with pytest.raises(ValueError):
hs.model.components1D.Expression(expression="10.0", name="offset")
with pytest.raises(ValueError):
hs.model.components1D.Expression(expression="10", name="offset")
with pytest.raises(ValueError):
hs.model.components1D.Expression(expression="10*offset", name="Offset")
def test_expression_substitution():
expr = 'A / B; A = x+2; B = x-c'
comp = hs.model.components1D.Expression(expr, name='testcomp',
autodoc=True,
c=2)
assert ''.join(p.name for p in comp.parameters) == 'c'
assert comp.function(1) == -3
class TestScalableFixedPattern:
def setup_method(self, method):
s = hs.signals.Signal1D(np.linspace(0., 100., 10))
s1 = hs.signals.Signal1D(np.linspace(0., 1., 10))
s.axes_manager[0].scale = 0.1
s1.axes_manager[0].scale = 0.1
self.s = s
self.pattern = s1
def test_both_unbinned(self):
s = self.s
s1 = self.pattern
s.metadata.Signal.binned = False
s1.metadata.Signal.binned = False
m = s.create_model()
fp = hs.model.components1D.ScalableFixedPattern(s1)
m.append(fp)
with ignore_warning(message="invalid value encountered in sqrt",
category=RuntimeWarning):
m.fit()
assert abs(fp.yscale.value - 100) <= 0.1
def test_both_binned(self):
s = self.s
s1 = self.pattern
s.metadata.Signal.binned = True
s1.metadata.Signal.binned = True
m = s.create_model()
fp = hs.model.components1D.ScalableFixedPattern(s1)
m.append(fp)
with ignore_warning(message="invalid value encountered in sqrt",
category=RuntimeWarning):
m.fit()
assert abs(fp.yscale.value - 100) <= 0.1
def test_pattern_unbinned_signal_binned(self):
s = self.s
s1 = self.pattern
s.metadata.Signal.binned = True
s1.metadata.Signal.binned = False
m = s.create_model()
fp = hs.model.components1D.ScalableFixedPattern(s1)
m.append(fp)
with ignore_warning(message="invalid value encountered in sqrt",
category=RuntimeWarning):
m.fit()
assert abs(fp.yscale.value - 1000) <= 1
def test_pattern_binned_signal_unbinned(self):
s = self.s
s1 = self.pattern
s.metadata.Signal.binned = False
s1.metadata.Signal.binned = True
m = s.create_model()
fp = hs.model.components1D.ScalableFixedPattern(s1)
m.append(fp)
with ignore_warning(message="invalid value encountered in sqrt",
category=RuntimeWarning):
m.fit()
assert abs(fp.yscale.value - 10) <= .1
class TestHeavisideStep:
def setup_method(self, method):
self.c = hs.model.components1D.HeavisideStep()
def test_integer_values(self):
c = self.c
np.testing.assert_array_almost_equal(c.function([-1, 0, 2]),
[0, 1, 1])
def test_float_values(self):
c = self.c
np.testing.assert_array_almost_equal(c.function([-0.5, 0.5, 2]),
[0, 1, 1])
def test_not_sorted(self):
c = self.c
np.testing.assert_array_almost_equal(c.function([3, -0.1, 0]),
[1, 0, 1])
def test_gradients(self):
c = self.c
np.testing.assert_array_almost_equal(c.A.grad([3, -0.1, 0]),
[1, 1, 1])
np.testing.assert_array_almost_equal(c.n.grad([3, -0.1, 0]),
[1, 0, 1])
|
dnjohnstone/hyperspy
|
hyperspy/tests/component/test_components.py
|
Python
|
gpl-3.0
| 20,259 | 0.000346 |
"""
Models a GC-MS experiment represented by a list of signal peaks
"""
#############################################################################
# #
# PyMS software for processing of metabolomic mass-spectrometry data #
# Copyright (C) 2005-2012 Vladimir Likic #
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License version 2 as #
# published by the Free Software Foundation. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. #
# #
#############################################################################
from pyms.Utils.Error import error
from pyms.Utils.Utils import is_str
from pyms.Peak.Class import Peak
from pyms.Peak.List.Utils import is_peak_list, sele_peaks_by_rt
class Experiment:
"""
@summary: Models an experiment object
@author: Vladimir Likic
@author: Andrew Isaac
"""
def __init__(self, expr_code, peak_list):
"""
@summary: Models an experiment
@param expr_code: Unique identifier for the experiment
@type expr_code: StringType
@param peak_list: A list of peak objects
@type peak_list: ListType
"""
if not is_str(expr_code):
error("'expr_code' must be a string")
if not is_peak_list(peak_list):
error("'peak_list' must be a list of Peak objects")
self.__expr_code = expr_code
self.__peak_list = peak_list
def get_expr_code(self):
"""
@summary: Returns the expr_code of the experiment
@return: The expr_code of the experiment
@rtype: StringType
"""
return self.__expr_code
def get_peak_list(self):
"""
@summary: Returns the peak list
@return: A list of peak objects
@rtype: ListType
"""
return self.__peak_list
def sele_rt_range(self, rt_range):
"""
@summary: Discards all peaks which have the retention time outside
the specified range
@param rt_range: Min, max retention time given as a list [rt_min,rt_max]
@type rt_range: ListType
@return: none
@rtype: NoneType
"""
peaks_sele = sele_peaks_by_rt(self.__peak_list, rt_range)
self.__peak_list = peaks_sele
|
thegodone/pyms
|
Experiment/Class.py
|
Python
|
gpl-2.0
| 3,288 | 0.012165 |
from django.apps import AppConfig
class PlayersConfig(AppConfig):
name = 'players'
|
kevinharvey/django-tourney
|
tourney/players/apps.py
|
Python
|
gpl-3.0
| 89 | 0 |
#!/usr/bin/python
#
# Copyright Friday Film Club. All Rights Reserved.
"""League unit tests."""
__author__ = 'adamjmcgrath@gmail.com (Adam McGrath)'
import unittest
import base
import helpers
import models
class LeagueTestCase(base.TestCase):
def testPostPutHook(self):
league_owner = helpers.user()
league_member_1 = helpers.user()
league_member_2 = helpers.user()
league = models.League(name='Foo',
owner=league_owner.put(),
users=[league_member_1.put(), league_member_2.put()])
league_key = league.put()
self.assertListEqual(league_owner.leagues, [league_key])
self.assertListEqual(league_member_1.leagues, [league_key])
self.assertListEqual(league_member_2.leagues, [league_key])
league.users = [league_member_2.key]
league.put()
self.assertListEqual(league_member_1.leagues, [])
self.assertListEqual(league_member_2.leagues, [league_key])
def testPostDeleteHook(self):
league_owner = helpers.user()
league_member_1 = helpers.user()
league_member_2 = helpers.user()
league = models.League(name='Foo',
owner=league_owner.put(),
users=[league_member_1.put(), league_member_2.put()])
league_key = league.put()
self.assertListEqual(league_owner.leagues, [league_key])
self.assertListEqual(league_member_1.leagues, [league_key])
self.assertListEqual(league_member_2.leagues, [league_key])
league.key.delete()
self.assertListEqual(league_owner.leagues, [])
self.assertListEqual(league_member_1.leagues, [])
self.assertListEqual(league_member_2.leagues, [])
def testGetByName(self):
league = models.League(name='Foo',
owner=helpers.user().put())
league.put()
self.assertEqual(models.League.get_by_name('foo'), league)
if __name__ == '__main__':
unittest.main()
|
adamjmcgrath/fridayfilmclub
|
src/tests/test_model_league.py
|
Python
|
mpl-2.0
| 1,925 | 0.002597 |
import ctypes
import os
import types
from platform_utils import paths
def load_library(libname):
if paths.is_frozen():
libfile = os.path.join(paths.embedded_data_path(), 'accessible_output2', 'lib', libname)
else:
libfile = os.path.join(paths.module_path(), 'lib', libname)
return ctypes.windll[libfile]
def get_output_classes():
import outputs
module_type = types.ModuleType
classes = [m.output_class for m in outputs.__dict__.itervalues() if type(m) == module_type and hasattr(m, 'output_class')]
return sorted(classes, key=lambda c: c.priority)
def find_datafiles():
import os
import platform
from glob import glob
import accessible_output2
if platform.system() != 'Windows':
return []
path = os.path.join(accessible_output2.__path__[0], 'lib', '*.dll')
results = glob(path)
dest_dir = os.path.join('accessible_output2', 'lib')
return [(dest_dir, results)]
|
codeofdusk/ProjectMagenta
|
src/accessible_output2/__init__.py
|
Python
|
gpl-2.0
| 885 | 0.027119 |
# $Id$
import copy
import logging
import time
import traceback
import types
from quixote import form2
from quixote.html import htmltext
import canary.context
from canary.gazeteer import Feature
from canary.qx_defs import MyForm
from canary.utils import DTable, render_capitalized
import dtuple
class ExposureRoute (DTable):
# A Methodology can have one to many ROUTEs
ROUTE = {
'-': -1,
'ingestion' : 1,
'inhalation' : 2,
'mucocutaneous' : 3,
'vector' : 4,
'other' : 5,
}
def __init__ (self):
self.uid = -1
self.study_id = -1
self.methodology_id = -1
self.route = self.ROUTE['-']
def __str__ (self):
out = []
out.append('<Route uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\troute=%s' % self.get_text_value(self.ROUTE, self.route))
out.append('\tmethodology_id=%s' % self.methodology_id)
out.append('/>')
return '\n'.join(out)
def get_text_value (self, lookup_table, value):
for k, v in lookup_table.iteritems():
if v == value:
return k
return ''
def set_route (self, route):
if type(route) is types.StringType:
if route in self.ROUTE.keys():
self.route = self.ROUTE[route]
elif type(route) is types.IntType:
if route in self.ROUTE.values():
self.route = route
def get_route (self, text=False):
if text:
return self.get_text_value(self.ROUTE, self.route)
else:
return self.route
def delete (self, context):
"""
Delete this route from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM exposure_routes
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('ExposureRoute: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO exposure_routes
(uid, study_id, methodology_id, route)
VALUES
(NULL, %s, %s, %s)
""", (self.study_id, self.methodology_id, self.route)
)
self.uid = self.get_new_uid(context)
else:
# Assume all calls to save() are after all routes have been removed
# already by "DELETE FROM exposure_routes" in methodology.save()
try:
cursor.execute("""
INSERT INTO exposure_routes
(uid, study_id, methodology_id, route)
VALUES
(%s, %s, %s, %s)
""", (self.uid, self.study_id, self.methodology_id, self.route)
)
except Exception, e:
context.logger.error('ExposureRoute: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
class Methodology (DTable):
TABLE_NAME = 'methodologies'
# A Methodology must have one TYPE
TYPES = {
'experimental' : 1,
'descriptive' : 2,
'aggregate' : 3,
'cross sectional' : 4,
'cohort' : 5,
'case control' : 6,
'disease model' : 7,
}
# A Methodology can have at most one TIMING
TIMING = {
'-': -1,
'unknown' : 0,
'historical' : 1,
'concurrent' : 2,
'repeated' : 3,
'mixed' : 4,
}
# A Methodology can have at most one SAMPLING
SAMPLING = {
'-': -1,
'unknown' : 0,
'exposure' : 1,
'outcome' : 2,
'both' : 3,
}
# A Methodology can have at most one CONTROLS
CONTROLS = {
'-': -1,
'no' : 0,
'yes' : 1,
'both' : 2,
}
def __init__ (self, uid=-1):
self.uid = uid
self.study_id = -1
self.study_type_id = -1
self.sample_size = ''
self.timing = -1
self.sampling = -1
self.controls = -1
self.is_mesocosm = False
self.is_enclosure = False
self.exposure_routes = []
self.comments = ''
self.date_modified = None
self.date_entered = None
def __str__ (self):
out = []
out.append('<Methodology uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tstudy_type=%s' % self.get_text_value(self.TYPES, self.study_type_id))
out.append('\tsample_size=%s' % self.sample_size)
for item in ['timing', 'sampling', 'controls', 'exposure_routes']:
out.append('\t%s=%s' % (item, getattr(self, 'get_' + item)(text=True)))
out.append('\tis_mesocosm=%s, is_enclosure=%s' % (self.is_mesocosm, self.is_enclosure))
out.append('\tcomments=%s' % self.comments or '')
out.append('/>')
return '\n'.join(out)
def evidence_level (self):
"""
Return the evidence level relative to the type of study
performed.
"""
text_value = self.get_text_value(self.TYPES, self.study_type_id)
if text_value in ['experimental', 'cohort']:
return 3
elif text_value in ['case control', 'cross sectional', 'aggregate']:
return 2
elif text_value in ['descriptive', 'disease model']:
return 1
else:
return 0
def get_text_value (self, lookup_table, value):
for k, v in lookup_table.iteritems():
if v == value:
return k
return ''
def set_timing (self, timing):
if type(timing) is types.StringType:
if timing in self.TIMING.keys():
self.timing = self.TIMING[timing]
elif type(timing) is types.IntType:
if timing in self.TIMING.values():
self.timing = timing
def get_timing (self, text=False):
if text:
return self.get_text_value(self.TIMING, self.timing)
else:
return self.timing
def set_sampling (self, sampling):
if type(sampling) is types.StringType:
if sampling in self.SAMPLING.keys():
self.sampling = self.SAMPLING[sampling]
elif type(sampling) is types.IntType:
if sampling in self.SAMPLING.values():
self.sampling = sampling
def get_sampling (self, text=False):
if text:
return self.get_text_value(self.SAMPLING, self.sampling)
else:
return self.sampling
def set_controls (self, controls):
if type(controls) is types.StringType:
if controls in self.CONTROLS.keys():
self.controls = self.CONTROLS[controls]
elif type(controls) is types.IntType:
if controls in self.CONTROLS.values():
self.controls = controls
def get_controls (self, text=False):
if text:
return self.get_text_value(self.CONTROLS, self.controls)
else:
return self.controls
def set_routes (self, routes):
for route in routes:
self.add_route(route)
# Remove routes no longer specified
for route in self.exposure_routes:
if not route.get_route() in [r.get_route() for r in routes]:
self.exposure_routes.remove(route)
def add_route (self, route):
if not route.get_route() in [r.get_route() for r in self.exposure_routes]:
route.methodology_id = self.uid
route.study_id = self.study_id
self.exposure_routes.append(route)
def get_routes (self, text=False):
if text:
return [r.get_text_value(r.ROUTE, r.route) for r in self.exposure_routes]
else:
return self.exposure_routes
def set_study_type (self, value):
"""
Each methodology has exactly one type.
"""
if type(value) is types.StringType:
if value in self.TYPES.keys():
self.study_type_id = self.TYPES[value]
elif type(value) == type(htmltext('a')):
str_value = str(value)
if str_value in self.TYPES.keys():
self.study_type_id = self.TYPES[str_value]
elif type(value) is types.IntType:
if value in self.TYPES.values():
self.study_type_id = value
self.update_values()
def get_study_type (self, text=False):
"""
Return the study design type.
"""
if text:
return self.get_text_value(self.TYPES, self.study_type_id)
else:
return self.study_type_id
def update_values (self):
"""
To keep values consistent with methodology type, "blank out"
inapplicable ones; called by set_study_type() on update.
"""
if self.get_study_type() in [
self.TYPES['experimental'],
self.TYPES['descriptive'],
self.TYPES['disease model'],
]:
self.set_timing('-')
if not self.get_study_type() in [
self.TYPES['cross sectional'],
self.TYPES['cohort'],
self.TYPES['case control']
]:
self.set_controls('-')
if not self.get_study_type() in [
self.TYPES['cross sectional']
]:
self.set_sampling('-')
def delete (self, context):
"""
Delete this methodology, and its exposure_routes, from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM methodologies
WHERE uid = %s
""", self.uid)
cursor.execute("""
DELETE FROM exposure_routes
where methodology_id = %s
""", self.uid)
except Exception, e:
context.logger.error('Methodology: %s (%s)', self.uid, e)
def load_routes (self, context):
cursor = context.get_cursor()
cursor.execute("""
SELECT * FROM exposure_routes
WHERE methodology_id = %s
""", (self.uid))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
exp_route = ExposureRoute()
for field in fields:
exp_route.set(field, row[field])
self.add_route(exp_route)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO methodologies
(uid, study_id, study_type_id,
sample_size, timing,
sampling, controls, comments,
is_mesocosm, is_enclosure,
date_modified, date_entered)
VALUES
(NULL, %s, %s,
%s, %s,
%s, %s, %s,
%s, %s,
NOW(), NOW())
""", (self.study_id, self.study_type_id,
self.sample_size, self.timing,
self.sampling, self.controls, self.comments,
int(self.is_mesocosm), int(self.is_enclosure))
)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE methodologies
SET study_id = %s, study_type_id = %s,
sample_size = %s, timing = %s,
sampling = %s, controls = %s, comments = %s,
is_mesocosm = %s, is_enclosure = %s,
date_modified = NOW()
WHERE uid = %s
""", (self.study_id, self.study_type_id,
self.sample_size, self.timing,
self.sampling, self.controls, self.comments,
int(self.is_mesocosm), int(self.is_enclosure),
self.uid)
)
except Exception, e:
context.logger.error('Methodology: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
# Refill these values every time
cursor.execute("""
DELETE FROM exposure_routes
WHERE methodology_id = %s
""", self.uid)
for route in self.exposure_routes:
route.save(context)
def create_form (self, context):
form = MyForm(context)
# all methodology types get a sample size
form.add(form2.StringWidget, 'sample_size',
title='Sample size (study n)',
size=10, value=self.sample_size,
required=False)
# all methodology types get one or more routes
route_options = [(route, text, route) for text, route in ExposureRoute.ROUTE.items()]
# FIXME: what else to do about leaving out the default/empty?
route_options.remove((-1, '-', -1))
select_size = len(route_options)
form.add(form2.MultipleSelectWidget, 'exposure_routes',
title='Routes of exposure (ctrl-click to select or change multiple)',
value=[r.route for r in self.get_routes()],
options=route_options,
size=select_size,
sort=False,
required=True)
# experimental can be is_mesocosm=True
if self.get_study_type() == self.TYPES['experimental']:
form.add(form2.CheckboxWidget, 'is_mesocosm',
title='Is mesocosm?',
value=self.is_mesocosm)
# methodology types except experimental get timing
if not self.get_study_type() == self.TYPES['experimental']:
form.add(form2.SingleSelectWidget, 'timing',
title='Timing',
value=self.get_timing(),
options=[(val, name, val) for name, val in self.TIMING.items()],
sort=True,
required=True)
# all the 'c*' methodology types get controls
if self.get_study_type() in [
self.TYPES['cross sectional'],
self.TYPES['cohort'],
self.TYPES['case control']
]:
form.add(form2.SingleSelectWidget, 'controls',
title='Controls from same population?',
value=self.get_controls(),
options=[(val, name, val) for name, val in self.CONTROLS.items()],
sort=True,
required=True)
# cohort can be is_enclosure=True
if self.get_study_type() == self.TYPES['cohort']:
form.add(form2.CheckboxWidget, 'is_enclosure',
title='Is enclosure?',
value=self.is_enclosure)
# only cross sectional methodologies get sampling
if self.get_study_type() == self.TYPES['cross sectional']:
form.add(form2.SingleSelectWidget, 'sampling',
title='Sampling',
value=self.get_sampling(),
options=[(val, name, val) for name, val in self.SAMPLING.items()],
sort=True,
required=True)
# every methodology type has comments
form.add(form2.TextWidget, 'comments',
title='Comments',
rows='4', cols='60',
wrap='virtual',
value=self.comments)
form.add_submit('update', value='update')
form.add_submit('finish', value='finish')
return form
def process_form (self, form):
# all methodology types get a sample size
if form['sample_size']:
self.sample_size = form['sample_size']
# all methodology types get one or more routes
if form['exposure_routes']:
routes = []
for r in form['exposure_routes']:
route = ExposureRoute()
route.set_route(r)
routes.append(route)
self.set_routes(routes)
else:
form.set_error('exposure_routes', 'You must choose at least one route of exposure.')
# experimental can be is_mesocosm=True
if self.get_study_type() == self.TYPES['experimental']:
if form['is_mesocosm']:
self.is_mesocosm = True
else:
self.is_mesocosm = False
# all methodology types but experimental get timing
if not self.get_study_type() == self.TYPES['experimental']:
if form['timing'] == self.TIMING['-']:
form.set_error('timing', 'You must specifiy the timing.')
else:
self.set_timing(form['timing'])
# all 'c*' methodology types get controls
if self.get_study_type() in [
self.TYPES['cross sectional'],
self.TYPES['cohort'],
self.TYPES['case control']
]:
if form['controls'] == self.CONTROLS['-']:
form.set_error('controls', 'You must specify the controls.')
else:
self.set_controls(form['controls'])
# cohort can be is_enclosure=True
if self.get_study_type() == self.TYPES['cohort']:
if form['is_enclosure']:
self.is_enclosure = True
else:
self.is_enclosure = False
# only cross sectional gets sampling
if self.get_study_type() == self.TYPES['cross sectional']:
if form['sampling'] == self.SAMPLING['-']:
form.set_error('sampling', 'You must specify the sampling.')
else:
self.set_sampling(form['sampling'])
# every methodology type can have comments
if form['comments']:
self.comments = form['comments']
def find_exposures (context, search_term):
exposures = {}
if search_term \
and len(search_term) > 0:
cursor = context.get_cursor()
query_term = search_term.strip().replace(' ', '% ') + '%'
cursor.execute("""
SELECT umls_terms.umls_concept_id, term, preferred_name, umls_source_id
FROM umls_terms, umls_concepts, umls_concepts_sources
WHERE term LIKE %s
AND umls_concepts.umls_concept_id = umls_terms.umls_concept_id
AND umls_concepts_sources.umls_concept_id = umls_concepts.umls_concept_id
ORDER BY term, preferred_name
""", query_term)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
if not exposures.has_key((row['umls_concept_id'], row['umls_source_id'])):
exp = Exposure()
exp.concept_source_id = row['umls_source_id']
exp.concept_id = row['umls_concept_id']
exp.term = row['preferred_name']
exp.synonyms.append(row['term'])
exposures[(exp.concept_id, exp.concept_source_id)] = exp
else:
exp = exposures[(row['umls_concept_id'], row['umls_source_id'])]
if not row['term'] in exp.synonyms:
exp.synonyms.append(row['term'])
exposures[(exp.concept_id, exp.concept_source_id)] = exp
# Try to bump up coarse "relevance" of exact matches
exposures_ranked = exposures.values()
for exp in exposures_ranked:
if exp.term.lower() == search_term.lower()\
or search_term.lower() in [syn.lower() for syn in exp.synonyms]:
exposures_ranked.remove(exp)
exposures_ranked.insert(0, exp)
return exposures_ranked
else:
return exposures.values()
class Exposure (DTable):
TABLE_NAME = 'exposures'
UMLS_SOURCES = {
75: 'MeSH',
85: 'NCBI Taxonomy',
501: 'ITIS',
}
def __init__ (self):
self.uid = -1
self.study_id = -1
self.concept_id = -1
self.concept_source_id = -1
self.term = ''
self.synonyms = []
def __str__ (self):
out = []
out.append('<Exposure uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tconcept_id=%s (%s)' % (self.concept_id, self.concept_source_id))
out.append('\tterm=%s' % self.term)
out.append('/>')
return '\n'.join(out)
def delete (self, context):
"""
Delete this exposure from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM exposures
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('Exposure: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO exposures
(uid, study_id, concept_id,
concept_source_id, term)
VALUES
(NULL, %s, %s,
%s, %s)
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term)
)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE exposures
SET study_id = %s, concept_id = %s,
concept_source_id = %s, term = %s
WHERE uid = %s
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term,
self.uid)
)
except Exception, e:
context.logger.error('Exposure: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
def find_outcomes (context, search_term):
# Note: for now, limit to only MeSH (umls_source_id==75)
outcomes = {}
if search_term \
and len(search_term) > 0:
cursor = context.get_cursor()
query_term = search_term.strip().replace(' ', '% ') + '%'
cursor.execute("""
SELECT umls_terms.umls_concept_id, term, preferred_name, umls_source_id
FROM umls_terms, umls_concepts, umls_concepts_sources
WHERE term LIKE %s
AND umls_source_id = %s
AND umls_concepts.umls_concept_id = umls_terms.umls_concept_id
AND umls_concepts_sources.umls_concept_id = umls_concepts.umls_concept_id
ORDER BY term, preferred_name
""", (query_term, 75))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
if not outcomes.has_key((row['umls_concept_id'], row['umls_source_id'])):
outcome = Outcome()
outcome.concept_source_id = row['umls_source_id']
outcome.concept_id = row['umls_concept_id']
outcome.term = row['preferred_name']
outcome.synonyms.append(row['term'])
outcomes[(outcome.concept_id, outcome.concept_source_id)] = outcome
else:
outcome = outcomes[(row['umls_concept_id'], row['umls_source_id'])]
if not row['term'] in outcome.synonyms:
outcome.synonyms.append(row['term'])
outcomes[(outcome.concept_id, outcome.concept_source_id)] = outcome
# Try to bump up coarse "relevance" of exact matches
outcomes_ranked = outcomes.values()
for outcome in outcomes_ranked:
if outcome.term.lower() == search_term.lower()\
or search_term.lower() in [syn.lower() for syn in outcome.synonyms]:
outcomes_ranked.remove(outcome)
outcomes_ranked.insert(0, outcome)
return outcomes_ranked
else:
return outcomes.values()
class Outcome (DTable):
TABLE_NAME = 'outcomes'
UMLS_SOURCES = {
75: 'MeSH',
85: 'NCBI Taxonomy',
501: 'ITIS',
}
def __init__ (self):
self.uid = -1
self.study_id = -1
self.concept_id = -1
self.concept_source_id = -1
self.term = ''
self.synonyms = []
def __str__ (self):
out = []
out.append('<Outcome uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tconcept_id=%s (%s)' % (self.concept_id, self.concept_source_id))
out.append('\tterm=%s' % self.term)
out.append('/>')
return '\n'.join(out)
def delete (self, context):
"""
Delete this outcome from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM outcomes
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('Outcome: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO outcomes
(uid, study_id, concept_id,
concept_source_id, term)
VALUES
(NULL, %s, %s,
%s, %s)
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term)
)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE outcomes
SET study_id = %s, concept_id = %s,
concept_source_id = %s, term = %s
WHERE uid = %s
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term,
self.uid)
)
except Exception, e:
context.logger.error('Outcome: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
def find_risk_factors (context, search_term):
# Note: for now, limit to only MeSH (umls_source_id==75)
risk_factors = {}
if search_term \
and len(search_term) > 0:
cursor = context.get_cursor()
query_term = search_term.strip().replace(' ', '% ') + '%'
cursor.execute("""
SELECT umls_terms.umls_concept_id, term, preferred_name, umls_source_id
FROM umls_terms, umls_concepts, umls_concepts_sources
WHERE term LIKE %s
AND umls_source_id = %s
AND umls_concepts.umls_concept_id = umls_terms.umls_concept_id
AND umls_concepts_sources.umls_concept_id = umls_concepts.umls_concept_id
ORDER BY term, preferred_name
""", (query_term, 75))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
if not risk_factors.has_key((row['umls_concept_id'], row['umls_source_id'])):
risk_factor = RiskFactor()
risk_factor.concept_source_id = row['umls_source_id']
risk_factor.concept_id = row['umls_concept_id']
risk_factor.term = row['preferred_name']
risk_factor.synonyms.append(row['term'])
risk_factors[(risk_factor.concept_id, risk_factor.concept_source_id)] = risk_factor
else:
risk_factor = risk_factors[(row['umls_concept_id'], row['umls_source_id'])]
if not row['term'] in risk_factor.synonyms:
risk_factor.synonyms.append(row['term'])
risk_factors[(risk_factor.concept_id, risk_factor.concept_source_id)] = risk_factor
# Try to bump up coarse "relevance" of exact matches
risk_factors_ranked = risk_factors.values()
for risk_factor in risk_factors_ranked:
if risk_factor.term.lower() == search_term.lower()\
or search_term.lower() in [syn.lower() for syn in risk_factor.synonyms]:
risk_factors_ranked.remove(risk_factor)
risk_factors_ranked.insert(0, risk_factor)
return risk_factors_ranked
else:
return risk_factors.values()
class RiskFactor (DTable):
UMLS_SOURCES = {
75: 'MeSH',
85: 'NCBI Taxonomy',
501: 'ITIS',
}
def __init__ (self):
self.uid = -1
self.study_id = -1
self.concept_id = -1
self.concept_source_id = -1
self.term = ''
self.synonyms = []
def __str__ (self):
out = []
out.append('<RiskFactor uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tconcept_id=%s (%s)' % (self.concept_id, self.concept_source_id))
out.append('\tterm=%s' % self.term)
out.append('/>')
return '\n'.join(out)
def delete (self, context):
"""
Delete this risk_factor from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM risk_factors
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('RiskFactor: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO risk_factors
(uid, study_id, concept_id,
concept_source_id, term)
VALUES
(NULL, %s, %s,
%s, %s)
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term)
)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE risk_factors
SET study_id = %s, concept_id = %s,
concept_source_id = %s, term = %s
WHERE uid = %s
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term,
self.uid)
)
except Exception, e:
context.logger.error('RiskFactor: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
def find_species (context,search_term):
species_map = {}
if search_term \
and len(search_term) > 0:
cursor = context.get_cursor()
query_term = search_term.strip().replace(' ', '% ') + '%'
cursor.execute("""
SELECT umls_terms.umls_concept_id, term, preferred_name, umls_source_id
FROM umls_terms, umls_concepts, umls_concepts_sources
WHERE term LIKE %s
AND umls_concepts.umls_concept_id = umls_terms.umls_concept_id
AND umls_concepts_sources.umls_concept_id = umls_concepts.umls_concept_id
ORDER BY term, preferred_name
""", query_term)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
if not species_map.has_key((row['umls_concept_id'], row['umls_source_id'])):
spec = Species()
spec.concept_source_id = row['umls_source_id']
spec.concept_id = row['umls_concept_id']
spec.term = row['preferred_name']
spec.synonyms.append(row['term'])
species_map[(spec.concept_id, spec.concept_source_id)] = spec
else:
spec = species_map[(row['umls_concept_id'], row['umls_source_id'])]
if not row['term'] in spec.synonyms:
spec.synonyms.append(row['term'])
species_map[(spec.concept_id, spec.concept_source_id)] = spec
# Try to bump up coarse "relevance" of exact matches
species_ranked = species_map.values()
for spec in species_ranked:
if spec.term.lower() == search_term.lower()\
or search_term.lower() in [syn.lower() for syn in spec.synonyms]:
species_ranked.remove(spec)
species_ranked.insert(0, spec)
return species_ranked
else:
return species_map.values()
class Species (DTable):
TABLE_NAME = 'species'
UMLS_SOURCES = {
75: 'MeSH',
85: 'NCBI Taxonomy',
501: 'ITIS',
}
TYPES = [
'companion',
'livestock',
'wildlife',
'laboratory',
]
def __init__ (self):
self.uid = -1
self.study_id = -1
self.concept_id = -1
self.concept_source_id = -1
self.term = ''
self.synonyms = []
self.__dict__['types'] = []
def __str__ (self):
out = []
out.append('<Species uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tconcept_id=%s (%s)' % (self.concept_id, self.concept_source_id))
out.append('\tterm=%s' % self.term)
out.append('\tsynonyms=%s' % '; '.join(self.synonyms))
out.append('\ttypes=%s' % '; '.join(self.types))
out.append('/>')
return '\n'.join(out)
def __setattr__ (self, name, value):
# self.types should be a list, but the auto-loader from Study
# will try to assign it a string. Catch here, and assume it
# will be the only time a direct assignment to self.types is
# called.
if name == 'types':
if value.__class__ == ''.__class__:
self.set_types(value)
else:
self.__dict__[name] = value
else:
self.__dict__[name] = value
def add_type (self, type):
if type in self.TYPES:
if not type in self.types:
self.types.append(type)
def clear_types (self):
self.__dict__['types'] = []
def set_types (self, types):
self.clear_types()
if types.__class__ == ''.__class__:
type_dict = dict(zip([t[0:2] for t in self.TYPES], self.TYPES))
# pass through every two chars in types
for i in range(0, len(types), 2):
type = types[i:i+2]
species_type = type_dict.get(type, None)
if species_type:
self.add_type(species_type)
elif types.__class__ == [].__class__:
for type in types:
if type in self.TYPES:
self.add_type(type)
def get_types (self, shorthand=False):
if shorthand:
sh = ''.join([type[0:2] for type in self.types])
return sh
else:
return self.types
def delete (self, context):
"""
Delete this species from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM species
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('Species: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO species
(uid, study_id, concept_id,
concept_source_id, term, types)
VALUES
(NULL, %s, %s,
%s, %s, %s)
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term, self.get_types(shorthand=True))
)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE species
SET study_id = %s, concept_id = %s,
concept_source_id = %s, term = %s, types = %s
WHERE uid = %s
""", (self.study_id, self.concept_id,
self.concept_source_id, self.term, self.get_types(shorthand=True),
self.uid)
)
except Exception, e:
context.logger.error('Species: %s (%s)', self.uid, e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
class Location (DTable):
TABLE_NAME = 'locations'
def __init__ (self, uid=-1):
self.uid = uid
self.study_id = -1
self.feature_id = -1
self.name = ''
self.country = ''
self.designation = ''
def __str__ (self):
out = []
out.append('<Location uid=%s study_id=%s' % (self.uid, self.study_id))
out.append('\tfeature_id=%s' % self.feature_id)
out.append('/>')
return '\n'.join(out)
def delete (self, context):
"""
Delete this location from the database.
"""
cursor = context.get_cursor()
if not self.uid == -1:
try:
cursor.execute("""
DELETE FROM locations
WHERE uid = %s
""", self.uid)
except Exception, e:
context.logger.error('Location: %s (%s)', self.uid, e)
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
cursor.execute("""
INSERT INTO locations
(uid, study_id, feature_id)
VALUES
(NULL, %s, %s)
""", (self.study_id, self.feature_id))
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE locations
SET study_id = %s, feature_id = %s
WHERE uid = %s
""", (self.study_id, self.feature_id,
self.uid)
)
except Exception, e:
context.logger.error('Location: %s (%s)', self.uid, e)
class Study (canary.context.Cacheable, DTable):
TABLE_NAME = 'studies'
# FIXME: does this only belong here or on loader.QueuedRecord?
# A Study has only one STATUS_TYPE
STATUS_TYPES = {
'unclaimed' : 0,
'claimed' : 1,
'curated' : 2,
}
# A Study has only one ARTICLE_TYPE
ARTICLE_TYPES = {
'unknown' : 0,
'irrelevant' : 1,
'traditional' : 2,
'general' : 3,
'review' : 4,
'outcomes only' : 5,
'exposures only' : 6,
'curated' : 7,
'duplicate' : 8,
}
# For dynamic iteration over related tables
TABLES = {
'methodologies' : Methodology,
'exposures': Exposure,
'risk_factors': RiskFactor,
'outcomes': Outcome,
'species': Species,
'locations': Location,
}
CACHE_KEY = 'study'
def __init__ (self, context=None, uid=-1, record_id=-1):
try:
if self.record_id >= 0:
return
except AttributeError:
pass
self.uid = uid
self.record_id = -1
self.status = self.STATUS_TYPES['unclaimed']
self.article_type = self.ARTICLE_TYPES['unknown']
self.curator_user_id = ''
self.has_outcomes = False
self.has_exposures = False
self.has_relationships = False
self.has_interspecies = False
self.has_exposure_linkage = False
self.has_outcome_linkage = False
self.has_genomic = False
self.comments = ''
self.methodologies = []
self.exposures = []
self.risk_factors = []
self.outcomes = []
self.species = []
self.locations = []
self.date_modified = None
self.date_entered = None
self.date_curated = None
self.history = {}
def __str__ (self):
out = []
out.append('<Study uid=%s record_id=%s' % (self.uid, self.record_id))
out.append('\tstatus=%s' % self.get_text_value(self.STATUS_TYPES, self.status))
out.append('\tcurator_user_id=%s' % self.curator_user_id)
out.append('\tarticle_type=%s' % self.get_text_value(self.ARTICLE_TYPES, self.article_type))
out.append('\thas_outcomes=%s' % self.has_outcomes)
out.append('\thas_exposures=%s' % self.has_exposures)
out.append('\thas_relationships=%s' % self.has_relationships)
out.append('\thas_interspecies=%s' % self.has_interspecies)
out.append('\thas_exposure_linkage=%s' % self.has_exposure_linkage)
out.append('\thas_outcome_linkage=%s' % self.has_outcome_linkage)
out.append('\thas_genomic=%s' % self.has_genomic)
# What are you wanting here? TYPES is not like OUTCOMES, is it?
#for table_name in self.TABLES:
# if len(getattr(self, table_name)) > 0:
# out.append('\t%s=' % table_name + \
# ','.join(getattr(self, 'get_' + table_name)(text=True)))
#if len(self.types) > 0:
# out.append('\ttypes=' + ','.join(self.get_types(text=True)))
out.append('\tcomments=%s' % self.comments or '')
out.append('/>')
return '\n'.join(out)
def get_text_value (self, lookup_table, value):
for k, v in lookup_table.iteritems():
if v == value:
return k
return ''
"""Simple accessors for basic study parameters."""
# FIXME: some of these could be parameterized.
def set_status (self, value):
if value in self.STATUS_TYPES.keys():
self.status = self.STATUS_TYPES[value]
def get_status (self, text=False):
if text:
return self.get_text_value(self.STATUS_TYPES, self.status)
else:
return self.status
def set_article_type (self, value):
try:
if str(value) in self.ARTICLE_TYPES.keys():
self.article_type = self.ARTICLE_TYPES[str(value)]
except:
# FIXME: proper error here
pass
def get_article_type (self, text=False):
if text:
return self.get_text_value(self.ARTICLE_TYPES, self.article_type)
else:
return self.article_type
def get_concept_from_concept (self, concept):
"""
For use in matching searches for exposure/species/outcome against
summary data.
NOTE: not checking 'risk_factor', but that should be refactored in
with a broader concept code refactoring.
"""
for concept_type in ('exposures', 'outcomes', 'species'):
for c in getattr(self, concept_type):
if c.concept_id == concept.uid:
# Eliminate trailing 's'
if concept_type in ('exposures', 'outcomes'):
concept_type = concept_type[:-1]
return c, concept_type
return None, None
def add_methodology (self, methodology):
for meth in self.methodologies:
if meth.uid == methodology.uid:
return
methodology.study_id = self.uid
self.methodologies.append(methodology)
def delete_methodology (self, context, methodology):
for meth in self.methodologies:
if meth.uid == methodology.uid:
self.methodologies.remove(meth)
meth.delete(context)
def get_methodology (self, id):
for methodology in self.methodologies:
if methodology.uid == id:
return methodology
return None
def has_exposure (self, exposure):
"""
Returns True if this exposure has already been added to this Study.
Note that has_exposure may be used before exposure is added,
hence it does not check exposure.uid.
"""
for exp in self.exposures:
if exp.concept_id == exposure.concept_id:
return True
return False
def add_exposure (self, exposure):
if not self.has_exposure(exposure):
exposure.study_id = self.uid
self.exposures.append(exposure)
def delete_exposure (self, context, exposure):
for exp in self.exposures:
if exp.concept_id == exposure.concept_id:
self.exposures.remove(exp)
exp.delete(context)
def get_exposure (self, id):
"""
Return the matching exposure, if added.
Note that get_exposure is for use in matching or deleting exposures,
i.e., only after an exposure has been added to the Study, so uid
matching is required.
"""
for exp in self.exposures:
if exp.uid == id:
return exp
return None
def get_exposure_from_exposure (self, exposure):
for exp in self.exposures:
if exp.concept_id == exposure.concept_id:
return exp
return None
def has_risk_factor (self, risk_factor):
"""
Returns True if this risk_factor has already been added to this Study.
Note that has_risk_factor may be used before risk_factor is added,
hence it does not check risk_factor.uid.
"""
for rf in self.risk_factors:
if rf.concept_id == risk_factor.concept_id:
return True
return False
def add_risk_factor (self, risk_factor):
if not self.has_risk_factor(risk_factor):
risk_factor.study_id = self.uid
self.risk_factors.append(risk_factor)
def delete_risk_factor (self, context, risk_factor):
for rf in self.risk_factors:
if rf.concept_id == risk_factor.concept_id:
self.risk_factors.remove(rf)
rf.delete(context)
def get_risk_factor (self, id):
"""
Return the matching risk_factor, if added.
Note that get_risk_factor is for use in matching or deleting risk_factors,
i.e., only after an risk_factor has been added to the Study, so uid
matching is required.
"""
for risk_factor in self.risk_factors:
if risk_factor.uid == id:
return risk_factor
return None
def get_risk_factor_from_risk_factor (self, risk_factor):
for rf in self.risk_factors:
if rf.concept_id == risk_factor.concept_id:
return rf
return None
def has_outcome (self, outcome):
"""
Returns True if this outcome has already been added to this Study.
Note that has_outcome may be used before outcome is added,
hence it does not check outcome.uid.
"""
for outc in self.outcomes:
if outc.concept_id == outcome.concept_id:
return True
return False
def add_outcome (self, outcome):
if not self.has_outcome(outcome):
outcome.study_id = self.uid
self.outcomes.append(outcome)
def delete_outcome (self, context, outcome):
for outc in self.outcomes:
if outc.concept_id == outcome.concept_id:
self.outcomes.remove(outc)
outc.delete(context)
def get_outcome (self, id):
"""
Return the matching outcome, if added.
Note that get_outcome is for use in matching or deleting outcomes,
i.e., only after an outcome has been added to the Study, so uid
matching is required.
"""
for outcome in self.outcomes:
if outcome.uid == id:
return outcome
return None
def get_outcome_from_outcome (self, outcome):
for outc in self.outcomes:
if outc.concept_id == outcome.concept_id:
return outc
return None
def has_species (self, species):
"""
Returns True if this species has already been added to this Study.
Note that has_species may be used before species is added,
hence it does not check species.uid.
"""
for spec in self.species:
if spec.concept_id == species.concept_id:
return True
return False
def add_species (self, species):
if not self.has_species(species):
species.study_id = self.uid
self.species.append(species)
def delete_species (self, context, species):
for spec in self.species:
if spec.concept_id == species.concept_id:
self.species.remove(spec)
spec.delete(context)
def get_species (self, id):
"""
Return the matching species, if added.
Note that get_species is for use in matching or deleting species,
i.e., only after an species has been added to the Study, so uid
matching is required.
"""
for species in self.species:
if species.uid == id:
return species
return None
def get_species_from_species (self, species):
for spec in self.species:
if spec.concept_id == species.concept_id:
return spec
return None
def has_location (self, location):
"""
Returns True if this location has already been added to this Study.
Note that has_location may be used before location is added,
hence it does not check location.uid.
"""
for loc in self.locations:
if loc.feature_id == location.feature_id:
return True
return False
def has_feature (self, feature):
"""
Returns True if this feature has already been added to this Study.
"""
for loc in self.locations:
if loc.feature_id == feature.uid:
return True
return False
def add_location (self, location):
if not self.has_location(location):
location.study_id = self.uid
self.locations.append(location)
def delete_location (self, context, location):
for loc in self.locations:
if loc.uid == location.uid:
self.locations.remove(loc)
loc.delete(context)
def get_location (self, id):
"""
Return the matching location, if added.
Note that get_location is for use in matching or deleting locations,
i.e., only after an location has been added to the Study, so uid
matching is required.
"""
for loc in self.locations:
if loc.uid == id:
return loc
return None
def get_location_from_feature (self, feature):
for loc in self.locations:
if loc.feature_id == feature.uid:
return loc
return None
def get_locations_sorted (self, context):
"""
For a set of canary record locations, return them in sort order
by lower((country_name, region_name, feature_name)).
"""
gazeteer = context.get_gazeteer()
locs = []
for location in self.locations:
feature = Feature(uid=location.feature_id)
feature.load(context)
if gazeteer.fips_codes.has_key((feature.country_code, feature.adm1)):
region_name = gazeteer.fips_codes[(feature.country_code, feature.adm1)]
else:
region_name = ''
name = feature.name
type = gazeteer.feature_codes[feature.feature_type]
region_name = render_capitalized(region_name)
country_name = render_capitalized(gazeteer.country_codes[feature.country_code])
locs.append(
((country_name.lower(), region_name.lower(), name.lower()),
(name, type, region_name, country_name))
)
locs.sort()
return locs
def get_lat_longs (self, context, dms=False):
"""
For a set of canary record locations, return their latitudes
and longitudes as two lists.
"""
lats = longs = []
for location in self.locations:
feature = Feature(uid=location.feature_id)
feature.load(context)
if dms:
lats.append(feature.dms_latitude)
longs.append(feature.dms_longitude)
else:
lats.append(feature.latitude)
longs.append(feature.longitude)
return lats, longs
def add_history (self, uid=-1, curator_user_id='', message='', modified=''):
"""
Add a history record; only one history record can be added to a
study_history at a time (because the key is set to -1). Maybe
that's bad design. :\
"""
# Convert w/str() in case htmltext is passed by mistake
curator_user_id = str(curator_user_id)
message = str(message)
new_history = {
'uid': uid,
'study_id': self.uid,
'curator_user_id': curator_user_id,
'message': message,
'modified': modified
}
self.history[new_history['uid']] = new_history
def load (self, context):
# Can't load a new study; it hasn't been saved yet.
if self.uid == -1:
return
# Is it already loaded? Convenience check for client calls
# don't need to verify loads from the cache.
if context.config.use_cache:
try:
if self.record_id >= 0:
# Already loaded
return
except AttributeError:
# Note already loaded, so continue
pass
cursor = context.get_cursor()
cursor.execute("""
SELECT *
FROM studies
WHERE uid = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
if rows and len(rows) > 0:
row = dtuple.DatabaseTuple(desc, rows[0])
for field in fields:
self.set(field, row[field])
# Every table_class is a DTable
for table_name, table_class in self.TABLES.items():
select_phrase = """SELECT * FROM %s """ % table_name
cursor.execute(select_phrase + """
WHERE study_id = %s
""", (self.uid))
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
table_class_instance = table_class()
for field in fields:
table_class_instance.set(field, row[field])
getattr(self, table_name).append(table_class_instance)
for meth in self.methodologies:
meth.load_routes(context)
cursor.execute("""
SELECT *
FROM study_history
WHERE study_id = %s
""", self.uid)
fields = [d[0] for d in cursor.description]
desc = dtuple.TupleDescriptor([[f] for f in fields])
rows = cursor.fetchall()
for row in rows:
row = dtuple.DatabaseTuple(desc, row)
history_record = {}
for field in fields:
history_record[field] = row[field]
self.add_history(uid=history_record['uid'],
curator_user_id=history_record['curator_user_id'],
message=history_record['message'],
modified=history_record['modified'])
def save (self, context):
cursor = context.get_cursor()
if self.uid == -1:
try:
cursor.execute("""
INSERT INTO studies
(uid,
record_id, status, article_type, curator_user_id,
has_outcomes, has_exposures,
has_relationships, has_interspecies,
has_exposure_linkage, has_outcome_linkage,
has_genomic, comments,
date_modified, date_entered, date_curated)
VALUES
(NULL,
%s, %s, %s, %s,
%s, %s,
%s, %s,
%s, %s,
%s, %s,
NOW(), NOW(), %s)
""", (self.record_id, self.status, self.article_type, self.curator_user_id,
int(self.has_outcomes), int(self.has_exposures),
int(self.has_relationships), int(self.has_interspecies),
int(self.has_exposure_linkage), int(self.has_outcome_linkage),
int(self.has_genomic), self.comments,
self.date_curated)
)
except Exception, e:
context.logger.error('Save study: %s (%s)', self.uid, e)
self.uid = self.get_new_uid(context)
else:
try:
cursor.execute("""
UPDATE studies
SET record_id = %s, status = %s, article_type = %s, curator_user_id = %s,
has_outcomes = %s, has_exposures = %s,
has_relationships = %s, has_interspecies = %s,
has_exposure_linkage = %s, has_outcome_linkage = %s,
has_genomic = %s, comments = %s,
date_modified = NOW(), date_curated = %s
WHERE uid = %s
""", (self.record_id, self.status, self.article_type, self.curator_user_id,
int(self.has_outcomes), int(self.has_exposures),
int(self.has_relationships), int(self.has_interspecies),
int(self.has_exposure_linkage), int(self.has_outcome_linkage),
int(self.has_genomic), self.comments,
self.date_curated,
self.uid)
)
except Exception, e:
context.logger.error('Update study: %s', e)
# FIXME: should this be set from the SQL?
self.date_modified = time.strftime(str('%Y-%m-%d'))
# update all the related table values
for table_name in self.TABLES.keys():
for item in getattr(self, table_name):
item.save(context)
# Save new history records; assume only one can be added at a time,
# new record will necessarily have uid == -1
if self.history:
new_history_record = self.history.get(-1, None)
if new_history_record:
try:
cursor.execute("""
INSERT INTO study_history
(uid, study_id, curator_user_id,
message, modified)
VALUES
(NULL, %s, %s,
%s, NOW())
""", (self.uid, new_history_record['curator_user_id'],
new_history_record['message']))
new_history_record_id = self.get_new_uid(context)
del(self.history[-1])
self.history[new_history_record_id] = new_history_record
except Exception, e:
context.logger.error('Save study history: %s (%s)', self.uid, e)
if context.config.use_cache:
# Force reload on next call to flush history times
context.cache_delete('%s:%s' % (self.CACHE_KEY, self.uid))
def delete (self, context):
cursor = context.get_cursor()
try:
for table_name in self.TABLES.keys():
for item in getattr(self, table_name):
item.delete(context)
cursor.execute("""
DELETE FROM studies
WHERE uid = %s
""", self.uid)
if context.config.use_cache:
context.cache_delete('%s:%s' % (self.CACHE_KEY, self.uid))
except Exception, e:
context.logger.error('Delete study: %s', e)
|
dchud/sentinel
|
canary/study.py
|
Python
|
mit
| 63,030 | 0.009107 |
#!/usr/bin/env python
#! -*- coding: utf-8 -*-
###
# Copyright (c) Rice University 2012-13
# This software is subject to
# the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
###
"""
THis exists solely to provide less typing for a "leaf node"
in a simple realtional schema (1:M and 1:M-N:1) when used with SQLAlchemy
SA does not support class based inheritence in the normal Python way for objects inheriting from Base. Thus we have those objects perform multiple inheritence...
"""
import json
import sqlalchemy.types
import datetime
class CNXBase():
def from_dict(self, userprofile_dict):
"""
SHould test for schema validity etc.
"""
d = userprofile_dict
for k in d:
setattr(self, k, d[k])
def to_dict(self):
"""Return self as a dict, suitable for jsonifying """
d = {}
for col in self.__table__.columns:
d[col.name] = self.safe_type_out(col)
return d
def jsonify(self):
"""Helper function that returns simple json repr """
selfd = self.to_dict()
jsonstr = json.dumps(selfd) # here use the Json ENcoder???
return jsonstr
def safe_type_out(self, col):
"""return the value of a coulmn field safely as something that
json can use This is essentially a JSONEncoder sublclass
inside this object.
"""
if isinstance(type(col.type), sqlalchemy.types.DateTime):
outstr = getattr(self, col.name).isoformat()
else:
outstr = getattr(self, col.name)
return outstr
|
jbarmash/rhaptos2.user
|
rhaptos2/user/cnxbase.py
|
Python
|
agpl-3.0
| 1,673 | 0.003586 |
from django.conf.urls import url
from django.views.generic import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='homepage.html')),
url(r'^remote.html$', TemplateView.as_view(template_name='remote.html'), name="remote.html"),
]
|
bashu/django-facebox
|
example/urls.py
|
Python
|
bsd-3-clause
| 266 | 0.003759 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for metric_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.contrib.metrics.python.ops import metric_ops
NAN = float('nan')
metrics = tf.contrib.metrics
def _enqueue_vector(sess, queue, values, shape=None):
if not shape:
shape = (1, len(values))
dtype = queue.dtypes[0]
sess.run(queue.enqueue(tf.constant(values, dtype=dtype, shape=shape)))
def _binary_2d_label_to_sparse_value(labels):
"""Convert dense 2D binary indicator tensor to sparse tensor.
Only 1 values in `labels` are included in result.
Args:
labels: Dense 2D binary indicator tensor.
Returns:
`SparseTensorValue` whose values are indices along the last dimension of
`labels`.
"""
indices = []
values = []
batch = 0
for row in labels:
label = 0
xi = 0
for x in row:
if x == 1:
indices.append([batch, xi])
values.append(label)
xi += 1
else:
assert x == 0
label += 1
batch += 1
shape = [len(labels), len(labels[0])]
return tf.SparseTensorValue(
np.array(indices, np.int64),
np.array(values, np.int64),
np.array(shape, np.int64))
def _binary_2d_label_to_sparse(labels):
"""Convert dense 2D binary indicator tensor to sparse tensor.
Only 1 values in `labels` are included in result.
Args:
labels: Dense 2D binary indicator tensor.
Returns:
`SparseTensor` whose values are indices along the last dimension of
`labels`.
"""
return tf.SparseTensor.from_value(_binary_2d_label_to_sparse_value(labels))
def _binary_3d_label_to_sparse_value(labels):
"""Convert dense 3D binary indicator tensor to sparse tensor.
Only 1 values in `labels` are included in result.
Args:
labels: Dense 2D binary indicator tensor.
Returns:
`SparseTensorValue` whose values are indices along the last dimension of
`labels`.
"""
indices = []
values = []
for d0, labels_d0 in enumerate(labels):
for d1, labels_d1 in enumerate(labels_d0):
d2 = 0
for class_id, label in enumerate(labels_d1):
if label == 1:
values.append(class_id)
indices.append([d0, d1, d2])
d2 += 1
else:
assert label == 0
shape = [len(labels), len(labels[0]), len(labels[0][0])]
return tf.SparseTensorValue(
np.array(indices, np.int64),
np.array(values, np.int64),
np.array(shape, np.int64))
def _binary_3d_label_to_sparse(labels):
"""Convert dense 3D binary indicator tensor to sparse tensor.
Only 1 values in `labels` are included in result.
Args:
labels: Dense 2D binary indicator tensor.
Returns:
`SparseTensor` whose values are indices along the last dimension of
`labels`.
"""
return tf.SparseTensor.from_value(_binary_3d_label_to_sparse_value(labels))
def _assert_nan(test_case, actual):
test_case.assertTrue(math.isnan(actual), 'Expected NAN, got %s.' % actual)
class StreamingMeanTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_mean(
tf.ones([4, 3]),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_mean(
tf.ones([4, 3]),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testBasic(self):
with self.test_session() as sess:
values_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.streaming_mean(values)
sess.run(tf.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAlmostEqual(1.65, sess.run(mean), 5)
def testUpdateOpsReturnsCurrentValue(self):
with self.test_session() as sess:
values_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.streaming_mean(values)
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.5, sess.run(update_op), 5)
self.assertAlmostEqual(1.475, sess.run(update_op), 5)
self.assertAlmostEqual(12.4/6.0, sess.run(update_op), 5)
self.assertAlmostEqual(1.65, sess.run(update_op), 5)
self.assertAlmostEqual(1.65, sess.run(mean), 5)
def test1dWeightedValues(self):
with self.test_session() as sess:
# Create the queue that populates the values.
values_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
# Create the queue that populates the weighted labels.
weights_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [1])
_enqueue_vector(sess, weights_queue, [0])
_enqueue_vector(sess, weights_queue, [0])
_enqueue_vector(sess, weights_queue, [1])
weights = weights_queue.dequeue()
mean, update_op = metrics.streaming_mean(values, weights)
tf.local_variables_initializer().run()
for _ in range(4):
update_op.eval()
self.assertAlmostEqual((0 + 1 - 3.2 + 4.0) / 4.0, mean.eval(), 5)
def test1dWeightedValues_placeholders(self):
with self.test_session() as sess:
# Create the queue that populates the values.
feed_values = (
(0, 1),
(-4.2, 9.1),
(6.5, 0),
(-3.2, 4.0)
)
values = tf.placeholder(dtype=tf.float32)
# Create the queue that populates the weighted labels.
weights_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [1])
_enqueue_vector(sess, weights_queue, [0])
_enqueue_vector(sess, weights_queue, [0])
_enqueue_vector(sess, weights_queue, [1])
weights = weights_queue.dequeue()
mean, update_op = metrics.streaming_mean(values, weights)
tf.local_variables_initializer().run()
for i in range(4):
update_op.eval(feed_dict={values: feed_values[i]})
self.assertAlmostEqual((0 + 1 - 3.2 + 4.0) / 4.0, mean.eval(), 5)
def test2dWeightedValues(self):
with self.test_session() as sess:
# Create the queue that populates the values.
values_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
# Create the queue that populates the weighted labels.
weights_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, weights_queue, [1, 1])
_enqueue_vector(sess, weights_queue, [1, 0])
_enqueue_vector(sess, weights_queue, [0, 1])
_enqueue_vector(sess, weights_queue, [0, 0])
weights = weights_queue.dequeue()
mean, update_op = metrics.streaming_mean(values, weights)
tf.local_variables_initializer().run()
for _ in range(4):
update_op.eval()
self.assertAlmostEqual((0 + 1 - 4.2 + 0) / 4.0, mean.eval(), 5)
def test2dWeightedValues_placeholders(self):
with self.test_session() as sess:
# Create the queue that populates the values.
feed_values = (
(0, 1),
(-4.2, 9.1),
(6.5, 0),
(-3.2, 4.0)
)
values = tf.placeholder(dtype=tf.float32)
# Create the queue that populates the weighted labels.
weights_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, weights_queue, [1, 1])
_enqueue_vector(sess, weights_queue, [1, 0])
_enqueue_vector(sess, weights_queue, [0, 1])
_enqueue_vector(sess, weights_queue, [0, 0])
weights = weights_queue.dequeue()
mean, update_op = metrics.streaming_mean(values, weights)
tf.local_variables_initializer().run()
for i in range(4):
update_op.eval(feed_dict={values: feed_values[i]})
self.assertAlmostEqual((0 + 1 - 4.2 + 0) / 4.0, mean.eval(), 5)
class StreamingMeanTensorTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_mean_tensor(
tf.ones([4, 3]),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_mean_tensor(
tf.ones([4, 3]),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testBasic(self):
with self.test_session() as sess:
values_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.streaming_mean_tensor(values)
sess.run(tf.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[-0.9/4., 3.525]], sess.run(mean))
def testMultiDimensional(self):
with self.test_session() as sess:
values_queue = tf.FIFOQueue(2, dtypes=tf.float32, shapes=(2, 2, 2))
_enqueue_vector(sess,
values_queue,
[[[1, 2], [1, 2]], [[1, 2], [1, 2]]],
shape=(2, 2, 2))
_enqueue_vector(sess,
values_queue,
[[[1, 2], [1, 2]], [[3, 4], [9, 10]]],
shape=(2, 2, 2))
values = values_queue.dequeue()
mean, update_op = metrics.streaming_mean_tensor(values)
sess.run(tf.local_variables_initializer())
for _ in range(2):
sess.run(update_op)
self.assertAllClose([[[1, 2], [1, 2]], [[2, 3], [5, 6]]],
sess.run(mean))
def testUpdateOpsReturnsCurrentValue(self):
with self.test_session() as sess:
values_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
mean, update_op = metrics.streaming_mean_tensor(values)
sess.run(tf.local_variables_initializer())
self.assertAllClose([[0, 1]], sess.run(update_op), 5)
self.assertAllClose([[-2.1, 5.05]], sess.run(update_op), 5)
self.assertAllClose([[2.3/3., 10.1/3.]], sess.run(update_op), 5)
self.assertAllClose([[-0.9/4., 3.525]], sess.run(update_op), 5)
self.assertAllClose([[-0.9/4., 3.525]], sess.run(mean), 5)
def testWeighted1d(self):
with self.test_session() as sess:
# Create the queue that populates the values.
values_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [[1]])
_enqueue_vector(sess, weights_queue, [[0]])
_enqueue_vector(sess, weights_queue, [[1]])
_enqueue_vector(sess, weights_queue, [[0]])
weights = weights_queue.dequeue()
mean, update_op = metrics.streaming_mean_tensor(values, weights)
sess.run(tf.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[3.25, 0.5]], sess.run(mean), 5)
def testWeighted2d_1(self):
with self.test_session() as sess:
# Create the queue that populates the values.
values_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, weights_queue, [1, 1])
_enqueue_vector(sess, weights_queue, [1, 0])
_enqueue_vector(sess, weights_queue, [0, 1])
_enqueue_vector(sess, weights_queue, [0, 0])
weights = weights_queue.dequeue()
mean, update_op = metrics.streaming_mean_tensor(values, weights)
sess.run(tf.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[-2.1, 0.5]], sess.run(mean), 5)
def testWeighted2d_2(self):
with self.test_session() as sess:
# Create the queue that populates the values.
values_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, values_queue, [0, 1])
_enqueue_vector(sess, values_queue, [-4.2, 9.1])
_enqueue_vector(sess, values_queue, [6.5, 0])
_enqueue_vector(sess, values_queue, [-3.2, 4.0])
values = values_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 2))
_enqueue_vector(sess, weights_queue, [0, 1])
_enqueue_vector(sess, weights_queue, [0, 0])
_enqueue_vector(sess, weights_queue, [0, 1])
_enqueue_vector(sess, weights_queue, [0, 0])
weights = weights_queue.dequeue()
mean, update_op = metrics.streaming_mean_tensor(values, weights)
sess.run(tf.local_variables_initializer())
for _ in range(4):
sess.run(update_op)
self.assertAllClose([[0, 0.5]], sess.run(mean), 5)
class StreamingAccuracyTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_accuracy(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_accuracy(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testPredictionsAndLabelsOfDifferentSizeRaisesValueError(self):
predictions = tf.ones((10, 3))
labels = tf.ones((10, 4))
with self.assertRaises(ValueError):
metrics.streaming_accuracy(predictions, labels)
def testPredictionsAndWeightsOfDifferentSizeRaisesValueError(self):
predictions = tf.ones((10, 3))
labels = tf.ones((10, 3))
weights = tf.ones((9, 3))
with self.assertRaises(ValueError):
metrics.streaming_accuracy(predictions, labels, weights)
def testValueTensorIsIdempotent(self):
predictions = tf.random_uniform((10, 3), maxval=3, dtype=tf.int64, seed=1)
labels = tf.random_uniform((10, 3), maxval=3, dtype=tf.int64, seed=1)
accuracy, update_op = metrics.streaming_accuracy(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_accuracy = accuracy.eval()
for _ in range(10):
self.assertEqual(initial_accuracy, accuracy.eval())
def testMultipleUpdates(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [2])
_enqueue_vector(sess, preds_queue, [1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [2])
labels = labels_queue.dequeue()
accuracy, update_op = metrics.streaming_accuracy(
predictions, labels)
sess.run(tf.local_variables_initializer())
for _ in xrange(3):
sess.run(update_op)
self.assertEqual(0.5, sess.run(update_op))
self.assertEqual(0.5, accuracy.eval())
def testEffectivelyEquivalentSizes(self):
predictions = tf.ones((40, 1))
labels = tf.ones((40,))
with self.test_session() as sess:
accuracy, update_op = metrics.streaming_accuracy(
predictions, labels)
sess.run(tf.local_variables_initializer())
self.assertEqual(1.0, update_op.eval())
self.assertEqual(1.0, accuracy.eval())
def testEffectivelyEquivalentSizesWithStaicShapedWeight(self):
predictions = tf.convert_to_tensor([1, 1, 1]) # shape 3,
labels = tf.expand_dims(tf.convert_to_tensor([1, 0, 0]), 1) # shape 3, 1
weights = tf.expand_dims(tf.convert_to_tensor([100, 1, 1]), 1) # shape 3, 1
with self.test_session() as sess:
accuracy, update_op = metrics.streaming_accuracy(
predictions, labels, weights)
sess.run(tf.local_variables_initializer())
# if streaming_accuracy does not flatten the weight, accuracy would be
# 0.33333334 due to an intended broadcast of weight. Due to flattening,
# it will be higher than .95
self.assertGreater(update_op.eval(), .95)
self.assertGreater(accuracy.eval(), .95)
def testEffectivelyEquivalentSizesWithDynamicallyShapedWeight(self):
predictions = tf.convert_to_tensor([1, 1, 1]) # shape 3,
labels = tf.expand_dims(tf.convert_to_tensor([1, 0, 0]), 1) # shape 3, 1
weights = [[100], [1], [1]] # shape 3, 1
weights_placeholder = tf.placeholder(dtype=tf.int32, name='weights')
feed_dict = {weights_placeholder: weights}
with self.test_session() as sess:
accuracy, update_op = metrics.streaming_accuracy(
predictions, labels, weights_placeholder)
sess.run(tf.local_variables_initializer())
# if streaming_accuracy does not flatten the weight, accuracy would be
# 0.33333334 due to an intended broadcast of weight. Due to flattening,
# it will be higher than .95
self.assertGreater(update_op.eval(feed_dict=feed_dict), .95)
self.assertGreater(accuracy.eval(feed_dict=feed_dict), .95)
def testMultipleUpdatesWithWeightedValues(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [2])
_enqueue_vector(sess, preds_queue, [1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = tf.FIFOQueue(4, dtypes=tf.float32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [2])
labels = labels_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = tf.FIFOQueue(4, dtypes=tf.int64, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [1])
_enqueue_vector(sess, weights_queue, [1])
_enqueue_vector(sess, weights_queue, [0])
_enqueue_vector(sess, weights_queue, [0])
weights = weights_queue.dequeue()
accuracy, update_op = metrics.streaming_accuracy(
predictions, labels, weights)
sess.run(tf.local_variables_initializer())
for _ in xrange(3):
sess.run(update_op)
self.assertEqual(1.0, sess.run(update_op))
self.assertEqual(1.0, accuracy.eval())
class StreamingPrecisionTest(tf.test.TestCase):
def setUp(self):
np.random.seed(1)
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_precision(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_precision(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = tf.random_uniform((10, 3), maxval=1, dtype=tf.int64, seed=1)
labels = tf.random_uniform((10, 3), maxval=1, dtype=tf.int64, seed=1)
precision, update_op = metrics.streaming_precision(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_precision = precision.eval()
for _ in range(10):
self.assertEqual(initial_precision, precision.eval())
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = tf.constant(inputs)
labels = tf.constant(inputs)
precision, update_op = metrics.streaming_precision(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op))
self.assertAlmostEqual(1, precision.eval())
def testSomeCorrect(self):
predictions = tf.constant([1, 0, 1, 0], shape=(1, 4))
labels = tf.constant([0, 1, 1, 0], shape=(1, 4))
precision, update_op = metrics.streaming_precision(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.5, update_op.eval())
self.assertAlmostEqual(0.5, precision.eval())
def testWeighted1d(self):
predictions = tf.constant([[1, 0, 1, 0], [1, 0, 1, 0]])
labels = tf.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
precision, update_op = metrics.streaming_precision(
predictions, labels, weights=tf.constant([[2], [5]]))
with self.test_session():
tf.local_variables_initializer().run()
weighted_tp = 2.0 + 5.0
weighted_positives = (2.0 + 2.0) + (5.0 + 5.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, precision.eval())
def testWeighted1d_placeholders(self):
predictions = tf.placeholder(dtype=tf.float32)
labels = tf.placeholder(dtype=tf.float32)
feed_dict = {
predictions: ((1, 0, 1, 0), (1, 0, 1, 0)),
labels: ((0, 1, 1, 0), (1, 0, 0, 1))
}
precision, update_op = metrics.streaming_precision(
predictions, labels, weights=tf.constant([[2], [5]]))
with self.test_session():
tf.local_variables_initializer().run()
weighted_tp = 2.0 + 5.0
weighted_positives = (2.0 + 2.0) + (5.0 + 5.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(
expected_precision, update_op.eval(feed_dict=feed_dict))
self.assertAlmostEqual(
expected_precision, precision.eval(feed_dict=feed_dict))
def testWeighted2d(self):
predictions = tf.constant([[1, 0, 1, 0], [1, 0, 1, 0]])
labels = tf.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
precision, update_op = metrics.streaming_precision(
predictions, labels, weights=tf.constant([[1, 2, 3, 4], [4, 3, 2, 1]]))
with self.test_session():
tf.local_variables_initializer().run()
weighted_tp = 3.0 + 4.0
weighted_positives = (1.0 + 3.0) + (4.0 + 2.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, precision.eval())
def testWeighted2d_placeholders(self):
predictions = tf.placeholder(dtype=tf.float32)
labels = tf.placeholder(dtype=tf.float32)
feed_dict = {
predictions: ((1, 0, 1, 0), (1, 0, 1, 0)),
labels: ((0, 1, 1, 0), (1, 0, 0, 1))
}
precision, update_op = metrics.streaming_precision(
predictions, labels, weights=tf.constant([[1, 2, 3, 4], [4, 3, 2, 1]]))
with self.test_session():
tf.local_variables_initializer().run()
weighted_tp = 3.0 + 4.0
weighted_positives = (1.0 + 3.0) + (4.0 + 2.0)
expected_precision = weighted_tp / weighted_positives
self.assertAlmostEqual(
expected_precision, update_op.eval(feed_dict=feed_dict))
self.assertAlmostEqual(
expected_precision, precision.eval(feed_dict=feed_dict))
def testAllIncorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = tf.constant(inputs)
labels = tf.constant(1 - inputs)
precision, update_op = metrics.streaming_precision(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
sess.run(update_op)
self.assertAlmostEqual(0, precision.eval())
def testZeroTrueAndFalsePositivesGivesZeroPrecision(self):
predictions = tf.constant([0, 0, 0, 0])
labels = tf.constant([0, 0, 0, 0])
precision, update_op = metrics.streaming_precision(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
sess.run(update_op)
self.assertEqual(0.0, precision.eval())
class StreamingRecallTest(tf.test.TestCase):
def setUp(self):
np.random.seed(1)
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_recall(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_recall(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = tf.random_uniform((10, 3), maxval=1, dtype=tf.int64, seed=1)
labels = tf.random_uniform((10, 3), maxval=1, dtype=tf.int64, seed=1)
recall, update_op = metrics.streaming_recall(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_recall = recall.eval()
for _ in range(10):
self.assertEqual(initial_recall, recall.eval())
def testAllCorrect(self):
np_inputs = np.random.randint(0, 2, size=(100, 1))
predictions = tf.constant(np_inputs)
labels = tf.constant(np_inputs)
recall, update_op = metrics.streaming_recall(predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
sess.run(update_op)
self.assertEqual(1, recall.eval())
def testSomeCorrect(self):
predictions = tf.constant([1, 0, 1, 0], shape=(1, 4))
labels = tf.constant([0, 1, 1, 0], shape=(1, 4))
recall, update_op = metrics.streaming_recall(predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.5, update_op.eval())
self.assertAlmostEqual(0.5, recall.eval())
def testWeighted1d(self):
predictions = tf.constant([[1, 0, 1, 0], [0, 1, 0, 1]])
labels = tf.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
weights = tf.constant([[2], [5]])
recall, update_op = metrics.streaming_recall(
predictions, labels, weights=weights)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
weighted_tp = 2.0 + 5.0
weighted_t = (2.0 + 2.0) + (5.0 + 5.0)
expected_precision = weighted_tp / weighted_t
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, recall.eval())
def testWeighted2d(self):
predictions = tf.constant([[1, 0, 1, 0], [0, 1, 0, 1]])
labels = tf.constant([[0, 1, 1, 0], [1, 0, 0, 1]])
weights = tf.constant([[1, 2, 3, 4], [4, 3, 2, 1]])
recall, update_op = metrics.streaming_recall(
predictions, labels, weights=weights)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
weighted_tp = 3.0 + 1.0
weighted_t = (2.0 + 3.0) + (4.0 + 1.0)
expected_precision = weighted_tp / weighted_t
self.assertAlmostEqual(expected_precision, update_op.eval())
self.assertAlmostEqual(expected_precision, recall.eval())
def testAllIncorrect(self):
np_inputs = np.random.randint(0, 2, size=(100, 1))
predictions = tf.constant(np_inputs)
labels = tf.constant(1 - np_inputs)
recall, update_op = metrics.streaming_recall(predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
sess.run(update_op)
self.assertEqual(0, recall.eval())
def testZeroTruePositivesAndFalseNegativesGivesZeroRecall(self):
predictions = tf.zeros((1, 4))
labels = tf.zeros((1, 4))
recall, update_op = metrics.streaming_recall(predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
sess.run(update_op)
self.assertEqual(0, recall.eval())
class StreamingAUCTest(tf.test.TestCase):
def setUp(self):
np.random.seed(1)
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_auc(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_auc(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = tf.random_uniform((10, 3), maxval=1, dtype=tf.float32, seed=1)
labels = tf.random_uniform((10, 3), maxval=1, dtype=tf.int64, seed=1)
auc, update_op = metrics.streaming_auc(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_auc = auc.eval()
for _ in range(10):
self.assertAlmostEqual(initial_auc, auc.eval(), 5)
def testAllCorrect(self):
self.allCorrectAsExpected('ROC')
def allCorrectAsExpected(self, curve):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = tf.constant(inputs, dtype=tf.float32)
labels = tf.constant(inputs)
auc, update_op = metrics.streaming_auc(predictions, labels, curve=curve)
sess.run(tf.local_variables_initializer())
self.assertEqual(1, sess.run(update_op))
self.assertEqual(1, auc.eval())
def testSomeCorrect(self):
with self.test_session() as sess:
predictions = tf.constant([1, 0, 1, 0], shape=(1, 4), dtype=tf.float32)
labels = tf.constant([0, 1, 1, 0], shape=(1, 4))
auc, update_op = metrics.streaming_auc(predictions, labels)
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.5, sess.run(update_op))
self.assertAlmostEqual(0.5, auc.eval())
def testWeighted1d(self):
with self.test_session() as sess:
predictions = tf.constant([1, 0, 1, 0], shape=(1, 4), dtype=tf.float32)
labels = tf.constant([0, 1, 1, 0], shape=(1, 4))
weights = tf.constant([2], shape=(1, 1))
auc, update_op = metrics.streaming_auc(predictions, labels,
weights=weights)
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.5, sess.run(update_op), 5)
self.assertAlmostEqual(0.5, auc.eval(), 5)
def testWeighted2d(self):
with self.test_session() as sess:
predictions = tf.constant([1, 0, 1, 0], shape=(1, 4), dtype=tf.float32)
labels = tf.constant([0, 1, 1, 0], shape=(1, 4))
weights = tf.constant([1, 2, 3, 4], shape=(1, 4))
auc, update_op = metrics.streaming_auc(predictions, labels,
weights=weights)
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.7, sess.run(update_op), 5)
self.assertAlmostEqual(0.7, auc.eval(), 5)
def testAUCPRSpecialCase(self):
with self.test_session() as sess:
predictions = tf.constant([0.1, 0.4, 0.35, 0.8],
shape=(1, 4), dtype=tf.float32)
labels = tf.constant([0, 0, 1, 1], shape=(1, 4))
auc, update_op = metrics.streaming_auc(predictions, labels, curve='PR')
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.79166, sess.run(update_op), delta=1e-3)
self.assertAlmostEqual(0.79166, auc.eval(), delta=1e-3)
def testAnotherAUCPRSpecialCase(self):
with self.test_session() as sess:
predictions = tf.constant([0.1, 0.4, 0.35, 0.8, 0.1, 0.135, 0.81],
shape=(1, 7), dtype=tf.float32)
labels = tf.constant([0, 0, 1, 0, 1, 0, 1], shape=(1, 7))
auc, update_op = metrics.streaming_auc(predictions, labels, curve='PR')
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.610317, sess.run(update_op), delta=1e-3)
self.assertAlmostEqual(0.610317, auc.eval(), delta=1e-3)
def testThirdAUCPRSpecialCase(self):
with self.test_session() as sess:
predictions = tf.constant([0.0, 0.1, 0.2, 0.33, 0.3, 0.4, 0.5],
shape=(1, 7), dtype=tf.float32)
labels = tf.constant([0, 0, 0, 0, 1, 1, 1], shape=(1, 7))
auc, update_op = metrics.streaming_auc(predictions, labels, curve='PR')
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.90277, sess.run(update_op), delta=1e-3)
self.assertAlmostEqual(0.90277, auc.eval(), delta=1e-3)
def testAllIncorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = tf.constant(inputs, dtype=tf.float32)
labels = tf.constant(1 - inputs, dtype=tf.float32)
auc, update_op = metrics.streaming_auc(predictions, labels)
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0, sess.run(update_op))
self.assertAlmostEqual(0, auc.eval())
def testZeroTruePositivesAndFalseNegativesGivesOneAUC(self):
with self.test_session() as sess:
predictions = tf.zeros([4], dtype=tf.float32)
labels = tf.zeros([4])
auc, update_op = metrics.streaming_auc(predictions, labels)
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op), 6)
self.assertAlmostEqual(1, auc.eval(), 6)
def testRecallOneAndPrecisionOneGivesOnePRAUC(self):
with self.test_session() as sess:
predictions = tf.ones([4], dtype=tf.float32)
labels = tf.ones([4])
auc, update_op = metrics.streaming_auc(predictions,
labels,
curve='PR')
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op), 6)
self.assertAlmostEqual(1, auc.eval(), 6)
def np_auc(self, predictions, labels, weights):
"""Computes the AUC explicitely using Numpy.
Args:
predictions: an ndarray with shape [N].
labels: an ndarray with shape [N].
weights: an ndarray with shape [N].
Returns:
the area under the ROC curve.
"""
if weights is None:
weights = np.ones(np.size(predictions))
is_positive = labels > 0
num_positives = np.sum(weights[is_positive])
num_negatives = np.sum(weights[~is_positive])
# Sort descending:
inds = np.argsort(-predictions)
sorted_labels = labels[inds]
sorted_weights = weights[inds]
is_positive = sorted_labels > 0
tp = np.cumsum(sorted_weights * is_positive) / num_positives
return np.sum((sorted_weights * tp)[~is_positive]) / num_negatives
def testWithMultipleUpdates(self):
num_samples = 1000
batch_size = 10
num_batches = int(num_samples / batch_size)
# Create the labels and data.
labels = np.random.randint(0, 2, size=num_samples)
noise = np.random.normal(0.0, scale=0.2, size=num_samples)
predictions = 0.4 + 0.2 * labels + noise
predictions[predictions > 1] = 1
predictions[predictions < 0] = 0
def _enqueue_as_batches(x, enqueue_ops):
x_batches = x.astype(np.float32).reshape((num_batches, batch_size))
x_queue = tf.FIFOQueue(num_batches, dtypes=tf.float32,
shapes=(batch_size,))
for i in range(num_batches):
enqueue_ops[i].append(x_queue.enqueue(x_batches[i, :]))
return x_queue.dequeue()
for weights in (None,
np.ones(num_samples),
np.random.exponential(scale=1.0, size=num_samples)):
expected_auc = self.np_auc(predictions, labels, weights)
with self.test_session() as sess:
enqueue_ops = [[] for i in range(num_batches)]
tf_predictions = _enqueue_as_batches(predictions, enqueue_ops)
tf_labels = _enqueue_as_batches(labels, enqueue_ops)
tf_weights = (_enqueue_as_batches(weights, enqueue_ops)
if weights is not None else None)
for i in range(num_batches):
sess.run(enqueue_ops[i])
auc, update_op = metrics.streaming_auc(
tf_predictions, tf_labels, curve='ROC', num_thresholds=500,
weights=tf_weights)
sess.run(tf.local_variables_initializer())
for i in range(num_batches):
sess.run(update_op)
# Since this is only approximate, we can't expect a 6 digits match.
# Although with higher number of samples/thresholds we should see the
# accuracy improving
self.assertAlmostEqual(expected_auc, auc.eval(), 2)
class StreamingSpecificityAtSensitivityTest(tf.test.TestCase):
def setUp(self):
np.random.seed(1)
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_specificity_at_sensitivity(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
sensitivity=0.7,
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_specificity_at_sensitivity(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
sensitivity=0.7,
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = tf.random_uniform((10, 3), maxval=1, dtype=tf.float32, seed=1)
labels = tf.random_uniform((10, 3), maxval=2, dtype=tf.int64, seed=1)
specificity, update_op = metrics.streaming_specificity_at_sensitivity(
predictions, labels, sensitivity=0.7)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_specificity = specificity.eval()
for _ in range(10):
self.assertAlmostEqual(initial_specificity, specificity.eval(), 5)
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = tf.constant(inputs, dtype=tf.float32)
labels = tf.constant(inputs)
specificity, update_op = metrics.streaming_specificity_at_sensitivity(
predictions, labels, sensitivity=0.7)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(1, sess.run(update_op))
self.assertEqual(1, specificity.eval())
def testSomeCorrectHighSensitivity(self):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0,
0.1, 0.45, 0.5, 0.8, 0.9]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = tf.constant(predictions_values, dtype=tf.float32)
labels = tf.constant(labels_values)
specificity, update_op = metrics.streaming_specificity_at_sensitivity(
predictions, labels, sensitivity=0.8)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(1.0, sess.run(update_op))
self.assertAlmostEqual(1.0, specificity.eval())
def testSomeCorrectLowSensitivity(self):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0,
0.1, 0.2, 0.2, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = tf.constant(predictions_values, dtype=tf.float32)
labels = tf.constant(labels_values)
specificity, update_op = metrics.streaming_specificity_at_sensitivity(
predictions, labels, sensitivity=0.4)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.6, sess.run(update_op))
self.assertAlmostEqual(0.6, specificity.eval())
def testWeighted1d(self):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0,
0.1, 0.2, 0.2, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
weights_values = [3]
predictions = tf.constant(predictions_values, dtype=tf.float32)
labels = tf.constant(labels_values)
weights = tf.constant(weights_values)
specificity, update_op = metrics.streaming_specificity_at_sensitivity(
predictions, labels, weights=weights, sensitivity=0.4)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.6, sess.run(update_op))
self.assertAlmostEqual(0.6, specificity.eval())
def testWeighted2d(self):
predictions_values = [0.1, 0.2, 0.4, 0.3, 0.0,
0.1, 0.2, 0.2, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
weights_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
predictions = tf.constant(predictions_values, dtype=tf.float32)
labels = tf.constant(labels_values)
weights = tf.constant(weights_values)
specificity, update_op = metrics.streaming_specificity_at_sensitivity(
predictions, labels, weights=weights, sensitivity=0.4)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(8.0 / 15.0, sess.run(update_op))
self.assertAlmostEqual(8.0 / 15.0, specificity.eval())
class StreamingSensitivityAtSpecificityTest(tf.test.TestCase):
def setUp(self):
np.random.seed(1)
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_sensitivity_at_specificity(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
specificity=0.7,
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_sensitivity_at_specificity(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
specificity=0.7,
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = tf.random_uniform((10, 3), maxval=1, dtype=tf.float32, seed=1)
labels = tf.random_uniform((10, 3), maxval=2, dtype=tf.int64, seed=1)
sensitivity, update_op = metrics.streaming_sensitivity_at_specificity(
predictions, labels, specificity=0.7)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_sensitivity = sensitivity.eval()
for _ in range(10):
self.assertAlmostEqual(initial_sensitivity, sensitivity.eval(), 5)
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
predictions = tf.constant(inputs, dtype=tf.float32)
labels = tf.constant(inputs)
specificity, update_op = metrics.streaming_sensitivity_at_specificity(
predictions, labels, specificity=0.7)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(1, sess.run(update_op))
self.assertEqual(1, specificity.eval())
def testSomeCorrectHighSpecificity(self):
predictions_values = [0.0, 0.1, 0.2, 0.3, 0.4,
0.1, 0.45, 0.5, 0.8, 0.9]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = tf.constant(predictions_values, dtype=tf.float32)
labels = tf.constant(labels_values)
specificity, update_op = metrics.streaming_sensitivity_at_specificity(
predictions, labels, specificity=0.8)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.8, sess.run(update_op))
self.assertAlmostEqual(0.8, specificity.eval())
def testSomeCorrectLowSpecificity(self):
predictions_values = [0.0, 0.1, 0.2, 0.3, 0.4,
0.01, 0.02, 0.25, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
predictions = tf.constant(predictions_values, dtype=tf.float32)
labels = tf.constant(labels_values)
specificity, update_op = metrics.streaming_sensitivity_at_specificity(
predictions, labels, specificity=0.4)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.6, sess.run(update_op))
self.assertAlmostEqual(0.6, specificity.eval())
def testWeighted(self):
predictions_values = [0.0, 0.1, 0.2, 0.3, 0.4,
0.01, 0.02, 0.25, 0.26, 0.26]
labels_values = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
weights_values = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
predictions = tf.constant(predictions_values, dtype=tf.float32)
labels = tf.constant(labels_values)
weights = tf.constant(weights_values)
specificity, update_op = metrics.streaming_sensitivity_at_specificity(
predictions, labels, weights=weights, specificity=0.4)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(0.675, sess.run(update_op))
self.assertAlmostEqual(0.675, specificity.eval())
# TODO(nsilberman): Break this up into two sets of tests.
class StreamingPrecisionRecallThresholdsTest(tf.test.TestCase):
def setUp(self):
np.random.seed(1)
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
prec, _ = metrics.streaming_precision_at_thresholds(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
metrics_collections=[my_collection_name])
rec, _ = metrics.streaming_recall_at_thresholds(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [prec, rec])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, precision_op = metrics.streaming_precision_at_thresholds(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
updates_collections=[my_collection_name])
_, recall_op = metrics.streaming_recall_at_thresholds(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
thresholds=[0, 0.5, 1.0],
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name),
[precision_op, recall_op])
def testValueTensorIsIdempotent(self):
predictions = tf.random_uniform((10, 3), maxval=1, dtype=tf.float32, seed=1)
labels = tf.random_uniform((10, 3), maxval=1, dtype=tf.int64, seed=1)
thresholds = [0, 0.5, 1.0]
prec, prec_op = metrics.streaming_precision_at_thresholds(
predictions, labels, thresholds)
rec, rec_op = metrics.streaming_recall_at_thresholds(
predictions, labels, thresholds)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates, then verify idempotency.
sess.run([prec_op, rec_op])
initial_prec = prec.eval()
initial_rec = rec.eval()
for _ in range(10):
sess.run([prec_op, rec_op])
self.assertAllClose(initial_prec, prec.eval())
self.assertAllClose(initial_rec, rec.eval())
# TODO(nsilberman): fix tests (passing but incorrect).
def testAllCorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = tf.constant(inputs, dtype=tf.float32)
labels = tf.constant(inputs)
thresholds = [0.5]
prec, prec_op = metrics.streaming_precision_at_thresholds(
predictions, labels, thresholds)
rec, rec_op = metrics.streaming_recall_at_thresholds(
predictions, labels, thresholds)
sess.run(tf.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertEqual(1, prec.eval())
self.assertEqual(1, rec.eval())
def testSomeCorrect(self):
with self.test_session() as sess:
predictions = tf.constant([1, 0, 1, 0], shape=(1, 4), dtype=tf.float32)
labels = tf.constant([0, 1, 1, 0], shape=(1, 4))
thresholds = [0.5]
prec, prec_op = metrics.streaming_precision_at_thresholds(
predictions, labels, thresholds)
rec, rec_op = metrics.streaming_recall_at_thresholds(
predictions, labels, thresholds)
sess.run(tf.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0.5, prec.eval())
self.assertAlmostEqual(0.5, rec.eval())
def testAllIncorrect(self):
inputs = np.random.randint(0, 2, size=(100, 1))
with self.test_session() as sess:
predictions = tf.constant(inputs, dtype=tf.float32)
labels = tf.constant(1 - inputs, dtype=tf.float32)
thresholds = [0.5]
prec, prec_op = metrics.streaming_precision_at_thresholds(
predictions, labels, thresholds)
rec, rec_op = metrics.streaming_recall_at_thresholds(
predictions, labels, thresholds)
sess.run(tf.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0, prec.eval())
self.assertAlmostEqual(0, rec.eval())
def testWeights1d(self):
with self.test_session() as sess:
predictions = tf.constant([[1, 0], [1, 0]], shape=(2, 2),
dtype=tf.float32)
labels = tf.constant([[0, 1], [1, 0]], shape=(2, 2))
weights = tf.constant([[0], [1]], shape=(2, 1), dtype=tf.float32)
thresholds = [0.5, 1.1]
prec, prec_op = metrics.streaming_precision_at_thresholds(
predictions, labels, thresholds, weights=weights)
rec, rec_op = metrics.streaming_recall_at_thresholds(
predictions, labels, thresholds, weights=weights)
[prec_low, prec_high] = tf.split(0, 2, prec)
prec_low = tf.reshape(prec_low, shape=())
prec_high = tf.reshape(prec_high, shape=())
[rec_low, rec_high] = tf.split(0, 2, rec)
rec_low = tf.reshape(rec_low, shape=())
rec_high = tf.reshape(rec_high, shape=())
sess.run(tf.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(1.0, prec_low.eval(), places=5)
self.assertAlmostEqual(0.0, prec_high.eval(), places=5)
self.assertAlmostEqual(1.0, rec_low.eval(), places=5)
self.assertAlmostEqual(0.0, rec_high.eval(), places=5)
def testWeights2d(self):
with self.test_session() as sess:
predictions = tf.constant([[1, 0], [1, 0]], shape=(2, 2),
dtype=tf.float32)
labels = tf.constant([[0, 1], [1, 0]], shape=(2, 2))
weights = tf.constant([[0, 0], [1, 1]], shape=(2, 2), dtype=tf.float32)
thresholds = [0.5, 1.1]
prec, prec_op = metrics.streaming_precision_at_thresholds(
predictions, labels, thresholds, weights=weights)
rec, rec_op = metrics.streaming_recall_at_thresholds(
predictions, labels, thresholds, weights=weights)
[prec_low, prec_high] = tf.split(0, 2, prec)
prec_low = tf.reshape(prec_low, shape=())
prec_high = tf.reshape(prec_high, shape=())
[rec_low, rec_high] = tf.split(0, 2, rec)
rec_low = tf.reshape(rec_low, shape=())
rec_high = tf.reshape(rec_high, shape=())
sess.run(tf.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(1.0, prec_low.eval(), places=5)
self.assertAlmostEqual(0.0, prec_high.eval(), places=5)
self.assertAlmostEqual(1.0, rec_low.eval(), places=5)
self.assertAlmostEqual(0.0, rec_high.eval(), places=5)
def testExtremeThresholds(self):
with self.test_session() as sess:
predictions = tf.constant([1, 0, 1, 0], shape=(1, 4), dtype=tf.float32)
labels = tf.constant([0, 1, 1, 1], shape=(1, 4))
thresholds = [-1.0, 2.0] # lower/higher than any values
prec, prec_op = metrics.streaming_precision_at_thresholds(
predictions, labels, thresholds)
rec, rec_op = metrics.streaming_recall_at_thresholds(
predictions, labels, thresholds)
[prec_low, prec_high] = tf.split(0, 2, prec)
[rec_low, rec_high] = tf.split(0, 2, rec)
sess.run(tf.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0.75, prec_low.eval())
self.assertAlmostEqual(0.0, prec_high.eval())
self.assertAlmostEqual(1.0, rec_low.eval())
self.assertAlmostEqual(0.0, rec_high.eval())
def testZeroLabelsPredictions(self):
with self.test_session() as sess:
predictions = tf.zeros([4], dtype=tf.float32)
labels = tf.zeros([4])
thresholds = [0.5]
prec, prec_op = metrics.streaming_precision_at_thresholds(
predictions, labels, thresholds)
rec, rec_op = metrics.streaming_recall_at_thresholds(
predictions, labels, thresholds)
sess.run(tf.local_variables_initializer())
sess.run([prec_op, rec_op])
self.assertAlmostEqual(0, prec.eval(), 6)
self.assertAlmostEqual(0, rec.eval(), 6)
def testWithMultipleUpdates(self):
num_samples = 1000
batch_size = 10
num_batches = int(num_samples / batch_size)
# Create the labels and data.
labels = np.random.randint(0, 2, size=(num_samples, 1))
noise = np.random.normal(0.0, scale=0.2, size=(num_samples, 1))
predictions = 0.4 + 0.2 * labels + noise
predictions[predictions > 1] = 1
predictions[predictions < 0] = 0
thresholds = [0.3]
tp = 0
fp = 0
fn = 0
tn = 0
for i in range(num_samples):
if predictions[i] > thresholds[0]:
if labels[i] == 1:
tp += 1
else:
fp += 1
else:
if labels[i] == 1:
fn += 1
else:
tn += 1
epsilon = 1e-7
expected_prec = tp / (epsilon + tp + fp)
expected_rec = tp / (epsilon + tp + fn)
labels = labels.astype(np.float32)
predictions = predictions.astype(np.float32)
with self.test_session() as sess:
# Reshape the data so its easy to queue up:
predictions_batches = predictions.reshape((batch_size, num_batches))
labels_batches = labels.reshape((batch_size, num_batches))
# Enqueue the data:
predictions_queue = tf.FIFOQueue(num_batches, dtypes=tf.float32,
shapes=(batch_size,))
labels_queue = tf.FIFOQueue(num_batches, dtypes=tf.float32,
shapes=(batch_size,))
for i in range(int(num_batches)):
tf_prediction = tf.constant(predictions_batches[:, i])
tf_label = tf.constant(labels_batches[:, i])
sess.run([predictions_queue.enqueue(tf_prediction),
labels_queue.enqueue(tf_label)])
tf_predictions = predictions_queue.dequeue()
tf_labels = labels_queue.dequeue()
prec, prec_op = metrics.streaming_precision_at_thresholds(
tf_predictions, tf_labels, thresholds)
rec, rec_op = metrics.streaming_recall_at_thresholds(
tf_predictions, tf_labels, thresholds)
sess.run(tf.local_variables_initializer())
for _ in range(int(num_samples / batch_size)):
sess.run([prec_op, rec_op])
# Since this is only approximate, we can't expect a 6 digits match.
# Although with higher number of samples/thresholds we should see the
# accuracy improving
self.assertAlmostEqual(expected_prec, prec.eval(), 2)
self.assertAlmostEqual(expected_rec, rec.eval(), 2)
# TODO(ptucker): Remove when we remove `streaming_recall_at_k`.
# This op will be deprecated soon in favor of `streaming_sparse_recall_at_k`.
# Until then, this test validates that both ops yield the same results.
class StreamingRecallAtKTest(tf.test.TestCase):
def setUp(self):
np.random.seed(1)
tf.reset_default_graph()
self._batch_size = 4
self._num_classes = 3
self._np_predictions = np.matrix(('0.1 0.2 0.7;'
'0.6 0.2 0.2;'
'0.0 0.9 0.1;'
'0.2 0.0 0.8'))
self._np_labels = [0, 0, 0, 0]
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_recall_at_k(
predictions=tf.ones((self._batch_size, self._num_classes)),
labels=tf.ones((self._batch_size,), dtype=tf.int32),
k=1,
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_recall_at_k(
predictions=tf.ones((self._batch_size, self._num_classes)),
labels=tf.ones((self._batch_size,), dtype=tf.int32),
k=1,
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testSingleUpdateKIs1(self):
predictions = tf.constant(self._np_predictions,
shape=(self._batch_size, self._num_classes),
dtype=tf.float32)
labels = tf.constant(
self._np_labels, shape=(self._batch_size,), dtype=tf.int64)
recall, update_op = metrics.streaming_recall_at_k(
predictions, labels, k=1)
sp_recall, sp_update_op = metrics.streaming_sparse_recall_at_k(
predictions, tf.reshape(labels, (self._batch_size, 1)), k=1)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(0.25, sess.run(update_op))
self.assertEqual(0.25, recall.eval())
self.assertEqual(0.25, sess.run(sp_update_op))
self.assertEqual(0.25, sp_recall.eval())
def testSingleUpdateKIs2(self):
predictions = tf.constant(self._np_predictions,
shape=(self._batch_size, self._num_classes),
dtype=tf.float32)
labels = tf.constant(
self._np_labels, shape=(self._batch_size,), dtype=tf.int64)
recall, update_op = metrics.streaming_recall_at_k(
predictions, labels, k=2)
sp_recall, sp_update_op = metrics.streaming_sparse_recall_at_k(
predictions, tf.reshape(labels, (self._batch_size, 1)), k=2)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(0.5, sess.run(update_op))
self.assertEqual(0.5, recall.eval())
self.assertEqual(0.5, sess.run(sp_update_op))
self.assertEqual(0.5, sp_recall.eval())
def testSingleUpdateKIs3(self):
predictions = tf.constant(self._np_predictions,
shape=(self._batch_size, self._num_classes),
dtype=tf.float32)
labels = tf.constant(
self._np_labels, shape=(self._batch_size,), dtype=tf.int64)
recall, update_op = metrics.streaming_recall_at_k(
predictions, labels, k=3)
sp_recall, sp_update_op = metrics.streaming_sparse_recall_at_k(
predictions, tf.reshape(labels, (self._batch_size, 1)), k=3)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(1.0, sess.run(update_op))
self.assertEqual(1.0, recall.eval())
self.assertEqual(1.0, sess.run(sp_update_op))
self.assertEqual(1.0, sp_recall.eval())
def testSingleUpdateSomeMissingKIs2(self):
predictions = tf.constant(self._np_predictions,
shape=(self._batch_size, self._num_classes),
dtype=tf.float32)
labels = tf.constant(
self._np_labels, shape=(self._batch_size,), dtype=tf.int64)
weights = tf.constant([0, 1, 0, 1], shape=(self._batch_size,),
dtype=tf.float32)
recall, update_op = metrics.streaming_recall_at_k(
predictions, labels, k=2, weights=weights)
sp_recall, sp_update_op = metrics.streaming_sparse_recall_at_k(
predictions, tf.reshape(labels, (self._batch_size, 1)), k=2,
weights=weights)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(1.0, sess.run(update_op))
self.assertEqual(1.0, recall.eval())
self.assertEqual(1.0, sess.run(sp_update_op))
self.assertEqual(1.0, sp_recall.eval())
class StreamingSparsePrecisionTest(tf.test.TestCase):
def _test_streaming_sparse_precision_at_k(self,
predictions,
labels,
k,
expected,
class_id=None,
weights=None):
with tf.Graph().as_default() as g, self.test_session(g):
if weights is not None:
weights = tf.constant(weights, tf.float32)
metric, update = metrics.streaming_sparse_precision_at_k(
predictions=tf.constant(predictions, tf.float32), labels=labels,
k=k, class_id=class_id, weights=weights)
# Fails without initialized vars.
self.assertRaises(tf.OpError, metric.eval)
self.assertRaises(tf.OpError, update.eval)
tf.initialize_variables(tf.local_variables()).run()
# Run per-step op and assert expected values.
if math.isnan(expected):
_assert_nan(self, update.eval())
_assert_nan(self, metric.eval())
else:
self.assertEqual(expected, update.eval())
self.assertEqual(expected, metric.eval())
def _test_streaming_sparse_precision_at_top_k(self,
top_k_predictions,
labels,
expected,
class_id=None,
weights=None):
with tf.Graph().as_default() as g, self.test_session(g):
if weights is not None:
weights = tf.constant(weights, tf.float32)
metric, update = metrics.streaming_sparse_precision_at_top_k(
top_k_predictions=tf.constant(top_k_predictions, tf.int32),
labels=labels, class_id=class_id, weights=weights)
# Fails without initialized vars.
self.assertRaises(tf.OpError, metric.eval)
self.assertRaises(tf.OpError, update.eval)
tf.initialize_variables(tf.local_variables()).run()
# Run per-step op and assert expected values.
if math.isnan(expected):
self.assertTrue(math.isnan(update.eval()))
self.assertTrue(math.isnan(metric.eval()))
else:
self.assertEqual(expected, update.eval())
self.assertEqual(expected, metric.eval())
def _test_sparse_average_precision_at_k(self,
predictions,
labels,
k,
expected):
with tf.Graph().as_default() as g, self.test_session(g):
predictions = tf.constant(predictions, tf.float32)
metric = metric_ops.sparse_average_precision_at_k(
predictions, labels, k)
self.assertAllEqual(expected, metric.eval())
def _test_streaming_sparse_average_precision_at_k(
self, predictions, labels, k, expected, weights=None):
with tf.Graph().as_default() as g, self.test_session(g):
if weights is not None:
weights = tf.constant(weights, tf.float32)
predictions = tf.constant(predictions, tf.float32)
metric, update = metrics.streaming_sparse_average_precision_at_k(
predictions, labels, k, weights=weights)
# Fails without initialized vars.
self.assertRaises(tf.OpError, metric.eval)
self.assertRaises(tf.OpError, update.eval)
local_variables = tf.local_variables()
tf.initialize_variables(local_variables).run()
# Run per-step op and assert expected values.
if math.isnan(expected):
_assert_nan(self, update.eval())
_assert_nan(self, metric.eval())
else:
self.assertAlmostEqual(expected, update.eval())
self.assertAlmostEqual(expected, metric.eval())
def test_top_k_rank_invalid(self):
with self.test_session():
# top_k_predictions has rank < 2.
top_k_predictions = [9, 4, 6, 2, 0]
sp_labels = tf.SparseTensorValue(
indices=np.array([[0,], [1,], [2,]], np.int64),
values=np.array([2, 7, 8], np.int64),
shape=np.array([10,], np.int64))
with self.assertRaises(ValueError):
precision, _ = metrics.streaming_sparse_precision_at_top_k(
top_k_predictions=tf.constant(top_k_predictions, tf.int64),
labels=sp_labels)
tf.initialize_variables(tf.local_variables()).run()
precision.eval()
def test_average_precision(self):
# Example 1.
# Matches example here:
# fastml.com/what-you-wanted-to-know-about-mean-average-precision
labels_ex1 = (0, 1, 2, 3, 4)
labels = np.array([labels_ex1], dtype=np.int64)
predictions_ex1 = (0.2, 0.1, 0.0, 0.4, 0.0, 0.5, 0.3)
predictions = (predictions_ex1,)
predictions_top_k_ex1 = (5, 3, 6, 0, 1, 2)
precision_ex1 = (
0.0 / 1,
1.0 / 2,
1.0 / 3,
2.0 / 4
)
avg_precision_ex1 = (
0.0 / 1,
precision_ex1[1] / 2,
precision_ex1[1] / 3,
(precision_ex1[1] + precision_ex1[3]) / 4
)
for i in xrange(4):
k = i + 1
self._test_streaming_sparse_precision_at_k(
predictions, labels, k, expected=precision_ex1[i])
self._test_streaming_sparse_precision_at_top_k(
(predictions_top_k_ex1[:k],), labels, expected=precision_ex1[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=[avg_precision_ex1[i]])
self._test_streaming_sparse_average_precision_at_k(
predictions, labels, k, expected=avg_precision_ex1[i])
# Example 2.
labels_ex2 = (0, 2, 4, 5, 6)
labels = np.array([labels_ex2], dtype=np.int64)
predictions_ex2 = (0.3, 0.5, 0.0, 0.4, 0.0, 0.1, 0.2)
predictions = (predictions_ex2,)
predictions_top_k_ex2 = (1, 3, 0, 6, 5)
precision_ex2 = (
0.0 / 1,
0.0 / 2,
1.0 / 3,
2.0 / 4
)
avg_precision_ex2 = (
0.0 / 1,
0.0 / 2,
precision_ex2[2] / 3,
(precision_ex2[2] + precision_ex2[3]) / 4
)
for i in xrange(4):
k = i + 1
self._test_streaming_sparse_precision_at_k(
predictions, labels, k, expected=precision_ex2[i])
self._test_streaming_sparse_precision_at_top_k(
(predictions_top_k_ex2[:k],), labels, expected=precision_ex2[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=[avg_precision_ex2[i]])
self._test_streaming_sparse_average_precision_at_k(
predictions, labels, k, expected=avg_precision_ex2[i])
# Both examples, we expect both precision and average precision to be the
# average of the 2 examples.
labels = np.array([labels_ex1, labels_ex2], dtype=np.int64)
predictions = (predictions_ex1, predictions_ex2)
average_precision = [
(ex1, ex2) for ex1, ex2 in zip(avg_precision_ex1, avg_precision_ex2)]
streaming_precision = [
(ex1 + ex2) / 2
for ex1, ex2 in zip(precision_ex1, precision_ex2)]
streaming_average_precision = [
(ex1 + ex2) / 2
for ex1, ex2 in zip(avg_precision_ex1, avg_precision_ex2)]
for i in xrange(4):
k = i + 1
self._test_streaming_sparse_precision_at_k(
predictions, labels, k, expected=streaming_precision[i])
predictions_top_k = (predictions_top_k_ex1[:k], predictions_top_k_ex2[:k])
self._test_streaming_sparse_precision_at_top_k(
predictions_top_k, labels, expected=streaming_precision[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=average_precision[i])
self._test_streaming_sparse_average_precision_at_k(
predictions, labels, k, expected=streaming_average_precision[i])
# Weighted examples, we expect streaming average precision to be the
# weighted average of the 2 examples.
weights = (0.3, 0.6)
streaming_average_precision = [
(weights[0] * ex1 + weights[1] * ex2) / (weights[0] + weights[1])
for ex1, ex2 in zip(avg_precision_ex1, avg_precision_ex2)]
for i in xrange(4):
k = i + 1
self._test_streaming_sparse_average_precision_at_k(
predictions, labels, k, expected=streaming_average_precision[i],
weights=weights)
def test_average_precision_some_labels_out_of_range(self):
"""Tests that labels outside the [0, n_classes) range are ignored."""
labels_ex1 = (-1, 0, 1, 2, 3, 4, 7)
labels = np.array([labels_ex1], dtype=np.int64)
predictions_ex1 = (0.2, 0.1, 0.0, 0.4, 0.0, 0.5, 0.3)
predictions = (predictions_ex1,)
predictions_top_k_ex1 = (5, 3, 6, 0, 1, 2)
precision_ex1 = (
0.0 / 1,
1.0 / 2,
1.0 / 3,
2.0 / 4
)
avg_precision_ex1 = (
0.0 / 1,
precision_ex1[1] / 2,
precision_ex1[1] / 3,
(precision_ex1[1] + precision_ex1[3]) / 4
)
for i in xrange(4):
k = i + 1
self._test_streaming_sparse_precision_at_k(
predictions, labels, k, expected=precision_ex1[i])
self._test_streaming_sparse_precision_at_top_k(
(predictions_top_k_ex1[:k],), labels, expected=precision_ex1[i])
self._test_sparse_average_precision_at_k(
predictions, labels, k, expected=[avg_precision_ex1[i]])
self._test_streaming_sparse_average_precision_at_k(
predictions, labels, k, expected=avg_precision_ex1[i])
def test_one_label_at_k1_nan(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
top_k_predictions = [[3], [3]]
sparse_labels = _binary_2d_label_to_sparse_value(
[[0, 0, 0, 1], [0, 0, 1, 0]])
dense_labels = np.array([[3], [2]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Classes 0,1,2 have 0 predictions, classes -1 and 4 are out of range.
for class_id in (-1, 0, 1, 2, 4):
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=1, expected=NAN, class_id=class_id)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=NAN, class_id=class_id)
def test_one_label_at_k1(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
top_k_predictions = [[3], [3]]
sparse_labels = _binary_2d_label_to_sparse_value(
[[0, 0, 0, 1], [0, 0, 1, 0]])
dense_labels = np.array([[3], [2]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Class 3: 1 label, 2 predictions, 1 correct.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=1, expected=1.0 / 2, class_id=3)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=1.0 / 2, class_id=3)
# All classes: 2 labels, 2 predictions, 1 correct.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=1, expected=1.0 / 2)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=1.0 / 2)
def test_three_labels_at_k5_no_predictions(self):
predictions = [
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
]
top_k_predictions = [
[9, 4, 6, 2, 0],
[5, 7, 2, 9, 6],
]
sparse_labels = _binary_2d_label_to_sparse_value([
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Classes 1,3,8 have 0 predictions, classes -1 and 10 are out of range.
for class_id in (-1, 1, 3, 8, 10):
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=NAN, class_id=class_id)
def test_three_labels_at_k5_no_labels(self):
predictions = [
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
]
top_k_predictions = [
[9, 4, 6, 2, 0],
[5, 7, 2, 9, 6],
]
sparse_labels = _binary_2d_label_to_sparse_value([
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Classes 0,4,6,9: 0 labels, >=1 prediction.
for class_id in (0, 4, 6, 9):
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=0.0, class_id=class_id)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=0.0, class_id=class_id)
def test_three_labels_at_k5(self):
predictions = [
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
]
top_k_predictions = [
[9, 4, 6, 2, 0],
[5, 7, 2, 9, 6],
]
sparse_labels = _binary_2d_label_to_sparse_value([
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Class 2: 2 labels, 2 correct predictions.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=2.0 / 2,
class_id=2)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=2.0 / 2, class_id=2)
# Class 5: 1 label, 1 correct prediction.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=1.0 / 1, class_id=5)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=1.0 / 1, class_id=5)
# Class 7: 1 label, 1 incorrect prediction.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=0.0 / 1, class_id=7)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=0.0 / 1, class_id=7)
# All classes: 10 predictions, 3 correct.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=3.0 / 10)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=3.0 / 10)
def test_three_labels_at_k5_some_out_of_range(self):
"""Tests that labels outside the [0, n_classes) range are ignored."""
predictions = [
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
]
top_k_predictions = [
[9, 4, 6, 2, 0],
[5, 7, 2, 9, 6],
]
sp_labels = tf.SparseTensorValue(
indices=[[0, 0], [0, 1], [0, 2], [0, 3],
[1, 0], [1, 1], [1, 2], [1, 3]],
# values -1 and 10 are outside the [0, n_classes) range and are ignored.
values=np.array([2, 7, -1, 8,
1, 2, 5, 10], np.int64),
shape=[2, 4])
# Class 2: 2 labels, 2 correct predictions.
self._test_streaming_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=2.0 / 2, class_id=2)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, sp_labels, expected=2.0 / 2, class_id=2)
# Class 5: 1 label, 1 correct prediction.
self._test_streaming_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=1.0 / 1, class_id=5)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, sp_labels, expected=1.0 / 1, class_id=5)
# Class 7: 1 label, 1 incorrect prediction.
self._test_streaming_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=0.0 / 1, class_id=7)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, sp_labels, expected=0.0 / 1, class_id=7)
# All classes: 10 predictions, 3 correct.
self._test_streaming_sparse_precision_at_k(
predictions, sp_labels, k=5, expected=3.0 / 10)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, sp_labels, expected=3.0 / 10)
def test_3d_nan(self):
predictions = [[
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
], [
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]
]]
top_k_predictions = [[
[9, 4, 6, 2, 0],
[5, 7, 2, 9, 6],
], [
[5, 7, 2, 9, 6],
[9, 4, 6, 2, 0],
]]
labels = _binary_3d_label_to_sparse_value([[
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
], [
[0, 1, 1, 0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0]
]])
# Classes 1,3,8 have 0 predictions, classes -1 and 10 are out of range.
for class_id in (-1, 1, 3, 8, 10):
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=NAN, class_id=class_id)
def test_3d_no_labels(self):
predictions = [[
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
], [
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]
]]
top_k_predictions = [[
[9, 4, 6, 2, 0],
[5, 7, 2, 9, 6],
], [
[5, 7, 2, 9, 6],
[9, 4, 6, 2, 0],
]]
labels = _binary_3d_label_to_sparse_value([[
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
], [
[0, 1, 1, 0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0]
]])
# Classes 0,4,6,9: 0 labels, >=1 prediction.
for class_id in (0, 4, 6, 9):
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=0.0, class_id=class_id)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=0.0, class_id=class_id)
def test_3d(self):
predictions = [[
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
], [
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]
]]
top_k_predictions = [[
[9, 4, 6, 2, 0],
[5, 7, 2, 9, 6],
], [
[5, 7, 2, 9, 6],
[9, 4, 6, 2, 0],
]]
labels = _binary_3d_label_to_sparse_value([[
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
], [
[0, 1, 1, 0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0]
]])
# Class 2: 4 predictions, all correct.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=4.0 / 4, class_id=2)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=4.0 / 4, class_id=2)
# Class 5: 2 predictions, both correct.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=2.0 / 2, class_id=5)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=2.0 / 2, class_id=5)
# Class 7: 2 predictions, 1 correct.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=1.0 / 2, class_id=7)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=1.0 / 2, class_id=7)
# All classes: 20 predictions, 7 correct.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=7.0 / 20)
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=7.0 / 20)
def test_3d_ignore_all(self):
predictions = [[
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
], [
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]
]]
top_k_predictions = [[
[9, 4, 6, 2, 0],
[5, 7, 2, 9, 6],
], [
[5, 7, 2, 9, 6],
[9, 4, 6, 2, 0],
]]
labels = _binary_3d_label_to_sparse_value([[
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
], [
[0, 1, 1, 0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0]
]])
for class_id in xrange(10):
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id,
weights=[[0], [0]])
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=NAN, class_id=class_id,
weights=[[0], [0]])
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id,
weights=[[0, 0], [0, 0]])
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=NAN, class_id=class_id,
weights=[[0, 0], [0, 0]])
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=NAN, weights=[[0], [0]])
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=NAN, weights=[[0], [0]])
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=NAN, weights=[[0, 0], [0, 0]])
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=NAN,
weights=[[0, 0], [0, 0]])
def test_3d_ignore_some(self):
predictions = [[
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
], [
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]
]]
top_k_predictions = [[
[9, 4, 6, 2, 0],
[5, 7, 2, 9, 6],
], [
[5, 7, 2, 9, 6],
[9, 4, 6, 2, 0],
]]
labels = _binary_3d_label_to_sparse_value([[
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
], [
[0, 1, 1, 0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0]
]])
# Class 2: 2 predictions, both correct.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=2.0 / 2.0, class_id=2,
weights=[[1], [0]])
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=2.0 / 2.0, class_id=2,
weights=[[1], [0]])
# Class 2: 2 predictions, both correct.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=2.0 / 2.0, class_id=2,
weights=[[0], [1]])
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=2.0 / 2.0, class_id=2,
weights=[[0], [1]])
# Class 7: 1 incorrect prediction.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=0.0 / 1.0, class_id=7,
weights=[[1], [0]])
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=0.0 / 1.0, class_id=7,
weights=[[1], [0]])
# Class 7: 1 correct prediction.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=1.0 / 1.0, class_id=7,
weights=[[0], [1]])
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=1.0 / 1.0, class_id=7,
weights=[[0], [1]])
# Class 7: no predictions.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=NAN, class_id=7,
weights=[[1, 0], [0, 1]])
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=NAN, class_id=7,
weights=[[1, 0], [0, 1]])
# Class 7: 2 predictions, 1 correct.
self._test_streaming_sparse_precision_at_k(
predictions, labels, k=5, expected=1.0 / 2.0, class_id=7,
weights=[[0, 1], [1, 0]])
self._test_streaming_sparse_precision_at_top_k(
top_k_predictions, labels, expected=1.0 / 2.0, class_id=7,
weights=[[0, 1], [1, 0]])
def test_sparse_tensor_value(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
labels = [[0, 0, 0, 1], [0, 0, 1, 0]]
expected_precision = 0.5
with self.test_session():
_, precision = metrics.streaming_sparse_precision_at_k(
predictions=tf.constant(predictions, tf.float32),
labels=_binary_2d_label_to_sparse_value(labels), k=1)
tf.initialize_variables(tf.local_variables()).run()
self.assertEqual(expected_precision, precision.eval())
class StreamingSparseRecallTest(tf.test.TestCase):
def _test_streaming_sparse_recall_at_k(self,
predictions,
labels,
k,
expected,
class_id=None,
weights=None):
with tf.Graph().as_default() as g, self.test_session(g):
if weights is not None:
weights = tf.constant(weights, tf.float32)
metric, update = metrics.streaming_sparse_recall_at_k(
predictions=tf.constant(predictions, tf.float32),
labels=labels, k=k, class_id=class_id, weights=weights)
# Fails without initialized vars.
self.assertRaises(tf.OpError, metric.eval)
self.assertRaises(tf.OpError, update.eval)
tf.initialize_variables(tf.local_variables()).run()
# Run per-step op and assert expected values.
if math.isnan(expected):
_assert_nan(self, update.eval())
_assert_nan(self, metric.eval())
else:
self.assertEqual(expected, update.eval())
self.assertEqual(expected, metric.eval())
def test_one_label_at_k1_nan(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
sparse_labels = _binary_2d_label_to_sparse_value(
[[0, 0, 0, 1], [0, 0, 1, 0]])
dense_labels = np.array([[3], [2]], dtype=np.int64)
# Classes 0,1 have 0 labels, 0 predictions, classes -1 and 4 are out of
# range.
for labels in (sparse_labels, dense_labels):
for class_id in (-1, 0, 1, 4):
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=NAN,
class_id=class_id)
def test_one_label_at_k1_no_predictions(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
sparse_labels = _binary_2d_label_to_sparse_value(
[[0, 0, 0, 1], [0, 0, 1, 0]])
dense_labels = np.array([[3], [2]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Class 2: 0 predictions.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=0.0,
class_id=2)
def test_one_label_at_k1(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
sparse_labels = _binary_2d_label_to_sparse_value(
[[0, 0, 0, 1], [0, 0, 1, 0]])
dense_labels = np.array([[3], [2]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Class 3: 1 label, 2 predictions, 1 correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=1.0 / 1,
class_id=3)
# All classes: 2 labels, 2 predictions, 1 correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=1.0 / 2)
def test_one_label_at_k1_weighted(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
sparse_labels = _binary_2d_label_to_sparse_value(
[[0, 0, 0, 1], [0, 0, 1, 0]])
dense_labels = np.array([[3], [2]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Class 3: 1 label, 2 predictions, 1 correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=NAN, class_id=3, weights=(0.0,))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=1.0 / 1, class_id=3,
weights=(1.0,))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=1.0 / 1, class_id=3,
weights=(2.0,))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=NAN, class_id=3,
weights=(0.0, 0.0))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=NAN, class_id=3,
weights=(0.0, 1.0))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=1.0 / 1, class_id=3,
weights=(1.0, 0.0))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=1.0 / 1, class_id=3,
weights=(1.0, 1.0))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=2.0 / 2, class_id=3,
weights=(2.0, 3.0))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=3.0 / 3, class_id=3,
weights=(3.0, 2.0))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=0.3 / 0.3, class_id=3,
weights=(0.3, 0.6))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=0.6 / 0.6, class_id=3,
weights=(0.6, 0.3))
# All classes: 2 labels, 2 predictions, 1 correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=NAN, weights=(0.0,))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=1.0 / 2, weights=(1.0,))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=1.0 / 2, weights=(2.0,))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=1.0 / 1, weights=(1.0, 0.0))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=0.0 / 1, weights=(0.0, 1.0))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=1.0 / 2, weights=(1.0, 1.0))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=2.0 / 5, weights=(2.0, 3.0))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=3.0 / 5, weights=(3.0, 2.0))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=0.3 / 0.9, weights=(0.3, 0.6))
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=1, expected=0.6 / 0.9, weights=(0.6, 0.3))
def test_three_labels_at_k5_nan(self):
predictions = [
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sparse_labels = _binary_2d_label_to_sparse_value([
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Classes 0,3,4,6,9 have 0 labels, class 10 is out of range.
for class_id in (0, 3, 4, 6, 9, 10):
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id)
def test_three_labels_at_k5_no_predictions(self):
predictions = [
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sparse_labels = _binary_2d_label_to_sparse_value([
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Class 8: 1 label, no predictions.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=0.0 / 1, class_id=8)
def test_three_labels_at_k5(self):
predictions = [
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sparse_labels = _binary_2d_label_to_sparse_value([
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]])
dense_labels = np.array([[2, 7, 8], [1, 2, 5]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Class 2: 2 labels, both correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=2.0 / 2, class_id=2)
# Class 5: 1 label, incorrect.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=1.0 / 1, class_id=5)
# Class 7: 1 label, incorrect.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=0.0 / 1, class_id=7)
# All classes: 6 labels, 3 correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=3.0 / 6)
def test_three_labels_at_k5_some_out_of_range(self):
"""Tests that labels outside the [0, n_classes) count in denominator."""
predictions = [
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]]
sp_labels = tf.SparseTensorValue(
indices=[[0, 0], [0, 1], [0, 2], [0, 3],
[1, 0], [1, 1], [1, 2], [1, 3]],
# values -1 and 10 are outside the [0, n_classes) range.
values=np.array([2, 7, -1, 8,
1, 2, 5, 10], np.int64),
shape=[2, 4])
# Class 2: 2 labels, both correct.
self._test_streaming_sparse_recall_at_k(
predictions=predictions, labels=sp_labels, k=5, expected=2.0 / 2,
class_id=2)
# Class 5: 1 label, incorrect.
self._test_streaming_sparse_recall_at_k(
predictions=predictions, labels=sp_labels, k=5, expected=1.0 / 1,
class_id=5)
# Class 7: 1 label, incorrect.
self._test_streaming_sparse_recall_at_k(
predictions=predictions, labels=sp_labels, k=5, expected=0.0 / 1,
class_id=7)
# All classes: 8 labels, 3 correct.
self._test_streaming_sparse_recall_at_k(
predictions=predictions, labels=sp_labels, k=5, expected=3.0 / 8)
def test_3d_nan(self):
predictions = [[
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
], [
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]
]]
sparse_labels = _binary_3d_label_to_sparse_value([[
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
], [
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0]
]])
dense_labels = np.array([[
[2, 7, 8],
[1, 2, 5]
], [
[1, 2, 5],
[2, 7, 8],
]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Classes 0,3,4,6,9 have 0 labels, class 10 is out of range.
for class_id in (0, 3, 4, 6, 9, 10):
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id)
def test_3d_no_predictions(self):
predictions = [[
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
], [
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]
]]
sparse_labels = _binary_3d_label_to_sparse_value([[
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
], [
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0]
]])
dense_labels = np.array([[
[2, 7, 8],
[1, 2, 5]
], [
[1, 2, 5],
[2, 7, 8],
]], dtype=np.int64)
for labels in (sparse_labels, dense_labels):
# Classes 1,8 have 0 predictions, >=1 label.
for class_id in (1, 8):
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=0.0, class_id=class_id)
def test_3d(self):
predictions = [[
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
], [
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]
]]
labels = _binary_3d_label_to_sparse_value([[
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
], [
[0, 1, 1, 0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0]
]])
# Class 2: 4 labels, all correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=4.0 / 4, class_id=2)
# Class 5: 2 labels, both correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=2.0 / 2, class_id=5)
# Class 7: 2 labels, 1 incorrect.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=1.0 / 2, class_id=7)
# All classes: 12 labels, 7 correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=7.0 / 12)
def test_3d_ignore_all(self):
predictions = [[
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
], [
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]
]]
labels = _binary_3d_label_to_sparse_value([[
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
], [
[0, 1, 1, 0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0]
]])
for class_id in xrange(10):
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id,
weights=[[0], [0]])
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=NAN, class_id=class_id,
weights=[[0, 0], [0, 0]])
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=NAN, weights=[[0], [0]])
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=NAN, weights=[[0, 0], [0, 0]])
def test_3d_ignore_some(self):
predictions = [[
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9],
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6]
], [
[0.3, 0.0, 0.7, 0.2, 0.4, 0.9, 0.5, 0.8, 0.1, 0.6],
[0.5, 0.1, 0.6, 0.3, 0.8, 0.0, 0.7, 0.2, 0.4, 0.9]
]]
labels = _binary_3d_label_to_sparse_value([[
[0, 0, 1, 0, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 1, 0, 0, 0, 0]
], [
[0, 1, 1, 0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 1, 0]
]])
# Class 2: 2 labels, both correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=2.0 / 2.0, class_id=2,
weights=[[1], [0]])
# Class 2: 2 labels, both correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=2.0 / 2.0, class_id=2,
weights=[[0], [1]])
# Class 7: 1 label, correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=1.0 / 1.0, class_id=7,
weights=[[0], [1]])
# Class 7: 1 label, incorrect.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=0.0 / 1.0, class_id=7,
weights=[[1], [0]])
# Class 7: 2 labels, 1 correct.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=1.0 / 2.0, class_id=7,
weights=[[1, 0], [1, 0]])
# Class 7: No labels.
self._test_streaming_sparse_recall_at_k(
predictions, labels, k=5, expected=NAN, class_id=7,
weights=[[0, 1], [0, 1]])
def test_sparse_tensor_value(self):
predictions = [[0.1, 0.3, 0.2, 0.4], [0.1, 0.2, 0.3, 0.4]]
labels = [[0, 0, 1, 0], [0, 0, 0, 1]]
expected_recall = 0.5
with self.test_session():
_, recall = metrics.streaming_sparse_recall_at_k(
predictions=tf.constant(predictions, tf.float32),
labels=_binary_2d_label_to_sparse_value(labels), k=1)
tf.initialize_variables(tf.local_variables()).run()
self.assertEqual(expected_recall, recall.eval())
class StreamingMeanAbsoluteErrorTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_mean_absolute_error(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_mean_absolute_error(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = tf.random_normal((10, 3), seed=1)
labels = tf.random_normal((10, 3), seed=2)
error, update_op = metrics.streaming_mean_absolute_error(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateWithErrorAndWeights(self):
predictions = tf.constant([2, 4, 6, 8], shape=(1, 4), dtype=tf.float32)
labels = tf.constant([1, 3, 2, 3], shape=(1, 4), dtype=tf.float32)
weights = tf.constant([0, 1, 0, 1], shape=(1, 4))
error, update_op = metrics.streaming_mean_absolute_error(
predictions, labels, weights)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(3, sess.run(update_op))
self.assertEqual(3, error.eval())
class StreamingMeanRelativeErrorTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_mean_relative_error(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
normalizer=tf.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(
tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_mean_relative_error(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
normalizer=tf.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = tf.random_normal((10, 3), seed=1)
labels = tf.random_normal((10, 3), seed=2)
normalizer = tf.random_normal((10, 3), seed=3)
error, update_op = metrics.streaming_mean_relative_error(
predictions, labels, normalizer)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateNormalizedByLabels(self):
np_predictions = np.asarray([2, 4, 6, 8], dtype=np.float32)
np_labels = np.asarray([1, 3, 2, 3], dtype=np.float32)
expected_error = np.mean(
np.divide(np.absolute(np_predictions - np_labels),
np_labels))
predictions = tf.constant(np_predictions, shape=(1, 4), dtype=tf.float32)
labels = tf.constant(np_labels, shape=(1, 4))
error, update_op = metrics.streaming_mean_relative_error(
predictions, labels, normalizer=labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(expected_error, sess.run(update_op))
self.assertEqual(expected_error, error.eval())
def testSingleUpdateNormalizedByZeros(self):
np_predictions = np.asarray([2, 4, 6, 8], dtype=np.float32)
predictions = tf.constant(np_predictions, shape=(1, 4), dtype=tf.float32)
labels = tf.constant([1, 3, 2, 3], shape=(1, 4), dtype=tf.float32)
error, update_op = metrics.streaming_mean_relative_error(
predictions, labels, normalizer=tf.zeros_like(labels))
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(0.0, sess.run(update_op))
self.assertEqual(0.0, error.eval())
class StreamingMeanSquaredErrorTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_mean_squared_error(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_mean_squared_error(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = tf.random_normal((10, 3), seed=1)
labels = tf.random_normal((10, 3), seed=2)
error, update_op = metrics.streaming_mean_squared_error(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateZeroError(self):
predictions = tf.zeros((1, 3), dtype=tf.float32)
labels = tf.zeros((1, 3), dtype=tf.float32)
error, update_op = metrics.streaming_mean_squared_error(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, error.eval())
def testSingleUpdateWithError(self):
predictions = tf.constant([2, 4, 6], shape=(1, 3), dtype=tf.float32)
labels = tf.constant([1, 3, 2], shape=(1, 3), dtype=tf.float32)
error, update_op = metrics.streaming_mean_squared_error(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(6, sess.run(update_op))
self.assertEqual(6, error.eval())
def testSingleUpdateWithErrorAndWeights(self):
predictions = tf.constant([2, 4, 6, 8], shape=(1, 4), dtype=tf.float32)
labels = tf.constant([1, 3, 2, 3], shape=(1, 4), dtype=tf.float32)
weights = tf.constant([0, 1, 0, 1], shape=(1, 4))
error, update_op = metrics.streaming_mean_squared_error(
predictions, labels, weights)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(13, sess.run(update_op))
self.assertEqual(13, error.eval())
def testMultipleBatchesOfSizeOne(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = tf.FIFOQueue(2, dtypes=tf.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue, [10, 8, 6])
_enqueue_vector(sess, preds_queue, [-4, 3, -1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = tf.FIFOQueue(2, dtypes=tf.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue, [1, 3, 2])
_enqueue_vector(sess, labels_queue, [2, 4, 6])
labels = labels_queue.dequeue()
error, update_op = metrics.streaming_mean_squared_error(
predictions, labels)
sess.run(tf.local_variables_initializer())
sess.run(update_op)
self.assertAlmostEqual(208.0 / 6, sess.run(update_op), 5)
self.assertAlmostEqual(208.0 / 6, error.eval(), 5)
def testMetricsComputedConcurrently(self):
with self.test_session() as sess:
# Create the queue that populates one set of predictions.
preds_queue0 = tf.FIFOQueue(2, dtypes=tf.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue0, [10, 8, 6])
_enqueue_vector(sess, preds_queue0, [-4, 3, -1])
predictions0 = preds_queue0.dequeue()
# Create the queue that populates one set of predictions.
preds_queue1 = tf.FIFOQueue(2, dtypes=tf.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue1, [0, 1, 1])
_enqueue_vector(sess, preds_queue1, [1, 1, 0])
predictions1 = preds_queue1.dequeue()
# Create the queue that populates one set of labels.
labels_queue0 = tf.FIFOQueue(2, dtypes=tf.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue0, [1, 3, 2])
_enqueue_vector(sess, labels_queue0, [2, 4, 6])
labels0 = labels_queue0.dequeue()
# Create the queue that populates another set of labels.
labels_queue1 = tf.FIFOQueue(2, dtypes=tf.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue1, [-5, -3, -1])
_enqueue_vector(sess, labels_queue1, [5, 4, 3])
labels1 = labels_queue1.dequeue()
mse0, update_op0 = metrics.streaming_mean_squared_error(
predictions0, labels0, name='msd0')
mse1, update_op1 = metrics.streaming_mean_squared_error(
predictions1, labels1, name='msd1')
sess.run(tf.local_variables_initializer())
sess.run([update_op0, update_op1])
sess.run([update_op0, update_op1])
mse0, mse1 = sess.run([mse0, mse1])
self.assertAlmostEqual(208.0 / 6, mse0, 5)
self.assertAlmostEqual(79.0 / 6, mse1, 5)
def testMultipleMetricsOnMultipleBatchesOfSizeOne(self):
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = tf.FIFOQueue(2, dtypes=tf.float32, shapes=(1, 3))
_enqueue_vector(sess, preds_queue, [10, 8, 6])
_enqueue_vector(sess, preds_queue, [-4, 3, -1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = tf.FIFOQueue(2, dtypes=tf.float32, shapes=(1, 3))
_enqueue_vector(sess, labels_queue, [1, 3, 2])
_enqueue_vector(sess, labels_queue, [2, 4, 6])
labels = labels_queue.dequeue()
mae, ma_update_op = metrics.streaming_mean_absolute_error(
predictions, labels)
mse, ms_update_op = metrics.streaming_mean_squared_error(
predictions, labels)
sess.run(tf.local_variables_initializer())
sess.run([ma_update_op, ms_update_op])
sess.run([ma_update_op, ms_update_op])
self.assertAlmostEqual(32.0 / 6, mae.eval(), 5)
self.assertAlmostEqual(208.0 / 6, mse.eval(), 5)
class StreamingRootMeanSquaredErrorTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_root_mean_squared_error(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_root_mean_squared_error(
predictions=tf.ones((10, 1)),
labels=tf.ones((10, 1)),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = tf.random_normal((10, 3), seed=1)
labels = tf.random_normal((10, 3), seed=2)
error, update_op = metrics.streaming_root_mean_squared_error(
predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateZeroError(self):
with self.test_session() as sess:
predictions = tf.constant(0.0, shape=(1, 3), dtype=tf.float32)
labels = tf.constant(0.0, shape=(1, 3), dtype=tf.float32)
rmse, update_op = metrics.streaming_root_mean_squared_error(
predictions, labels)
sess.run(tf.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, rmse.eval())
def testSingleUpdateWithError(self):
with self.test_session() as sess:
predictions = tf.constant([2, 4, 6], shape=(1, 3), dtype=tf.float32)
labels = tf.constant([1, 3, 2], shape=(1, 3), dtype=tf.float32)
rmse, update_op = metrics.streaming_root_mean_squared_error(
predictions, labels)
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(math.sqrt(6), update_op.eval(), 5)
self.assertAlmostEqual(math.sqrt(6), rmse.eval(), 5)
def testSingleUpdateWithErrorAndWeights(self):
with self.test_session() as sess:
predictions = tf.constant([2, 4, 6, 8], shape=(1, 4), dtype=tf.float32)
labels = tf.constant([1, 3, 2, 3], shape=(1, 4), dtype=tf.float32)
weights = tf.constant([0, 1, 0, 1], shape=(1, 4))
rmse, update_op = metrics.streaming_root_mean_squared_error(
predictions, labels, weights)
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(math.sqrt(13), sess.run(update_op))
self.assertAlmostEqual(math.sqrt(13), rmse.eval(), 5)
def _reweight(predictions, labels, weights):
return (np.concatenate([[p] * int(w) for p, w in zip(predictions, weights)]),
np.concatenate([[l] * int(w) for l, w in zip(labels, weights)]))
class StreamingCovarianceTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
cov, _ = metrics.streaming_covariance(
predictions=tf.to_float(tf.range(10)) + tf.ones([10, 10]),
labels=tf.to_float(tf.range(10)) + tf.ones([10, 10]),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [cov])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_covariance(
predictions=tf.to_float(tf.range(10)) + tf.ones([10, 10]),
labels=tf.to_float(tf.range(10)) + tf.ones([10, 10]),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
labels = tf.random_normal((10, 3), seed=2)
predictions = labels * 0.5 + tf.random_normal((10, 3), seed=1) * 0.5
cov, update_op = metrics.streaming_covariance(predictions, labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_cov = cov.eval()
for _ in range(10):
self.assertEqual(initial_cov, cov.eval())
def testSingleUpdateIdentical(self):
with self.test_session() as sess:
predictions = tf.to_float(tf.range(10))
labels = tf.to_float(tf.range(10))
cov, update_op = metrics.streaming_covariance(predictions, labels)
expected_cov = np.cov(np.arange(10), np.arange(10))[0, 1]
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(expected_cov, sess.run(update_op), 5)
self.assertAlmostEqual(expected_cov, cov.eval(), 5)
def testSingleUpdateNonIdentical(self):
with self.test_session() as sess:
predictions = tf.constant([2, 4, 6], shape=(1, 3), dtype=tf.float32)
labels = tf.constant([1, 3, 2], shape=(1, 3), dtype=tf.float32)
cov, update_op = metrics.streaming_covariance(predictions, labels)
expected_cov = np.cov([2, 4, 6], [1, 3, 2])[0, 1]
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(expected_cov, update_op.eval())
self.assertAlmostEqual(expected_cov, cov.eval())
def testSingleUpdateWithErrorAndWeights(self):
with self.test_session() as sess:
predictions = tf.constant([2, 4, 6, 8], shape=(1, 4), dtype=tf.float32)
labels = tf.constant([1, 3, 2, 7], shape=(1, 4), dtype=tf.float32)
weights = tf.constant([0, 1, 3, 1], shape=(1, 4), dtype=tf.float32)
cov, update_op = metrics.streaming_covariance(
predictions, labels, weights=weights)
p, l = _reweight([2, 4, 6, 8], [1, 3, 2, 7], [0, 1, 3, 1])
expected_cov = np.cov(p, l)[0, 1]
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(expected_cov, sess.run(update_op))
self.assertAlmostEqual(expected_cov, cov.eval())
def testMultiUpdateWithErrorNoWeights(self):
with self.test_session() as sess:
np.random.seed(123)
n = 100
predictions = np.random.randn(n)
labels = 0.5 * predictions + np.random.randn(n)
stride = 10
predictions_t = tf.placeholder(tf.float32, [stride])
labels_t = tf.placeholder(tf.float32, [stride])
cov, update_op = metrics.streaming_covariance(predictions_t, labels_t)
sess.run(tf.local_variables_initializer())
prev_expected_cov = 0.
for i in range(n // stride):
feed_dict = {
predictions_t: predictions[stride * i:stride * (i + 1)],
labels_t: labels[stride * i:stride * (i + 1)]
}
self.assertAlmostEqual(
prev_expected_cov, sess.run(cov, feed_dict=feed_dict), 5)
expected_cov = np.cov(predictions[:stride * (i + 1)],
labels[:stride * (i + 1)])[0, 1]
self.assertAlmostEqual(
expected_cov, sess.run(update_op, feed_dict=feed_dict), 5)
self.assertAlmostEqual(
expected_cov, sess.run(cov, feed_dict=feed_dict), 5)
prev_expected_cov = expected_cov
def testMultiUpdateWithErrorAndWeights(self):
with self.test_session() as sess:
np.random.seed(123)
n = 100
predictions = np.random.randn(n)
labels = 0.5 * predictions + np.random.randn(n)
weights = np.tile(np.arange(n // 10), n // 10)
np.random.shuffle(weights)
stride = 10
predictions_t = tf.placeholder(tf.float32, [stride])
labels_t = tf.placeholder(tf.float32, [stride])
weights_t = tf.placeholder(tf.float32, [stride])
cov, update_op = metrics.streaming_covariance(
predictions_t, labels_t, weights=weights_t)
sess.run(tf.local_variables_initializer())
prev_expected_cov = 0.
for i in range(n // stride):
feed_dict = {
predictions_t: predictions[stride * i:stride * (i + 1)],
labels_t: labels[stride * i:stride * (i + 1)],
weights_t: weights[stride * i:stride * (i + 1)]
}
self.assertAlmostEqual(
prev_expected_cov, sess.run(cov, feed_dict=feed_dict), 5)
p, l = _reweight(predictions[:stride * (i + 1)],
labels[:stride * (i + 1)], weights[:stride * (i + 1)])
expected_cov = np.cov(p, l)[0, 1]
self.assertAlmostEqual(
expected_cov, sess.run(update_op, feed_dict=feed_dict), 5)
self.assertAlmostEqual(
expected_cov, sess.run(cov, feed_dict=feed_dict), 5)
prev_expected_cov = expected_cov
class StreamingPearsonRTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
pearson_r, _ = metrics.streaming_pearson_correlation(
predictions=tf.to_float(tf.range(10)) + tf.ones([10, 10]),
labels=tf.to_float(tf.range(10)) + tf.ones([10, 10]),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [pearson_r])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_pearson_correlation(
predictions=tf.to_float(tf.range(10)) + tf.ones([10, 10]),
labels=tf.to_float(tf.range(10)) + tf.ones([10, 10]),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
labels = tf.random_normal((10, 3), seed=2)
predictions = labels * 0.5 + tf.random_normal((10, 3), seed=1) * 0.5
pearson_r, update_op = metrics.streaming_pearson_correlation(predictions,
labels)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_r = pearson_r.eval()
for _ in range(10):
self.assertEqual(initial_r, pearson_r.eval())
def testSingleUpdateIdentical(self):
with self.test_session() as sess:
predictions = tf.to_float(tf.range(10))
labels = tf.to_float(tf.range(10))
pearson_r, update_op = metrics.streaming_pearson_correlation(predictions,
labels)
expected_r = np.corrcoef(np.arange(10), np.arange(10))[0, 1]
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(expected_r, sess.run(update_op), 5)
self.assertAlmostEqual(expected_r, pearson_r.eval(), 5)
def testSingleUpdateNonIdentical(self):
with self.test_session() as sess:
predictions = tf.constant([2, 4, 6], shape=(1, 3), dtype=tf.float32)
labels = tf.constant([1, 3, 2], shape=(1, 3), dtype=tf.float32)
pearson_r, update_op = metrics.streaming_pearson_correlation(predictions,
labels)
expected_r = np.corrcoef([2, 4, 6], [1, 3, 2])[0, 1]
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(expected_r, update_op.eval())
self.assertAlmostEqual(expected_r, pearson_r.eval())
def testSingleUpdateWithErrorAndWeights(self):
with self.test_session() as sess:
predictions = np.array([2, 4, 6, 8])
labels = np.array([1, 3, 2, 7])
weights = np.array([0, 1, 3, 1])
predictions_t = tf.constant(predictions, shape=(1, 4), dtype=tf.float32)
labels_t = tf.constant(labels, shape=(1, 4), dtype=tf.float32)
weights_t = tf.constant(weights, shape=(1, 4), dtype=tf.float32)
pearson_r, update_op = metrics.streaming_pearson_correlation(
predictions_t, labels_t, weights=weights_t)
p, l = _reweight(predictions, labels, weights)
cmat = np.cov(p, l)
expected_r = cmat[0, 1] / np.sqrt(cmat[0, 0] * cmat[1, 1])
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(expected_r, sess.run(update_op))
self.assertAlmostEqual(expected_r, pearson_r.eval())
def testMultiUpdateWithErrorNoWeights(self):
with self.test_session() as sess:
np.random.seed(123)
n = 100
predictions = np.random.randn(n)
labels = 0.5 * predictions + np.random.randn(n)
stride = 10
predictions_t = tf.placeholder(tf.float32, [stride])
labels_t = tf.placeholder(tf.float32, [stride])
pearson_r, update_op = metrics.streaming_pearson_correlation(
predictions_t, labels_t)
sess.run(tf.local_variables_initializer())
prev_expected_r = 0.
for i in range(n // stride):
feed_dict = {
predictions_t: predictions[stride * i:stride * (i + 1)],
labels_t: labels[stride * i:stride * (i + 1)]
}
self.assertAlmostEqual(
prev_expected_r, sess.run(pearson_r, feed_dict=feed_dict), 5)
expected_r = np.corrcoef(predictions[:stride * (i + 1)],
labels[:stride * (i + 1)])[0, 1]
self.assertAlmostEqual(
expected_r, sess.run(update_op, feed_dict=feed_dict), 5)
self.assertAlmostEqual(
expected_r, sess.run(pearson_r, feed_dict=feed_dict), 5)
prev_expected_r = expected_r
def testMultiUpdateWithErrorAndWeights(self):
with self.test_session() as sess:
np.random.seed(123)
n = 100
predictions = np.random.randn(n)
labels = 0.5 * predictions + np.random.randn(n)
weights = np.tile(np.arange(n // 10), n // 10)
np.random.shuffle(weights)
stride = 10
predictions_t = tf.placeholder(tf.float32, [stride])
labels_t = tf.placeholder(tf.float32, [stride])
weights_t = tf.placeholder(tf.float32, [stride])
pearson_r, update_op = metrics.streaming_pearson_correlation(
predictions_t, labels_t, weights=weights_t)
sess.run(tf.local_variables_initializer())
prev_expected_r = 0.
for i in range(n // stride):
feed_dict = {
predictions_t: predictions[stride * i:stride * (i + 1)],
labels_t: labels[stride * i:stride * (i + 1)],
weights_t: weights[stride * i:stride * (i + 1)]
}
self.assertAlmostEqual(
prev_expected_r, sess.run(pearson_r, feed_dict=feed_dict), 5)
p, l = _reweight(predictions[:stride * (i + 1)],
labels[:stride * (i + 1)], weights[:stride * (i + 1)])
cmat = np.cov(p, l)
expected_r = cmat[0, 1] / np.sqrt(cmat[0, 0] * cmat[1, 1])
self.assertAlmostEqual(
expected_r, sess.run(update_op, feed_dict=feed_dict), 5)
self.assertAlmostEqual(
expected_r, sess.run(pearson_r, feed_dict=feed_dict), 5)
prev_expected_r = expected_r
class StreamingMeanCosineDistanceTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_mean_cosine_distance(
predictions=tf.ones((10, 3)),
labels=tf.ones((10, 3)),
dim=1,
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_mean_cosine_distance(
predictions=tf.ones((10, 3)),
labels=tf.ones((10, 3)),
dim=1,
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testValueTensorIsIdempotent(self):
predictions = tf.random_normal((10, 3), seed=1)
labels = tf.random_normal((10, 3), seed=2)
error, update_op = metrics.streaming_mean_cosine_distance(
predictions, labels, dim=1)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_error = error.eval()
for _ in range(10):
self.assertEqual(initial_error, error.eval())
def testSingleUpdateZeroError(self):
np_labels = np.matrix(('1 0 0;'
'0 0 1;'
'0 1 0'))
predictions = tf.constant(np_labels, shape=(1, 3, 3), dtype=tf.float32)
labels = tf.constant(np_labels, shape=(1, 3, 3), dtype=tf.float32)
error, update_op = metrics.streaming_mean_cosine_distance(
predictions, labels, dim=2)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, error.eval())
def testSingleUpdateWithError1(self):
np_labels = np.matrix(('1 0 0;'
'0 0 1;'
'0 1 0'))
np_predictions = np.matrix(('1 0 0;'
'0 0 -1;'
'1 0 0'))
predictions = tf.constant(np_predictions, shape=(3, 1, 3), dtype=tf.float32)
labels = tf.constant(np_labels, shape=(3, 1, 3), dtype=tf.float32)
error, update_op = metrics.streaming_mean_cosine_distance(
predictions, labels, dim=2)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(1, sess.run(update_op), 5)
self.assertAlmostEqual(1, error.eval(), 5)
def testSingleUpdateWithError2(self):
np_predictions = np.matrix((
'0.819031913261206 0.567041924552012 0.087465312324590;'
'-0.665139432070255 -0.739487441769973 -0.103671883216994;'
'0.707106781186548 -0.707106781186548 0'))
np_labels = np.matrix((
'0.819031913261206 0.567041924552012 0.087465312324590;'
'0.665139432070255 0.739487441769973 0.103671883216994;'
'0.707106781186548 0.707106781186548 0'))
predictions = tf.constant(np_predictions, shape=(3, 1, 3), dtype=tf.float32)
labels = tf.constant(np_labels, shape=(3, 1, 3), dtype=tf.float32)
error, update_op = metrics.streaming_mean_cosine_distance(
predictions, labels, dim=2)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertAlmostEqual(1.0, sess.run(update_op), 5)
self.assertAlmostEqual(1.0, error.eval(), 5)
def testSingleUpdateWithErrorAndWeights1(self):
np_predictions = np.matrix(('1 0 0;'
'0 0 -1;'
'1 0 0'))
np_labels = np.matrix(('1 0 0;'
'0 0 1;'
'0 1 0'))
predictions = tf.constant(np_predictions, shape=(3, 1, 3), dtype=tf.float32)
labels = tf.constant(np_labels, shape=(3, 1, 3), dtype=tf.float32)
weights = tf.constant([1, 0, 0], shape=(3, 1, 1), dtype=tf.float32)
error, update_op = metrics.streaming_mean_cosine_distance(
predictions, labels, dim=2, weights=weights)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(0, sess.run(update_op))
self.assertEqual(0, error.eval())
def testSingleUpdateWithErrorAndWeights2(self):
np_predictions = np.matrix(('1 0 0;'
'0 0 -1;'
'1 0 0'))
np_labels = np.matrix(('1 0 0;'
'0 0 1;'
'0 1 0'))
predictions = tf.constant(np_predictions, shape=(3, 1, 3), dtype=tf.float32)
labels = tf.constant(np_labels, shape=(3, 1, 3), dtype=tf.float32)
weights = tf.constant([0, 1, 1], shape=(3, 1, 1), dtype=tf.float32)
error, update_op = metrics.streaming_mean_cosine_distance(
predictions, labels, dim=2, weights=weights)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(1.5, update_op.eval())
self.assertEqual(1.5, error.eval())
class PcntBelowThreshTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
mean, _ = metrics.streaming_percentage_less(
values=tf.ones((10,)),
threshold=2,
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_percentage_less(
values=tf.ones((10,)),
threshold=2,
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testOneUpdate(self):
with self.test_session() as sess:
values = tf.constant([2, 4, 6, 8], shape=(1, 4), dtype=tf.float32)
pcnt0, update_op0 = metrics.streaming_percentage_less(
values, 100, name='high')
pcnt1, update_op1 = metrics.streaming_percentage_less(
values, 7, name='medium')
pcnt2, update_op2 = metrics.streaming_percentage_less(
values, 1, name='low')
sess.run(tf.local_variables_initializer())
sess.run([update_op0, update_op1, update_op2])
pcnt0, pcnt1, pcnt2 = sess.run([pcnt0, pcnt1, pcnt2])
self.assertAlmostEqual(1.0, pcnt0, 5)
self.assertAlmostEqual(0.75, pcnt1, 5)
self.assertAlmostEqual(0.0, pcnt2, 5)
def testSomePresentOneUpdate(self):
with self.test_session() as sess:
values = tf.constant([2, 4, 6, 8], shape=(1, 4), dtype=tf.float32)
weights = tf.constant([1, 0, 0, 1], shape=(1, 4), dtype=tf.float32)
pcnt0, update_op0 = metrics.streaming_percentage_less(
values, 100, weights=weights, name='high')
pcnt1, update_op1 = metrics.streaming_percentage_less(
values, 7, weights=weights, name='medium')
pcnt2, update_op2 = metrics.streaming_percentage_less(
values, 1, weights=weights, name='low')
sess.run(tf.local_variables_initializer())
self.assertListEqual([1.0, 0.5, 0.0],
sess.run([update_op0, update_op1, update_op2]))
pcnt0, pcnt1, pcnt2 = sess.run([pcnt0, pcnt1, pcnt2])
self.assertAlmostEqual(1.0, pcnt0, 5)
self.assertAlmostEqual(0.5, pcnt1, 5)
self.assertAlmostEqual(0.0, pcnt2, 5)
class StreamingMeanIOUTest(tf.test.TestCase):
def setUp(self):
np.random.seed(1)
tf.reset_default_graph()
def testMetricsCollections(self):
my_collection_name = '__metrics__'
mean_iou, _ = metrics.streaming_mean_iou(
predictions=tf.ones([10, 1]),
labels=tf.ones([10, 1]),
num_classes=2,
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [mean_iou])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_mean_iou(
predictions=tf.ones([10, 1]),
labels=tf.ones([10, 1]),
num_classes=2,
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testPredictionsAndLabelsOfDifferentSizeRaisesValueError(self):
predictions = tf.ones([10, 3])
labels = tf.ones([10, 4])
with self.assertRaises(ValueError):
metrics.streaming_mean_iou(
predictions, labels, num_classes=2)
def testLabelsAndWeightsOfDifferentSizeRaisesValueError(self):
predictions = tf.ones([10])
labels = tf.ones([10])
weights = tf.zeros([9])
with self.assertRaises(ValueError):
metrics.streaming_mean_iou(
predictions, labels, num_classes=2, weights=weights)
def testValueTensorIsIdempotent(self):
num_classes = 3
predictions = tf.random_uniform([10], maxval=num_classes,
dtype=tf.int64, seed=1)
labels = tf.random_uniform([10], maxval=num_classes,
dtype=tf.int64, seed=1)
miou, update_op = metrics.streaming_mean_iou(
predictions, labels, num_classes=num_classes)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
# Run several updates.
for _ in range(10):
sess.run(update_op)
# Then verify idempotency.
initial_miou = miou.eval()
for _ in range(10):
self.assertEqual(initial_miou, miou.eval())
def testMultipleUpdates(self):
num_classes = 3
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = tf.FIFOQueue(5, dtypes=tf.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [2])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = tf.FIFOQueue(5, dtypes=tf.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [2])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
miou, update_op = metrics.streaming_mean_iou(
predictions, labels, num_classes)
sess.run(tf.local_variables_initializer())
for _ in range(5):
sess.run(update_op)
desired_output = np.mean([1.0/2.0, 1.0/4.0, 0.])
self.assertEqual(desired_output, miou.eval())
def testMultipleUpdatesWithWeights(self):
num_classes = 2
with self.test_session() as sess:
# Create the queue that populates the predictions.
preds_queue = tf.FIFOQueue(6, dtypes=tf.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
labels_queue = tf.FIFOQueue(6, dtypes=tf.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
# Create the queue that populates the weights.
weights_queue = tf.FIFOQueue(6, dtypes=tf.float32, shapes=(1, 1))
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [0.0])
_enqueue_vector(sess, weights_queue, [1.0])
_enqueue_vector(sess, weights_queue, [0.0])
weights = weights_queue.dequeue()
miou, update_op = metrics.streaming_mean_iou(
predictions, labels, num_classes, weights=weights)
sess.run(tf.local_variables_initializer())
for _ in range(6):
sess.run(update_op)
desired_output = np.mean([2.0/3.0, 1.0/2.0])
self.assertAlmostEqual(desired_output, miou.eval())
def testMultipleUpdatesWithMissingClass(self):
# Test the case where there are no predicions and labels for
# one class, and thus there is one row and one column with
# zero entries in the confusion matrix.
num_classes = 3
with self.test_session() as sess:
# Create the queue that populates the predictions.
# There is no prediction for class 2.
preds_queue = tf.FIFOQueue(5, dtypes=tf.int32, shapes=(1, 1))
_enqueue_vector(sess, preds_queue, [0])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [1])
_enqueue_vector(sess, preds_queue, [0])
predictions = preds_queue.dequeue()
# Create the queue that populates the labels.
# There is label for class 2.
labels_queue = tf.FIFOQueue(5, dtypes=tf.int32, shapes=(1, 1))
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [1])
_enqueue_vector(sess, labels_queue, [0])
_enqueue_vector(sess, labels_queue, [1])
labels = labels_queue.dequeue()
miou, update_op = metrics.streaming_mean_iou(
predictions, labels, num_classes)
sess.run(tf.local_variables_initializer())
for _ in range(5):
sess.run(update_op)
desired_output = np.mean([1.0/3.0, 2.0/4.0, 0.])
self.assertAlmostEqual(desired_output, miou.eval())
def testUpdateOpEvalIsAccumulatedConfusionMatrix(self):
predictions = tf.concat(0,
[tf.constant(0, shape=[5]),
tf.constant(1, shape=[5])])
labels = tf.concat(0,
[tf.constant(0, shape=[3]),
tf.constant(1, shape=[7])])
num_classes = 2
with self.test_session() as sess:
miou, update_op = metrics.streaming_mean_iou(
predictions, labels, num_classes)
sess.run(tf.local_variables_initializer())
confusion_matrix = update_op.eval()
self.assertAllEqual([[3, 2], [0, 5]], confusion_matrix)
desired_miou = np.mean([3./5., 5./7.])
self.assertAlmostEqual(desired_miou, miou.eval())
def testAllCorrect(self):
predictions = tf.zeros([40])
labels = tf.zeros([40])
num_classes = 1
with self.test_session() as sess:
miou, update_op = metrics.streaming_mean_iou(
predictions, labels, num_classes)
sess.run(tf.local_variables_initializer())
self.assertEqual(40, update_op.eval()[0])
self.assertEqual(1.0, miou.eval())
def testAllWrong(self):
predictions = tf.zeros([40])
labels = tf.ones([40])
num_classes = 2
with self.test_session() as sess:
miou, update_op = metrics.streaming_mean_iou(
predictions, labels, num_classes)
sess.run(tf.local_variables_initializer())
self.assertAllEqual([[0, 40], [0, 0]], update_op.eval())
self.assertEqual(0., miou.eval())
def testResultsWithSomeMissing(self):
predictions = tf.concat(0, [tf.constant(0, shape=[5]),
tf.constant(1, shape=[5])])
labels = tf.concat(0, [tf.constant(0, shape=[3]),
tf.constant(1, shape=[7])])
num_classes = 2
weights = tf.concat(0, [tf.constant(0, shape=[1]),
tf.constant(1, shape=[8]),
tf.constant(0, shape=[1])])
with self.test_session() as sess:
miou, update_op = metrics.streaming_mean_iou(
predictions, labels, num_classes, weights=weights)
sess.run(tf.local_variables_initializer())
self.assertAllEqual([[2, 2], [0, 4]], update_op.eval())
desired_miou = np.mean([2./4., 4./6.])
self.assertAlmostEqual(desired_miou, miou.eval())
class StreamingConcatTest(tf.test.TestCase):
def setUp(self):
tf.reset_default_graph()
def testMetricsCollection(self):
my_collection_name = '__metrics__'
value, _ = metrics.streaming_concat(
values=tf.ones((10,)),
metrics_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [value])
def testUpdatesCollection(self):
my_collection_name = '__updates__'
_, update_op = metrics.streaming_concat(
values=tf.ones((10,)),
updates_collections=[my_collection_name])
self.assertListEqual(tf.get_collection(my_collection_name), [update_op])
def testNextArraySize(self):
next_array_size = metrics.python.ops.metric_ops._next_array_size
with self.test_session():
self.assertEqual(next_array_size(2, growth_factor=2).eval(), 2)
self.assertEqual(next_array_size(3, growth_factor=2).eval(), 4)
self.assertEqual(next_array_size(4, growth_factor=2).eval(), 4)
self.assertEqual(next_array_size(5, growth_factor=2).eval(), 8)
self.assertEqual(next_array_size(6, growth_factor=2).eval(), 8)
def testStreamingConcat(self):
with self.test_session() as sess:
values = tf.placeholder(tf.int32, [None])
concatenated, update_op = metrics.streaming_concat(values)
sess.run(tf.local_variables_initializer())
self.assertAllEqual([], concatenated.eval())
sess.run([update_op], feed_dict={values: [0, 1, 2]})
self.assertAllEqual([0, 1, 2], concatenated.eval())
sess.run([update_op], feed_dict={values: [3, 4]})
self.assertAllEqual([0, 1, 2, 3, 4], concatenated.eval())
sess.run([update_op], feed_dict={values: [5, 6, 7, 8, 9]})
self.assertAllEqual(np.arange(10), concatenated.eval())
def testStreamingConcatMaxSize(self):
with self.test_session() as sess:
values = tf.range(3)
concatenated, update_op = metrics.streaming_concat(values, max_size=5)
sess.run(tf.local_variables_initializer())
self.assertAllEqual([], concatenated.eval())
sess.run([update_op])
self.assertAllEqual([0, 1, 2], concatenated.eval())
sess.run([update_op])
self.assertAllEqual([0, 1, 2, 0, 1], concatenated.eval())
sess.run([update_op])
self.assertAllEqual([0, 1, 2, 0, 1], concatenated.eval())
def testStreamingConcat2D(self):
with self.test_session() as sess:
values = tf.reshape(tf.range(3), (3, 1))
concatenated, update_op = metrics.streaming_concat(values, axis=-1)
sess.run(tf.local_variables_initializer())
for _ in range(10):
sess.run([update_op])
self.assertAllEqual([[0] * 10, [1] * 10, [2] * 10],
concatenated.eval())
def testStreamingConcatErrors(self):
with self.assertRaises(ValueError):
metrics.streaming_concat(tf.placeholder(tf.float32))
values = tf.zeros((2, 3))
with self.assertRaises(ValueError):
metrics.streaming_concat(values, axis=-3, max_size=3)
with self.assertRaises(ValueError):
metrics.streaming_concat(values, axis=2, max_size=3)
with self.assertRaises(ValueError):
metrics.streaming_concat(tf.placeholder(tf.float32, [None, None]))
def testStreamingConcatReset(self):
with self.test_session() as sess:
values = tf.placeholder(tf.int32, [None])
concatenated, update_op = metrics.streaming_concat(values)
sess.run(tf.local_variables_initializer())
self.assertAllEqual([], concatenated.eval())
sess.run([update_op], feed_dict={values: [0, 1, 2]})
self.assertAllEqual([0, 1, 2], concatenated.eval())
sess.run(tf.local_variables_initializer())
sess.run([update_op], feed_dict={values: [3, 4]})
self.assertAllEqual([3, 4], concatenated.eval())
class AggregateMetricsTest(tf.test.TestCase):
def testAggregateNoMetricsRaisesValueError(self):
with self.assertRaises(ValueError):
metrics.aggregate_metrics()
def testAggregateSingleMetricReturnsOneItemLists(self):
values = tf.ones((10, 4))
value_tensors, update_ops = metrics.aggregate_metrics(
metrics.streaming_mean(values))
self.assertEqual(len(value_tensors), 1)
self.assertEqual(len(update_ops), 1)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(1, update_ops[0].eval())
self.assertEqual(1, value_tensors[0].eval())
def testAggregateMultipleMetricsReturnsListsInOrder(self):
predictions = tf.ones((10, 4))
labels = tf.ones((10, 4)) * 3
value_tensors, update_ops = metrics.aggregate_metrics(
metrics.streaming_mean_absolute_error(
predictions, labels),
metrics.streaming_mean_squared_error(
predictions, labels))
self.assertEqual(len(value_tensors), 2)
self.assertEqual(len(update_ops), 2)
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(2, update_ops[0].eval())
self.assertEqual(4, update_ops[1].eval())
self.assertEqual(2, value_tensors[0].eval())
self.assertEqual(4, value_tensors[1].eval())
class AggregateMetricMapTest(tf.test.TestCase):
def testAggregateMultipleMetricsReturnsListsInOrder(self):
predictions = tf.ones((10, 4))
labels = tf.ones((10, 4)) * 3
names_to_values, names_to_updates = metrics.aggregate_metric_map(
{
'm1': metrics.streaming_mean_absolute_error(
predictions, labels),
'm2': metrics.streaming_mean_squared_error(
predictions, labels),
})
self.assertEqual(2, len(names_to_values))
self.assertEqual(2, len(names_to_updates))
with self.test_session() as sess:
sess.run(tf.local_variables_initializer())
self.assertEqual(2, names_to_updates['m1'].eval())
self.assertEqual(4, names_to_updates['m2'].eval())
self.assertEqual(2, names_to_values['m1'].eval())
self.assertEqual(4, names_to_values['m2'].eval())
class NumRelevantTest(tf.test.TestCase):
def testNumRelevantInvalidArgs(self):
labels = tf.random_uniform(
shape=(3, 3, 3), minval=0, maxval=100, dtype=tf.int32)
with self.assertRaisesRegexp(ValueError, 'nvalid k'):
metric_ops.num_relevant(labels, k=0)
with self.assertRaisesRegexp(ValueError, 'nvalid k'):
metric_ops.num_relevant(labels, k=-1)
def testNumRelevantDense(self):
with self.test_session():
labels = tf.random_uniform(
shape=(3, 3, 3), minval=0, maxval=100, dtype=tf.int32)
ones = np.ones(shape=(3, 3))
self.assertAllEqual(ones, metric_ops.num_relevant(labels, k=1).eval())
twos = ones * 2
self.assertAllEqual(twos, metric_ops.num_relevant(labels, k=2).eval())
threes = ones * 3
self.assertAllEqual(threes, metric_ops.num_relevant(labels, k=3).eval())
self.assertAllEqual(threes, metric_ops.num_relevant(labels, k=4).eval())
self.assertAllEqual(threes, metric_ops.num_relevant(labels, k=999).eval())
def testNumRelevantSparse(self):
with self.test_session():
labels = tf.SparseTensorValue(
indices=(
(0, 0, 0), (0, 0, 1),
(0, 1, 0), (0, 1, 1), (0, 1, 2),
# (0, 2) missing
(1, 0, 0), (1, 0, 1), (1, 0, 2),
(1, 1, 0),
(1, 2, 0),
# (2, 0) missing
(2, 1, 0), (2, 1, 1),
(2, 2, 0)),
values=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13),
shape=(3, 3, 3))
self.assertAllEqual(
((1, 1, 0), (1, 1, 1), (0, 1, 1)),
metric_ops.num_relevant(labels, k=1).eval())
self.assertAllEqual(
((2, 2, 0), (2, 1, 1), (0, 2, 1)),
metric_ops.num_relevant(labels, k=2).eval())
label_lengths = ((2, 3, 0), (3, 1, 1), (0, 2, 1))
self.assertAllEqual(
label_lengths, metric_ops.num_relevant(labels, k=3).eval())
self.assertAllEqual(
label_lengths, metric_ops.num_relevant(labels, k=999).eval())
class ExpandAndTileTest(tf.test.TestCase):
def testExpandAndTileInvalidArgs(self):
x = tf.ones(shape=(3, 3, 3))
with self.assertRaisesRegexp(ValueError, 'nvalid multiple'):
metric_ops.expand_and_tile(x, multiple=0)
with self.test_session():
with self.assertRaises(ValueError):
metric_ops.expand_and_tile(x, multiple=1, dim=-4).eval()
with self.assertRaises(ValueError):
metric_ops.expand_and_tile(x, multiple=1, dim=4).eval()
def testSparseExpandAndTileInvalidArgs(self):
x = tf.SparseTensorValue(
indices=[
(i, j, k) for i in range(3) for j in range(3) for k in range(3)],
values=[1] * 27,
shape=[3, 3, 3])
with self.assertRaisesRegexp(ValueError, 'nvalid multiple'):
metric_ops.expand_and_tile(x, multiple=0)
with self.test_session():
with self.assertRaises(tf.OpError):
metric_ops.expand_and_tile(x, multiple=1, dim=-4).eval()
with self.assertRaises(ValueError):
metric_ops.expand_and_tile(x, multiple=1, dim=4).eval()
def _test_expand_and_tile(
self, expected_shape, expected_value, tensor, multiple, dim=None):
with tf.Graph().as_default() as g, self.test_session(g):
if dim is None:
op = metric_ops.expand_and_tile(tensor=tensor, multiple=multiple)
else:
op = metric_ops.expand_and_tile(
tensor=tensor, multiple=multiple, dim=dim)
self.assertAllEqual(expected_shape, tf.shape(op).eval())
self.assertAllEqual(expected_value, op.eval())
# TODO(ptucker): Use @parameterized when it's available in tf.
def testExpandAndTile1x(self):
# Shape (3,3,3).
x = ((
(1, 2, 3),
(4, 5, 6),
(7, 8, 9)
), (
(10, 11, 12),
(13, 14, 15),
(16, 17, 18)
), (
(19, 20, 21),
(22, 23, 24),
(25, 26, 26)
))
for dim in (None, -3, 0):
self._test_expand_and_tile(
expected_shape=(1, 3, 3, 3),
expected_value=[x],
tensor=x, multiple=1, dim=dim)
for dim in (-2, 1):
self._test_expand_and_tile(
expected_shape=(3, 1, 3, 3),
expected_value=[[x1] for x1 in x],
tensor=x, multiple=1, dim=dim)
for dim in (-1, 2):
self._test_expand_and_tile(
expected_shape=(3, 3, 1, 3),
expected_value=[[[x2] for x2 in x1] for x1 in x],
tensor=x, multiple=1, dim=dim)
self._test_expand_and_tile(
expected_shape=(3, 3, 3, 1),
expected_value=[[[[x3] for x3 in x2] for x2 in x1] for x1 in x],
tensor=x, multiple=1, dim=3)
# TODO(ptucker): Use @parameterized when it's available in tf.
def testExpandAndTile5x(self):
# Shape (3,3,3).
x = ((
(1, 2, 3),
(4, 5, 6),
(7, 8, 9)
), (
(10, 11, 12),
(13, 14, 15),
(16, 17, 18)
), (
(19, 20, 21),
(22, 23, 24),
(25, 26, 26)
))
with self.test_session():
for dim in (None, -3, 0):
self._test_expand_and_tile(
expected_shape=(5, 3, 3, 3),
expected_value=[x] * 5,
tensor=x, multiple=5, dim=dim)
for dim in (-2, 1):
self._test_expand_and_tile(
expected_shape=(3, 5, 3, 3),
expected_value=[[x1] * 5 for x1 in x],
tensor=x, multiple=5, dim=dim)
for dim in (-1, 2):
self._test_expand_and_tile(
expected_shape=(3, 3, 5, 3),
expected_value=[[[x2] * 5 for x2 in x1] for x1 in x],
tensor=x, multiple=5, dim=dim)
self._test_expand_and_tile(
expected_shape=(3, 3, 3, 5),
expected_value=[[[[x3] * 5 for x3 in x2] for x2 in x1] for x1 in x],
tensor=x, multiple=5, dim=3)
def _assert_sparse_tensors_equal(self, expected, actual):
self.assertAllEqual(expected.indices, actual.indices)
self.assertAllEqual(expected.values, actual.values)
self.assertAllEqual(expected.shape, actual.shape)
# TODO(ptucker): Use @parameterized when it's available in tf.
def testSparseExpandAndTile1x(self):
# Shape (3,3).
x = tf.SparseTensorValue(
indices=[
[0, 0], [0, 1],
[1, 0], [1, 1], [1, 2],
[2, 0]],
values=[
1, 2,
3, 4, 5,
6],
shape=[3, 3])
with self.test_session():
expected_result_dim0 = tf.SparseTensorValue(
indices=[[0, i[0], i[1]] for i in x.indices], values=x.values,
shape=[1, 3, 3])
self._assert_sparse_tensors_equal(
expected_result_dim0,
metric_ops.expand_and_tile(x, multiple=1).eval())
for dim in (-2, 0):
self._assert_sparse_tensors_equal(
expected_result_dim0,
metric_ops.expand_and_tile(x, multiple=1, dim=dim).eval())
expected_result_dim1 = tf.SparseTensorValue(
indices=[[i[0], 0, i[1]] for i in x.indices], values=x.values,
shape=[3, 1, 3])
for dim in (-1, 1):
self._assert_sparse_tensors_equal(
expected_result_dim1,
metric_ops.expand_and_tile(x, multiple=1, dim=dim).eval())
expected_result_dim2 = tf.SparseTensorValue(
indices=[[i[0], i[1], 0] for i in x.indices], values=x.values,
shape=[3, 3, 1])
self._assert_sparse_tensors_equal(
expected_result_dim2,
metric_ops.expand_and_tile(x, multiple=1, dim=2).eval())
# TODO(ptucker): Use @parameterized when it's available in tf.
def testSparseExpandAndTile5x(self):
# Shape (3,3).
x = tf.SparseTensorValue(
indices=(
(0, 0), (0, 1),
(1, 0), (1, 1), (1, 2),
(2, 0)),
values=(
1, 2,
3, 4, 5,
6),
shape=(3, 3))
with self.test_session():
expected_result_dim0 = tf.SparseTensorValue(
indices=[(d0, i[0], i[1]) for d0 in range(5) for i in x.indices],
values=[v for _ in range(5) for v in x.values],
shape=(5, 3, 3))
self._assert_sparse_tensors_equal(
expected_result_dim0,
metric_ops.expand_and_tile(x, multiple=5).eval())
for dim in (-2, 0):
self._assert_sparse_tensors_equal(
expected_result_dim0,
metric_ops.expand_and_tile(x, multiple=5, dim=dim).eval())
expected_result_dim1 = tf.SparseTensorValue(
indices=[
(d0, d1, i[1])
for d0 in range(3)
for d1 in range(5)
for i in x.indices if i[0] == d0],
values=x.values[0:2] * 5 + x.values[2:5] * 5 + x.values[5:] * 5,
shape=(3, 5, 3))
for dim in (-1, 1):
self._assert_sparse_tensors_equal(
expected_result_dim1,
metric_ops.expand_and_tile(x, multiple=5, dim=dim).eval())
expected_result_dim2 = tf.SparseTensorValue(
indices=[(i[0], i[1], d2) for i in x.indices for d2 in range(5)],
values=[v for v in x.values for _ in range(5)],
shape=(3, 3, 5))
self._assert_sparse_tensors_equal(
expected_result_dim2,
metric_ops.expand_and_tile(x, multiple=5, dim=2).eval())
if __name__ == '__main__':
tf.test.main()
|
nanditav/15712-TensorFlow
|
tensorflow/contrib/metrics/python/ops/metric_ops_test.py
|
Python
|
apache-2.0
| 163,728 | 0.009143 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp import api, fields, models
class CrmActivity(models.Model):
''' CrmActivity is a model introduced in Odoo v9 that models activities
performed in CRM, like phonecalls, sending emails, making demonstrations,
... Users are able to configure their custom activities.
Each activity has up to three next activities. This allows to model light
custom workflows. This way sales manager can configure their crm workflow
that salepersons will use in their daily job.
CrmActivity inherits from mail.message.subtype. This allows users to follow
some activities through subtypes. Each activity will generate messages with
the matching subtypes, allowing reporting and statistics computation based
on mail.message.subtype model. '''
_name = 'crm.activity'
_description = 'CRM Activity'
_inherits = {'mail.message.subtype': 'subtype_id'}
_rec_name = 'name'
_order = "sequence"
days = fields.Integer('Number of days', default=0,
help='Number of days before doing fulfilling the action, allowing to plan the action date.')
sequence = fields.Integer('Sequence', default=0)
team_id = fields.Many2one('crm.team', string='Sales Team')
subtype_id = fields.Many2one('mail.message.subtype', string='Message Subtype', required=True, ondelete='cascade')
activity_1_id = fields.Many2one('crm.activity', string="Next Activity 1")
activity_2_id = fields.Many2one('crm.activity', string="Next Activity 2")
activity_3_id = fields.Many2one('crm.activity', string="Next Activity 3")
@api.model
def create(self, values):
''' Override to set the res_model of inherited subtype to crm.lead.
This cannot be achieved using a default on res_model field because
of the inherits. Indeed a new field would be created. However the
field on the subtype would still exist. Being void, the subtype
will be present for every model in Odoo. That's quite an issue. '''
if not values.get('res_model') and 'default_res_model' not in self._context:
values['res_model'] = 'crm.lead'
if 'internal' not in values and 'default_internal' not in self._context:
values['internal'] = True
return super(CrmActivity, self).create(values)
|
tvtsoft/odoo8
|
addons/crm/models/crm_activity.py
|
Python
|
agpl-3.0
| 2,406 | 0.001663 |
# coding: utf-8
'''
Created on 2012-8-30
@author: shanfeng
'''
import smtplib
from email.mime.text import MIMEText
import urllib
import web
class XWJemail:
'''
classdocs
'''
def __init__(self, params):
'''
Constructor
'''
pass
@staticmethod
def sendfindpass(user,hash):
link = "%s/account/newpass?%s" %(web.ctx.sitehost,urllib.urlencode({'email':user.u_email,"v":hash}))
mail_body = """
<html>
<head></head>
<body>
<h4>%s,你好</h4>
您刚才在 liulin.info 申请了找回密码。<br>
请点击下面的链接来重置密码:<br>
<a href="%s">%s</a><br>
如果无法点击上面的链接,您可以复制该地址,并粘帖在浏览器的地址栏中访问。<br>
</body>
</html>
""" % (web.utf8(user.u_name),link,link)
#mail_body = web.utf8(mail_body)
if isinstance(mail_body,unicode):
mail_body = str(mail_body)
mail_from = "liulin.info<wukong10086@163.com>"
mail_to = user.u_email
mail_subject = 'liulin.info重置密码邮件'
msg = MIMEText(mail_body,'html','utf-8')
#msg=MIMEText(mail_body,'html')
if not isinstance(mail_subject,unicode):
mail_subject = unicode(mail_subject)
msg['Subject']= mail_subject
msg['From']=mail_from
msg['To'] = mail_to
msg["Accept-Language"]="zh-CN"
msg["Accept-Charset"]="ISO-8859-1,utf-8"
smtp=smtplib.SMTP()
smtp.connect('smtp.163.com')
smtp.login('wukong10086@163.com','831112')
smtp.sendmail(mail_from,mail_to,msg.as_string())
smtp.quit()
def sendMail(mailto,subject,body,format='plain'):
if isinstance(body,unicode):
body = str(body)
me= ("%s<"+fromMail+">") % (Header(_mailFrom,'utf-8'),)
msg = MIMEText(body,format,'utf-8')
if not isinstance(subject,unicode):
subject = unicode(subject)
msg['Subject'] = subject
msg['From'] = me
msg['To'] = mailto
msg["Accept-Language"]="zh-CN"
msg["Accept-Charset"]="ISO-8859-1,utf-8"
try:
s = smtplib.SMTP()
s.connect(host)
s.login(user,password)
s.sendmail(me, mailto, msg.as_string())
s.close()
return True
except Exception, e:
print str(e)
return False
|
waile23/todo
|
utils/xwjemail.py
|
Python
|
mit
| 2,175 | 0.063391 |
#!/usr/bin/python
# This script reads through a enotype likelihood file and the respective mean genotype likelihood file. It writes a nexus file for all individuals and the given genotypesi, with '0' for ref homozygote, '1' for heterozygote, and '2' for alt homozygote.
# Usage: ~/vcf2nex012.py pubRetStriUG_unlnkd.gl pntest_pubRetStriUG_unlnkd.txt
from sys import argv
# read genotype likelihood file to get scaffold:bp (which is not in the same order as the vcf file, resulting from vcf2gl.py)
with open(argv[1], 'rb') as gl_file:
scafPos_gl = list()
for line in gl_file:
if line.split(' ')[0] == '65':
continue
elif line.split(' ')[0] == 'CR1043':
ind_id = line.split(' ')
ind_id[len(ind_id)-1] = ind_id[len(ind_id)-1].split('\n')[0]
else:
scafPos_gl.append(line.split(' ')[0])
# read the file with mean genotypes
with open(argv[2], 'rb') as mean_gt_file:
ind_dict = dict()
for line in mean_gt_file:
gt_line = line.split(' ')
for i, ind in enumerate(ind_id):
if not ind in ind_dict:
gt_line[i]
ind_dict[ind] = [float(gt_line[i])]
else:
ind_dict[ind].append(float(gt_line[i]))
# parse the mean genotypes and write the proper bases
for key, value in ind_dict.iteritems():
newline = list()
for i, pos in enumerate(scafPos_gl):
if round(float(value[i])) == 0:
newline.append(str(0))
elif round(float(value[i])) == 1:
newline.append(str(1))
elif round(float(value[i])) == 2:
newline.append(str(2))
else:
continue
print str(key + '\t' + ''.join(newline))
#print scafPos_gl
#for key, value in iter(refp_dict.iteritems()):
# print key, ''.join(value)
|
schimar/hts_tools
|
vcf2nex012.py
|
Python
|
gpl-2.0
| 1,837 | 0.004355 |
""" ListCompToMap transforms list comprehension into intrinsics. """
from pythran.analyses import OptimizableComprehension
from pythran.passmanager import Transformation
from pythran.transformations import NormalizeTuples
import ast
class ListCompToMap(Transformation):
'''
Transforms list comprehension into intrinsics.
>>> import ast
>>> from pythran import passmanager, backend
>>> node = ast.parse("[x*x for x in range(10)]")
>>> pm = passmanager.PassManager("test")
>>> _, node = pm.apply(ListCompToMap, node)
>>> print pm.dump(backend.Python, node)
__builtin__.map((lambda x: (x * x)), range(10))
'''
def __init__(self):
Transformation.__init__(self, NormalizeTuples,
OptimizableComprehension)
def make_Iterator(self, gen):
if gen.ifs:
ldFilter = ast.Lambda(
ast.arguments([ast.Name(gen.target.id, ast.Param())],
None, None, []), ast.BoolOp(ast.And(), gen.ifs))
ifilterName = ast.Attribute(
value=ast.Name(id='itertools', ctx=ast.Load()),
attr='ifilter', ctx=ast.Load())
return ast.Call(ifilterName, [ldFilter, gen.iter], [], None, None)
else:
return gen.iter
def visit_ListComp(self, node):
if node in self.optimizable_comprehension:
self.update = True
self.generic_visit(node)
iterList = []
varList = []
for gen in node.generators:
iterList.append(self.make_Iterator(gen))
varList.append(ast.Name(gen.target.id, ast.Param()))
# If dim = 1, product is useless
if len(iterList) == 1:
iterAST = iterList[0]
varAST = ast.arguments([varList[0]], None, None, [])
else:
prodName = ast.Attribute(
value=ast.Name(id='itertools', ctx=ast.Load()),
attr='product', ctx=ast.Load())
iterAST = ast.Call(prodName, iterList, [], None, None)
varAST = ast.arguments([ast.Tuple(varList, ast.Store())],
None, None, [])
mapName = ast.Attribute(
value=ast.Name(id='__builtin__', ctx=ast.Load()),
attr='map', ctx=ast.Load())
ldBodymap = node.elt
ldmap = ast.Lambda(varAST, ldBodymap)
return ast.Call(mapName, [ldmap, iterAST], [], None, None)
else:
return self.generic_visit(node)
|
hainm/pythran
|
pythran/optimizations/list_comp_to_map.py
|
Python
|
bsd-3-clause
| 2,611 | 0 |
#!/usr/bin/env python
#
# Copyright 2008 Jose Fonseca
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Generate a dot graph from the output of several profilers."""
__author__ = "Jose Fonseca"
__version__ = "1.0"
import sys
import math
import os.path
import re
import textwrap
import optparse
try:
# Debugging helper module
import debug
except ImportError:
pass
def percentage(p):
return "%.02f%%" % (p*100.0,)
def add(a, b):
return a + b
def equal(a, b):
if a == b:
return a
else:
return None
def fail(a, b):
assert False
def ratio(numerator, denominator):
numerator = float(numerator)
denominator = float(denominator)
assert 0.0 <= numerator
assert numerator <= denominator
try:
return numerator/denominator
except ZeroDivisionError:
# 0/0 is undefined, but 1.0 yields more useful results
return 1.0
class UndefinedEvent(Exception):
"""Raised when attempting to get an event which is undefined."""
def __init__(self, event):
Exception.__init__(self)
self.event = event
def __str__(self):
return 'unspecified event %s' % self.event.name
class Event(object):
"""Describe a kind of event, and its basic operations."""
def __init__(self, name, null, aggregator, formatter = str):
self.name = name
self._null = null
self._aggregator = aggregator
self._formatter = formatter
def __eq__(self, other):
return self is other
def __hash__(self):
return id(self)
def null(self):
return self._null
def aggregate(self, val1, val2):
"""Aggregate two event values."""
assert val1 is not None
assert val2 is not None
return self._aggregator(val1, val2)
def format(self, val):
"""Format an event value."""
assert val is not None
return self._formatter(val)
MODULE = Event("Module", None, equal)
PROCESS = Event("Process", None, equal)
CALLS = Event("Calls", 0, add)
SAMPLES = Event("Samples", 0, add)
TIME = Event("Time", 0.0, add, lambda x: '(' + str(x) + ')')
TIME_RATIO = Event("Time ratio", 0.0, add, lambda x: '(' + percentage(x) + ')')
TOTAL_TIME = Event("Total time", 0.0, fail)
TOTAL_TIME_RATIO = Event("Total time ratio", 0.0, fail, percentage)
CALL_RATIO = Event("Call ratio", 0.0, add, percentage)
PRUNE_RATIO = Event("Prune ratio", 0.0, add, percentage)
class Object(object):
"""Base class for all objects in profile which can store events."""
def __init__(self, events=None):
if events is None:
self.events = {}
else:
self.events = events
def __hash__(self):
return id(self)
def __eq__(self, other):
return self is other
def __contains__(self, event):
return event in self.events
def __getitem__(self, event):
try:
return self.events[event]
except KeyError:
raise UndefinedEvent(event)
def __setitem__(self, event, value):
if value is None:
if event in self.events:
del self.events[event]
else:
self.events[event] = value
class Call(Object):
"""A call between functions.
There should be at most one call object for every pair of functions.
"""
def __init__(self, callee_id):
Object.__init__(self)
self.callee_id = callee_id
class Function(Object):
"""A function."""
def __init__(self, id, name):
Object.__init__(self)
self.id = id
self.name = name
self.calls = {}
self.cycle = None
def add_call(self, call):
if call.callee_id in self.calls:
sys.stderr.write('warning: overwriting call from function %s to %s\n' % (str(self.id), str(call.callee_id)))
self.calls[call.callee_id] = call
# TODO: write utility functions
def __repr__(self):
return self.name
class Cycle(Object):
"""A cycle made from recursive function calls."""
def __init__(self):
Object.__init__(self)
# XXX: Do cycles need an id?
self.functions = set()
def add_function(self, function):
assert function not in self.functions
self.functions.add(function)
# XXX: Aggregate events?
if function.cycle is not None:
for other in function.cycle.functions:
if function not in self.functions:
self.add_function(other)
function.cycle = self
class Profile(Object):
"""The whole profile."""
def __init__(self):
Object.__init__(self)
self.functions = {}
self.cycles = []
def add_function(self, function):
if function.id in self.functions:
sys.stderr.write('warning: overwriting function %s (id %s)\n' % (function.name, str(function.id)))
self.functions[function.id] = function
def add_cycle(self, cycle):
self.cycles.append(cycle)
def validate(self):
"""Validate the edges."""
for function in self.functions.itervalues():
for callee_id in function.calls.keys():
assert function.calls[callee_id].callee_id == callee_id
if callee_id not in self.functions:
sys.stderr.write('warning: call to undefined function %s from function %s\n' % (str(callee_id), function.name))
del function.calls[callee_id]
def find_cycles(self):
"""Find cycles using Tarjan's strongly connected components algorithm."""
# Apply the Tarjan's algorithm successively until all functions are visited
visited = set()
for function in self.functions.itervalues():
if function not in visited:
self._tarjan(function, 0, [], {}, {}, visited)
cycles = []
for function in self.functions.itervalues():
if function.cycle is not None and function.cycle not in cycles:
cycles.append(function.cycle)
self.cycles = cycles
if 0:
for cycle in cycles:
sys.stderr.write("Cycle:\n")
for member in cycle.functions:
sys.stderr.write("\t%s\n" % member.name)
def _tarjan(self, function, order, stack, orders, lowlinks, visited):
"""Tarjan's strongly connected components algorithm.
See also:
- http://en.wikipedia.org/wiki/Tarjan's_strongly_connected_components_algorithm
"""
visited.add(function)
orders[function] = order
lowlinks[function] = order
order += 1
pos = len(stack)
stack.append(function)
for call in function.calls.itervalues():
callee = self.functions[call.callee_id]
# TODO: use a set to optimize lookup
if callee not in orders:
order = self._tarjan(callee, order, stack, orders, lowlinks, visited)
lowlinks[function] = min(lowlinks[function], lowlinks[callee])
elif callee in stack:
lowlinks[function] = min(lowlinks[function], orders[callee])
if lowlinks[function] == orders[function]:
# Strongly connected component found
members = stack[pos:]
del stack[pos:]
if len(members) > 1:
cycle = Cycle()
for member in members:
cycle.add_function(member)
return order
def call_ratios(self, event):
# Aggregate for incoming calls
cycle_totals = {}
for cycle in self.cycles:
cycle_totals[cycle] = 0.0
function_totals = {}
for function in self.functions.itervalues():
function_totals[function] = 0.0
for function in self.functions.itervalues():
for call in function.calls.itervalues():
if call.callee_id != function.id:
callee = self.functions[call.callee_id]
function_totals[callee] += call[event]
if callee.cycle is not None and callee.cycle is not function.cycle:
cycle_totals[callee.cycle] += call[event]
# Compute the ratios
for function in self.functions.itervalues():
for call in function.calls.itervalues():
assert CALL_RATIO not in call
if call.callee_id != function.id:
callee = self.functions[call.callee_id]
if callee.cycle is not None and callee.cycle is not function.cycle:
total = cycle_totals[callee.cycle]
else:
total = function_totals[callee]
call[CALL_RATIO] = ratio(call[event], total)
def integrate(self, outevent, inevent):
"""Propagate function time ratio allong the function calls.
Must be called after finding the cycles.
See also:
- http://citeseer.ist.psu.edu/graham82gprof.html
"""
# Sanity checking
assert outevent not in self
for function in self.functions.itervalues():
assert outevent not in function
assert inevent in function
for call in function.calls.itervalues():
assert outevent not in call
if call.callee_id != function.id:
assert CALL_RATIO in call
# Aggregate the input for each cycle
for cycle in self.cycles:
total = inevent.null()
for function in self.functions.itervalues():
total = inevent.aggregate(total, function[inevent])
self[inevent] = total
# Integrate along the edges
total = inevent.null()
for function in self.functions.itervalues():
total = inevent.aggregate(total, function[inevent])
self._integrate_function(function, outevent, inevent)
self[outevent] = total
def _integrate_function(self, function, outevent, inevent):
if function.cycle is not None:
return self._integrate_cycle(function.cycle, outevent, inevent)
else:
if outevent not in function:
total = function[inevent]
for call in function.calls.itervalues():
if call.callee_id != function.id:
total += self._integrate_call(call, outevent, inevent)
function[outevent] = total
return function[outevent]
def _integrate_call(self, call, outevent, inevent):
assert outevent not in call
assert CALL_RATIO in call
callee = self.functions[call.callee_id]
subtotal = call[CALL_RATIO]*self._integrate_function(callee, outevent, inevent)
call[outevent] = subtotal
return subtotal
def _integrate_cycle(self, cycle, outevent, inevent):
if outevent not in cycle:
total = inevent.null()
for member in cycle.functions:
subtotal = member[inevent]
for call in member.calls.itervalues():
callee = self.functions[call.callee_id]
if callee.cycle is not cycle:
subtotal += self._integrate_call(call, outevent, inevent)
total += subtotal
cycle[outevent] = total
callees = {}
for function in self.functions.itervalues():
if function.cycle is not cycle:
for call in function.calls.itervalues():
callee = self.functions[call.callee_id]
if callee.cycle is cycle:
try:
callees[callee] += call[CALL_RATIO]
except KeyError:
callees[callee] = call[CALL_RATIO]
for callee, call_ratio in callees.iteritems():
ranks = {}
call_ratios = {}
partials = {}
self._rank_cycle_function(cycle, callee, 0, ranks)
self._call_ratios_cycle(cycle, callee, ranks, call_ratios, set())
partial = self._integrate_cycle_function(cycle, callee, call_ratio, partials, ranks, call_ratios, outevent, inevent)
assert partial == max(partials.values())
assert not total or abs(1.0 - partial/(call_ratio*total)) <= 0.001
return cycle[outevent]
def _rank_cycle_function(self, cycle, function, rank, ranks):
if function not in ranks or ranks[function] > rank:
ranks[function] = rank
for call in function.calls.itervalues():
if call.callee_id != function.id:
callee = self.functions[call.callee_id]
if callee.cycle is cycle:
self._rank_cycle_function(cycle, callee, rank + 1, ranks)
def _call_ratios_cycle(self, cycle, function, ranks, call_ratios, visited):
if function not in visited:
visited.add(function)
for call in function.calls.itervalues():
if call.callee_id != function.id:
callee = self.functions[call.callee_id]
if callee.cycle is cycle:
if ranks[callee] > ranks[function]:
call_ratios[callee] = call_ratios.get(callee, 0.0) + call[CALL_RATIO]
self._call_ratios_cycle(cycle, callee, ranks, call_ratios, visited)
def _integrate_cycle_function(self, cycle, function, partial_ratio, partials, ranks, call_ratios, outevent, inevent):
if function not in partials:
partial = partial_ratio*function[inevent]
for call in function.calls.itervalues():
if call.callee_id != function.id:
callee = self.functions[call.callee_id]
if callee.cycle is not cycle:
assert outevent in call
partial += partial_ratio*call[outevent]
else:
if ranks[callee] > ranks[function]:
callee_partial = self._integrate_cycle_function(cycle, callee, partial_ratio, partials, ranks, call_ratios, outevent, inevent)
call_ratio = ratio(call[CALL_RATIO], call_ratios[callee])
call_partial = call_ratio*callee_partial
try:
call[outevent] += call_partial
except UndefinedEvent:
call[outevent] = call_partial
partial += call_partial
partials[function] = partial
try:
function[outevent] += partial
except UndefinedEvent:
function[outevent] = partial
return partials[function]
def aggregate(self, event):
"""Aggregate an event for the whole profile."""
total = event.null()
for function in self.functions.itervalues():
try:
total = event.aggregate(total, function[event])
except UndefinedEvent:
return
self[event] = total
def ratio(self, outevent, inevent):
assert outevent not in self
assert inevent in self
for function in self.functions.itervalues():
assert outevent not in function
assert inevent in function
function[outevent] = ratio(function[inevent], self[inevent])
for call in function.calls.itervalues():
assert outevent not in call
if inevent in call:
call[outevent] = ratio(call[inevent], self[inevent])
self[outevent] = 1.0
def prune(self, node_thres, edge_thres):
"""Prune the profile"""
# compute the prune ratios
for function in self.functions.itervalues():
try:
function[PRUNE_RATIO] = function[TOTAL_TIME_RATIO]
except UndefinedEvent:
pass
for call in function.calls.itervalues():
callee = self.functions[call.callee_id]
if TOTAL_TIME_RATIO in call:
# handle exact cases first
call[PRUNE_RATIO] = call[TOTAL_TIME_RATIO]
else:
try:
# make a safe estimate
call[PRUNE_RATIO] = min(function[TOTAL_TIME_RATIO], callee[TOTAL_TIME_RATIO])
except UndefinedEvent:
pass
# prune the nodes
for function_id in self.functions.keys():
function = self.functions[function_id]
try:
if function[PRUNE_RATIO] < node_thres:
del self.functions[function_id]
except UndefinedEvent:
pass
# prune the egdes
for function in self.functions.itervalues():
for callee_id in function.calls.keys():
call = function.calls[callee_id]
try:
if callee_id not in self.functions or call[PRUNE_RATIO] < edge_thres:
del function.calls[callee_id]
except UndefinedEvent:
pass
def dump(self):
for function in self.functions.itervalues():
sys.stderr.write('Function %s:\n' % (function.name,))
self._dump_events(function.events)
for call in function.calls.itervalues():
callee = self.functions[call.callee_id]
sys.stderr.write(' Call %s:\n' % (callee.name,))
self._dump_events(call.events)
def _dump_events(self, events):
for event, value in events.iteritems():
sys.stderr.write(' %s: %s\n' % (event.name, event.format(value)))
class Struct:
"""Masquerade a dictionary with a structure-like behavior."""
def __init__(self, attrs = None):
if attrs is None:
attrs = {}
self.__dict__['_attrs'] = attrs
def __getattr__(self, name):
try:
return self._attrs[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self._attrs[name] = value
def __str__(self):
return str(self._attrs)
def __repr__(self):
return repr(self._attrs)
class ParseError(Exception):
"""Raised when parsing to signal mismatches."""
def __init__(self, msg, line):
self.msg = msg
# TODO: store more source line information
self.line = line
def __str__(self):
return '%s: %r' % (self.msg, self.line)
class Parser:
"""Parser interface."""
def __init__(self):
pass
def parse(self):
raise NotImplementedError
class LineParser(Parser):
"""Base class for parsers that read line-based formats."""
def __init__(self, file):
Parser.__init__(self)
self._file = file
self.__line = None
self.__eof = False
def readline(self):
line = self._file.readline()
if not line:
self.__line = ''
self.__eof = True
self.__line = line.rstrip('\r\n')
def lookahead(self):
assert self.__line is not None
return self.__line
def consume(self):
assert self.__line is not None
line = self.__line
self.readline()
return line
def eof(self):
assert self.__line is not None
return self.__eof
class GprofParser(Parser):
"""Parser for GNU gprof output.
See also:
- Chapter "Interpreting gprof's Output" from the GNU gprof manual
http://sourceware.org/binutils/docs-2.18/gprof/Call-Graph.html#Call-Graph
- File "cg_print.c" from the GNU gprof source code
http://sourceware.org/cgi-bin/cvsweb.cgi/~checkout~/src/gprof/cg_print.c?rev=1.12&cvsroot=src
"""
def __init__(self, fp):
Parser.__init__(self)
self.fp = fp
self.functions = {}
self.cycles = {}
def readline(self):
line = self.fp.readline()
if not line:
sys.stderr.write('error: unexpected end of file\n')
sys.exit(1)
line = line.rstrip('\r\n')
return line
_int_re = re.compile(r'^\d+$')
_float_re = re.compile(r'^\d+\.\d+$')
def translate(self, mo):
"""Extract a structure from a match object, while translating the types in the process."""
attrs = {}
groupdict = mo.groupdict()
for name, value in groupdict.iteritems():
if value is None:
value = None
elif self._int_re.match(value):
value = int(value)
elif self._float_re.match(value):
value = float(value)
attrs[name] = (value)
return Struct(attrs)
_cg_header_re = re.compile(
# original gprof header
r'^\s+called/total\s+parents\s*$|' +
r'^index\s+%time\s+self\s+descendents\s+called\+self\s+name\s+index\s*$|' +
r'^\s+called/total\s+children\s*$|' +
# GNU gprof header
r'^index\s+%\s+time\s+self\s+children\s+called\s+name\s*$'
)
_cg_ignore_re = re.compile(
# spontaneous
r'^\s+<spontaneous>\s*$|'
# internal calls (such as "mcount")
r'^.*\((\d+)\)$'
)
_cg_primary_re = re.compile(
r'^\[(?P<index>\d+)\]' +
r'\s+(?P<percentage_time>\d+\.\d+)' +
r'\s+(?P<self>\d+\.\d+)' +
r'\s+(?P<descendants>\d+\.\d+)' +
r'\s+(?:(?P<called>\d+)(?:\+(?P<called_self>\d+))?)?' +
r'\s+(?P<name>\S.*?)' +
r'(?:\s+<cycle\s(?P<cycle>\d+)>)?' +
r'\s\[(\d+)\]$'
)
_cg_parent_re = re.compile(
r'^\s+(?P<self>\d+\.\d+)?' +
r'\s+(?P<descendants>\d+\.\d+)?' +
r'\s+(?P<called>\d+)(?:/(?P<called_total>\d+))?' +
r'\s+(?P<name>\S.*?)' +
r'(?:\s+<cycle\s(?P<cycle>\d+)>)?' +
r'\s\[(?P<index>\d+)\]$'
)
_cg_child_re = _cg_parent_re
_cg_cycle_header_re = re.compile(
r'^\[(?P<index>\d+)\]' +
r'\s+(?P<percentage_time>\d+\.\d+)' +
r'\s+(?P<self>\d+\.\d+)' +
r'\s+(?P<descendants>\d+\.\d+)' +
r'\s+(?:(?P<called>\d+)(?:\+(?P<called_self>\d+))?)?' +
r'\s+<cycle\s(?P<cycle>\d+)\sas\sa\swhole>' +
r'\s\[(\d+)\]$'
)
_cg_cycle_member_re = re.compile(
r'^\s+(?P<self>\d+\.\d+)?' +
r'\s+(?P<descendants>\d+\.\d+)?' +
r'\s+(?P<called>\d+)(?:\+(?P<called_self>\d+))?' +
r'\s+(?P<name>\S.*?)' +
r'(?:\s+<cycle\s(?P<cycle>\d+)>)?' +
r'\s\[(?P<index>\d+)\]$'
)
_cg_sep_re = re.compile(r'^--+$')
def parse_function_entry(self, lines):
parents = []
children = []
while True:
if not lines:
sys.stderr.write('warning: unexpected end of entry\n')
line = lines.pop(0)
if line.startswith('['):
break
# read function parent line
mo = self._cg_parent_re.match(line)
if not mo:
if self._cg_ignore_re.match(line):
continue
sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line)
else:
parent = self.translate(mo)
parents.append(parent)
# read primary line
mo = self._cg_primary_re.match(line)
if not mo:
sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line)
return
else:
function = self.translate(mo)
while lines:
line = lines.pop(0)
# read function subroutine line
mo = self._cg_child_re.match(line)
if not mo:
if self._cg_ignore_re.match(line):
continue
sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line)
else:
child = self.translate(mo)
children.append(child)
function.parents = parents
function.children = children
self.functions[function.index] = function
def parse_cycle_entry(self, lines):
# read cycle header line
line = lines[0]
mo = self._cg_cycle_header_re.match(line)
if not mo:
sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line)
return
cycle = self.translate(mo)
# read cycle member lines
cycle.functions = []
for line in lines[1:]:
mo = self._cg_cycle_member_re.match(line)
if not mo:
sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line)
continue
call = self.translate(mo)
cycle.functions.append(call)
self.cycles[cycle.cycle] = cycle
def parse_cg_entry(self, lines):
if lines[0].startswith("["):
self.parse_cycle_entry(lines)
else:
self.parse_function_entry(lines)
def parse_cg(self):
"""Parse the call graph."""
# skip call graph header
while not self._cg_header_re.match(self.readline()):
pass
line = self.readline()
while self._cg_header_re.match(line):
line = self.readline()
# process call graph entries
entry_lines = []
while line != '\014': # form feed
if line and not line.isspace():
if self._cg_sep_re.match(line):
self.parse_cg_entry(entry_lines)
entry_lines = []
else:
entry_lines.append(line)
line = self.readline()
def parse(self):
self.parse_cg()
self.fp.close()
profile = Profile()
profile[TIME] = 0.0
cycles = {}
for index in self.cycles.iterkeys():
cycles[index] = Cycle()
for entry in self.functions.itervalues():
# populate the function
function = Function(entry.index, entry.name)
function[TIME] = entry.self
if entry.called is not None:
function[CALLS] = entry.called
if entry.called_self is not None:
call = Call(entry.index)
call[CALLS] = entry.called_self
function[CALLS] += entry.called_self
# populate the function calls
for child in entry.children:
call = Call(child.index)
assert child.called is not None
call[CALLS] = child.called
if child.index not in self.functions:
# NOTE: functions that were never called but were discovered by gprof's
# static call graph analysis dont have a call graph entry so we need
# to add them here
missing = Function(child.index, child.name)
function[TIME] = 0.0
function[CALLS] = 0
profile.add_function(missing)
function.add_call(call)
profile.add_function(function)
if entry.cycle is not None:
cycles[entry.cycle].add_function(function)
profile[TIME] = profile[TIME] + function[TIME]
for cycle in cycles.itervalues():
profile.add_cycle(cycle)
# Compute derived events
profile.validate()
profile.ratio(TIME_RATIO, TIME)
profile.call_ratios(CALLS)
profile.integrate(TOTAL_TIME, TIME)
profile.ratio(TOTAL_TIME_RATIO, TOTAL_TIME)
return profile
class OprofileParser(LineParser):
"""Parser for oprofile callgraph output.
See also:
- http://oprofile.sourceforge.net/doc/opreport.html#opreport-callgraph
"""
_fields_re = {
'samples': r'(?P<samples>\d+)',
'%': r'(?P<percentage>\S+)',
'linenr info': r'(?P<source>\(no location information\)|\S+:\d+)',
'image name': r'(?P<image>\S+(?:\s\(tgid:[^)]*\))?)',
'app name': r'(?P<application>\S+)',
'symbol name': r'(?P<symbol>\(no symbols\)|.+?)',
}
def __init__(self, infile):
LineParser.__init__(self, infile)
self.entries = {}
self.entry_re = None
def add_entry(self, callers, function, callees):
try:
entry = self.entries[function.id]
except KeyError:
self.entries[function.id] = (callers, function, callees)
else:
callers_total, function_total, callees_total = entry
self.update_subentries_dict(callers_total, callers)
function_total.samples += function.samples
self.update_subentries_dict(callees_total, callees)
def update_subentries_dict(self, totals, partials):
for partial in partials.itervalues():
try:
total = totals[partial.id]
except KeyError:
totals[partial.id] = partial
else:
total.samples += partial.samples
def parse(self):
# read lookahead
self.readline()
self.parse_header()
while self.lookahead():
self.parse_entry()
profile = Profile()
reverse_call_samples = {}
# populate the profile
profile[SAMPLES] = 0
for _callers, _function, _callees in self.entries.itervalues():
function = Function(_function.id, _function.name)
function[SAMPLES] = _function.samples
profile.add_function(function)
profile[SAMPLES] += _function.samples
if _function.application:
function[PROCESS] = os.path.basename(_function.application)
if _function.image:
function[MODULE] = os.path.basename(_function.image)
total_callee_samples = 0
for _callee in _callees.itervalues():
total_callee_samples += _callee.samples
for _callee in _callees.itervalues():
if not _callee.self:
call = Call(_callee.id)
call[SAMPLES] = _callee.samples
function.add_call(call)
# compute derived data
profile.validate()
profile.find_cycles()
profile.ratio(TIME_RATIO, SAMPLES)
profile.call_ratios(SAMPLES)
profile.integrate(TOTAL_TIME_RATIO, TIME_RATIO)
return profile
def parse_header(self):
while not self.match_header():
self.consume()
line = self.lookahead()
fields = re.split(r'\s\s+', line)
entry_re = r'^\s*' + r'\s+'.join([self._fields_re[field] for field in fields]) + r'(?P<self>\s+\[self\])?$'
self.entry_re = re.compile(entry_re)
self.skip_separator()
def parse_entry(self):
callers = self.parse_subentries()
if self.match_primary():
function = self.parse_subentry()
if function is not None:
callees = self.parse_subentries()
self.add_entry(callers, function, callees)
self.skip_separator()
def parse_subentries(self):
subentries = {}
while self.match_secondary():
subentry = self.parse_subentry()
subentries[subentry.id] = subentry
return subentries
def parse_subentry(self):
entry = Struct()
line = self.consume()
mo = self.entry_re.match(line)
if not mo:
raise ParseError('failed to parse', line)
fields = mo.groupdict()
entry.samples = int(fields.get('samples', 0))
entry.percentage = float(fields.get('percentage', 0.0))
if 'source' in fields and fields['source'] != '(no location information)':
source = fields['source']
filename, lineno = source.split(':')
entry.filename = filename
entry.lineno = int(lineno)
else:
source = ''
entry.filename = None
entry.lineno = None
entry.image = fields.get('image', '')
entry.application = fields.get('application', '')
if 'symbol' in fields and fields['symbol'] != '(no symbols)':
entry.symbol = fields['symbol']
else:
entry.symbol = ''
if entry.symbol.startswith('"') and entry.symbol.endswith('"'):
entry.symbol = entry.symbol[1:-1]
entry.id = ':'.join((entry.application, entry.image, source, entry.symbol))
entry.self = fields.get('self', None) != None
if entry.self:
entry.id += ':self'
if entry.symbol:
entry.name = entry.symbol
else:
entry.name = entry.image
return entry
def skip_separator(self):
while not self.match_separator():
self.consume()
self.consume()
def match_header(self):
line = self.lookahead()
return line.startswith('samples')
def match_separator(self):
line = self.lookahead()
return line == '-'*len(line)
def match_primary(self):
line = self.lookahead()
return not line[:1].isspace()
def match_secondary(self):
line = self.lookahead()
return line[:1].isspace()
class SharkParser(LineParser):
"""Parser for MacOSX Shark output.
Author: tom@dbservice.com
"""
def __init__(self, infile):
LineParser.__init__(self, infile)
self.stack = []
self.entries = {}
def add_entry(self, function):
try:
entry = self.entries[function.id]
except KeyError:
self.entries[function.id] = (function, { })
else:
function_total, callees_total = entry
function_total.samples += function.samples
def add_callee(self, function, callee):
func, callees = self.entries[function.id]
try:
entry = callees[callee.id]
except KeyError:
callees[callee.id] = callee
else:
entry.samples += callee.samples
def parse(self):
self.readline()
self.readline()
self.readline()
self.readline()
match = re.compile(r'(?P<prefix>[|+ ]*)(?P<samples>\d+), (?P<symbol>[^,]+), (?P<image>.*)')
while self.lookahead():
line = self.consume()
mo = match.match(line)
if not mo:
raise ParseError('failed to parse', line)
fields = mo.groupdict()
prefix = len(fields.get('prefix', 0)) / 2 - 1
symbol = str(fields.get('symbol', 0))
image = str(fields.get('image', 0))
entry = Struct()
entry.id = ':'.join([symbol, image])
entry.samples = int(fields.get('samples', 0))
entry.name = symbol
entry.image = image
# adjust the callstack
if prefix < len(self.stack):
del self.stack[prefix:]
if prefix == len(self.stack):
self.stack.append(entry)
# if the callstack has had an entry, it's this functions caller
if prefix > 0:
self.add_callee(self.stack[prefix - 1], entry)
self.add_entry(entry)
profile = Profile()
profile[SAMPLES] = 0
for _function, _callees in self.entries.itervalues():
function = Function(_function.id, _function.name)
function[SAMPLES] = _function.samples
profile.add_function(function)
profile[SAMPLES] += _function.samples
if _function.image:
function[MODULE] = os.path.basename(_function.image)
for _callee in _callees.itervalues():
call = Call(_callee.id)
call[SAMPLES] = _callee.samples
function.add_call(call)
# compute derived data
profile.validate()
profile.find_cycles()
profile.ratio(TIME_RATIO, SAMPLES)
profile.call_ratios(SAMPLES)
profile.integrate(TOTAL_TIME_RATIO, TIME_RATIO)
return profile
class PstatsParser:
"""Parser python profiling statistics saved with te pstats module."""
def __init__(self, *filename):
import pstats
self.stats = pstats.Stats(*filename)
self.profile = Profile()
self.function_ids = {}
def get_function_name(self, (filename, line, name)):
module = os.path.splitext(filename)[0]
module = os.path.basename(module)
return "%s:%d:%s" % (module, line, name)
def get_function(self, key):
try:
id = self.function_ids[key]
except KeyError:
id = len(self.function_ids)
name = self.get_function_name(key)
function = Function(id, name)
self.profile.functions[id] = function
self.function_ids[key] = id
else:
function = self.profile.functions[id]
return function
def parse(self):
self.profile[TIME] = 0.0
self.profile[TOTAL_TIME] = self.stats.total_tt
for fn, (cc, nc, tt, ct, callers) in self.stats.stats.iteritems():
callee = self.get_function(fn)
callee[CALLS] = nc
callee[TOTAL_TIME] = ct
callee[TIME] = tt
self.profile[TIME] += tt
self.profile[TOTAL_TIME] = max(self.profile[TOTAL_TIME], ct)
for fn, value in callers.iteritems():
caller = self.get_function(fn)
call = Call(callee.id)
if isinstance(value, tuple):
for i in xrange(0, len(value), 4):
nc, cc, tt, ct = value[i:i+4]
if CALLS in call:
call[CALLS] += cc
else:
call[CALLS] = cc
if TOTAL_TIME in call:
call[TOTAL_TIME] += ct
else:
call[TOTAL_TIME] = ct
else:
call[CALLS] = value
call[TOTAL_TIME] = ratio(value, nc)*ct
caller.add_call(call)
#self.stats.print_stats()
#self.stats.print_callees()
# Compute derived events
self.profile.validate()
self.profile.ratio(TIME_RATIO, TIME)
self.profile.ratio(TOTAL_TIME_RATIO, TOTAL_TIME)
return self.profile
class Theme:
def __init__(self,
bgcolor = (0.0, 0.0, 1.0),
mincolor = (0.0, 0.0, 0.0),
maxcolor = (0.0, 0.0, 1.0),
fontname = "Arial",
minfontsize = 10.0,
maxfontsize = 10.0,
minpenwidth = 0.5,
maxpenwidth = 4.0,
gamma = 2.2):
self.bgcolor = bgcolor
self.mincolor = mincolor
self.maxcolor = maxcolor
self.fontname = fontname
self.minfontsize = minfontsize
self.maxfontsize = maxfontsize
self.minpenwidth = minpenwidth
self.maxpenwidth = maxpenwidth
self.gamma = gamma
def graph_bgcolor(self):
return self.hsl_to_rgb(*self.bgcolor)
def graph_fontname(self):
return self.fontname
def graph_fontsize(self):
return self.minfontsize
def node_bgcolor(self, weight):
return self.color(weight)
def node_fgcolor(self, weight):
return self.graph_bgcolor()
def node_fontsize(self, weight):
return self.fontsize(weight)
def edge_color(self, weight):
return self.color(weight)
def edge_fontsize(self, weight):
return self.fontsize(weight)
def edge_penwidth(self, weight):
return max(weight*self.maxpenwidth, self.minpenwidth)
def edge_arrowsize(self, weight):
return 0.5 * math.sqrt(self.edge_penwidth(weight))
def fontsize(self, weight):
return max(weight**2 * self.maxfontsize, self.minfontsize)
def color(self, weight):
weight = min(max(weight, 0.0), 1.0)
hmin, smin, lmin = self.mincolor
hmax, smax, lmax = self.maxcolor
h = hmin + weight*(hmax - hmin)
s = smin + weight*(smax - smin)
l = lmin + weight*(lmax - lmin)
return self.hsl_to_rgb(h, s, l)
def hsl_to_rgb(self, h, s, l):
"""Convert a color from HSL color-model to RGB.
See also:
- http://www.w3.org/TR/css3-color/#hsl-color
"""
h = h % 1.0
s = min(max(s, 0.0), 1.0)
l = min(max(l, 0.0), 1.0)
if l <= 0.5:
m2 = l*(s + 1.0)
else:
m2 = l + s - l*s
m1 = l*2.0 - m2
r = self._hue_to_rgb(m1, m2, h + 1.0/3.0)
g = self._hue_to_rgb(m1, m2, h)
b = self._hue_to_rgb(m1, m2, h - 1.0/3.0)
# Apply gamma correction
r **= self.gamma
g **= self.gamma
b **= self.gamma
return (r, g, b)
def _hue_to_rgb(self, m1, m2, h):
if h < 0.0:
h += 1.0
elif h > 1.0:
h -= 1.0
if h*6 < 1.0:
return m1 + (m2 - m1)*h*6.0
elif h*2 < 1.0:
return m2
elif h*3 < 2.0:
return m1 + (m2 - m1)*(2.0/3.0 - h)*6.0
else:
return m1
TEMPERATURE_COLORMAP = Theme(
mincolor = (2.0/3.0, 0.80, 0.25), # dark blue
maxcolor = (0.0, 1.0, 0.5), # satured red
gamma = 1.0
)
PINK_COLORMAP = Theme(
mincolor = (0.0, 1.0, 0.90), # pink
maxcolor = (0.0, 1.0, 0.5), # satured red
)
GRAY_COLORMAP = Theme(
mincolor = (0.0, 0.0, 0.85), # light gray
maxcolor = (0.0, 0.0, 0.0), # black
)
BW_COLORMAP = Theme(
minfontsize = 8.0,
maxfontsize = 24.0,
mincolor = (0.0, 0.0, 0.0), # black
maxcolor = (0.0, 0.0, 0.0), # black
minpenwidth = 0.1,
maxpenwidth = 8.0,
)
class DotWriter:
"""Writer for the DOT language.
See also:
- "The DOT Language" specification
http://www.graphviz.org/doc/info/lang.html
"""
def __init__(self, fp):
self.fp = fp
def graph(self, profile, theme):
self.begin_graph()
fontname = theme.graph_fontname()
self.attr('graph', fontname=fontname, ranksep=0.25, nodesep=0.125)
self.attr('node', fontname=fontname, shape="box", style="filled,rounded", fontcolor="white", width=0, height=0)
self.attr('edge', fontname=fontname)
for function in profile.functions.itervalues():
labels = []
for event in PROCESS, MODULE:
if event in function.events:
label = event.format(function[event])
labels.append(label)
labels.append(function.name)
for event in TOTAL_TIME_RATIO, TIME_RATIO, CALLS:
if event in function.events:
label = event.format(function[event])
labels.append(label)
try:
weight = function[PRUNE_RATIO]
except UndefinedEvent:
weight = 0.0
label = '\n'.join(labels)
self.node(function.id,
label = label,
color = self.color(theme.node_bgcolor(weight)),
fontcolor = self.color(theme.node_fgcolor(weight)),
fontsize = "%.2f" % theme.node_fontsize(weight),
)
for call in function.calls.itervalues():
callee = profile.functions[call.callee_id]
labels = []
for event in TOTAL_TIME_RATIO, CALLS:
if event in call.events:
label = event.format(call[event])
labels.append(label)
try:
weight = call[PRUNE_RATIO]
except UndefinedEvent:
try:
weight = callee[PRUNE_RATIO]
except UndefinedEvent:
weight = 0.0
label = '\n'.join(labels)
self.edge(function.id, call.callee_id,
label = label,
color = self.color(theme.edge_color(weight)),
fontcolor = self.color(theme.edge_color(weight)),
fontsize = "%.2f" % theme.edge_fontsize(weight),
penwidth = "%.2f" % theme.edge_penwidth(weight),
labeldistance = "%.2f" % theme.edge_penwidth(weight),
arrowsize = "%.2f" % theme.edge_arrowsize(weight),
)
self.end_graph()
def begin_graph(self):
self.write('digraph {\n')
def end_graph(self):
self.write('}\n')
def attr(self, what, **attrs):
self.write("\t")
self.write(what)
self.attr_list(attrs)
self.write(";\n")
def node(self, node, **attrs):
self.write("\t")
self.id(node)
self.attr_list(attrs)
self.write(";\n")
def edge(self, src, dst, **attrs):
self.write("\t")
self.id(src)
self.write(" -> ")
self.id(dst)
self.attr_list(attrs)
self.write(";\n")
def attr_list(self, attrs):
if not attrs:
return
self.write(' [')
first = True
for name, value in attrs.iteritems():
if first:
first = False
else:
self.write(", ")
self.id(name)
self.write('=')
self.id(value)
self.write(']')
def id(self, id):
if isinstance(id, (int, float)):
s = str(id)
elif isinstance(id, str):
if id.isalnum():
s = id
else:
s = self.escape(id)
else:
raise TypeError
self.write(s)
def color(self, (r, g, b)):
def float2int(f):
if f <= 0.0:
return 0
if f >= 1.0:
return 255
return int(255.0*f + 0.5)
return "#" + "".join(["%02x" % float2int(c) for c in (r, g, b)])
def escape(self, s):
s = s.encode('utf-8')
s = s.replace('\\', r'\\')
s = s.replace('\n', r'\n')
s = s.replace('\t', r'\t')
s = s.replace('"', r'\"')
return '"' + s + '"'
def write(self, s):
self.fp.write(s)
class Main:
"""Main program."""
themes = {
"color": TEMPERATURE_COLORMAP,
"pink": PINK_COLORMAP,
"gray": GRAY_COLORMAP,
"bw": BW_COLORMAP,
}
def main(self):
"""Main program."""
parser = optparse.OptionParser(
usage="\n\t%prog [options] [file] ...",
version="%%prog %s" % __version__)
parser.add_option(
'-o', '--output', metavar='FILE',
type="string", dest="output",
help="output filename [stdout]")
parser.add_option(
'-n', '--node-thres', metavar='PERCENTAGE',
type="float", dest="node_thres", default=0.5,
help="eliminate nodes below this threshold [default: %default]")
parser.add_option(
'-e', '--edge-thres', metavar='PERCENTAGE',
type="float", dest="edge_thres", default=0.1,
help="eliminate edges below this threshold [default: %default]")
parser.add_option(
'-f', '--format',
type="choice", choices=('prof', 'oprofile', 'pstats', 'shark'),
dest="format", default="prof",
help="profile format: prof, oprofile, or pstats [default: %default]")
parser.add_option(
'-c', '--colormap',
type="choice", choices=('color', 'pink', 'gray', 'bw'),
dest="theme", default="color",
help="color map: color, pink, gray, or bw [default: %default]")
parser.add_option(
'-s', '--strip',
action="store_true",
dest="strip", default=False,
help="strip function parameters, template parameters, and const modifiers from demangled C++ function names")
parser.add_option(
'-w', '--wrap',
action="store_true",
dest="wrap", default=False,
help="wrap function names")
(self.options, self.args) = parser.parse_args(sys.argv[1:])
if len(self.args) > 1 and self.options.format != 'pstats':
parser.error('incorrect number of arguments')
try:
self.theme = self.themes[self.options.theme]
except KeyError:
parser.error('invalid colormap \'%s\'' % self.options.theme)
if self.options.format == 'prof':
if not self.args:
fp = sys.stdin
else:
fp = open(self.args[0], 'rt')
parser = GprofParser(fp)
elif self.options.format == 'oprofile':
if not self.args:
fp = sys.stdin
else:
fp = open(self.args[0], 'rt')
parser = OprofileParser(fp)
elif self.options.format == 'pstats':
if not self.args:
parser.error('at least a file must be specified for pstats input')
parser = PstatsParser(*self.args)
elif self.options.format == 'shark':
if not self.args:
fp = sys.stdin
else:
fp = open(self.args[0], 'rt')
parser = SharkParser(fp)
else:
parser.error('invalid format \'%s\'' % self.options.format)
self.profile = parser.parse()
if self.options.output is None:
self.output = sys.stdout
else:
self.output = open(self.options.output, 'wt')
self.write_graph()
_parenthesis_re = re.compile(r'\([^()]*\)')
_angles_re = re.compile(r'<[^<>]*>')
_const_re = re.compile(r'\s+const$')
def strip_function_name(self, name):
"""Remove extraneous information from C++ demangled function names."""
# Strip function parameters from name by recursively removing paired parenthesis
while True:
name, n = self._parenthesis_re.subn('', name)
if not n:
break
# Strip const qualifier
name = self._const_re.sub('', name)
# Strip template parameters from name by recursively removing paired angles
while True:
name, n = self._angles_re.subn('', name)
if not n:
break
return name
def wrap_function_name(self, name):
"""Split the function name on multiple lines."""
if len(name) > 32:
ratio = 2.0/3.0
height = max(int(len(name)/(1.0 - ratio) + 0.5), 1)
width = max(len(name)/height, 32)
# TODO: break lines in symbols
name = textwrap.fill(name, width, break_long_words=False)
# Take away spaces
name = name.replace(", ", ",")
name = name.replace("> >", ">>")
name = name.replace("> >", ">>") # catch consecutive
return name
def compress_function_name(self, name):
"""Compress function name according to the user preferences."""
if self.options.strip:
name = self.strip_function_name(name)
if self.options.wrap:
name = self.wrap_function_name(name)
# TODO: merge functions with same resulting name
return name
def write_graph(self):
dot = DotWriter(self.output)
profile = self.profile
profile.prune(self.options.node_thres/100.0, self.options.edge_thres/100.0)
for function in profile.functions.itervalues():
function.name = self.compress_function_name(function.name)
dot.graph(profile, self.theme)
if __name__ == '__main__':
Main().main()
|
dpimenov/tvdb_api
|
tests/gprof2dot.py
|
Python
|
unlicense
| 53,218 | 0.004209 |
from HSM_Reactions import *
########## RIGHT MEMBERS OF ODEs, rewritten with only 10 equations to isolate those that are independent ##############
def f10eqs(t, y, ksetDict, TparamSet, REACparamSet, DirectControlnuPp, IC_PplusPp, IC_SplusSs):
#P = y[0]
Ph = y[0]
#S = y[2]
Ss = y[1]
F = y[2]
Fs = y[3]
G = y[4]
FsG = y[5]
FG = y[6]
RF = y[7]
RHP = y[8]
HP = y[9]
kP0 = ksetDict["kP0"]
kP0p = ksetDict["kP0p"]
kS = ksetDict["kS"]
kSp0 = ksetDict["kSp0"]
kFp0 = ksetDict["kFp0"]
kF0 = ksetDict["kF0"]
kFpi0 = ksetDict["kFpi0"]
kFGp = ksetDict["kFGp"]
kFG = ksetDict["kFG"]
ketaF = ksetDict["ketaF"]
kFsG = ksetDict["kFsG"]
kFsGp = ksetDict["kFsGp"]
kFsp = ksetDict["kFsp"]
kFs = ksetDict["kFs"]
kpiRF = ksetDict["kpiRF"]
kpiRH = ksetDict["kpiRH"]
kpiHP = ksetDict["kpiHP"]
ketaHP = ksetDict["ketaHP"]
ketaRF = ksetDict["ketaRF"]
ketaRHP = ksetDict["ketaRHP"]
n1 = REACparamSet["n1"]
n2 = REACparamSet["n2"]
P0const = REACparamSet["P0const"]
I = REACparamSet["I"]
T0const = REACparamSet["T0const"]
piRFconst = REACparamSet["piRFconst"]
piRHPconst = REACparamSet["piRHPconst"]
PplusPpCONST = IC_PplusPp # (microM) Initial Condition protein P
SplusSsCONST = IC_SplusSs # (microM) Initial Condition stresskinease S
system = [
#nuP(Ph, HP, kP0) - nuPp(P, t, kP0p, n1, T0const, TparamSet, DirectControlnuPp), # P
- nuP(Ph, HP, kP0) + nuPp(PplusPpCONST - Ph, t, kP0p, n1, T0const, TparamSet, DirectControlnuPp), # Ph
#nuS(Ss, kS) - nuSp(S, Ph, kSp0, n2, P0const), # S
- nuS(Ss, kS) + nuSp(SplusSsCONST - Ss, Ph, kSp0, n2, P0const), # Ss
nuF(I, Fs, kF0) + piF(RF, kFpi0) + nuFGp(FG, kFGp) - nuFG(G, F, kFG) - nuFp(F, Ss, kFp0) - etaF(F, ketaF), # F
- nuF(I, Fs, kF0) + nuFp(F, Ss, kFp0) + nuFsGp(FsG, kFsGp) - nuFsG(G, Fs, kFsG), # Fs
nuFsGp(FsG, kFsGp) + nuFGp(FG, kFGp) - nuFG(G, F, kFG) - nuFsG(G, Fs, kFsG), # G
nuFsG(G, Fs, kFsG) + nuFs(FG, kFs) - nuFsp(FsG, I, kFsp) - nuFsGp(FsG, kFsGp), # FsG
nuFsp(FsG, I, kFsp) + nuFG(G, F, kFG) - nuFGp(FG, kFGp) - nuFs(FG, kFs), # FG
piRF(FsG, kpiRF) + piRFAddConst(piRFconst) - etaRF(RF, ketaRF), # RF Added const to Alex model
piRHP(FsG, kpiRH) + piRHPAddConst(piRHPconst) - etaRHP(RHP, ketaRHP), # RHP Aded const to Alex model
piHP(RHP, kpiHP) - etaHP(HP, ketaHP)] # HP
# Notice presence of nuFG() in line of F, presence of nuFsG() in that of Fs, absence of pi in that of FsG.
return system
########## RIGHT MEMBERS OF ODEs, rewritten with only 9 equations to isolate those that are independent ##############
def f9eqs(t, y, ksetDict, TparamSet, REACparamSet, DirectControlnuPp, IC_PplusPp, IC_SplusSs, IC_GplusFsGplusFG):
#P = y[0]
Ph = y[0]
#S = y[2]
Ss = y[1]
F = y[2]
Fs = y[3]
#G = y[4]
FsG = y[4]
FG = y[5]
RF = y[6]
RHP = y[7]
HP = y[8]
kP0 = ksetDict["kP0"]
kP0p = ksetDict["kP0p"]
kS = ksetDict["kS"]
kSp0 = ksetDict["kSp0"]
kFp0 = ksetDict["kFp0"]
kF0 = ksetDict["kF0"]
kFpi0 = ksetDict["kFpi0"]
kFGp = ksetDict["kFGp"]
kFG = ksetDict["kFG"]
ketaF = ksetDict["ketaF"]
kFsG = ksetDict["kFsG"]
kFsGp = ksetDict["kFsGp"]
kFsp = ksetDict["kFsp"]
kFs = ksetDict["kFs"]
kpiRF = ksetDict["kpiRF"]
kpiRH = ksetDict["kpiRH"]
kpiHP = ksetDict["kpiHP"]
ketaHP = ksetDict["ketaHP"]
ketaRF = ksetDict["ketaRF"]
ketaRHP = ksetDict["ketaRHP"]
n1 = REACparamSet["n1"]
n2 = REACparamSet["n2"]
P0const = REACparamSet["P0const"]
I = REACparamSet["I"]
T0const = REACparamSet["T0const"]
piRFconst = REACparamSet["piRFconst"]
piRHPconst = REACparamSet["piRHPconst"]
PplusPpCONST = IC_PplusPp # (microM) Initial Condition protein P
SplusSsCONST = IC_SplusSs # (microM) Initial Condition stresskinease S
GplusFsGplusFG = IC_GplusFsGplusFG # (microM) Initial Condition gene G
G = GplusFsGplusFG - FsG - FG
system = [
#nuP(Ph, HP, kP0) - nuPp(P, t, kP0p, n1, T0const, TparamSet, DirectControlnuPp), # P
- nuP(Ph, HP, kP0) + nuPp(PplusPpCONST - Ph, t, kP0p, n1, T0const, TparamSet, DirectControlnuPp), # Ph
#nuS(Ss, kS) - nuSp(S, Ph, kSp0, n2, P0const), # S
- nuS(Ss, kS) + nuSp(SplusSsCONST - Ss, Ph, kSp0, n2, P0const), # Ss
nuF(I, Fs, kF0) + piF(RF, kFpi0) + nuFGp(FG, kFGp) - nuFG(G, F, kFG) - nuFp(F, Ss, kFp0) - etaF(F, ketaF), # F
- nuF(I, Fs, kF0) + nuFp(F, Ss, kFp0) + nuFsGp(FsG, kFsGp) - nuFsG(G, Fs, kFsG), # Fs
#nuFsGp(FsG, kFsGp) + nuFGp(FG, kFGp) - nuFG(G, F, kFG) - nuFsG(G, Fs, kFsG), # G
nuFsG(G, Fs, kFsG) + nuFs(FG, kFs) - nuFsp(FsG, I, kFsp) - nuFsGp(FsG, kFsGp), # FsG
nuFsp(FsG, I, kFsp) + nuFG(G, F, kFG) - nuFGp(FG, kFGp) - nuFs(FG, kFs), # FG
piRF(FsG, kpiRF) + piRFAddConst(piRFconst) - etaRF(RF, ketaRF), # RF Added const to Alex model
piRHP(FsG, kpiRH) + piRHPAddConst(piRHPconst) - etaRHP(RHP, ketaRHP), # RHP Aded const to Alex model
piHP(RHP, kpiHP) - etaHP(HP, ketaHP)] # HP
# Notice presence of nuFG() in line of F, presence of nuFsG() in that of Fs, absence of pi in that of FsG.
return system
|
QTB-HHU/ModelHeatShock
|
HSM_ODEsSystem10or9eqs.py
|
Python
|
gpl-3.0
| 5,759 | 0.009029 |
#!/usr/bin/env python3
#########################################################################
# File Name: mthreading.py
# Author: ly
# Created Time: Wed 05 Jul 2017 08:46:57 PM CST
# Description:
#########################################################################
# -*- coding: utf-8 -*-
import time
import threading
def play(name,count):
for i in range(1,count):
print('%s %d in %d' %(name, i, count))
time.sleep(1)
return
if __name__=='__main__':
t1=threading.Thread(target=play, args=('t1',10))
# 设置为守护线程
t1.setDaemon(True)
t1.start()
print("main")
# 等待子线程结束
t1.join()
exit(1)
|
LingyuGitHub/codingofly
|
python/threading/mthreading.py
|
Python
|
gpl-3.0
| 699 | 0.013413 |
# coding=utf-8
"""TV base class."""
from __future__ import unicode_literals
import threading
from builtins import object
from medusa.indexers.config import INDEXER_TVDBV2
class Identifier(object):
"""Base identifier class."""
def __bool__(self):
"""Magic method."""
raise NotImplementedError
def __ne__(self, other):
"""Magic method."""
return not self == other
class TV(object):
"""Base class for Series and Episode."""
def __init__(self, indexer, indexerid, ignored_properties):
"""Initialize class.
:param indexer:
:type indexer: int
:param indexerid:
:type indexerid: int
:param ignored_properties:
:type ignored_properties: set(str)
"""
self.__dirty = True
self.__ignored_properties = ignored_properties | {'lock'}
self.indexer = int(indexer)
self.indexerid = int(indexerid)
self.lock = threading.Lock()
@property
def series_id(self):
"""To make a clear distinction between an indexer and the id for the series. You can now also use series_id."""
return self.indexerid
def __setattr__(self, key, value):
"""Set the corresponding attribute and use the dirty flag if the new value is different from the old value.
:param key:
:type key: str
:param value:
"""
if key == '_location' or (not key.startswith('_') and key not in self.__ignored_properties):
self.__dirty |= self.__dict__.get(key) != value
super(TV, self).__setattr__(key, value)
@property
def dirty(self):
"""Return the dirty flag.
:return:
:rtype: bool
"""
return self.__dirty
def reset_dirty(self):
"""Reset the dirty flag."""
self.__dirty = False
@property
def tvdb_id(self):
"""Get the item's tvdb_id."""
if self.indexerid and self.indexer == INDEXER_TVDBV2:
return self.indexerid
def __getstate__(self):
"""Make object serializable."""
d = dict(self.__dict__)
del d['lock']
return d
def __setstate__(self, d):
"""Un-serialize the object."""
d['lock'] = threading.Lock()
self.__dict__.update(d)
|
pymedusa/SickRage
|
medusa/tv/base.py
|
Python
|
gpl-3.0
| 2,303 | 0.001303 |
from astropy.io import ascii
from astropy.table import MaskedColumn, Table, Column
import logging
import math
import numpy
import os
from .downloads.cutouts.downloader import ImageDownloader
from . import util
from .downloads.cutouts.source import SourceCutout
from astropy.time import Time
from .astrom import Observation
from . import storage
BRIGHT_LIMIT = 23.0
OBJECT_PLANTED = "Object.planted"
MINIMUM_BRIGHT_DETECTIONS = 5
MINIMUM_BRIGHT_FRACTION = 0.5
def match_mopfiles(mopfile1, mopfile2):
"""
Given an input list of 'real' detections and candidate detections provide a result file that contains
the measured values from candidate detections with a flag indicating if they are real or false.
@rtype MOPFile
@return mopfile2 with a new column containing index of matching entry in mopfile1
"""
pos1 = pos2 = numpy.array([])
if len(mopfile1.data) > 0:
X_COL = "X_{}".format(mopfile1.header.file_ids[0])
Y_COL = "Y_{}".format(mopfile1.header.file_ids[0])
pos1 = numpy.array([mopfile1.data[X_COL].data, mopfile1.data[Y_COL].data]).transpose()
if len(mopfile2.data) > 0:
X_COL = "X_{}".format(mopfile2.header.file_ids[0])
Y_COL = "Y_{}".format(mopfile2.header.file_ids[0])
pos2 = numpy.array([mopfile2.data[X_COL].data, mopfile2.data[Y_COL].data]).transpose()
# match_idx is an order list. The list is in the order of the first list of positions and each entry
# is the index of the matching position from the second list.
match_idx1, match_idx2 = util.match_lists(pos1, pos2)
mopfile1.data.add_column(Column(data=match_idx1.filled(-1), name="real", length=len(mopfile1.data)))
idx = 0
for file_id in mopfile1.header.file_ids:
idx += 1
mopfile1.data.add_column(Column(data=[file_id]*len(mopfile1.data), name="ID_{}".format(idx)))
return mopfile1
def measure_mags(measures):
"""
Given a list of readings compute the magnitudes for all sources in each reading.
@param measures: list of readings
@return: None
"""
from . import daophot
image_downloader = ImageDownloader()
observations = {}
for measure in measures:
for reading in measure:
if reading.obs not in observations:
observations[reading.obs] = {'x': [],
'y': [],
'source': image_downloader.download(reading, needs_apcor=True)}
assert isinstance(reading.obs, Observation)
observations[reading.obs]['x'].append(reading.x)
observations[reading.obs]['y'].append(reading.y)
for observation in observations:
source = observations[observation]['source']
assert isinstance(source, SourceCutout)
hdulist_index = source.get_hdulist_idx(observation.ccdnum)
#source.update_pixel_location((observations[observation]['x'],
# observations[observation]['y']), hdulist_index)
observations[observation]['mags'] = daophot.phot(source._hdu_on_disk(hdulist_index),
observations[observation]['x'],
observations[observation]['y'],
aperture=source.apcor.aperture,
sky=source.apcor.sky,
swidth=source.apcor.swidth,
apcor=source.apcor.apcor,
zmag=source.zmag,
maxcount=30000,
extno=0)
return observations
def match_planted(fk_candidate_observations, match_filename, bright_limit=BRIGHT_LIMIT, object_planted=OBJECT_PLANTED,
minimum_bright_detections=MINIMUM_BRIGHT_DETECTIONS, bright_fraction=MINIMUM_BRIGHT_FRACTION):
"""
Using the fk_candidate_observations as input get the Object.planted file from VOSpace and match
planted sources with found sources.
The Object.planted list is pulled from VOSpace based on the standard file-layout and name of the
first exposure as read from the .astrom file.
:param fk_candidate_observations: name of the fk*reals.astrom file to check against Object.planted
:param match_filename: a file that will contain a list of all planted sources and the matched found source
@param minimum_bright_detections: if there are too few bright detections we raise an error.
"""
found_pos = []
detections = fk_candidate_observations.get_sources()
for detection in detections:
reading = detection.get_reading(0)
# create a list of positions, to be used later by match_lists
found_pos.append([reading.x, reading.y])
# Now get the Object.planted file, either from the local FS or from VOSpace.
objects_planted_uri = object_planted
if not os.access(objects_planted_uri, os.F_OK):
objects_planted_uri = fk_candidate_observations.observations[0].get_object_planted_uri()
try:
lines = storage.open_vos_or_local(objects_planted_uri)
lines = lines.read().decode('utf-8')
except Exception as ex:
logging.critical(f'{ex}')
print(lines)
raise ex
# we are changing the format of the Object.planted header to be compatible with astropy.io.ascii but
# there are some old Object.planted files out there so we do these string/replace calls to reset those.
new_lines = lines.replace("pix rate", "pix_rate")
new_lines = new_lines.replace("""''/h rate""", "sky_rate")
planted_objects_table = ascii.read(new_lines, header_start=-1, data_start=0)
planted_objects_table.meta = None
# The match_list method expects a list that contains a position, not an x and a y vector, so we transpose.
planted_pos = numpy.transpose([planted_objects_table['x'].data, planted_objects_table['y'].data])
# match_idx is an order list. The list is in the order of the first list of positions and each entry
# is the index of the matching position from the second list.
(match_idx, match_fnd) = util.match_lists(numpy.array(planted_pos), numpy.array(found_pos))
assert isinstance(match_idx, numpy.ma.MaskedArray)
assert isinstance(match_fnd, numpy.ma.MaskedArray)
false_positives_table = Table()
# Once we've matched the two lists we'll need some new columns to store the information in.
# these are masked columns so that object.planted entries that have no detected match are left 'blank'.
new_columns = [MaskedColumn(name="measure_x", length=len(planted_objects_table), mask=True),
MaskedColumn(name="measure_y", length=len(planted_objects_table), mask=True),
MaskedColumn(name="measure_rate", length=len(planted_objects_table), mask=True),
MaskedColumn(name="measure_angle", length=len(planted_objects_table), mask=True),
MaskedColumn(name="measure_mag1", length=len(planted_objects_table), mask=True),
MaskedColumn(name="measure_merr1", length=len(planted_objects_table), mask=True),
MaskedColumn(name="measure_mag2", length=len(planted_objects_table), mask=True),
MaskedColumn(name="measure_merr2", length=len(planted_objects_table), mask=True),
MaskedColumn(name="measure_mag3", length=len(planted_objects_table), mask=True),
MaskedColumn(name="measure_merr3", length=len(planted_objects_table), mask=True)]
planted_objects_table.add_columns(new_columns)
tlength = 0
new_columns = [MaskedColumn(name="measure_x", length=tlength, mask=True),
MaskedColumn(name="measure_y", length=tlength, mask=True),
MaskedColumn(name="measure_rate", length=0, mask=True),
MaskedColumn(name="measure_angle", length=0, mask=True),
MaskedColumn(name="measure_mag1", length=0, mask=True),
MaskedColumn(name="measure_merr1", length=0, mask=True),
MaskedColumn(name="measure_mag2", length=0, mask=True),
MaskedColumn(name="measure_merr2", length=0, mask=True),
MaskedColumn(name="measure_mag3", length=tlength, mask=True),
MaskedColumn(name="measure_merr3", length=tlength, mask=True)]
false_positives_table.add_columns(new_columns)
# We do some 'checks' on the Object.planted match to diagnose pipeline issues. Those checks are made using just
# those planted sources we should have detected.
bright = planted_objects_table['mag'] < bright_limit
n_bright_planted = numpy.count_nonzero(planted_objects_table['mag'][bright])
measures = []
idxs = []
for idx in range(len(match_idx)):
# The match_idx value is False if nothing was found.
if not match_idx.mask[idx]:
# Each 'source' has multiple 'readings'
measures.append(detections[match_idx[idx]].get_readings())
idxs.append(idx)
observations = measure_mags(measures)
for oidx in range(len(measures)):
idx = idxs[oidx]
readings = measures[oidx]
start_jd = Time(readings[0].obs.header['MJD_OBS_CENTER'], format='mpc', scale='utc').jd
end_jd = Time(readings[-1].obs.header['MJD_OBS_CENTER'], format='mpc', scale='utc').jd
rate = math.sqrt((readings[-1].x - readings[0].x) ** 2 + (readings[-1].y - readings[0].y) ** 2) / (
24 * (end_jd - start_jd))
rate = int(rate * 100) / 100.0
angle = math.degrees(math.atan2(readings[-1].y - readings[0].y, readings[-1].x - readings[0].x))
angle = int(angle * 100) / 100.0
planted_objects_table[idx]['measure_rate'] = rate
planted_objects_table[idx]['measure_angle'] = angle
planted_objects_table[idx]['measure_x'] = observations[readings[0].obs]['mags']["XCENTER"][oidx]
planted_objects_table[idx]['measure_y'] = observations[readings[0].obs]['mags']["YCENTER"][oidx]
for ridx in range(len(readings)):
reading = readings[ridx]
mags = observations[reading.obs]['mags']
planted_objects_table[idx]['measure_mag{}'.format(ridx+1)] = mags["MAG"][oidx]
planted_objects_table[idx]['measure_merr{}'.format(ridx+1)] = mags["MERR"][oidx]
# for idx in range(len(match_fnd)):
# if match_fnd.mask[idx]:
# measures = detections[idx].get_readings()
# false_positives_table.add_row()
# false_positives_table[-1] = measure_mags(measures, false_positives_table[-1])
# Count an object as detected if it has a measured magnitude in the first frame of the triplet.
n_bright_found = numpy.count_nonzero(planted_objects_table['measure_mag1'][bright])
# Also compute the offset and standard deviation of the measured magnitude from that planted ones.
offset = numpy.mean(planted_objects_table['mag'][bright] - planted_objects_table['measure_mag1'][bright])
try:
offset = "{:5.2f}".format(offset)
except:
offset = "indef"
std = numpy.std(planted_objects_table['mag'][bright] - planted_objects_table['measure_mag1'][bright])
try:
std = "{:5.2f}".format(std)
except:
std = "indef"
if os.access(match_filename, os.R_OK):
fout = open(match_filename, 'a')
else:
fout = open(match_filename, 'w')
fout.write("#K {:10s} {:10s}\n".format("EXPNUM", "FWHM"))
for measure in detections[0].get_readings():
fout.write('#V {:10s} {:10s}\n'.format(measure.obs.header['EXPNUM'], measure.obs.header['FWHM']))
fout.write("#K ")
for keyword in ["RMIN", "RMAX", "ANGLE", "AWIDTH"]:
fout.write("{:10s} ".format(keyword))
fout.write("\n")
fout.write("#V ")
for keyword in ["RMIN", "RMAX", "ANGLE", "AWIDTH"]:
fout.write("{:10s} ".format(fk_candidate_observations.sys_header[keyword]))
fout.write("\n")
fout.write("#K ")
for keyword in ["NBRIGHT", "NFOUND", "OFFSET", "STDEV"]:
fout.write("{:10s} ".format(keyword))
fout.write("\n")
fout.write("#V {:<10} {:<10} {:<10} {:<10}\n".format(n_bright_planted,
n_bright_found,
offset,
std))
try:
writer = ascii.FixedWidth
# add a hash to the start of line that will have header columns: for JMP
fout.write("# ")
fout.flush()
ascii.write(planted_objects_table, output=fout, Writer=writer, delimiter=None)
if len(false_positives_table) > 0:
with open(match_filename+".fp", 'a') as fpout:
fpout.write("#")
ascii.write(false_positives_table, output=fpout, Writer=writer, delimiter=None)
except Exception as e:
logging.error(str(e))
raise e
finally:
fout.close()
# Some simple checks to report a failure how we're doing.
if n_bright_planted < minimum_bright_detections:
raise RuntimeError(1, "Too few bright objects planted.")
if n_bright_found / float(n_bright_planted) < bright_fraction:
raise RuntimeError(2, "Too few bright objects found.")
return "{} {} {} {}".format(n_bright_planted, n_bright_found, offset, std)
|
OSSOS/MOP
|
src/ossos/core/ossos/match.py
|
Python
|
gpl-3.0
| 13,649 | 0.005495 |
import sys
import os
import platform
import re
import imp
from Tkinter import *
import tkSimpleDialog
import tkMessageBox
import webbrowser
from idlelib.MultiCall import MultiCallCreator
from idlelib import idlever
from idlelib import WindowList
from idlelib import SearchDialog
from idlelib import GrepDialog
from idlelib import ReplaceDialog
from idlelib import PyParse
from idlelib.configHandler import idleConf
from idlelib import aboutDialog, textView, configDialog
from idlelib import macosxSupport
# The default tab setting for a Text widget, in average-width characters.
TK_TABWIDTH_DEFAULT = 8
_py_version = ' (%s)' % platform.python_version()
def _sphinx_version():
"Format sys.version_info to produce the Sphinx version string used to install the chm docs"
major, minor, micro, level, serial = sys.version_info
release = '%s%s' % (major, minor)
if micro:
release += '%s' % (micro,)
if level == 'candidate':
release += 'rc%s' % (serial,)
elif level != 'final':
release += '%s%s' % (level[0], serial)
return release
def _find_module(fullname, path=None):
"""Version of imp.find_module() that handles hierarchical module names"""
file = None
for tgt in fullname.split('.'):
if file is not None:
file.close() # close intermediate files
(file, filename, descr) = imp.find_module(tgt, path)
if descr[2] == imp.PY_SOURCE:
break # find but not load the source file
module = imp.load_module(tgt, file, filename, descr)
try:
path = module.__path__
except AttributeError:
raise ImportError, 'No source for module ' + module.__name__
if descr[2] != imp.PY_SOURCE:
# If all of the above fails and didn't raise an exception,fallback
# to a straight import which can find __init__.py in a package.
m = __import__(fullname)
try:
filename = m.__file__
except AttributeError:
pass
else:
file = None
base, ext = os.path.splitext(filename)
if ext == '.pyc':
ext = '.py'
filename = base + ext
descr = filename, None, imp.PY_SOURCE
return file, filename, descr
class HelpDialog(object):
def __init__(self):
self.parent = None # parent of help window
self.dlg = None # the help window iteself
def display(self, parent, near=None):
""" Display the help dialog.
parent - parent widget for the help window
near - a Toplevel widget (e.g. EditorWindow or PyShell)
to use as a reference for placing the help window
"""
if self.dlg is None:
self.show_dialog(parent)
if near:
self.nearwindow(near)
def show_dialog(self, parent):
self.parent = parent
fn=os.path.join(os.path.abspath(os.path.dirname(__file__)),'help.txt')
self.dlg = dlg = textView.view_file(parent,'Help',fn, modal=False)
dlg.bind('<Destroy>', self.destroy, '+')
def nearwindow(self, near):
# Place the help dialog near the window specified by parent.
# Note - this may not reposition the window in Metacity
# if "/apps/metacity/general/disable_workarounds" is enabled
dlg = self.dlg
geom = (near.winfo_rootx() + 10, near.winfo_rooty() + 10)
dlg.withdraw()
dlg.geometry("=+%d+%d" % geom)
dlg.deiconify()
dlg.lift()
def destroy(self, ev=None):
self.dlg = None
self.parent = None
helpDialog = HelpDialog() # singleton instance
def _help_dialog(parent): # wrapper for htest
helpDialog.show_dialog(parent)
class EditorWindow(object):
from idlelib.Percolator import Percolator
from idlelib.ColorDelegator import ColorDelegator
from idlelib.UndoDelegator import UndoDelegator
from idlelib.IOBinding import IOBinding, filesystemencoding, encoding
from idlelib import Bindings
from Tkinter import Toplevel
from idlelib.MultiStatusBar import MultiStatusBar
help_url = None
def __init__(self, flist=None, filename=None, key=None, root=None):
if EditorWindow.help_url is None:
dochome = os.path.join(sys.prefix, 'Doc', 'index.html')
if sys.platform.count('linux'):
# look for html docs in a couple of standard places
pyver = 'python-docs-' + '%s.%s.%s' % sys.version_info[:3]
if os.path.isdir('/var/www/html/python/'): # "python2" rpm
dochome = '/var/www/html/python/index.html'
else:
basepath = '/usr/share/doc/' # standard location
dochome = os.path.join(basepath, pyver,
'Doc', 'index.html')
elif sys.platform[:3] == 'win':
chmfile = os.path.join(sys.prefix, 'Doc',
'Python%s.chm' % _sphinx_version())
if os.path.isfile(chmfile):
dochome = chmfile
elif sys.platform == 'darwin':
# documentation may be stored inside a python framework
dochome = os.path.join(sys.prefix,
'Resources/English.lproj/Documentation/index.html')
dochome = os.path.normpath(dochome)
if os.path.isfile(dochome):
EditorWindow.help_url = dochome
if sys.platform == 'darwin':
# Safari requires real file:-URLs
EditorWindow.help_url = 'file://' + EditorWindow.help_url
else:
EditorWindow.help_url = "https://docs.python.org/%d.%d/" % sys.version_info[:2]
currentTheme=idleConf.CurrentTheme()
self.flist = flist
root = root or flist.root
self.root = root
try:
sys.ps1
except AttributeError:
sys.ps1 = '>>> '
self.menubar = Menu(root)
self.top = top = WindowList.ListedToplevel(root, menu=self.menubar)
if flist:
self.tkinter_vars = flist.vars
#self.top.instance_dict makes flist.inversedict available to
#configDialog.py so it can access all EditorWindow instances
self.top.instance_dict = flist.inversedict
else:
self.tkinter_vars = {} # keys: Tkinter event names
# values: Tkinter variable instances
self.top.instance_dict = {}
self.recent_files_path = os.path.join(idleConf.GetUserCfgDir(),
'recent-files.lst')
self.text_frame = text_frame = Frame(top)
self.vbar = vbar = Scrollbar(text_frame, name='vbar')
self.width = idleConf.GetOption('main','EditorWindow','width', type='int')
text_options = {
'name': 'text',
'padx': 5,
'wrap': 'none',
'width': self.width,
'height': idleConf.GetOption('main', 'EditorWindow', 'height', type='int')}
if TkVersion >= 8.5:
# Starting with tk 8.5 we have to set the new tabstyle option
# to 'wordprocessor' to achieve the same display of tabs as in
# older tk versions.
text_options['tabstyle'] = 'wordprocessor'
self.text = text = MultiCallCreator(Text)(text_frame, **text_options)
self.top.focused_widget = self.text
self.createmenubar()
self.apply_bindings()
self.top.protocol("WM_DELETE_WINDOW", self.close)
self.top.bind("<<close-window>>", self.close_event)
if macosxSupport.isAquaTk():
# Command-W on editorwindows doesn't work without this.
text.bind('<<close-window>>', self.close_event)
# Some OS X systems have only one mouse button,
# so use control-click for pulldown menus there.
# (Note, AquaTk defines <2> as the right button if
# present and the Tk Text widget already binds <2>.)
text.bind("<Control-Button-1>",self.right_menu_event)
else:
# Elsewhere, use right-click for pulldown menus.
text.bind("<3>",self.right_menu_event)
text.bind("<<cut>>", self.cut)
text.bind("<<copy>>", self.copy)
text.bind("<<paste>>", self.paste)
text.bind("<<center-insert>>", self.center_insert_event)
text.bind("<<help>>", self.help_dialog)
text.bind("<<python-docs>>", self.python_docs)
text.bind("<<about-idle>>", self.about_dialog)
text.bind("<<open-config-dialog>>", self.config_dialog)
text.bind("<<open-config-extensions-dialog>>",
self.config_extensions_dialog)
text.bind("<<open-module>>", self.open_module)
text.bind("<<do-nothing>>", lambda event: "break")
text.bind("<<select-all>>", self.select_all)
text.bind("<<remove-selection>>", self.remove_selection)
text.bind("<<find>>", self.find_event)
text.bind("<<find-again>>", self.find_again_event)
text.bind("<<find-in-files>>", self.find_in_files_event)
text.bind("<<find-selection>>", self.find_selection_event)
text.bind("<<replace>>", self.replace_event)
text.bind("<<goto-line>>", self.goto_line_event)
text.bind("<<smart-backspace>>",self.smart_backspace_event)
text.bind("<<newline-and-indent>>",self.newline_and_indent_event)
text.bind("<<smart-indent>>",self.smart_indent_event)
text.bind("<<indent-region>>",self.indent_region_event)
text.bind("<<dedent-region>>",self.dedent_region_event)
text.bind("<<comment-region>>",self.comment_region_event)
text.bind("<<uncomment-region>>",self.uncomment_region_event)
text.bind("<<tabify-region>>",self.tabify_region_event)
text.bind("<<untabify-region>>",self.untabify_region_event)
text.bind("<<toggle-tabs>>",self.toggle_tabs_event)
text.bind("<<change-indentwidth>>",self.change_indentwidth_event)
text.bind("<Left>", self.move_at_edge_if_selection(0))
text.bind("<Right>", self.move_at_edge_if_selection(1))
text.bind("<<del-word-left>>", self.del_word_left)
text.bind("<<del-word-right>>", self.del_word_right)
text.bind("<<beginning-of-line>>", self.home_callback)
if flist:
flist.inversedict[self] = key
if key:
flist.dict[key] = self
text.bind("<<open-new-window>>", self.new_callback)
text.bind("<<close-all-windows>>", self.flist.close_all_callback)
text.bind("<<open-class-browser>>", self.open_class_browser)
text.bind("<<open-path-browser>>", self.open_path_browser)
self.set_status_bar()
vbar['command'] = text.yview
vbar.pack(side=RIGHT, fill=Y)
text['yscrollcommand'] = vbar.set
fontWeight = 'normal'
if idleConf.GetOption('main', 'EditorWindow', 'font-bold', type='bool'):
fontWeight='bold'
text.config(font=(idleConf.GetOption('main', 'EditorWindow', 'font'),
idleConf.GetOption('main', 'EditorWindow',
'font-size', type='int'),
fontWeight))
text_frame.pack(side=LEFT, fill=BOTH, expand=1)
text.pack(side=TOP, fill=BOTH, expand=1)
text.focus_set()
# usetabs true -> literal tab characters are used by indent and
# dedent cmds, possibly mixed with spaces if
# indentwidth is not a multiple of tabwidth,
# which will cause Tabnanny to nag!
# false -> tab characters are converted to spaces by indent
# and dedent cmds, and ditto TAB keystrokes
# Although use-spaces=0 can be configured manually in config-main.def,
# configuration of tabs v. spaces is not supported in the configuration
# dialog. IDLE promotes the preferred Python indentation: use spaces!
usespaces = idleConf.GetOption('main', 'Indent', 'use-spaces', type='bool')
self.usetabs = not usespaces
# tabwidth is the display width of a literal tab character.
# CAUTION: telling Tk to use anything other than its default
# tab setting causes it to use an entirely different tabbing algorithm,
# treating tab stops as fixed distances from the left margin.
# Nobody expects this, so for now tabwidth should never be changed.
self.tabwidth = 8 # must remain 8 until Tk is fixed.
# indentwidth is the number of screen characters per indent level.
# The recommended Python indentation is four spaces.
self.indentwidth = self.tabwidth
self.set_notabs_indentwidth()
# If context_use_ps1 is true, parsing searches back for a ps1 line;
# else searches for a popular (if, def, ...) Python stmt.
self.context_use_ps1 = False
# When searching backwards for a reliable place to begin parsing,
# first start num_context_lines[0] lines back, then
# num_context_lines[1] lines back if that didn't work, and so on.
# The last value should be huge (larger than the # of lines in a
# conceivable file).
# Making the initial values larger slows things down more often.
self.num_context_lines = 50, 500, 5000000
self.per = per = self.Percolator(text)
self.undo = undo = self.UndoDelegator()
per.insertfilter(undo)
text.undo_block_start = undo.undo_block_start
text.undo_block_stop = undo.undo_block_stop
undo.set_saved_change_hook(self.saved_change_hook)
# IOBinding implements file I/O and printing functionality
self.io = io = self.IOBinding(self)
io.set_filename_change_hook(self.filename_change_hook)
# Create the recent files submenu
self.recent_files_menu = Menu(self.menubar)
self.menudict['file'].insert_cascade(3, label='Recent Files',
underline=0,
menu=self.recent_files_menu)
self.update_recent_files_list()
self.color = None # initialized below in self.ResetColorizer
if filename:
if os.path.exists(filename) and not os.path.isdir(filename):
io.loadfile(filename)
else:
io.set_filename(filename)
self.ResetColorizer()
self.saved_change_hook()
self.set_indentation_params(self.ispythonsource(filename))
self.load_extensions()
menu = self.menudict.get('windows')
if menu:
end = menu.index("end")
if end is None:
end = -1
if end >= 0:
menu.add_separator()
end = end + 1
self.wmenu_end = end
WindowList.register_callback(self.postwindowsmenu)
# Some abstractions so IDLE extensions are cross-IDE
self.askyesno = tkMessageBox.askyesno
self.askinteger = tkSimpleDialog.askinteger
self.showerror = tkMessageBox.showerror
self._highlight_workaround() # Fix selection tags on Windows
def _highlight_workaround(self):
# On Windows, Tk removes painting of the selection
# tags which is different behavior than on Linux and Mac.
# See issue14146 for more information.
if not sys.platform.startswith('win'):
return
text = self.text
text.event_add("<<Highlight-FocusOut>>", "<FocusOut>")
text.event_add("<<Highlight-FocusIn>>", "<FocusIn>")
def highlight_fix(focus):
sel_range = text.tag_ranges("sel")
if sel_range:
if focus == 'out':
HILITE_CONFIG = idleConf.GetHighlight(
idleConf.CurrentTheme(), 'hilite')
text.tag_config("sel_fix", HILITE_CONFIG)
text.tag_raise("sel_fix")
text.tag_add("sel_fix", *sel_range)
elif focus == 'in':
text.tag_remove("sel_fix", "1.0", "end")
text.bind("<<Highlight-FocusOut>>",
lambda ev: highlight_fix("out"))
text.bind("<<Highlight-FocusIn>>",
lambda ev: highlight_fix("in"))
def _filename_to_unicode(self, filename):
"""convert filename to unicode in order to display it in Tk"""
if isinstance(filename, unicode) or not filename:
return filename
else:
try:
return filename.decode(self.filesystemencoding)
except UnicodeDecodeError:
# XXX
try:
return filename.decode(self.encoding)
except UnicodeDecodeError:
# byte-to-byte conversion
return filename.decode('iso8859-1')
def new_callback(self, event):
dirname, basename = self.io.defaultfilename()
self.flist.new(dirname)
return "break"
def home_callback(self, event):
if (event.state & 4) != 0 and event.keysym == "Home":
# state&4==Control. If <Control-Home>, use the Tk binding.
return
if self.text.index("iomark") and \
self.text.compare("iomark", "<=", "insert lineend") and \
self.text.compare("insert linestart", "<=", "iomark"):
# In Shell on input line, go to just after prompt
insertpt = int(self.text.index("iomark").split(".")[1])
else:
line = self.text.get("insert linestart", "insert lineend")
for insertpt in xrange(len(line)):
if line[insertpt] not in (' ','\t'):
break
else:
insertpt=len(line)
lineat = int(self.text.index("insert").split('.')[1])
if insertpt == lineat:
insertpt = 0
dest = "insert linestart+"+str(insertpt)+"c"
if (event.state&1) == 0:
# shift was not pressed
self.text.tag_remove("sel", "1.0", "end")
else:
if not self.text.index("sel.first"):
self.text.mark_set("my_anchor", "insert") # there was no previous selection
else:
if self.text.compare(self.text.index("sel.first"), "<", self.text.index("insert")):
self.text.mark_set("my_anchor", "sel.first") # extend back
else:
self.text.mark_set("my_anchor", "sel.last") # extend forward
first = self.text.index(dest)
last = self.text.index("my_anchor")
if self.text.compare(first,">",last):
first,last = last,first
self.text.tag_remove("sel", "1.0", "end")
self.text.tag_add("sel", first, last)
self.text.mark_set("insert", dest)
self.text.see("insert")
return "break"
def set_status_bar(self):
self.status_bar = self.MultiStatusBar(self.top)
if sys.platform == "darwin":
# Insert some padding to avoid obscuring some of the statusbar
# by the resize widget.
self.status_bar.set_label('_padding1', ' ', side=RIGHT)
self.status_bar.set_label('column', 'Col: ?', side=RIGHT)
self.status_bar.set_label('line', 'Ln: ?', side=RIGHT)
self.status_bar.pack(side=BOTTOM, fill=X)
self.text.bind("<<set-line-and-column>>", self.set_line_and_column)
self.text.event_add("<<set-line-and-column>>",
"<KeyRelease>", "<ButtonRelease>")
self.text.after_idle(self.set_line_and_column)
def set_line_and_column(self, event=None):
line, column = self.text.index(INSERT).split('.')
self.status_bar.set_label('column', 'Col: %s' % column)
self.status_bar.set_label('line', 'Ln: %s' % line)
menu_specs = [
("file", "_File"),
("edit", "_Edit"),
("format", "F_ormat"),
("run", "_Run"),
("options", "_Options"),
("windows", "_Windows"),
("help", "_Help"),
]
if sys.platform == "darwin":
menu_specs[-2] = ("windows", "_Window")
def createmenubar(self):
mbar = self.menubar
self.menudict = menudict = {}
for name, label in self.menu_specs:
underline, label = prepstr(label)
menudict[name] = menu = Menu(mbar, name=name)
mbar.add_cascade(label=label, menu=menu, underline=underline)
if macosxSupport.isCarbonTk():
# Insert the application menu
menudict['application'] = menu = Menu(mbar, name='apple')
mbar.add_cascade(label='IDLE', menu=menu)
self.fill_menus()
self.base_helpmenu_length = self.menudict['help'].index(END)
self.reset_help_menu_entries()
def postwindowsmenu(self):
# Only called when Windows menu exists
menu = self.menudict['windows']
end = menu.index("end")
if end is None:
end = -1
if end > self.wmenu_end:
menu.delete(self.wmenu_end+1, end)
WindowList.add_windows_to_menu(menu)
rmenu = None
def right_menu_event(self, event):
self.text.mark_set("insert", "@%d,%d" % (event.x, event.y))
if not self.rmenu:
self.make_rmenu()
rmenu = self.rmenu
self.event = event
iswin = sys.platform[:3] == 'win'
if iswin:
self.text.config(cursor="arrow")
for item in self.rmenu_specs:
try:
label, eventname, verify_state = item
except ValueError: # see issue1207589
continue
if verify_state is None:
continue
state = getattr(self, verify_state)()
rmenu.entryconfigure(label, state=state)
rmenu.tk_popup(event.x_root, event.y_root)
if iswin:
self.text.config(cursor="ibeam")
rmenu_specs = [
# ("Label", "<<virtual-event>>", "statefuncname"), ...
("Close", "<<close-window>>", None), # Example
]
def make_rmenu(self):
rmenu = Menu(self.text, tearoff=0)
for item in self.rmenu_specs:
label, eventname = item[0], item[1]
if label is not None:
def command(text=self.text, eventname=eventname):
text.event_generate(eventname)
rmenu.add_command(label=label, command=command)
else:
rmenu.add_separator()
self.rmenu = rmenu
def rmenu_check_cut(self):
return self.rmenu_check_copy()
def rmenu_check_copy(self):
try:
indx = self.text.index('sel.first')
except TclError:
return 'disabled'
else:
return 'normal' if indx else 'disabled'
def rmenu_check_paste(self):
try:
self.text.tk.call('tk::GetSelection', self.text, 'CLIPBOARD')
except TclError:
return 'disabled'
else:
return 'normal'
def about_dialog(self, event=None):
aboutDialog.AboutDialog(self.top,'About IDLE')
def config_dialog(self, event=None):
configDialog.ConfigDialog(self.top,'Settings')
def config_extensions_dialog(self, event=None):
configDialog.ConfigExtensionsDialog(self.top)
def help_dialog(self, event=None):
if self.root:
parent = self.root
else:
parent = self.top
helpDialog.display(parent, near=self.top)
def python_docs(self, event=None):
if sys.platform[:3] == 'win':
try:
os.startfile(self.help_url)
except WindowsError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
webbrowser.open(self.help_url)
return "break"
def cut(self,event):
self.text.event_generate("<<Cut>>")
return "break"
def copy(self,event):
if not self.text.tag_ranges("sel"):
# There is no selection, so do nothing and maybe interrupt.
return
self.text.event_generate("<<Copy>>")
return "break"
def paste(self,event):
self.text.event_generate("<<Paste>>")
self.text.see("insert")
return "break"
def select_all(self, event=None):
self.text.tag_add("sel", "1.0", "end-1c")
self.text.mark_set("insert", "1.0")
self.text.see("insert")
return "break"
def remove_selection(self, event=None):
self.text.tag_remove("sel", "1.0", "end")
self.text.see("insert")
def move_at_edge_if_selection(self, edge_index):
"""Cursor move begins at start or end of selection
When a left/right cursor key is pressed create and return to Tkinter a
function which causes a cursor move from the associated edge of the
selection.
"""
self_text_index = self.text.index
self_text_mark_set = self.text.mark_set
edges_table = ("sel.first+1c", "sel.last-1c")
def move_at_edge(event):
if (event.state & 5) == 0: # no shift(==1) or control(==4) pressed
try:
self_text_index("sel.first")
self_text_mark_set("insert", edges_table[edge_index])
except TclError:
pass
return move_at_edge
def del_word_left(self, event):
self.text.event_generate('<Meta-Delete>')
return "break"
def del_word_right(self, event):
self.text.event_generate('<Meta-d>')
return "break"
def find_event(self, event):
SearchDialog.find(self.text)
return "break"
def find_again_event(self, event):
SearchDialog.find_again(self.text)
return "break"
def find_selection_event(self, event):
SearchDialog.find_selection(self.text)
return "break"
def find_in_files_event(self, event):
GrepDialog.grep(self.text, self.io, self.flist)
return "break"
def replace_event(self, event):
ReplaceDialog.replace(self.text)
return "break"
def goto_line_event(self, event):
text = self.text
lineno = tkSimpleDialog.askinteger("Goto",
"Go to line number:",parent=text)
if lineno is None:
return "break"
if lineno <= 0:
text.bell()
return "break"
text.mark_set("insert", "%d.0" % lineno)
text.see("insert")
def open_module(self, event=None):
# XXX Shouldn't this be in IOBinding or in FileList?
try:
name = self.text.get("sel.first", "sel.last")
except TclError:
name = ""
else:
name = name.strip()
name = tkSimpleDialog.askstring("Module",
"Enter the name of a Python module\n"
"to search on sys.path and open:",
parent=self.text, initialvalue=name)
if name:
name = name.strip()
if not name:
return
# XXX Ought to insert current file's directory in front of path
try:
(f, file_path, (suffix, mode, mtype)) = _find_module(name)
except (NameError, ImportError) as msg:
tkMessageBox.showerror("Import error", str(msg), parent=self.text)
return
if mtype != imp.PY_SOURCE:
tkMessageBox.showerror("Unsupported type",
"%s is not a source module" % name, parent=self.text)
return
if f:
f.close()
if self.flist:
self.flist.open(file_path)
else:
self.io.loadfile(file_path)
return file_path
def open_class_browser(self, event=None):
filename = self.io.filename
if not (self.__class__.__name__ == 'PyShellEditorWindow'
and filename):
filename = self.open_module()
if filename is None:
return
head, tail = os.path.split(filename)
base, ext = os.path.splitext(tail)
from idlelib import ClassBrowser
ClassBrowser.ClassBrowser(self.flist, base, [head])
def open_path_browser(self, event=None):
from idlelib import PathBrowser
PathBrowser.PathBrowser(self.flist)
def gotoline(self, lineno):
if lineno is not None and lineno > 0:
self.text.mark_set("insert", "%d.0" % lineno)
self.text.tag_remove("sel", "1.0", "end")
self.text.tag_add("sel", "insert", "insert +1l")
self.center()
def ispythonsource(self, filename):
if not filename or os.path.isdir(filename):
return True
base, ext = os.path.splitext(os.path.basename(filename))
if os.path.normcase(ext) in (".py", ".pyw"):
return True
try:
f = open(filename)
line = f.readline()
f.close()
except IOError:
return False
return line.startswith('#!') and line.find('python') >= 0
def close_hook(self):
if self.flist:
self.flist.unregister_maybe_terminate(self)
self.flist = None
def set_close_hook(self, close_hook):
self.close_hook = close_hook
def filename_change_hook(self):
if self.flist:
self.flist.filename_changed_edit(self)
self.saved_change_hook()
self.top.update_windowlist_registry(self)
self.ResetColorizer()
def _addcolorizer(self):
if self.color:
return
if self.ispythonsource(self.io.filename):
self.color = self.ColorDelegator()
# can add more colorizers here...
if self.color:
self.per.removefilter(self.undo)
self.per.insertfilter(self.color)
self.per.insertfilter(self.undo)
def _rmcolorizer(self):
if not self.color:
return
self.color.removecolors()
self.per.removefilter(self.color)
self.color = None
def ResetColorizer(self):
"Update the color theme"
# Called from self.filename_change_hook and from configDialog.py
self._rmcolorizer()
self._addcolorizer()
theme = idleConf.GetOption('main','Theme','name')
normal_colors = idleConf.GetHighlight(theme, 'normal')
cursor_color = idleConf.GetHighlight(theme, 'cursor', fgBg='fg')
select_colors = idleConf.GetHighlight(theme, 'hilite')
self.text.config(
foreground=normal_colors['foreground'],
background=normal_colors['background'],
insertbackground=cursor_color,
selectforeground=select_colors['foreground'],
selectbackground=select_colors['background'],
)
def ResetFont(self):
"Update the text widgets' font if it is changed"
# Called from configDialog.py
fontWeight='normal'
if idleConf.GetOption('main','EditorWindow','font-bold',type='bool'):
fontWeight='bold'
self.text.config(font=(idleConf.GetOption('main','EditorWindow','font'),
idleConf.GetOption('main','EditorWindow','font-size',
type='int'),
fontWeight))
def RemoveKeybindings(self):
"Remove the keybindings before they are changed."
# Called from configDialog.py
self.Bindings.default_keydefs = keydefs = idleConf.GetCurrentKeySet()
for event, keylist in keydefs.items():
self.text.event_delete(event, *keylist)
for extensionName in self.get_standard_extension_names():
xkeydefs = idleConf.GetExtensionBindings(extensionName)
if xkeydefs:
for event, keylist in xkeydefs.items():
self.text.event_delete(event, *keylist)
def ApplyKeybindings(self):
"Update the keybindings after they are changed"
# Called from configDialog.py
self.Bindings.default_keydefs = keydefs = idleConf.GetCurrentKeySet()
self.apply_bindings()
for extensionName in self.get_standard_extension_names():
xkeydefs = idleConf.GetExtensionBindings(extensionName)
if xkeydefs:
self.apply_bindings(xkeydefs)
#update menu accelerators
menuEventDict = {}
for menu in self.Bindings.menudefs:
menuEventDict[menu[0]] = {}
for item in menu[1]:
if item:
menuEventDict[menu[0]][prepstr(item[0])[1]] = item[1]
for menubarItem in self.menudict.keys():
menu = self.menudict[menubarItem]
end = menu.index(END)
if end is None:
# Skip empty menus
continue
end += 1
for index in range(0, end):
if menu.type(index) == 'command':
accel = menu.entrycget(index, 'accelerator')
if accel:
itemName = menu.entrycget(index, 'label')
event = ''
if menubarItem in menuEventDict:
if itemName in menuEventDict[menubarItem]:
event = menuEventDict[menubarItem][itemName]
if event:
accel = get_accelerator(keydefs, event)
menu.entryconfig(index, accelerator=accel)
def set_notabs_indentwidth(self):
"Update the indentwidth if changed and not using tabs in this window"
# Called from configDialog.py
if not self.usetabs:
self.indentwidth = idleConf.GetOption('main', 'Indent','num-spaces',
type='int')
def reset_help_menu_entries(self):
"Update the additional help entries on the Help menu"
help_list = idleConf.GetAllExtraHelpSourcesList()
helpmenu = self.menudict['help']
# first delete the extra help entries, if any
helpmenu_length = helpmenu.index(END)
if helpmenu_length > self.base_helpmenu_length:
helpmenu.delete((self.base_helpmenu_length + 1), helpmenu_length)
# then rebuild them
if help_list:
helpmenu.add_separator()
for entry in help_list:
cmd = self.__extra_help_callback(entry[1])
helpmenu.add_command(label=entry[0], command=cmd)
# and update the menu dictionary
self.menudict['help'] = helpmenu
def __extra_help_callback(self, helpfile):
"Create a callback with the helpfile value frozen at definition time"
def display_extra_help(helpfile=helpfile):
if not helpfile.startswith(('www', 'http')):
helpfile = os.path.normpath(helpfile)
if sys.platform[:3] == 'win':
try:
os.startfile(helpfile)
except WindowsError as why:
tkMessageBox.showerror(title='Document Start Failure',
message=str(why), parent=self.text)
else:
webbrowser.open(helpfile)
return display_extra_help
def update_recent_files_list(self, new_file=None):
"Load and update the recent files list and menus"
rf_list = []
if os.path.exists(self.recent_files_path):
with open(self.recent_files_path, 'r') as rf_list_file:
rf_list = rf_list_file.readlines()
if new_file:
new_file = os.path.abspath(new_file) + '\n'
if new_file in rf_list:
rf_list.remove(new_file) # move to top
rf_list.insert(0, new_file)
# clean and save the recent files list
bad_paths = []
for path in rf_list:
if '\0' in path or not os.path.exists(path[0:-1]):
bad_paths.append(path)
rf_list = [path for path in rf_list if path not in bad_paths]
ulchars = "1234567890ABCDEFGHIJK"
rf_list = rf_list[0:len(ulchars)]
try:
with open(self.recent_files_path, 'w') as rf_file:
rf_file.writelines(rf_list)
except IOError as err:
if not getattr(self.root, "recentfilelist_error_displayed", False):
self.root.recentfilelist_error_displayed = True
tkMessageBox.showerror(title='IDLE Error',
message='Unable to update Recent Files list:\n%s'
% str(err),
parent=self.text)
# for each edit window instance, construct the recent files menu
for instance in self.top.instance_dict.keys():
menu = instance.recent_files_menu
menu.delete(0, END) # clear, and rebuild:
for i, file_name in enumerate(rf_list):
file_name = file_name.rstrip() # zap \n
# make unicode string to display non-ASCII chars correctly
ufile_name = self._filename_to_unicode(file_name)
callback = instance.__recent_file_callback(file_name)
menu.add_command(label=ulchars[i] + " " + ufile_name,
command=callback,
underline=0)
def __recent_file_callback(self, file_name):
def open_recent_file(fn_closure=file_name):
self.io.open(editFile=fn_closure)
return open_recent_file
def saved_change_hook(self):
short = self.short_title()
long = self.long_title()
if short and long:
title = short + " - " + long + _py_version
elif short:
title = short
elif long:
title = long
else:
title = "Untitled"
icon = short or long or title
if not self.get_saved():
title = "*%s*" % title
icon = "*%s" % icon
self.top.wm_title(title)
self.top.wm_iconname(icon)
def get_saved(self):
return self.undo.get_saved()
def set_saved(self, flag):
self.undo.set_saved(flag)
def reset_undo(self):
self.undo.reset_undo()
def short_title(self):
filename = self.io.filename
if filename:
filename = os.path.basename(filename)
else:
filename = "Untitled"
# return unicode string to display non-ASCII chars correctly
return self._filename_to_unicode(filename)
def long_title(self):
# return unicode string to display non-ASCII chars correctly
return self._filename_to_unicode(self.io.filename or "")
def center_insert_event(self, event):
self.center()
def center(self, mark="insert"):
text = self.text
top, bot = self.getwindowlines()
lineno = self.getlineno(mark)
height = bot - top
newtop = max(1, lineno - height//2)
text.yview(float(newtop))
def getwindowlines(self):
text = self.text
top = self.getlineno("@0,0")
bot = self.getlineno("@0,65535")
if top == bot and text.winfo_height() == 1:
# Geometry manager hasn't run yet
height = int(text['height'])
bot = top + height - 1
return top, bot
def getlineno(self, mark="insert"):
text = self.text
return int(float(text.index(mark)))
def get_geometry(self):
"Return (width, height, x, y)"
geom = self.top.wm_geometry()
m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom)
tuple = (map(int, m.groups()))
return tuple
def close_event(self, event):
self.close()
def maybesave(self):
if self.io:
if not self.get_saved():
if self.top.state()!='normal':
self.top.deiconify()
self.top.lower()
self.top.lift()
return self.io.maybesave()
def close(self):
reply = self.maybesave()
if str(reply) != "cancel":
self._close()
return reply
def _close(self):
if self.io.filename:
self.update_recent_files_list(new_file=self.io.filename)
WindowList.unregister_callback(self.postwindowsmenu)
self.unload_extensions()
self.io.close()
self.io = None
self.undo = None
if self.color:
self.color.close(False)
self.color = None
self.text = None
self.tkinter_vars = None
self.per.close()
self.per = None
self.top.destroy()
if self.close_hook:
# unless override: unregister from flist, terminate if last window
self.close_hook()
def load_extensions(self):
self.extensions = {}
self.load_standard_extensions()
def unload_extensions(self):
for ins in self.extensions.values():
if hasattr(ins, "close"):
ins.close()
self.extensions = {}
def load_standard_extensions(self):
for name in self.get_standard_extension_names():
try:
self.load_extension(name)
except:
print "Failed to load extension", repr(name)
import traceback
traceback.print_exc()
def get_standard_extension_names(self):
return idleConf.GetExtensions(editor_only=True)
def load_extension(self, name):
try:
mod = __import__(name, globals(), locals(), [])
except ImportError:
print "\nFailed to import extension: ", name
return
cls = getattr(mod, name)
keydefs = idleConf.GetExtensionBindings(name)
if hasattr(cls, "menudefs"):
self.fill_menus(cls.menudefs, keydefs)
ins = cls(self)
self.extensions[name] = ins
if keydefs:
self.apply_bindings(keydefs)
for vevent in keydefs.keys():
methodname = vevent.replace("-", "_")
while methodname[:1] == '<':
methodname = methodname[1:]
while methodname[-1:] == '>':
methodname = methodname[:-1]
methodname = methodname + "_event"
if hasattr(ins, methodname):
self.text.bind(vevent, getattr(ins, methodname))
def apply_bindings(self, keydefs=None):
if keydefs is None:
keydefs = self.Bindings.default_keydefs
text = self.text
text.keydefs = keydefs
for event, keylist in keydefs.items():
if keylist:
text.event_add(event, *keylist)
def fill_menus(self, menudefs=None, keydefs=None):
"""Add appropriate entries to the menus and submenus
Menus that are absent or None in self.menudict are ignored.
"""
if menudefs is None:
menudefs = self.Bindings.menudefs
if keydefs is None:
keydefs = self.Bindings.default_keydefs
menudict = self.menudict
text = self.text
for mname, entrylist in menudefs:
menu = menudict.get(mname)
if not menu:
continue
for entry in entrylist:
if not entry:
menu.add_separator()
else:
label, eventname = entry
checkbutton = (label[:1] == '!')
if checkbutton:
label = label[1:]
underline, label = prepstr(label)
accelerator = get_accelerator(keydefs, eventname)
def command(text=text, eventname=eventname):
text.event_generate(eventname)
if checkbutton:
var = self.get_var_obj(eventname, BooleanVar)
menu.add_checkbutton(label=label, underline=underline,
command=command, accelerator=accelerator,
variable=var)
else:
menu.add_command(label=label, underline=underline,
command=command,
accelerator=accelerator)
def getvar(self, name):
var = self.get_var_obj(name)
if var:
value = var.get()
return value
else:
raise NameError, name
def setvar(self, name, value, vartype=None):
var = self.get_var_obj(name, vartype)
if var:
var.set(value)
else:
raise NameError, name
def get_var_obj(self, name, vartype=None):
var = self.tkinter_vars.get(name)
if not var and vartype:
# create a Tkinter variable object with self.text as master:
self.tkinter_vars[name] = var = vartype(self.text)
return var
# Tk implementations of "virtual text methods" -- each platform
# reusing IDLE's support code needs to define these for its GUI's
# flavor of widget.
# Is character at text_index in a Python string? Return 0 for
# "guaranteed no", true for anything else. This info is expensive
# to compute ab initio, but is probably already known by the
# platform's colorizer.
def is_char_in_string(self, text_index):
if self.color:
# Return true iff colorizer hasn't (re)gotten this far
# yet, or the character is tagged as being in a string
return self.text.tag_prevrange("TODO", text_index) or \
"STRING" in self.text.tag_names(text_index)
else:
# The colorizer is missing: assume the worst
return 1
# If a selection is defined in the text widget, return (start,
# end) as Tkinter text indices, otherwise return (None, None)
def get_selection_indices(self):
try:
first = self.text.index("sel.first")
last = self.text.index("sel.last")
return first, last
except TclError:
return None, None
# Return the text widget's current view of what a tab stop means
# (equivalent width in spaces).
def get_tabwidth(self):
current = self.text['tabs'] or TK_TABWIDTH_DEFAULT
return int(current)
# Set the text widget's current view of what a tab stop means.
def set_tabwidth(self, newtabwidth):
text = self.text
if self.get_tabwidth() != newtabwidth:
pixels = text.tk.call("font", "measure", text["font"],
"-displayof", text.master,
"n" * newtabwidth)
text.configure(tabs=pixels)
# If ispythonsource and guess are true, guess a good value for
# indentwidth based on file content (if possible), and if
# indentwidth != tabwidth set usetabs false.
# In any case, adjust the Text widget's view of what a tab
# character means.
def set_indentation_params(self, ispythonsource, guess=True):
if guess and ispythonsource:
i = self.guess_indent()
if 2 <= i <= 8:
self.indentwidth = i
if self.indentwidth != self.tabwidth:
self.usetabs = False
self.set_tabwidth(self.tabwidth)
def smart_backspace_event(self, event):
text = self.text
first, last = self.get_selection_indices()
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
return "break"
# Delete whitespace left, until hitting a real char or closest
# preceding virtual tab stop.
chars = text.get("insert linestart", "insert")
if chars == '':
if text.compare("insert", ">", "1.0"):
# easy: delete preceding newline
text.delete("insert-1c")
else:
text.bell() # at start of buffer
return "break"
if chars[-1] not in " \t":
# easy: delete preceding real char
text.delete("insert-1c")
return "break"
# Ick. It may require *inserting* spaces if we back up over a
# tab character! This is written to be clear, not fast.
tabwidth = self.tabwidth
have = len(chars.expandtabs(tabwidth))
assert have > 0
want = ((have - 1) // self.indentwidth) * self.indentwidth
# Debug prompt is multilined....
if self.context_use_ps1:
last_line_of_prompt = sys.ps1.split('\n')[-1]
else:
last_line_of_prompt = ''
ncharsdeleted = 0
while 1:
if chars == last_line_of_prompt:
break
chars = chars[:-1]
ncharsdeleted = ncharsdeleted + 1
have = len(chars.expandtabs(tabwidth))
if have <= want or chars[-1] not in " \t":
break
text.undo_block_start()
text.delete("insert-%dc" % ncharsdeleted, "insert")
if have < want:
text.insert("insert", ' ' * (want - have))
text.undo_block_stop()
return "break"
def smart_indent_event(self, event):
# if intraline selection:
# delete it
# elif multiline selection:
# do indent-region
# else:
# indent one level
text = self.text
first, last = self.get_selection_indices()
text.undo_block_start()
try:
if first and last:
if index2line(first) != index2line(last):
return self.indent_region_event(event)
text.delete(first, last)
text.mark_set("insert", first)
prefix = text.get("insert linestart", "insert")
raw, effective = classifyws(prefix, self.tabwidth)
if raw == len(prefix):
# only whitespace to the left
self.reindent_to(effective + self.indentwidth)
else:
# tab to the next 'stop' within or to right of line's text:
if self.usetabs:
pad = '\t'
else:
effective = len(prefix.expandtabs(self.tabwidth))
n = self.indentwidth
pad = ' ' * (n - effective % n)
text.insert("insert", pad)
text.see("insert")
return "break"
finally:
text.undo_block_stop()
def newline_and_indent_event(self, event):
text = self.text
first, last = self.get_selection_indices()
text.undo_block_start()
try:
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
line = text.get("insert linestart", "insert")
i, n = 0, len(line)
while i < n and line[i] in " \t":
i = i+1
if i == n:
# the cursor is in or at leading indentation in a continuation
# line; just inject an empty line at the start
text.insert("insert linestart", '\n')
return "break"
indent = line[:i]
# strip whitespace before insert point unless it's in the prompt
i = 0
last_line_of_prompt = sys.ps1.split('\n')[-1]
while line and line[-1] in " \t" and line != last_line_of_prompt:
line = line[:-1]
i = i+1
if i:
text.delete("insert - %d chars" % i, "insert")
# strip whitespace after insert point
while text.get("insert") in " \t":
text.delete("insert")
# start new line
text.insert("insert", '\n')
# adjust indentation for continuations and block
# open/close first need to find the last stmt
lno = index2line(text.index('insert'))
y = PyParse.Parser(self.indentwidth, self.tabwidth)
if not self.context_use_ps1:
for context in self.num_context_lines:
startat = max(lno - context, 1)
startatindex = repr(startat) + ".0"
rawtext = text.get(startatindex, "insert")
y.set_str(rawtext)
bod = y.find_good_parse_start(
self.context_use_ps1,
self._build_char_in_string_func(startatindex))
if bod is not None or startat == 1:
break
y.set_lo(bod or 0)
else:
r = text.tag_prevrange("console", "insert")
if r:
startatindex = r[1]
else:
startatindex = "1.0"
rawtext = text.get(startatindex, "insert")
y.set_str(rawtext)
y.set_lo(0)
c = y.get_continuation_type()
if c != PyParse.C_NONE:
# The current stmt hasn't ended yet.
if c == PyParse.C_STRING_FIRST_LINE:
# after the first line of a string; do not indent at all
pass
elif c == PyParse.C_STRING_NEXT_LINES:
# inside a string which started before this line;
# just mimic the current indent
text.insert("insert", indent)
elif c == PyParse.C_BRACKET:
# line up with the first (if any) element of the
# last open bracket structure; else indent one
# level beyond the indent of the line with the
# last open bracket
self.reindent_to(y.compute_bracket_indent())
elif c == PyParse.C_BACKSLASH:
# if more than one line in this stmt already, just
# mimic the current indent; else if initial line
# has a start on an assignment stmt, indent to
# beyond leftmost =; else to beyond first chunk of
# non-whitespace on initial line
if y.get_num_lines_in_stmt() > 1:
text.insert("insert", indent)
else:
self.reindent_to(y.compute_backslash_indent())
else:
assert 0, "bogus continuation type %r" % (c,)
return "break"
# This line starts a brand new stmt; indent relative to
# indentation of initial line of closest preceding
# interesting stmt.
indent = y.get_base_indent_string()
text.insert("insert", indent)
if y.is_block_opener():
self.smart_indent_event(event)
elif indent and y.is_block_closer():
self.smart_backspace_event(event)
return "break"
finally:
text.see("insert")
text.undo_block_stop()
# Our editwin provides a is_char_in_string function that works
# with a Tk text index, but PyParse only knows about offsets into
# a string. This builds a function for PyParse that accepts an
# offset.
def _build_char_in_string_func(self, startindex):
def inner(offset, _startindex=startindex,
_icis=self.is_char_in_string):
return _icis(_startindex + "+%dc" % offset)
return inner
def indent_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, self.tabwidth)
effective = effective + self.indentwidth
lines[pos] = self._make_blanks(effective) + line[raw:]
self.set_region(head, tail, chars, lines)
return "break"
def dedent_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, self.tabwidth)
effective = max(effective - self.indentwidth, 0)
lines[pos] = self._make_blanks(effective) + line[raw:]
self.set_region(head, tail, chars, lines)
return "break"
def comment_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines) - 1):
line = lines[pos]
lines[pos] = '##' + line
self.set_region(head, tail, chars, lines)
def uncomment_region_event(self, event):
head, tail, chars, lines = self.get_region()
for pos in range(len(lines)):
line = lines[pos]
if not line:
continue
if line[:2] == '##':
line = line[2:]
elif line[:1] == '#':
line = line[1:]
lines[pos] = line
self.set_region(head, tail, chars, lines)
def tabify_region_event(self, event):
head, tail, chars, lines = self.get_region()
tabwidth = self._asktabwidth()
if tabwidth is None: return
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, tabwidth)
ntabs, nspaces = divmod(effective, tabwidth)
lines[pos] = '\t' * ntabs + ' ' * nspaces + line[raw:]
self.set_region(head, tail, chars, lines)
def untabify_region_event(self, event):
head, tail, chars, lines = self.get_region()
tabwidth = self._asktabwidth()
if tabwidth is None: return
for pos in range(len(lines)):
lines[pos] = lines[pos].expandtabs(tabwidth)
self.set_region(head, tail, chars, lines)
def toggle_tabs_event(self, event):
if self.askyesno(
"Toggle tabs",
"Turn tabs " + ("on", "off")[self.usetabs] +
"?\nIndent width " +
("will be", "remains at")[self.usetabs] + " 8." +
"\n Note: a tab is always 8 columns",
parent=self.text):
self.usetabs = not self.usetabs
# Try to prevent inconsistent indentation.
# User must change indent width manually after using tabs.
self.indentwidth = 8
return "break"
# XXX this isn't bound to anything -- see tabwidth comments
## def change_tabwidth_event(self, event):
## new = self._asktabwidth()
## if new != self.tabwidth:
## self.tabwidth = new
## self.set_indentation_params(0, guess=0)
## return "break"
def change_indentwidth_event(self, event):
new = self.askinteger(
"Indent width",
"New indent width (2-16)\n(Always use 8 when using tabs)",
parent=self.text,
initialvalue=self.indentwidth,
minvalue=2,
maxvalue=16)
if new and new != self.indentwidth and not self.usetabs:
self.indentwidth = new
return "break"
def get_region(self):
text = self.text
first, last = self.get_selection_indices()
if first and last:
head = text.index(first + " linestart")
tail = text.index(last + "-1c lineend +1c")
else:
head = text.index("insert linestart")
tail = text.index("insert lineend +1c")
chars = text.get(head, tail)
lines = chars.split("\n")
return head, tail, chars, lines
def set_region(self, head, tail, chars, lines):
text = self.text
newchars = "\n".join(lines)
if newchars == chars:
text.bell()
return
text.tag_remove("sel", "1.0", "end")
text.mark_set("insert", head)
text.undo_block_start()
text.delete(head, tail)
text.insert(head, newchars)
text.undo_block_stop()
text.tag_add("sel", head, "insert")
# Make string that displays as n leading blanks.
def _make_blanks(self, n):
if self.usetabs:
ntabs, nspaces = divmod(n, self.tabwidth)
return '\t' * ntabs + ' ' * nspaces
else:
return ' ' * n
# Delete from beginning of line to insert point, then reinsert
# column logical (meaning use tabs if appropriate) spaces.
def reindent_to(self, column):
text = self.text
text.undo_block_start()
if text.compare("insert linestart", "!=", "insert"):
text.delete("insert linestart", "insert")
if column:
text.insert("insert", self._make_blanks(column))
text.undo_block_stop()
def _asktabwidth(self):
return self.askinteger(
"Tab width",
"Columns per tab? (2-16)",
parent=self.text,
initialvalue=self.indentwidth,
minvalue=2,
maxvalue=16)
# Guess indentwidth from text content.
# Return guessed indentwidth. This should not be believed unless
# it's in a reasonable range (e.g., it will be 0 if no indented
# blocks are found).
def guess_indent(self):
opener, indented = IndentSearcher(self.text, self.tabwidth).run()
if opener and indented:
raw, indentsmall = classifyws(opener, self.tabwidth)
raw, indentlarge = classifyws(indented, self.tabwidth)
else:
indentsmall = indentlarge = 0
return indentlarge - indentsmall
# "line.col" -> line, as an int
def index2line(index):
return int(float(index))
# Look at the leading whitespace in s.
# Return pair (# of leading ws characters,
# effective # of leading blanks after expanding
# tabs to width tabwidth)
def classifyws(s, tabwidth):
raw = effective = 0
for ch in s:
if ch == ' ':
raw = raw + 1
effective = effective + 1
elif ch == '\t':
raw = raw + 1
effective = (effective // tabwidth + 1) * tabwidth
else:
break
return raw, effective
import tokenize
_tokenize = tokenize
del tokenize
class IndentSearcher(object):
# .run() chews over the Text widget, looking for a block opener
# and the stmt following it. Returns a pair,
# (line containing block opener, line containing stmt)
# Either or both may be None.
def __init__(self, text, tabwidth):
self.text = text
self.tabwidth = tabwidth
self.i = self.finished = 0
self.blkopenline = self.indentedline = None
def readline(self):
if self.finished:
return ""
i = self.i = self.i + 1
mark = repr(i) + ".0"
if self.text.compare(mark, ">=", "end"):
return ""
return self.text.get(mark, mark + " lineend+1c")
def tokeneater(self, type, token, start, end, line,
INDENT=_tokenize.INDENT,
NAME=_tokenize.NAME,
OPENERS=('class', 'def', 'for', 'if', 'try', 'while')):
if self.finished:
pass
elif type == NAME and token in OPENERS:
self.blkopenline = line
elif type == INDENT and self.blkopenline:
self.indentedline = line
self.finished = 1
def run(self):
save_tabsize = _tokenize.tabsize
_tokenize.tabsize = self.tabwidth
try:
try:
_tokenize.tokenize(self.readline, self.tokeneater)
except (_tokenize.TokenError, SyntaxError):
# since we cut off the tokenizer early, we can trigger
# spurious errors
pass
finally:
_tokenize.tabsize = save_tabsize
return self.blkopenline, self.indentedline
### end autoindent code ###
def prepstr(s):
# Helper to extract the underscore from a string, e.g.
# prepstr("Co_py") returns (2, "Copy").
i = s.find('_')
if i >= 0:
s = s[:i] + s[i+1:]
return i, s
keynames = {
'bracketleft': '[',
'bracketright': ']',
'slash': '/',
}
def get_accelerator(keydefs, eventname):
keylist = keydefs.get(eventname)
# issue10940: temporary workaround to prevent hang with OS X Cocoa Tk 8.5
# if not keylist:
if (not keylist) or (macosxSupport.isCocoaTk() and eventname in {
"<<open-module>>",
"<<goto-line>>",
"<<change-indentwidth>>"}):
return ""
s = keylist[0]
s = re.sub(r"-[a-z]\b", lambda m: m.group().upper(), s)
s = re.sub(r"\b\w+\b", lambda m: keynames.get(m.group(), m.group()), s)
s = re.sub("Key-", "", s)
s = re.sub("Cancel","Ctrl-Break",s) # dscherer@cmu.edu
s = re.sub("Control-", "Ctrl-", s)
s = re.sub("-", "+", s)
s = re.sub("><", " ", s)
s = re.sub("<", "", s)
s = re.sub(">", "", s)
return s
def fixwordbreaks(root):
# Make sure that Tk's double-click and next/previous word
# operations use our definition of a word (i.e. an identifier)
tk = root.tk
tk.call('tcl_wordBreakAfter', 'a b', 0) # make sure word.tcl is loaded
tk.call('set', 'tcl_wordchars', '[a-zA-Z0-9_]')
tk.call('set', 'tcl_nonwordchars', '[^a-zA-Z0-9_]')
def _editor_window(parent): # htest #
# error if close master window first - timer event, after script
root = parent
fixwordbreaks(root)
if sys.argv[1:]:
filename = sys.argv[1]
else:
filename = None
macosxSupport.setupApp(root, None)
edit = EditorWindow(root=root, filename=filename)
edit.text.bind("<<close-all-windows>>", edit.close_event)
# Does not stop error, neither does following
# edit.text.bind("<<close-window>>", edit.close_event)
if __name__ == '__main__':
from idlelib.idle_test.htest import run
run(_help_dialog, _editor_window)
|
sdlBasic/sdlbrt
|
win32/mingw/opt/lib/python2.7/idlelib/EditorWindow.py
|
Python
|
lgpl-2.1
| 66,626 | 0.001816 |
"""Implements a HD44780 character LCD connected via PCF8574 on I2C.
This was tested with: https://www.wemos.cc/product/d1-mini.html"""
from time import sleep_ms, ticks_ms
from machine import I2C, Pin
from esp8266_i2c_lcd import I2cLcd
# The PCF8574 has a jumper selectable address: 0x20 - 0x27
DEFAULT_I2C_ADDR = 0x27
def test_main():
"""Test function for verifying basic functionality."""
print("Running test_main")
i2c = I2C(scl=Pin(5), sda=Pin(4), freq=100000)
lcd = I2cLcd(i2c, DEFAULT_I2C_ADDR, 2, 16)
lcd.putstr("It Works!\nSecond Line")
sleep_ms(3000)
lcd.clear()
count = 0
while True:
lcd.move_to(0, 0)
lcd.putstr("%7d" % (ticks_ms() // 1000))
sleep_ms(1000)
count += 1
if count % 10 == 3:
print("Turning backlight off")
lcd.backlight_off()
if count % 10 == 4:
print("Turning backlight on")
lcd.backlight_on()
if count % 10 == 5:
print("Turning display off")
lcd.display_off()
if count % 10 == 6:
print("Turning display on")
lcd.display_on()
if count % 10 == 7:
print("Turning display & backlight off")
lcd.backlight_off()
lcd.display_off()
if count % 10 == 8:
print("Turning display & backlight on")
lcd.backlight_on()
lcd.display_on()
#if __name__ == "__main__":
test_main()
|
dhylands/python_lcd
|
lcd/esp8266_i2c_lcd_test.py
|
Python
|
mit
| 1,476 | 0.002033 |
from jinja2 import Markup
class momentjs(object):
def __init__(self, timestamp):
self.timestamp = timestamp
def render(self, format):
return Markup("<script>\ndocument.write(moment(\"%s\").%s);\n</script>" % (self.timestamp.strftime("%Y-%m-%dT%H:%M:%S Z"), format))
def format(self, fmt):
return self.render("format(\"%s\")" % fmt)
def calendar(self):
return self.render("calendar()")
def fromNow(self):
return self.render("fromNow()")
|
mikkqu/rc-chrysalis
|
scapp/moment.py
|
Python
|
bsd-2-clause
| 500 | 0.006 |
# -*- coding: utf-8 -*-
"""Parser related functions and classes for testing."""
import heapq
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.path import factory as path_spec_factory
from dfvfs.resolver import resolver as path_spec_resolver
from plaso.containers import sessions
from plaso.engine import knowledge_base
from plaso.formatters import manager as formatters_manager
from plaso.formatters import mediator as formatters_mediator
from plaso.parsers import interface
from plaso.parsers import mediator
from plaso.storage import fake_storage
from tests import test_lib as shared_test_lib
class _EventsHeap(object):
"""Events heap."""
def __init__(self):
"""Initializes an events heap."""
super(_EventsHeap, self).__init__()
self._heap = []
def PopEvent(self):
"""Pops an event from the heap.
Returns:
EventObject: event.
"""
try:
_, _, _, event = heapq.heappop(self._heap)
return event
except IndexError:
return None
def PopEvents(self):
"""Pops events from the heap.
Yields:
EventObject: event.
"""
event = self.PopEvent()
while event:
yield event
event = self.PopEvent()
def PushEvent(self, event):
"""Pushes an event onto the heap.
Args:
event (EventObject): event.
"""
# TODO: replace this work-around for an event "comparable".
event_values = event.CopyToDict()
attributes = []
for attribute_name, attribute_value in sorted(event_values.items()):
if isinstance(attribute_value, dict):
attribute_value = sorted(attribute_value.items())
comparable = u'{0:s}: {1!s}'.format(attribute_name, attribute_value)
attributes.append(comparable)
comparable = u', '.join(attributes)
event_values = sorted(event.CopyToDict().items())
heap_values = (event.timestamp, event.timestamp_desc, comparable, event)
heapq.heappush(self._heap, heap_values)
def PushEvents(self, events):
"""Pushes events onto the heap.
Args:
events list[EventObject]: events.
"""
for event in events:
self.PushEvent(event)
class ParserTestCase(shared_test_lib.BaseTestCase):
"""Parser test case."""
def _CreateParserMediator(
self, storage_writer, file_entry=None, knowledge_base_values=None,
parser_chain=None, timezone=u'UTC'):
"""Creates a parser mediator.
Args:
storage_writer (StorageWriter): storage writer.
file_entry (Optional[dfvfs.FileEntry]): file entry object being parsed.
knowledge_base_values (Optional[dict]): knowledge base values.
parser_chain (Optional[str]): parsing chain up to this point.
timezone (str): timezone.
Returns:
ParserMediator: parser mediator.
"""
knowledge_base_object = knowledge_base.KnowledgeBase()
if knowledge_base_values:
for identifier, value in iter(knowledge_base_values.items()):
knowledge_base_object.SetValue(identifier, value)
knowledge_base_object.SetTimezone(timezone)
parser_mediator = mediator.ParserMediator(
storage_writer, knowledge_base_object)
if file_entry:
parser_mediator.SetFileEntry(file_entry)
if parser_chain:
parser_mediator.parser_chain = parser_chain
return parser_mediator
def _CreateStorageWriter(self):
"""Creates a storage writer object.
Returns:
FakeStorageWriter: storage writer.
"""
session = sessions.Session()
storage_writer = fake_storage.FakeStorageWriter(session)
storage_writer.Open()
return storage_writer
def _GetSortedEvents(self, events):
"""Retrieves events sorted in a deterministic order.
Args:
events (list[EventObject]): events.
Returns:
list[EventObject]: sorted events.
"""
events_heap = _EventsHeap()
events_heap.PushEvents(events)
return list(events_heap.PopEvents())
def _GetShortMessage(self, message_string):
"""Shortens a message string to a maximum of 80 character width.
Args:
message_string (str): message string.
Returns:
str: short message string, if it is longer than 80 characters it will
be shortened to it's first 77 characters followed by a "...".
"""
if len(message_string) > 80:
return u'{0:s}...'.format(message_string[0:77])
return message_string
def _ParseFile(
self, path_segments, parser, knowledge_base_values=None,
timezone=u'UTC'):
"""Parses a file with a parser and writes results to a storage writer.
Args:
path_segments (list[str]): path segments inside the test data directory.
parser (BaseParser): parser.
knowledge_base_values (Optional[dict]): knowledge base values.
timezone (str): timezone.
Returns:
FakeStorageWriter: storage writer.
"""
path = self._GetTestFilePath(path_segments)
path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location=path)
return self._ParseFileByPathSpec(
path_spec, parser, knowledge_base_values=knowledge_base_values,
timezone=timezone)
def _ParseFileByPathSpec(
self, path_spec, parser, knowledge_base_values=None, timezone=u'UTC'):
"""Parses a file with a parser and writes results to a storage writer.
Args:
path_spec (dfvfs.PathSpec): path specification.
parser (BaseParser): parser.
knowledge_base_values (Optional[dict]): knowledge base values.
timezone (str): timezone.
Returns:
FakeStorageWriter: storage writer.
"""
storage_writer = self._CreateStorageWriter()
file_entry = path_spec_resolver.Resolver.OpenFileEntry(path_spec)
parser_mediator = self._CreateParserMediator(
storage_writer,
file_entry=file_entry,
knowledge_base_values=knowledge_base_values,
timezone=timezone)
if isinstance(parser, interface.FileEntryParser):
parser.Parse(parser_mediator)
elif isinstance(parser, interface.FileObjectParser):
file_object = file_entry.GetFileObject()
try:
parser.Parse(parser_mediator, file_object)
finally:
file_object.close()
else:
self.fail(u'Got unsupported parser type: {0:s}'.format(type(parser)))
return storage_writer
def _TestGetMessageStrings(
self, event, expected_message, expected_message_short):
"""Tests the formatting of the message strings.
This function invokes the GetMessageStrings function of the event
formatter on the event object and compares the resulting messages
strings with those expected.
Args:
event (EventObject): event.
expected_message (str): expected message string.
expected_message_short (str): expected short message string.
"""
formatter_mediator = formatters_mediator.FormatterMediator(
data_location=self._DATA_PATH)
message, message_short = (
formatters_manager.FormattersManager.GetMessageStrings(
formatter_mediator, event))
self.assertEqual(message, expected_message)
self.assertEqual(message_short, expected_message_short)
def _TestGetSourceStrings(
self, event, expected_source, expected_source_short):
"""Tests the formatting of the source strings.
This function invokes the GetSourceStrings function of the event
formatter on the event object and compares the resulting source
strings with those expected.
Args:
event (EventObject): event.
expected_source (str): expected source string.
expected_source_short (str): expected short source string.
"""
# TODO: change this to return the long variant first so it is consistent
# with GetMessageStrings.
source_short, source = (
formatters_manager.FormattersManager.GetSourceStrings(event))
self.assertEqual(source, expected_source)
self.assertEqual(source_short, expected_source_short)
def assertDictContains(self, received, expected):
"""Asserts if a dictionary contains every key-value pair as expected.
Recieved can contain new keys. If any value is a dict, this function is
called recursively.
Args:
received (dict): received dictionary.
expected (dict): expected dictionary.
"""
for key, value in expected.items():
self.assertIn(key, received)
if isinstance(value, dict):
self.assertDictEqual(received[key], expected[key])
else:
self.assertEqual(value, expected[key])
|
dc3-plaso/plaso
|
tests/parsers/test_lib.py
|
Python
|
apache-2.0
| 8,486 | 0.004949 |
from setuptools import setup, find_packages
setup(name='MODEL1201230000',
version=20140916,
description='MODEL1201230000 from BioModels',
url='http://www.ebi.ac.uk/biomodels-main/MODEL1201230000',
maintainer='Stanley Gu',
maintainer_url='stanleygu@gmail.com',
packages=find_packages(),
package_data={'': ['*.xml', 'README.md']},
)
|
biomodels/MODEL1201230000
|
setup.py
|
Python
|
cc0-1.0
| 377 | 0.005305 |
# -*- coding: utf-8 -*-
"""
pygments.lexers.ncl
~~~~~~~~~~~~~~~~~~~
Lexers for NCAR Command Language.
:copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['NCLLexer']
class NCLLexer(RegexLexer):
"""
Lexer for NCL code.
.. versionadded:: 2.2
"""
name = 'NCL'
aliases = ['ncl']
filenames = ['*.ncl']
mimetypes = ['text/ncl']
flags = re.MULTILINE
tokens = {
'root': [
(r';.*\n', Comment),
include('strings'),
include('core'),
(r'[a-zA-Z_]\w*', Name),
include('nums'),
(r'[\s]+', Text),
],
'core': [
# Statements
(words((
'begin', 'break', 'continue', 'create', 'defaultapp', 'do',
'else', 'end', 'external', 'exit', 'True', 'False', 'file', 'function',
'getvalues', 'graphic', 'group', 'if', 'list', 'load', 'local',
'new', '_Missing', 'Missing', 'noparent', 'procedure',
'quit', 'QUIT', 'Quit', 'record', 'return', 'setvalues', 'stop',
'then', 'while'), prefix=r'\b', suffix=r'\s*\b'),
Keyword),
# Data Types
(words((
'ubyte', 'uint', 'uint64', 'ulong', 'string', 'byte',
'character', 'double', 'float', 'integer', 'int64', 'logical',
'long', 'short', 'ushort', 'enumeric', 'numeric', 'snumeric'),
prefix=r'\b', suffix=r'\s*\b'),
Keyword.Type),
# Operators
(r'[\%^*+\-/<>]', Operator),
# punctuation:
(r'[\[\]():@$!&|.,\\{}]', Punctuation),
(r'[=:]', Punctuation),
# Intrinsics
(words((
'abs', 'acos', 'addfile', 'addfiles', 'all', 'angmom_atm', 'any',
'area_conserve_remap', 'area_hi2lores', 'area_poly_sphere',
'asciiread', 'asciiwrite', 'asin', 'atan', 'atan2', 'attsetvalues',
'avg', 'betainc', 'bin_avg', 'bin_sum', 'bw_bandpass_filter',
'cancor', 'cbinread', 'cbinwrite', 'cd_calendar', 'cd_inv_calendar',
'cdfbin_p', 'cdfbin_pr', 'cdfbin_s', 'cdfbin_xn', 'cdfchi_p',
'cdfchi_x', 'cdfgam_p', 'cdfgam_x', 'cdfnor_p', 'cdfnor_x',
'cdft_p', 'cdft_t', 'ceil', 'center_finite_diff',
'center_finite_diff_n', 'cfftb', 'cfftf', 'cfftf_frq_reorder',
'charactertodouble', 'charactertofloat', 'charactertointeger',
'charactertolong', 'charactertoshort', 'charactertostring',
'chartodouble', 'chartofloat', 'chartoint', 'chartointeger',
'chartolong', 'chartoshort', 'chartostring', 'chiinv', 'clear',
'color_index_to_rgba', 'conform', 'conform_dims', 'cos', 'cosh',
'count_unique_values', 'covcorm', 'covcorm_xy', 'craybinnumrec',
'craybinrecread', 'create_graphic', 'csa1', 'csa1d', 'csa1s',
'csa1x', 'csa1xd', 'csa1xs', 'csa2', 'csa2d', 'csa2l', 'csa2ld',
'csa2ls', 'csa2lx', 'csa2lxd', 'csa2lxs', 'csa2s', 'csa2x',
'csa2xd', 'csa2xs', 'csa3', 'csa3d', 'csa3l', 'csa3ld', 'csa3ls',
'csa3lx', 'csa3lxd', 'csa3lxs', 'csa3s', 'csa3x', 'csa3xd',
'csa3xs', 'csc2s', 'csgetp', 'css2c', 'cssetp', 'cssgrid', 'csstri',
'csvoro', 'cumsum', 'cz2ccm', 'datatondc', 'day_of_week',
'day_of_year', 'days_in_month', 'default_fillvalue', 'delete',
'depth_to_pres', 'destroy', 'determinant', 'dewtemp_trh',
'dgeevx_lapack', 'dim_acumrun_n', 'dim_avg', 'dim_avg_n',
'dim_avg_wgt', 'dim_avg_wgt_n', 'dim_cumsum', 'dim_cumsum_n',
'dim_gamfit_n', 'dim_gbits', 'dim_max', 'dim_max_n', 'dim_median',
'dim_median_n', 'dim_min', 'dim_min_n', 'dim_num', 'dim_num_n',
'dim_numrun_n', 'dim_pqsort', 'dim_pqsort_n', 'dim_product',
'dim_product_n', 'dim_rmsd', 'dim_rmsd_n', 'dim_rmvmean',
'dim_rmvmean_n', 'dim_rmvmed', 'dim_rmvmed_n', 'dim_spi_n',
'dim_standardize', 'dim_standardize_n', 'dim_stat4', 'dim_stat4_n',
'dim_stddev', 'dim_stddev_n', 'dim_sum', 'dim_sum_n', 'dim_sum_wgt',
'dim_sum_wgt_n', 'dim_variance', 'dim_variance_n', 'dimsizes',
'doubletobyte', 'doubletochar', 'doubletocharacter',
'doubletofloat', 'doubletoint', 'doubletointeger', 'doubletolong',
'doubletoshort', 'dpres_hybrid_ccm', 'dpres_plevel', 'draw',
'draw_color_palette', 'dsgetp', 'dsgrid2', 'dsgrid2d', 'dsgrid2s',
'dsgrid3', 'dsgrid3d', 'dsgrid3s', 'dspnt2', 'dspnt2d', 'dspnt2s',
'dspnt3', 'dspnt3d', 'dspnt3s', 'dssetp', 'dtrend', 'dtrend_msg',
'dtrend_msg_n', 'dtrend_n', 'dtrend_quadratic',
'dtrend_quadratic_msg_n', 'dv2uvf', 'dv2uvg', 'dz_height',
'echo_off', 'echo_on', 'eof2data', 'eof_varimax', 'eofcor',
'eofcor_pcmsg', 'eofcor_ts', 'eofcov', 'eofcov_pcmsg', 'eofcov_ts',
'eofunc', 'eofunc_ts', 'eofunc_varimax', 'equiv_sample_size', 'erf',
'erfc', 'esacr', 'esacv', 'esccr', 'esccv', 'escorc', 'escorc_n',
'escovc', 'exit', 'exp', 'exp_tapersh', 'exp_tapersh_wgts',
'exp_tapershC', 'ezfftb', 'ezfftb_n', 'ezfftf', 'ezfftf_n',
'f2fosh', 'f2foshv', 'f2fsh', 'f2fshv', 'f2gsh', 'f2gshv', 'fabs',
'fbindirread', 'fbindirwrite', 'fbinnumrec', 'fbinread',
'fbinrecread', 'fbinrecwrite', 'fbinwrite', 'fft2db', 'fft2df',
'fftshift', 'fileattdef', 'filechunkdimdef', 'filedimdef',
'fileexists', 'filegrpdef', 'filevarattdef', 'filevarchunkdef',
'filevarcompressleveldef', 'filevardef', 'filevardimsizes',
'filwgts_lancos', 'filwgts_lanczos', 'filwgts_normal',
'floattobyte', 'floattochar', 'floattocharacter', 'floattoint',
'floattointeger', 'floattolong', 'floattoshort', 'floor',
'fluxEddy', 'fo2fsh', 'fo2fshv', 'fourier_info', 'frame', 'fspan',
'ftcurv', 'ftcurvd', 'ftcurvi', 'ftcurvp', 'ftcurvpi', 'ftcurvps',
'ftcurvs', 'ftest', 'ftgetp', 'ftkurv', 'ftkurvd', 'ftkurvp',
'ftkurvpd', 'ftsetp', 'ftsurf', 'g2fsh', 'g2fshv', 'g2gsh',
'g2gshv', 'gamma', 'gammainc', 'gaus', 'gaus_lobat',
'gaus_lobat_wgt', 'gc_aangle', 'gc_clkwise', 'gc_dangle',
'gc_inout', 'gc_latlon', 'gc_onarc', 'gc_pnt2gc', 'gc_qarea',
'gc_tarea', 'generate_2d_array', 'get_color_index',
'get_color_rgba', 'get_cpu_time', 'get_isolines', 'get_ncl_version',
'get_script_name', 'get_script_prefix_name', 'get_sphere_radius',
'get_unique_values', 'getbitsone', 'getenv', 'getfiledimsizes',
'getfilegrpnames', 'getfilepath', 'getfilevaratts',
'getfilevarchunkdimsizes', 'getfilevardims', 'getfilevardimsizes',
'getfilevarnames', 'getfilevartypes', 'getvaratts', 'getvardims',
'gradsf', 'gradsg', 'greg2jul', 'grid2triple', 'hlsrgb', 'hsvrgb',
'hydro', 'hyi2hyo', 'idsfft', 'igradsf', 'igradsg', 'ilapsf',
'ilapsg', 'ilapvf', 'ilapvg', 'ind', 'ind_resolve', 'int2p',
'int2p_n', 'integertobyte', 'integertochar', 'integertocharacter',
'integertoshort', 'inttobyte', 'inttochar', 'inttoshort',
'inverse_matrix', 'isatt', 'isbigendian', 'isbyte', 'ischar',
'iscoord', 'isdefined', 'isdim', 'isdimnamed', 'isdouble',
'isenumeric', 'isfile', 'isfilepresent', 'isfilevar',
'isfilevaratt', 'isfilevarcoord', 'isfilevardim', 'isfloat',
'isfunc', 'isgraphic', 'isint', 'isint64', 'isinteger',
'isleapyear', 'islogical', 'islong', 'ismissing', 'isnan_ieee',
'isnumeric', 'ispan', 'isproc', 'isshort', 'issnumeric', 'isstring',
'isubyte', 'isuint', 'isuint64', 'isulong', 'isunlimited',
'isunsigned', 'isushort', 'isvar', 'jul2greg', 'kmeans_as136',
'kolsm2_n', 'kron_product', 'lapsf', 'lapsg', 'lapvf', 'lapvg',
'latlon2utm', 'lclvl', 'lderuvf', 'lderuvg', 'linint1', 'linint1_n',
'linint2', 'linint2_points', 'linmsg', 'linmsg_n', 'linrood_latwgt',
'linrood_wgt', 'list_files', 'list_filevars', 'list_hlus',
'list_procfuncs', 'list_vars', 'ListAppend', 'ListCount',
'ListGetType', 'ListIndex', 'ListIndexFromName', 'ListPop',
'ListPush', 'ListSetType', 'loadscript', 'local_max', 'local_min',
'log', 'log10', 'longtobyte', 'longtochar', 'longtocharacter',
'longtoint', 'longtointeger', 'longtoshort', 'lspoly', 'lspoly_n',
'mask', 'max', 'maxind', 'min', 'minind', 'mixed_layer_depth',
'mixhum_ptd', 'mixhum_ptrh', 'mjo_cross_coh2pha',
'mjo_cross_segment', 'moc_globe_atl', 'monthday', 'natgrid',
'natgridd', 'natgrids', 'ncargpath', 'ncargversion', 'ndctodata',
'ndtooned', 'new', 'NewList', 'ngezlogo', 'nggcog', 'nggetp',
'nglogo', 'ngsetp', 'NhlAddAnnotation', 'NhlAddData',
'NhlAddOverlay', 'NhlAddPrimitive', 'NhlAppGetDefaultParentId',
'NhlChangeWorkstation', 'NhlClassName', 'NhlClearWorkstation',
'NhlDataPolygon', 'NhlDataPolyline', 'NhlDataPolymarker',
'NhlDataToNDC', 'NhlDestroy', 'NhlDraw', 'NhlFrame', 'NhlFreeColor',
'NhlGetBB', 'NhlGetClassResources', 'NhlGetErrorObjectId',
'NhlGetNamedColorIndex', 'NhlGetParentId',
'NhlGetParentWorkstation', 'NhlGetWorkspaceObjectId',
'NhlIsAllocatedColor', 'NhlIsApp', 'NhlIsDataComm', 'NhlIsDataItem',
'NhlIsDataSpec', 'NhlIsTransform', 'NhlIsView', 'NhlIsWorkstation',
'NhlName', 'NhlNDCPolygon', 'NhlNDCPolyline', 'NhlNDCPolymarker',
'NhlNDCToData', 'NhlNewColor', 'NhlNewDashPattern', 'NhlNewMarker',
'NhlPalGetDefined', 'NhlRemoveAnnotation', 'NhlRemoveData',
'NhlRemoveOverlay', 'NhlRemovePrimitive', 'NhlSetColor',
'NhlSetDashPattern', 'NhlSetMarker', 'NhlUpdateData',
'NhlUpdateWorkstation', 'nice_mnmxintvl', 'nngetaspectd',
'nngetaspects', 'nngetp', 'nngetsloped', 'nngetslopes', 'nngetwts',
'nngetwtsd', 'nnpnt', 'nnpntd', 'nnpntend', 'nnpntendd',
'nnpntinit', 'nnpntinitd', 'nnpntinits', 'nnpnts', 'nnsetp', 'num',
'obj_anal_ic', 'omega_ccm', 'onedtond', 'overlay', 'paleo_outline',
'pdfxy_bin', 'poisson_grid_fill', 'pop_remap', 'potmp_insitu_ocn',
'prcwater_dp', 'pres2hybrid', 'pres_hybrid_ccm', 'pres_sigma',
'print', 'print_table', 'printFileVarSummary', 'printVarSummary',
'product', 'pslec', 'pslhor', 'pslhyp', 'qsort', 'rand',
'random_chi', 'random_gamma', 'random_normal', 'random_setallseed',
'random_uniform', 'rcm2points', 'rcm2rgrid', 'rdsstoi',
'read_colormap_file', 'reg_multlin', 'regcoef', 'regCoef_n',
'regline', 'relhum', 'replace_ieeenan', 'reshape', 'reshape_ind',
'rgba_to_color_index', 'rgbhls', 'rgbhsv', 'rgbyiq', 'rgrid2rcm',
'rhomb_trunc', 'rip_cape_2d', 'rip_cape_3d', 'round', 'rtest',
'runave', 'runave_n', 'set_default_fillvalue', 'set_sphere_radius',
'setfileoption', 'sfvp2uvf', 'sfvp2uvg', 'shaec', 'shagc',
'shgetnp', 'shgetp', 'shgrid', 'shorttobyte', 'shorttochar',
'shorttocharacter', 'show_ascii', 'shsec', 'shsetp', 'shsgc',
'shsgc_R42', 'sigma2hybrid', 'simpeq', 'simpne', 'sin',
'sindex_yrmo', 'sinh', 'sizeof', 'sleep', 'smth9', 'snindex_yrmo',
'solve_linsys', 'span_color_indexes', 'span_color_rgba',
'sparse_matrix_mult', 'spcorr', 'spcorr_n', 'specx_anal',
'specxy_anal', 'spei', 'sprintf', 'sprinti', 'sqrt', 'sqsort',
'srand', 'stat2', 'stat4', 'stat_medrng', 'stat_trim',
'status_exit', 'stdatmus_p2tdz', 'stdatmus_z2tdp', 'stddev',
'str_capital', 'str_concat', 'str_fields_count', 'str_get_cols',
'str_get_dq', 'str_get_field', 'str_get_nl', 'str_get_sq',
'str_get_tab', 'str_index_of_substr', 'str_insert', 'str_is_blank',
'str_join', 'str_left_strip', 'str_lower', 'str_match',
'str_match_ic', 'str_match_ic_regex', 'str_match_ind',
'str_match_ind_ic', 'str_match_ind_ic_regex', 'str_match_ind_regex',
'str_match_regex', 'str_right_strip', 'str_split',
'str_split_by_length', 'str_split_csv', 'str_squeeze', 'str_strip',
'str_sub_str', 'str_switch', 'str_upper', 'stringtochar',
'stringtocharacter', 'stringtodouble', 'stringtofloat',
'stringtoint', 'stringtointeger', 'stringtolong', 'stringtoshort',
'strlen', 'student_t', 'sum', 'svd_lapack', 'svdcov', 'svdcov_sv',
'svdstd', 'svdstd_sv', 'system', 'systemfunc', 'tan', 'tanh',
'taper', 'taper_n', 'tdclrs', 'tdctri', 'tdcudp', 'tdcurv',
'tddtri', 'tdez2d', 'tdez3d', 'tdgetp', 'tdgrds', 'tdgrid',
'tdgtrs', 'tdinit', 'tditri', 'tdlbla', 'tdlblp', 'tdlbls',
'tdline', 'tdlndp', 'tdlnpa', 'tdlpdp', 'tdmtri', 'tdotri',
'tdpara', 'tdplch', 'tdprpa', 'tdprpi', 'tdprpt', 'tdsetp',
'tdsort', 'tdstri', 'tdstrs', 'tdttri', 'thornthwaite', 'tobyte',
'tochar', 'todouble', 'tofloat', 'toint', 'toint64', 'tointeger',
'tolong', 'toshort', 'tosigned', 'tostring', 'tostring_with_format',
'totype', 'toubyte', 'touint', 'touint64', 'toulong', 'tounsigned',
'toushort', 'trend_manken', 'tri_trunc', 'triple2grid',
'triple2grid2d', 'trop_wmo', 'ttest', 'typeof', 'undef',
'unique_string', 'update', 'ushorttoint', 'ut_calendar',
'ut_inv_calendar', 'utm2latlon', 'uv2dv_cfd', 'uv2dvf', 'uv2dvg',
'uv2sfvpf', 'uv2sfvpg', 'uv2vr_cfd', 'uv2vrdvf', 'uv2vrdvg',
'uv2vrf', 'uv2vrg', 'v5d_close', 'v5d_create', 'v5d_setLowLev',
'v5d_setUnits', 'v5d_write', 'v5d_write_var', 'variance', 'vhaec',
'vhagc', 'vhsec', 'vhsgc', 'vibeta', 'vinth2p', 'vinth2p_ecmwf',
'vinth2p_ecmwf_nodes', 'vinth2p_nodes', 'vintp2p_ecmwf', 'vr2uvf',
'vr2uvg', 'vrdv2uvf', 'vrdv2uvg', 'wavelet', 'wavelet_default',
'weibull', 'wgt_area_smooth', 'wgt_areaave', 'wgt_areaave2',
'wgt_arearmse', 'wgt_arearmse2', 'wgt_areasum2', 'wgt_runave',
'wgt_runave_n', 'wgt_vert_avg_beta', 'wgt_volave', 'wgt_volave_ccm',
'wgt_volrmse', 'wgt_volrmse_ccm', 'where', 'wk_smooth121', 'wmbarb',
'wmbarbmap', 'wmdrft', 'wmgetp', 'wmlabs', 'wmsetp', 'wmstnm',
'wmvect', 'wmvectmap', 'wmvlbl', 'wrf_avo', 'wrf_cape_2d',
'wrf_cape_3d', 'wrf_dbz', 'wrf_eth', 'wrf_helicity', 'wrf_ij_to_ll',
'wrf_interp_1d', 'wrf_interp_2d_xy', 'wrf_interp_3d_z',
'wrf_latlon_to_ij', 'wrf_ll_to_ij', 'wrf_omega', 'wrf_pvo',
'wrf_rh', 'wrf_slp', 'wrf_smooth_2d', 'wrf_td', 'wrf_tk',
'wrf_updraft_helicity', 'wrf_uvmet', 'wrf_virtual_temp',
'wrf_wetbulb', 'wrf_wps_close_int', 'wrf_wps_open_int',
'wrf_wps_rddata_int', 'wrf_wps_rdhead_int', 'wrf_wps_read_int',
'wrf_wps_write_int', 'write_matrix', 'write_table', 'yiqrgb',
'z2geouv', 'zonal_mpsi', 'addfiles_GetVar', 'advect_variable',
'area_conserve_remap_Wrap', 'area_hi2lores_Wrap',
'array_append_record', 'assignFillValue', 'byte2flt',
'byte2flt_hdf', 'calcDayAnomTLL', 'calcMonAnomLLLT',
'calcMonAnomLLT', 'calcMonAnomTLL', 'calcMonAnomTLLL',
'calculate_monthly_values', 'cd_convert', 'changeCase',
'changeCaseChar', 'clmDayTLL', 'clmDayTLLL', 'clmMon2clmDay',
'clmMonLLLT', 'clmMonLLT', 'clmMonTLL', 'clmMonTLLL', 'closest_val',
'copy_VarAtts', 'copy_VarCoords', 'copy_VarCoords_1',
'copy_VarCoords_2', 'copy_VarMeta', 'copyatt', 'crossp3',
'cshstringtolist', 'cssgrid_Wrap', 'dble2flt', 'decimalPlaces',
'delete_VarAtts', 'dim_avg_n_Wrap', 'dim_avg_wgt_n_Wrap',
'dim_avg_wgt_Wrap', 'dim_avg_Wrap', 'dim_cumsum_n_Wrap',
'dim_cumsum_Wrap', 'dim_max_n_Wrap', 'dim_min_n_Wrap',
'dim_rmsd_n_Wrap', 'dim_rmsd_Wrap', 'dim_rmvmean_n_Wrap',
'dim_rmvmean_Wrap', 'dim_rmvmed_n_Wrap', 'dim_rmvmed_Wrap',
'dim_standardize_n_Wrap', 'dim_standardize_Wrap',
'dim_stddev_n_Wrap', 'dim_stddev_Wrap', 'dim_sum_n_Wrap',
'dim_sum_wgt_n_Wrap', 'dim_sum_wgt_Wrap', 'dim_sum_Wrap',
'dim_variance_n_Wrap', 'dim_variance_Wrap', 'dpres_plevel_Wrap',
'dtrend_leftdim', 'dv2uvF_Wrap', 'dv2uvG_Wrap', 'eof_north',
'eofcor_Wrap', 'eofcov_Wrap', 'eofunc_north', 'eofunc_ts_Wrap',
'eofunc_varimax_reorder', 'eofunc_varimax_Wrap', 'eofunc_Wrap',
'epsZero', 'f2fosh_Wrap', 'f2foshv_Wrap', 'f2fsh_Wrap',
'f2fshv_Wrap', 'f2gsh_Wrap', 'f2gshv_Wrap', 'fbindirSwap',
'fbinseqSwap1', 'fbinseqSwap2', 'flt2dble', 'flt2string',
'fo2fsh_Wrap', 'fo2fshv_Wrap', 'g2fsh_Wrap', 'g2fshv_Wrap',
'g2gsh_Wrap', 'g2gshv_Wrap', 'generate_resample_indices',
'generate_sample_indices', 'generate_unique_indices',
'genNormalDist', 'get1Dindex', 'get1Dindex_Collapse',
'get1Dindex_Exclude', 'get_file_suffix', 'GetFillColor',
'GetFillColorIndex', 'getFillValue', 'getind_latlon2d',
'getVarDimNames', 'getVarFillValue', 'grib_stime2itime',
'hyi2hyo_Wrap', 'ilapsF_Wrap', 'ilapsG_Wrap', 'ind_nearest_coord',
'indStrSubset', 'int2dble', 'int2flt', 'int2p_n_Wrap', 'int2p_Wrap',
'isMonotonic', 'isStrSubset', 'latGau', 'latGauWgt', 'latGlobeF',
'latGlobeFo', 'latRegWgt', 'linint1_n_Wrap', 'linint1_Wrap',
'linint2_points_Wrap', 'linint2_Wrap', 'local_max_1d',
'local_min_1d', 'lonFlip', 'lonGlobeF', 'lonGlobeFo', 'lonPivot',
'merge_levels_sfc', 'mod', 'month_to_annual',
'month_to_annual_weighted', 'month_to_season', 'month_to_season12',
'month_to_seasonN', 'monthly_total_to_daily_mean', 'nameDim',
'natgrid_Wrap', 'NewCosWeight', 'niceLatLon2D', 'NormCosWgtGlobe',
'numAsciiCol', 'numAsciiRow', 'numeric2int',
'obj_anal_ic_deprecated', 'obj_anal_ic_Wrap', 'omega_ccm_driver',
'omega_to_w', 'oneDtostring', 'pack_values', 'pattern_cor', 'pdfx',
'pdfxy', 'pdfxy_conform', 'pot_temp', 'pot_vort_hybrid',
'pot_vort_isobaric', 'pres2hybrid_Wrap', 'print_clock',
'printMinMax', 'quadroots', 'rcm2points_Wrap', 'rcm2rgrid_Wrap',
'readAsciiHead', 'readAsciiTable', 'reg_multlin_stats',
'region_ind', 'regline_stats', 'relhum_ttd', 'replaceSingleChar',
'RGBtoCmap', 'rgrid2rcm_Wrap', 'rho_mwjf', 'rm_single_dims',
'rmAnnCycle1D', 'rmInsufData', 'rmMonAnnCycLLLT', 'rmMonAnnCycLLT',
'rmMonAnnCycTLL', 'runave_n_Wrap', 'runave_Wrap', 'short2flt',
'short2flt_hdf', 'shsgc_R42_Wrap', 'sign_f90', 'sign_matlab',
'smth9_Wrap', 'smthClmDayTLL', 'smthClmDayTLLL', 'SqrtCosWeight',
'stat_dispersion', 'static_stability', 'stdMonLLLT', 'stdMonLLT',
'stdMonTLL', 'stdMonTLLL', 'symMinMaxPlt', 'table_attach_columns',
'table_attach_rows', 'time_to_newtime', 'transpose',
'triple2grid_Wrap', 'ut_convert', 'uv2dvF_Wrap', 'uv2dvG_Wrap',
'uv2vrF_Wrap', 'uv2vrG_Wrap', 'vr2uvF_Wrap', 'vr2uvG_Wrap',
'w_to_omega', 'wallClockElapseTime', 'wave_number_spc',
'wgt_areaave_Wrap', 'wgt_runave_leftdim', 'wgt_runave_n_Wrap',
'wgt_runave_Wrap', 'wgt_vertical_n', 'wind_component',
'wind_direction', 'yyyyddd_to_yyyymmdd', 'yyyymm_time',
'yyyymm_to_yyyyfrac', 'yyyymmdd_time', 'yyyymmdd_to_yyyyddd',
'yyyymmdd_to_yyyyfrac', 'yyyymmddhh_time', 'yyyymmddhh_to_yyyyfrac',
'zonal_mpsi_Wrap', 'zonalAve', 'calendar_decode2', 'cd_string',
'kf_filter', 'run_cor', 'time_axis_labels', 'ut_string',
'wrf_contour', 'wrf_map', 'wrf_map_overlay', 'wrf_map_overlays',
'wrf_map_resources', 'wrf_map_zoom', 'wrf_overlay', 'wrf_overlays',
'wrf_user_getvar', 'wrf_user_ij_to_ll', 'wrf_user_intrp2d',
'wrf_user_intrp3d', 'wrf_user_latlon_to_ij', 'wrf_user_list_times',
'wrf_user_ll_to_ij', 'wrf_user_unstagger', 'wrf_user_vert_interp',
'wrf_vector', 'gsn_add_annotation', 'gsn_add_polygon',
'gsn_add_polyline', 'gsn_add_polymarker',
'gsn_add_shapefile_polygons', 'gsn_add_shapefile_polylines',
'gsn_add_shapefile_polymarkers', 'gsn_add_text', 'gsn_attach_plots',
'gsn_blank_plot', 'gsn_contour', 'gsn_contour_map',
'gsn_contour_shade', 'gsn_coordinates', 'gsn_create_labelbar',
'gsn_create_legend', 'gsn_create_text',
'gsn_csm_attach_zonal_means', 'gsn_csm_blank_plot',
'gsn_csm_contour', 'gsn_csm_contour_map', 'gsn_csm_contour_map_ce',
'gsn_csm_contour_map_overlay', 'gsn_csm_contour_map_polar',
'gsn_csm_hov', 'gsn_csm_lat_time', 'gsn_csm_map', 'gsn_csm_map_ce',
'gsn_csm_map_polar', 'gsn_csm_pres_hgt',
'gsn_csm_pres_hgt_streamline', 'gsn_csm_pres_hgt_vector',
'gsn_csm_streamline', 'gsn_csm_streamline_contour_map',
'gsn_csm_streamline_contour_map_ce',
'gsn_csm_streamline_contour_map_polar', 'gsn_csm_streamline_map',
'gsn_csm_streamline_map_ce', 'gsn_csm_streamline_map_polar',
'gsn_csm_streamline_scalar', 'gsn_csm_streamline_scalar_map',
'gsn_csm_streamline_scalar_map_ce',
'gsn_csm_streamline_scalar_map_polar', 'gsn_csm_time_lat',
'gsn_csm_vector', 'gsn_csm_vector_map', 'gsn_csm_vector_map_ce',
'gsn_csm_vector_map_polar', 'gsn_csm_vector_scalar',
'gsn_csm_vector_scalar_map', 'gsn_csm_vector_scalar_map_ce',
'gsn_csm_vector_scalar_map_polar', 'gsn_csm_x2y', 'gsn_csm_x2y2',
'gsn_csm_xy', 'gsn_csm_xy2', 'gsn_csm_xy3', 'gsn_csm_y',
'gsn_define_colormap', 'gsn_draw_colormap', 'gsn_draw_named_colors',
'gsn_histogram', 'gsn_labelbar_ndc', 'gsn_legend_ndc', 'gsn_map',
'gsn_merge_colormaps', 'gsn_open_wks', 'gsn_panel', 'gsn_polygon',
'gsn_polygon_ndc', 'gsn_polyline', 'gsn_polyline_ndc',
'gsn_polymarker', 'gsn_polymarker_ndc', 'gsn_retrieve_colormap',
'gsn_reverse_colormap', 'gsn_streamline', 'gsn_streamline_map',
'gsn_streamline_scalar', 'gsn_streamline_scalar_map', 'gsn_table',
'gsn_text', 'gsn_text_ndc', 'gsn_vector', 'gsn_vector_map',
'gsn_vector_scalar', 'gsn_vector_scalar_map', 'gsn_xy', 'gsn_y',
'hsv2rgb', 'maximize_output', 'namedcolor2rgb', 'namedcolor2rgba',
'reset_device_coordinates', 'span_named_colors'), prefix=r'\b'),
Name.Builtin),
# Resources
(words((
'amDataXF', 'amDataYF', 'amJust', 'amOn', 'amOrthogonalPosF',
'amParallelPosF', 'amResizeNotify', 'amSide', 'amTrackData',
'amViewId', 'amZone', 'appDefaultParent', 'appFileSuffix',
'appResources', 'appSysDir', 'appUsrDir', 'caCopyArrays',
'caXArray', 'caXCast', 'caXMaxV', 'caXMinV', 'caXMissingV',
'caYArray', 'caYCast', 'caYMaxV', 'caYMinV', 'caYMissingV',
'cnCellFillEdgeColor', 'cnCellFillMissingValEdgeColor',
'cnConpackParams', 'cnConstFEnableFill', 'cnConstFLabelAngleF',
'cnConstFLabelBackgroundColor', 'cnConstFLabelConstantSpacingF',
'cnConstFLabelFont', 'cnConstFLabelFontAspectF',
'cnConstFLabelFontColor', 'cnConstFLabelFontHeightF',
'cnConstFLabelFontQuality', 'cnConstFLabelFontThicknessF',
'cnConstFLabelFormat', 'cnConstFLabelFuncCode', 'cnConstFLabelJust',
'cnConstFLabelOn', 'cnConstFLabelOrthogonalPosF',
'cnConstFLabelParallelPosF', 'cnConstFLabelPerimColor',
'cnConstFLabelPerimOn', 'cnConstFLabelPerimSpaceF',
'cnConstFLabelPerimThicknessF', 'cnConstFLabelSide',
'cnConstFLabelString', 'cnConstFLabelTextDirection',
'cnConstFLabelZone', 'cnConstFUseInfoLabelRes',
'cnExplicitLabelBarLabelsOn', 'cnExplicitLegendLabelsOn',
'cnExplicitLineLabelsOn', 'cnFillBackgroundColor', 'cnFillColor',
'cnFillColors', 'cnFillDotSizeF', 'cnFillDrawOrder', 'cnFillMode',
'cnFillOn', 'cnFillOpacityF', 'cnFillPalette', 'cnFillPattern',
'cnFillPatterns', 'cnFillScaleF', 'cnFillScales', 'cnFixFillBleed',
'cnGridBoundFillColor', 'cnGridBoundFillPattern',
'cnGridBoundFillScaleF', 'cnGridBoundPerimColor',
'cnGridBoundPerimDashPattern', 'cnGridBoundPerimOn',
'cnGridBoundPerimThicknessF', 'cnHighLabelAngleF',
'cnHighLabelBackgroundColor', 'cnHighLabelConstantSpacingF',
'cnHighLabelCount', 'cnHighLabelFont', 'cnHighLabelFontAspectF',
'cnHighLabelFontColor', 'cnHighLabelFontHeightF',
'cnHighLabelFontQuality', 'cnHighLabelFontThicknessF',
'cnHighLabelFormat', 'cnHighLabelFuncCode', 'cnHighLabelPerimColor',
'cnHighLabelPerimOn', 'cnHighLabelPerimSpaceF',
'cnHighLabelPerimThicknessF', 'cnHighLabelString', 'cnHighLabelsOn',
'cnHighLowLabelOverlapMode', 'cnHighUseLineLabelRes',
'cnInfoLabelAngleF', 'cnInfoLabelBackgroundColor',
'cnInfoLabelConstantSpacingF', 'cnInfoLabelFont',
'cnInfoLabelFontAspectF', 'cnInfoLabelFontColor',
'cnInfoLabelFontHeightF', 'cnInfoLabelFontQuality',
'cnInfoLabelFontThicknessF', 'cnInfoLabelFormat',
'cnInfoLabelFuncCode', 'cnInfoLabelJust', 'cnInfoLabelOn',
'cnInfoLabelOrthogonalPosF', 'cnInfoLabelParallelPosF',
'cnInfoLabelPerimColor', 'cnInfoLabelPerimOn',
'cnInfoLabelPerimSpaceF', 'cnInfoLabelPerimThicknessF',
'cnInfoLabelSide', 'cnInfoLabelString', 'cnInfoLabelTextDirection',
'cnInfoLabelZone', 'cnLabelBarEndLabelsOn', 'cnLabelBarEndStyle',
'cnLabelDrawOrder', 'cnLabelMasking', 'cnLabelScaleFactorF',
'cnLabelScaleValueF', 'cnLabelScalingMode', 'cnLegendLevelFlags',
'cnLevelCount', 'cnLevelFlag', 'cnLevelFlags', 'cnLevelSelectionMode',
'cnLevelSpacingF', 'cnLevels', 'cnLineColor', 'cnLineColors',
'cnLineDashPattern', 'cnLineDashPatterns', 'cnLineDashSegLenF',
'cnLineDrawOrder', 'cnLineLabelAngleF', 'cnLineLabelBackgroundColor',
'cnLineLabelConstantSpacingF', 'cnLineLabelCount',
'cnLineLabelDensityF', 'cnLineLabelFont', 'cnLineLabelFontAspectF',
'cnLineLabelFontColor', 'cnLineLabelFontColors',
'cnLineLabelFontHeightF', 'cnLineLabelFontQuality',
'cnLineLabelFontThicknessF', 'cnLineLabelFormat',
'cnLineLabelFuncCode', 'cnLineLabelInterval', 'cnLineLabelPerimColor',
'cnLineLabelPerimOn', 'cnLineLabelPerimSpaceF',
'cnLineLabelPerimThicknessF', 'cnLineLabelPlacementMode',
'cnLineLabelStrings', 'cnLineLabelsOn', 'cnLinePalette',
'cnLineThicknessF', 'cnLineThicknesses', 'cnLinesOn',
'cnLowLabelAngleF', 'cnLowLabelBackgroundColor',
'cnLowLabelConstantSpacingF', 'cnLowLabelCount', 'cnLowLabelFont',
'cnLowLabelFontAspectF', 'cnLowLabelFontColor',
'cnLowLabelFontHeightF', 'cnLowLabelFontQuality',
'cnLowLabelFontThicknessF', 'cnLowLabelFormat', 'cnLowLabelFuncCode',
'cnLowLabelPerimColor', 'cnLowLabelPerimOn', 'cnLowLabelPerimSpaceF',
'cnLowLabelPerimThicknessF', 'cnLowLabelString', 'cnLowLabelsOn',
'cnLowUseHighLabelRes', 'cnMaxDataValueFormat', 'cnMaxLevelCount',
'cnMaxLevelValF', 'cnMaxPointDistanceF', 'cnMinLevelValF',
'cnMissingValFillColor', 'cnMissingValFillPattern',
'cnMissingValFillScaleF', 'cnMissingValPerimColor',
'cnMissingValPerimDashPattern', 'cnMissingValPerimGridBoundOn',
'cnMissingValPerimOn', 'cnMissingValPerimThicknessF',
'cnMonoFillColor', 'cnMonoFillPattern', 'cnMonoFillScale',
'cnMonoLevelFlag', 'cnMonoLineColor', 'cnMonoLineDashPattern',
'cnMonoLineLabelFontColor', 'cnMonoLineThickness', 'cnNoDataLabelOn',
'cnNoDataLabelString', 'cnOutOfRangeFillColor',
'cnOutOfRangeFillPattern', 'cnOutOfRangeFillScaleF',
'cnOutOfRangePerimColor', 'cnOutOfRangePerimDashPattern',
'cnOutOfRangePerimOn', 'cnOutOfRangePerimThicknessF',
'cnRasterCellSizeF', 'cnRasterMinCellSizeF', 'cnRasterModeOn',
'cnRasterSampleFactorF', 'cnRasterSmoothingOn', 'cnScalarFieldData',
'cnSmoothingDistanceF', 'cnSmoothingOn', 'cnSmoothingTensionF',
'cnSpanFillPalette', 'cnSpanLinePalette', 'ctCopyTables',
'ctXElementSize', 'ctXMaxV', 'ctXMinV', 'ctXMissingV', 'ctXTable',
'ctXTableLengths', 'ctXTableType', 'ctYElementSize', 'ctYMaxV',
'ctYMinV', 'ctYMissingV', 'ctYTable', 'ctYTableLengths',
'ctYTableType', 'dcDelayCompute', 'errBuffer',
'errFileName', 'errFilePtr', 'errLevel', 'errPrint', 'errUnitNumber',
'gsClipOn', 'gsColors', 'gsEdgeColor', 'gsEdgeDashPattern',
'gsEdgeDashSegLenF', 'gsEdgeThicknessF', 'gsEdgesOn',
'gsFillBackgroundColor', 'gsFillColor', 'gsFillDotSizeF',
'gsFillIndex', 'gsFillLineThicknessF', 'gsFillOpacityF',
'gsFillScaleF', 'gsFont', 'gsFontAspectF', 'gsFontColor',
'gsFontHeightF', 'gsFontOpacityF', 'gsFontQuality',
'gsFontThicknessF', 'gsLineColor', 'gsLineDashPattern',
'gsLineDashSegLenF', 'gsLineLabelConstantSpacingF', 'gsLineLabelFont',
'gsLineLabelFontAspectF', 'gsLineLabelFontColor',
'gsLineLabelFontHeightF', 'gsLineLabelFontQuality',
'gsLineLabelFontThicknessF', 'gsLineLabelFuncCode',
'gsLineLabelString', 'gsLineOpacityF', 'gsLineThicknessF',
'gsMarkerColor', 'gsMarkerIndex', 'gsMarkerOpacityF', 'gsMarkerSizeF',
'gsMarkerThicknessF', 'gsSegments', 'gsTextAngleF',
'gsTextConstantSpacingF', 'gsTextDirection', 'gsTextFuncCode',
'gsTextJustification', 'gsnAboveYRefLineBarColors',
'gsnAboveYRefLineBarFillScales', 'gsnAboveYRefLineBarPatterns',
'gsnAboveYRefLineColor', 'gsnAddCyclic', 'gsnAttachBorderOn',
'gsnAttachPlotsXAxis', 'gsnBelowYRefLineBarColors',
'gsnBelowYRefLineBarFillScales', 'gsnBelowYRefLineBarPatterns',
'gsnBelowYRefLineColor', 'gsnBoxMargin', 'gsnCenterString',
'gsnCenterStringFontColor', 'gsnCenterStringFontHeightF',
'gsnCenterStringFuncCode', 'gsnCenterStringOrthogonalPosF',
'gsnCenterStringParallelPosF', 'gsnContourLineThicknessesScale',
'gsnContourNegLineDashPattern', 'gsnContourPosLineDashPattern',
'gsnContourZeroLineThicknessF', 'gsnDebugWriteFileName', 'gsnDraw',
'gsnFrame', 'gsnHistogramBarWidthPercent', 'gsnHistogramBinIntervals',
'gsnHistogramBinMissing', 'gsnHistogramBinWidth',
'gsnHistogramClassIntervals', 'gsnHistogramCompare',
'gsnHistogramComputePercentages',
'gsnHistogramComputePercentagesNoMissing',
'gsnHistogramDiscreteBinValues', 'gsnHistogramDiscreteClassValues',
'gsnHistogramHorizontal', 'gsnHistogramMinMaxBinsOn',
'gsnHistogramNumberOfBins', 'gsnHistogramPercentSign',
'gsnHistogramSelectNiceIntervals', 'gsnLeftString',
'gsnLeftStringFontColor', 'gsnLeftStringFontHeightF',
'gsnLeftStringFuncCode', 'gsnLeftStringOrthogonalPosF',
'gsnLeftStringParallelPosF', 'gsnMajorLatSpacing',
'gsnMajorLonSpacing', 'gsnMaskLambertConformal',
'gsnMaskLambertConformalOutlineOn', 'gsnMaximize',
'gsnMinorLatSpacing', 'gsnMinorLonSpacing', 'gsnPanelBottom',
'gsnPanelCenter', 'gsnPanelDebug', 'gsnPanelFigureStrings',
'gsnPanelFigureStringsBackgroundFillColor',
'gsnPanelFigureStringsFontHeightF', 'gsnPanelFigureStringsJust',
'gsnPanelFigureStringsPerimOn', 'gsnPanelLabelBar', 'gsnPanelLeft',
'gsnPanelMainFont', 'gsnPanelMainFontColor',
'gsnPanelMainFontHeightF', 'gsnPanelMainString', 'gsnPanelRight',
'gsnPanelRowSpec', 'gsnPanelScalePlotIndex', 'gsnPanelTop',
'gsnPanelXF', 'gsnPanelXWhiteSpacePercent', 'gsnPanelYF',
'gsnPanelYWhiteSpacePercent', 'gsnPaperHeight', 'gsnPaperMargin',
'gsnPaperOrientation', 'gsnPaperWidth', 'gsnPolar',
'gsnPolarLabelDistance', 'gsnPolarLabelFont',
'gsnPolarLabelFontHeightF', 'gsnPolarLabelSpacing', 'gsnPolarTime',
'gsnPolarUT', 'gsnRightString', 'gsnRightStringFontColor',
'gsnRightStringFontHeightF', 'gsnRightStringFuncCode',
'gsnRightStringOrthogonalPosF', 'gsnRightStringParallelPosF',
'gsnScalarContour', 'gsnScale', 'gsnShape', 'gsnSpreadColorEnd',
'gsnSpreadColorStart', 'gsnSpreadColors', 'gsnStringFont',
'gsnStringFontColor', 'gsnStringFontHeightF', 'gsnStringFuncCode',
'gsnTickMarksOn', 'gsnXAxisIrregular2Linear', 'gsnXAxisIrregular2Log',
'gsnXRefLine', 'gsnXRefLineColor', 'gsnXRefLineDashPattern',
'gsnXRefLineThicknessF', 'gsnXYAboveFillColors', 'gsnXYBarChart',
'gsnXYBarChartBarWidth', 'gsnXYBarChartColors',
'gsnXYBarChartColors2', 'gsnXYBarChartFillDotSizeF',
'gsnXYBarChartFillLineThicknessF', 'gsnXYBarChartFillOpacityF',
'gsnXYBarChartFillScaleF', 'gsnXYBarChartOutlineOnly',
'gsnXYBarChartOutlineThicknessF', 'gsnXYBarChartPatterns',
'gsnXYBarChartPatterns2', 'gsnXYBelowFillColors', 'gsnXYFillColors',
'gsnXYFillOpacities', 'gsnXYLeftFillColors', 'gsnXYRightFillColors',
'gsnYAxisIrregular2Linear', 'gsnYAxisIrregular2Log', 'gsnYRefLine',
'gsnYRefLineColor', 'gsnYRefLineColors', 'gsnYRefLineDashPattern',
'gsnYRefLineDashPatterns', 'gsnYRefLineThicknessF',
'gsnYRefLineThicknesses', 'gsnZonalMean', 'gsnZonalMeanXMaxF',
'gsnZonalMeanXMinF', 'gsnZonalMeanYRefLine', 'lbAutoManage',
'lbBottomMarginF', 'lbBoxCount', 'lbBoxEndCapStyle', 'lbBoxFractions',
'lbBoxLineColor', 'lbBoxLineDashPattern', 'lbBoxLineDashSegLenF',
'lbBoxLineThicknessF', 'lbBoxLinesOn', 'lbBoxMajorExtentF',
'lbBoxMinorExtentF', 'lbBoxSeparatorLinesOn', 'lbBoxSizing',
'lbFillBackground', 'lbFillColor', 'lbFillColors', 'lbFillDotSizeF',
'lbFillLineThicknessF', 'lbFillPattern', 'lbFillPatterns',
'lbFillScaleF', 'lbFillScales', 'lbJustification', 'lbLabelAlignment',
'lbLabelAngleF', 'lbLabelAutoStride', 'lbLabelBarOn',
'lbLabelConstantSpacingF', 'lbLabelDirection', 'lbLabelFont',
'lbLabelFontAspectF', 'lbLabelFontColor', 'lbLabelFontHeightF',
'lbLabelFontQuality', 'lbLabelFontThicknessF', 'lbLabelFuncCode',
'lbLabelJust', 'lbLabelOffsetF', 'lbLabelPosition', 'lbLabelStride',
'lbLabelStrings', 'lbLabelsOn', 'lbLeftMarginF', 'lbMaxLabelLenF',
'lbMinLabelSpacingF', 'lbMonoFillColor', 'lbMonoFillPattern',
'lbMonoFillScale', 'lbOrientation', 'lbPerimColor',
'lbPerimDashPattern', 'lbPerimDashSegLenF', 'lbPerimFill',
'lbPerimFillColor', 'lbPerimOn', 'lbPerimThicknessF',
'lbRasterFillOn', 'lbRightMarginF', 'lbTitleAngleF',
'lbTitleConstantSpacingF', 'lbTitleDirection', 'lbTitleExtentF',
'lbTitleFont', 'lbTitleFontAspectF', 'lbTitleFontColor',
'lbTitleFontHeightF', 'lbTitleFontQuality', 'lbTitleFontThicknessF',
'lbTitleFuncCode', 'lbTitleJust', 'lbTitleOffsetF', 'lbTitleOn',
'lbTitlePosition', 'lbTitleString', 'lbTopMarginF', 'lgAutoManage',
'lgBottomMarginF', 'lgBoxBackground', 'lgBoxLineColor',
'lgBoxLineDashPattern', 'lgBoxLineDashSegLenF', 'lgBoxLineThicknessF',
'lgBoxLinesOn', 'lgBoxMajorExtentF', 'lgBoxMinorExtentF',
'lgDashIndex', 'lgDashIndexes', 'lgItemCount', 'lgItemOrder',
'lgItemPlacement', 'lgItemPositions', 'lgItemType', 'lgItemTypes',
'lgJustification', 'lgLabelAlignment', 'lgLabelAngleF',
'lgLabelAutoStride', 'lgLabelConstantSpacingF', 'lgLabelDirection',
'lgLabelFont', 'lgLabelFontAspectF', 'lgLabelFontColor',
'lgLabelFontHeightF', 'lgLabelFontQuality', 'lgLabelFontThicknessF',
'lgLabelFuncCode', 'lgLabelJust', 'lgLabelOffsetF', 'lgLabelPosition',
'lgLabelStride', 'lgLabelStrings', 'lgLabelsOn', 'lgLeftMarginF',
'lgLegendOn', 'lgLineColor', 'lgLineColors', 'lgLineDashSegLenF',
'lgLineDashSegLens', 'lgLineLabelConstantSpacingF', 'lgLineLabelFont',
'lgLineLabelFontAspectF', 'lgLineLabelFontColor',
'lgLineLabelFontColors', 'lgLineLabelFontHeightF',
'lgLineLabelFontHeights', 'lgLineLabelFontQuality',
'lgLineLabelFontThicknessF', 'lgLineLabelFuncCode',
'lgLineLabelStrings', 'lgLineLabelsOn', 'lgLineThicknessF',
'lgLineThicknesses', 'lgMarkerColor', 'lgMarkerColors',
'lgMarkerIndex', 'lgMarkerIndexes', 'lgMarkerSizeF', 'lgMarkerSizes',
'lgMarkerThicknessF', 'lgMarkerThicknesses', 'lgMonoDashIndex',
'lgMonoItemType', 'lgMonoLineColor', 'lgMonoLineDashSegLen',
'lgMonoLineLabelFontColor', 'lgMonoLineLabelFontHeight',
'lgMonoLineThickness', 'lgMonoMarkerColor', 'lgMonoMarkerIndex',
'lgMonoMarkerSize', 'lgMonoMarkerThickness', 'lgOrientation',
'lgPerimColor', 'lgPerimDashPattern', 'lgPerimDashSegLenF',
'lgPerimFill', 'lgPerimFillColor', 'lgPerimOn', 'lgPerimThicknessF',
'lgRightMarginF', 'lgTitleAngleF', 'lgTitleConstantSpacingF',
'lgTitleDirection', 'lgTitleExtentF', 'lgTitleFont',
'lgTitleFontAspectF', 'lgTitleFontColor', 'lgTitleFontHeightF',
'lgTitleFontQuality', 'lgTitleFontThicknessF', 'lgTitleFuncCode',
'lgTitleJust', 'lgTitleOffsetF', 'lgTitleOn', 'lgTitlePosition',
'lgTitleString', 'lgTopMarginF', 'mpAreaGroupCount',
'mpAreaMaskingOn', 'mpAreaNames', 'mpAreaTypes', 'mpBottomAngleF',
'mpBottomMapPosF', 'mpBottomNDCF', 'mpBottomNPCF',
'mpBottomPointLatF', 'mpBottomPointLonF', 'mpBottomWindowF',
'mpCenterLatF', 'mpCenterLonF', 'mpCenterRotF', 'mpCountyLineColor',
'mpCountyLineDashPattern', 'mpCountyLineDashSegLenF',
'mpCountyLineThicknessF', 'mpDataBaseVersion', 'mpDataResolution',
'mpDataSetName', 'mpDefaultFillColor', 'mpDefaultFillPattern',
'mpDefaultFillScaleF', 'mpDynamicAreaGroups', 'mpEllipticalBoundary',
'mpFillAreaSpecifiers', 'mpFillBoundarySets', 'mpFillColor',
'mpFillColors', 'mpFillColors-default', 'mpFillDotSizeF',
'mpFillDrawOrder', 'mpFillOn', 'mpFillPatternBackground',
'mpFillPattern', 'mpFillPatterns', 'mpFillPatterns-default',
'mpFillScaleF', 'mpFillScales', 'mpFillScales-default',
'mpFixedAreaGroups', 'mpGeophysicalLineColor',
'mpGeophysicalLineDashPattern', 'mpGeophysicalLineDashSegLenF',
'mpGeophysicalLineThicknessF', 'mpGreatCircleLinesOn',
'mpGridAndLimbDrawOrder', 'mpGridAndLimbOn', 'mpGridLatSpacingF',
'mpGridLineColor', 'mpGridLineDashPattern', 'mpGridLineDashSegLenF',
'mpGridLineThicknessF', 'mpGridLonSpacingF', 'mpGridMaskMode',
'mpGridMaxLatF', 'mpGridPolarLonSpacingF', 'mpGridSpacingF',
'mpInlandWaterFillColor', 'mpInlandWaterFillPattern',
'mpInlandWaterFillScaleF', 'mpLabelDrawOrder', 'mpLabelFontColor',
'mpLabelFontHeightF', 'mpLabelsOn', 'mpLambertMeridianF',
'mpLambertParallel1F', 'mpLambertParallel2F', 'mpLandFillColor',
'mpLandFillPattern', 'mpLandFillScaleF', 'mpLeftAngleF',
'mpLeftCornerLatF', 'mpLeftCornerLonF', 'mpLeftMapPosF',
'mpLeftNDCF', 'mpLeftNPCF', 'mpLeftPointLatF',
'mpLeftPointLonF', 'mpLeftWindowF', 'mpLimbLineColor',
'mpLimbLineDashPattern', 'mpLimbLineDashSegLenF',
'mpLimbLineThicknessF', 'mpLimitMode', 'mpMaskAreaSpecifiers',
'mpMaskOutlineSpecifiers', 'mpMaxLatF', 'mpMaxLonF',
'mpMinLatF', 'mpMinLonF', 'mpMonoFillColor', 'mpMonoFillPattern',
'mpMonoFillScale', 'mpNationalLineColor', 'mpNationalLineDashPattern',
'mpNationalLineThicknessF', 'mpOceanFillColor', 'mpOceanFillPattern',
'mpOceanFillScaleF', 'mpOutlineBoundarySets', 'mpOutlineDrawOrder',
'mpOutlineMaskingOn', 'mpOutlineOn', 'mpOutlineSpecifiers',
'mpPerimDrawOrder', 'mpPerimLineColor', 'mpPerimLineDashPattern',
'mpPerimLineDashSegLenF', 'mpPerimLineThicknessF', 'mpPerimOn',
'mpPolyMode', 'mpProjection', 'mpProvincialLineColor',
'mpProvincialLineDashPattern', 'mpProvincialLineDashSegLenF',
'mpProvincialLineThicknessF', 'mpRelativeCenterLat',
'mpRelativeCenterLon', 'mpRightAngleF', 'mpRightCornerLatF',
'mpRightCornerLonF', 'mpRightMapPosF', 'mpRightNDCF',
'mpRightNPCF', 'mpRightPointLatF', 'mpRightPointLonF',
'mpRightWindowF', 'mpSatelliteAngle1F', 'mpSatelliteAngle2F',
'mpSatelliteDistF', 'mpShapeMode', 'mpSpecifiedFillColors',
'mpSpecifiedFillDirectIndexing', 'mpSpecifiedFillPatterns',
'mpSpecifiedFillPriority', 'mpSpecifiedFillScales',
'mpTopAngleF', 'mpTopMapPosF', 'mpTopNDCF', 'mpTopNPCF',
'mpTopPointLatF', 'mpTopPointLonF', 'mpTopWindowF',
'mpUSStateLineColor', 'mpUSStateLineDashPattern',
'mpUSStateLineDashSegLenF', 'mpUSStateLineThicknessF',
'pmAnnoManagers', 'pmAnnoViews', 'pmLabelBarDisplayMode',
'pmLabelBarHeightF', 'pmLabelBarKeepAspect', 'pmLabelBarOrthogonalPosF',
'pmLabelBarParallelPosF', 'pmLabelBarSide', 'pmLabelBarWidthF',
'pmLabelBarZone', 'pmLegendDisplayMode', 'pmLegendHeightF',
'pmLegendKeepAspect', 'pmLegendOrthogonalPosF',
'pmLegendParallelPosF', 'pmLegendSide', 'pmLegendWidthF',
'pmLegendZone', 'pmOverlaySequenceIds', 'pmTickMarkDisplayMode',
'pmTickMarkZone', 'pmTitleDisplayMode', 'pmTitleZone',
'prGraphicStyle', 'prPolyType', 'prXArray', 'prYArray',
'sfCopyData', 'sfDataArray', 'sfDataMaxV', 'sfDataMinV',
'sfElementNodes', 'sfExchangeDimensions', 'sfFirstNodeIndex',
'sfMissingValueV', 'sfXArray', 'sfXCActualEndF', 'sfXCActualStartF',
'sfXCEndIndex', 'sfXCEndSubsetV', 'sfXCEndV', 'sfXCStartIndex',
'sfXCStartSubsetV', 'sfXCStartV', 'sfXCStride', 'sfXCellBounds',
'sfYArray', 'sfYCActualEndF', 'sfYCActualStartF', 'sfYCEndIndex',
'sfYCEndSubsetV', 'sfYCEndV', 'sfYCStartIndex', 'sfYCStartSubsetV',
'sfYCStartV', 'sfYCStride', 'sfYCellBounds', 'stArrowLengthF',
'stArrowStride', 'stCrossoverCheckCount',
'stExplicitLabelBarLabelsOn', 'stLabelBarEndLabelsOn',
'stLabelFormat', 'stLengthCheckCount', 'stLevelColors',
'stLevelCount', 'stLevelPalette', 'stLevelSelectionMode',
'stLevelSpacingF', 'stLevels', 'stLineColor', 'stLineOpacityF',
'stLineStartStride', 'stLineThicknessF', 'stMapDirection',
'stMaxLevelCount', 'stMaxLevelValF', 'stMinArrowSpacingF',
'stMinDistanceF', 'stMinLevelValF', 'stMinLineSpacingF',
'stMinStepFactorF', 'stMonoLineColor', 'stNoDataLabelOn',
'stNoDataLabelString', 'stScalarFieldData', 'stScalarMissingValColor',
'stSpanLevelPalette', 'stStepSizeF', 'stStreamlineDrawOrder',
'stUseScalarArray', 'stVectorFieldData', 'stZeroFLabelAngleF',
'stZeroFLabelBackgroundColor', 'stZeroFLabelConstantSpacingF',
'stZeroFLabelFont', 'stZeroFLabelFontAspectF',
'stZeroFLabelFontColor', 'stZeroFLabelFontHeightF',
'stZeroFLabelFontQuality', 'stZeroFLabelFontThicknessF',
'stZeroFLabelFuncCode', 'stZeroFLabelJust', 'stZeroFLabelOn',
'stZeroFLabelOrthogonalPosF', 'stZeroFLabelParallelPosF',
'stZeroFLabelPerimColor', 'stZeroFLabelPerimOn',
'stZeroFLabelPerimSpaceF', 'stZeroFLabelPerimThicknessF',
'stZeroFLabelSide', 'stZeroFLabelString', 'stZeroFLabelTextDirection',
'stZeroFLabelZone', 'tfDoNDCOverlay', 'tfPlotManagerOn',
'tfPolyDrawList', 'tfPolyDrawOrder', 'tiDeltaF', 'tiMainAngleF',
'tiMainConstantSpacingF', 'tiMainDirection', 'tiMainFont',
'tiMainFontAspectF', 'tiMainFontColor', 'tiMainFontHeightF',
'tiMainFontQuality', 'tiMainFontThicknessF', 'tiMainFuncCode',
'tiMainJust', 'tiMainOffsetXF', 'tiMainOffsetYF', 'tiMainOn',
'tiMainPosition', 'tiMainSide', 'tiMainString', 'tiUseMainAttributes',
'tiXAxisAngleF', 'tiXAxisConstantSpacingF', 'tiXAxisDirection',
'tiXAxisFont', 'tiXAxisFontAspectF', 'tiXAxisFontColor',
'tiXAxisFontHeightF', 'tiXAxisFontQuality', 'tiXAxisFontThicknessF',
'tiXAxisFuncCode', 'tiXAxisJust', 'tiXAxisOffsetXF',
'tiXAxisOffsetYF', 'tiXAxisOn', 'tiXAxisPosition', 'tiXAxisSide',
'tiXAxisString', 'tiYAxisAngleF', 'tiYAxisConstantSpacingF',
'tiYAxisDirection', 'tiYAxisFont', 'tiYAxisFontAspectF',
'tiYAxisFontColor', 'tiYAxisFontHeightF', 'tiYAxisFontQuality',
'tiYAxisFontThicknessF', 'tiYAxisFuncCode', 'tiYAxisJust',
'tiYAxisOffsetXF', 'tiYAxisOffsetYF', 'tiYAxisOn', 'tiYAxisPosition',
'tiYAxisSide', 'tiYAxisString', 'tmBorderLineColor',
'tmBorderThicknessF', 'tmEqualizeXYSizes', 'tmLabelAutoStride',
'tmSciNoteCutoff', 'tmXBAutoPrecision', 'tmXBBorderOn',
'tmXBDataLeftF', 'tmXBDataRightF', 'tmXBFormat', 'tmXBIrrTensionF',
'tmXBIrregularPoints', 'tmXBLabelAngleF', 'tmXBLabelConstantSpacingF',
'tmXBLabelDeltaF', 'tmXBLabelDirection', 'tmXBLabelFont',
'tmXBLabelFontAspectF', 'tmXBLabelFontColor', 'tmXBLabelFontHeightF',
'tmXBLabelFontQuality', 'tmXBLabelFontThicknessF',
'tmXBLabelFuncCode', 'tmXBLabelJust', 'tmXBLabelStride', 'tmXBLabels',
'tmXBLabelsOn', 'tmXBMajorLengthF', 'tmXBMajorLineColor',
'tmXBMajorOutwardLengthF', 'tmXBMajorThicknessF', 'tmXBMaxLabelLenF',
'tmXBMaxTicks', 'tmXBMinLabelSpacingF', 'tmXBMinorLengthF',
'tmXBMinorLineColor', 'tmXBMinorOn', 'tmXBMinorOutwardLengthF',
'tmXBMinorPerMajor', 'tmXBMinorThicknessF', 'tmXBMinorValues',
'tmXBMode', 'tmXBOn', 'tmXBPrecision', 'tmXBStyle', 'tmXBTickEndF',
'tmXBTickSpacingF', 'tmXBTickStartF', 'tmXBValues', 'tmXMajorGrid',
'tmXMajorGridLineColor', 'tmXMajorGridLineDashPattern',
'tmXMajorGridThicknessF', 'tmXMinorGrid', 'tmXMinorGridLineColor',
'tmXMinorGridLineDashPattern', 'tmXMinorGridThicknessF',
'tmXTAutoPrecision', 'tmXTBorderOn', 'tmXTDataLeftF',
'tmXTDataRightF', 'tmXTFormat', 'tmXTIrrTensionF',
'tmXTIrregularPoints', 'tmXTLabelAngleF', 'tmXTLabelConstantSpacingF',
'tmXTLabelDeltaF', 'tmXTLabelDirection', 'tmXTLabelFont',
'tmXTLabelFontAspectF', 'tmXTLabelFontColor', 'tmXTLabelFontHeightF',
'tmXTLabelFontQuality', 'tmXTLabelFontThicknessF',
'tmXTLabelFuncCode', 'tmXTLabelJust', 'tmXTLabelStride', 'tmXTLabels',
'tmXTLabelsOn', 'tmXTMajorLengthF', 'tmXTMajorLineColor',
'tmXTMajorOutwardLengthF', 'tmXTMajorThicknessF', 'tmXTMaxLabelLenF',
'tmXTMaxTicks', 'tmXTMinLabelSpacingF', 'tmXTMinorLengthF',
'tmXTMinorLineColor', 'tmXTMinorOn', 'tmXTMinorOutwardLengthF',
'tmXTMinorPerMajor', 'tmXTMinorThicknessF', 'tmXTMinorValues',
'tmXTMode', 'tmXTOn', 'tmXTPrecision', 'tmXTStyle', 'tmXTTickEndF',
'tmXTTickSpacingF', 'tmXTTickStartF', 'tmXTValues', 'tmXUseBottom',
'tmYLAutoPrecision', 'tmYLBorderOn', 'tmYLDataBottomF',
'tmYLDataTopF', 'tmYLFormat', 'tmYLIrrTensionF',
'tmYLIrregularPoints', 'tmYLLabelAngleF', 'tmYLLabelConstantSpacingF',
'tmYLLabelDeltaF', 'tmYLLabelDirection', 'tmYLLabelFont',
'tmYLLabelFontAspectF', 'tmYLLabelFontColor', 'tmYLLabelFontHeightF',
'tmYLLabelFontQuality', 'tmYLLabelFontThicknessF',
'tmYLLabelFuncCode', 'tmYLLabelJust', 'tmYLLabelStride', 'tmYLLabels',
'tmYLLabelsOn', 'tmYLMajorLengthF', 'tmYLMajorLineColor',
'tmYLMajorOutwardLengthF', 'tmYLMajorThicknessF', 'tmYLMaxLabelLenF',
'tmYLMaxTicks', 'tmYLMinLabelSpacingF', 'tmYLMinorLengthF',
'tmYLMinorLineColor', 'tmYLMinorOn', 'tmYLMinorOutwardLengthF',
'tmYLMinorPerMajor', 'tmYLMinorThicknessF', 'tmYLMinorValues',
'tmYLMode', 'tmYLOn', 'tmYLPrecision', 'tmYLStyle', 'tmYLTickEndF',
'tmYLTickSpacingF', 'tmYLTickStartF', 'tmYLValues', 'tmYMajorGrid',
'tmYMajorGridLineColor', 'tmYMajorGridLineDashPattern',
'tmYMajorGridThicknessF', 'tmYMinorGrid', 'tmYMinorGridLineColor',
'tmYMinorGridLineDashPattern', 'tmYMinorGridThicknessF',
'tmYRAutoPrecision', 'tmYRBorderOn', 'tmYRDataBottomF',
'tmYRDataTopF', 'tmYRFormat', 'tmYRIrrTensionF',
'tmYRIrregularPoints', 'tmYRLabelAngleF', 'tmYRLabelConstantSpacingF',
'tmYRLabelDeltaF', 'tmYRLabelDirection', 'tmYRLabelFont',
'tmYRLabelFontAspectF', 'tmYRLabelFontColor', 'tmYRLabelFontHeightF',
'tmYRLabelFontQuality', 'tmYRLabelFontThicknessF',
'tmYRLabelFuncCode', 'tmYRLabelJust', 'tmYRLabelStride', 'tmYRLabels',
'tmYRLabelsOn', 'tmYRMajorLengthF', 'tmYRMajorLineColor',
'tmYRMajorOutwardLengthF', 'tmYRMajorThicknessF', 'tmYRMaxLabelLenF',
'tmYRMaxTicks', 'tmYRMinLabelSpacingF', 'tmYRMinorLengthF',
'tmYRMinorLineColor', 'tmYRMinorOn', 'tmYRMinorOutwardLengthF',
'tmYRMinorPerMajor', 'tmYRMinorThicknessF', 'tmYRMinorValues',
'tmYRMode', 'tmYROn', 'tmYRPrecision', 'tmYRStyle', 'tmYRTickEndF',
'tmYRTickSpacingF', 'tmYRTickStartF', 'tmYRValues', 'tmYUseLeft',
'trGridType', 'trLineInterpolationOn',
'trXAxisType', 'trXCoordPoints', 'trXInterPoints', 'trXLog',
'trXMaxF', 'trXMinF', 'trXReverse', 'trXSamples', 'trXTensionF',
'trYAxisType', 'trYCoordPoints', 'trYInterPoints', 'trYLog',
'trYMaxF', 'trYMinF', 'trYReverse', 'trYSamples', 'trYTensionF',
'txAngleF', 'txBackgroundFillColor', 'txConstantSpacingF', 'txDirection',
'txFont', 'HLU-Fonts', 'txFontAspectF', 'txFontColor',
'txFontHeightF', 'txFontOpacityF', 'txFontQuality',
'txFontThicknessF', 'txFuncCode', 'txJust', 'txPerimColor',
'txPerimDashLengthF', 'txPerimDashPattern', 'txPerimOn',
'txPerimSpaceF', 'txPerimThicknessF', 'txPosXF', 'txPosYF',
'txString', 'vcExplicitLabelBarLabelsOn', 'vcFillArrowEdgeColor',
'vcFillArrowEdgeThicknessF', 'vcFillArrowFillColor',
'vcFillArrowHeadInteriorXF', 'vcFillArrowHeadMinFracXF',
'vcFillArrowHeadMinFracYF', 'vcFillArrowHeadXF', 'vcFillArrowHeadYF',
'vcFillArrowMinFracWidthF', 'vcFillArrowWidthF', 'vcFillArrowsOn',
'vcFillOverEdge', 'vcGlyphOpacityF', 'vcGlyphStyle',
'vcLabelBarEndLabelsOn', 'vcLabelFontColor', 'vcLabelFontHeightF',
'vcLabelsOn', 'vcLabelsUseVectorColor', 'vcLevelColors',
'vcLevelCount', 'vcLevelPalette', 'vcLevelSelectionMode',
'vcLevelSpacingF', 'vcLevels', 'vcLineArrowColor',
'vcLineArrowHeadMaxSizeF', 'vcLineArrowHeadMinSizeF',
'vcLineArrowThicknessF', 'vcMagnitudeFormat',
'vcMagnitudeScaleFactorF', 'vcMagnitudeScaleValueF',
'vcMagnitudeScalingMode', 'vcMapDirection', 'vcMaxLevelCount',
'vcMaxLevelValF', 'vcMaxMagnitudeF', 'vcMinAnnoAngleF',
'vcMinAnnoArrowAngleF', 'vcMinAnnoArrowEdgeColor',
'vcMinAnnoArrowFillColor', 'vcMinAnnoArrowLineColor',
'vcMinAnnoArrowMinOffsetF', 'vcMinAnnoArrowSpaceF',
'vcMinAnnoArrowUseVecColor', 'vcMinAnnoBackgroundColor',
'vcMinAnnoConstantSpacingF', 'vcMinAnnoExplicitMagnitudeF',
'vcMinAnnoFont', 'vcMinAnnoFontAspectF', 'vcMinAnnoFontColor',
'vcMinAnnoFontHeightF', 'vcMinAnnoFontQuality',
'vcMinAnnoFontThicknessF', 'vcMinAnnoFuncCode', 'vcMinAnnoJust',
'vcMinAnnoOn', 'vcMinAnnoOrientation', 'vcMinAnnoOrthogonalPosF',
'vcMinAnnoParallelPosF', 'vcMinAnnoPerimColor', 'vcMinAnnoPerimOn',
'vcMinAnnoPerimSpaceF', 'vcMinAnnoPerimThicknessF', 'vcMinAnnoSide',
'vcMinAnnoString1', 'vcMinAnnoString1On', 'vcMinAnnoString2',
'vcMinAnnoString2On', 'vcMinAnnoTextDirection', 'vcMinAnnoZone',
'vcMinDistanceF', 'vcMinFracLengthF', 'vcMinLevelValF',
'vcMinMagnitudeF', 'vcMonoFillArrowEdgeColor',
'vcMonoFillArrowFillColor', 'vcMonoLineArrowColor',
'vcMonoWindBarbColor', 'vcNoDataLabelOn', 'vcNoDataLabelString',
'vcPositionMode', 'vcRefAnnoAngleF', 'vcRefAnnoArrowAngleF',
'vcRefAnnoArrowEdgeColor', 'vcRefAnnoArrowFillColor',
'vcRefAnnoArrowLineColor', 'vcRefAnnoArrowMinOffsetF',
'vcRefAnnoArrowSpaceF', 'vcRefAnnoArrowUseVecColor',
'vcRefAnnoBackgroundColor', 'vcRefAnnoConstantSpacingF',
'vcRefAnnoExplicitMagnitudeF', 'vcRefAnnoFont',
'vcRefAnnoFontAspectF', 'vcRefAnnoFontColor', 'vcRefAnnoFontHeightF',
'vcRefAnnoFontQuality', 'vcRefAnnoFontThicknessF',
'vcRefAnnoFuncCode', 'vcRefAnnoJust', 'vcRefAnnoOn',
'vcRefAnnoOrientation', 'vcRefAnnoOrthogonalPosF',
'vcRefAnnoParallelPosF', 'vcRefAnnoPerimColor', 'vcRefAnnoPerimOn',
'vcRefAnnoPerimSpaceF', 'vcRefAnnoPerimThicknessF', 'vcRefAnnoSide',
'vcRefAnnoString1', 'vcRefAnnoString1On', 'vcRefAnnoString2',
'vcRefAnnoString2On', 'vcRefAnnoTextDirection', 'vcRefAnnoZone',
'vcRefLengthF', 'vcRefMagnitudeF', 'vcScalarFieldData',
'vcScalarMissingValColor', 'vcScalarValueFormat',
'vcScalarValueScaleFactorF', 'vcScalarValueScaleValueF',
'vcScalarValueScalingMode', 'vcSpanLevelPalette', 'vcUseRefAnnoRes',
'vcUseScalarArray', 'vcVectorDrawOrder', 'vcVectorFieldData',
'vcWindBarbCalmCircleSizeF', 'vcWindBarbColor',
'vcWindBarbLineThicknessF', 'vcWindBarbScaleFactorF',
'vcWindBarbTickAngleF', 'vcWindBarbTickLengthF',
'vcWindBarbTickSpacingF', 'vcZeroFLabelAngleF',
'vcZeroFLabelBackgroundColor', 'vcZeroFLabelConstantSpacingF',
'vcZeroFLabelFont', 'vcZeroFLabelFontAspectF',
'vcZeroFLabelFontColor', 'vcZeroFLabelFontHeightF',
'vcZeroFLabelFontQuality', 'vcZeroFLabelFontThicknessF',
'vcZeroFLabelFuncCode', 'vcZeroFLabelJust', 'vcZeroFLabelOn',
'vcZeroFLabelOrthogonalPosF', 'vcZeroFLabelParallelPosF',
'vcZeroFLabelPerimColor', 'vcZeroFLabelPerimOn',
'vcZeroFLabelPerimSpaceF', 'vcZeroFLabelPerimThicknessF',
'vcZeroFLabelSide', 'vcZeroFLabelString', 'vcZeroFLabelTextDirection',
'vcZeroFLabelZone', 'vfCopyData', 'vfDataArray',
'vfExchangeDimensions', 'vfExchangeUVData', 'vfMagMaxV', 'vfMagMinV',
'vfMissingUValueV', 'vfMissingVValueV', 'vfPolarData',
'vfSingleMissingValue', 'vfUDataArray', 'vfUMaxV', 'vfUMinV',
'vfVDataArray', 'vfVMaxV', 'vfVMinV', 'vfXArray', 'vfXCActualEndF',
'vfXCActualStartF', 'vfXCEndIndex', 'vfXCEndSubsetV', 'vfXCEndV',
'vfXCStartIndex', 'vfXCStartSubsetV', 'vfXCStartV', 'vfXCStride',
'vfYArray', 'vfYCActualEndF', 'vfYCActualStartF', 'vfYCEndIndex',
'vfYCEndSubsetV', 'vfYCEndV', 'vfYCStartIndex', 'vfYCStartSubsetV',
'vfYCStartV', 'vfYCStride', 'vpAnnoManagerId', 'vpClipOn',
'vpHeightF', 'vpKeepAspect', 'vpOn', 'vpUseSegments', 'vpWidthF',
'vpXF', 'vpYF', 'wkAntiAlias', 'wkBackgroundColor', 'wkBackgroundOpacityF',
'wkColorMapLen', 'wkColorMap', 'wkColorModel', 'wkDashTableLength',
'wkDefGraphicStyleId', 'wkDeviceLowerX', 'wkDeviceLowerY',
'wkDeviceUpperX', 'wkDeviceUpperY', 'wkFileName', 'wkFillTableLength',
'wkForegroundColor', 'wkFormat', 'wkFullBackground', 'wkGksWorkId',
'wkHeight', 'wkMarkerTableLength', 'wkMetaName', 'wkOrientation',
'wkPDFFileName', 'wkPDFFormat', 'wkPDFResolution', 'wkPSFileName',
'wkPSFormat', 'wkPSResolution', 'wkPaperHeightF', 'wkPaperSize',
'wkPaperWidthF', 'wkPause', 'wkTopLevelViews', 'wkViews',
'wkVisualType', 'wkWidth', 'wkWindowId', 'wkXColorMode', 'wsCurrentSize',
'wsMaximumSize', 'wsThresholdSize', 'xyComputeXMax',
'xyComputeXMin', 'xyComputeYMax', 'xyComputeYMin', 'xyCoordData',
'xyCoordDataSpec', 'xyCurveDrawOrder', 'xyDashPattern',
'xyDashPatterns', 'xyExplicitLabels', 'xyExplicitLegendLabels',
'xyLabelMode', 'xyLineColor', 'xyLineColors', 'xyLineDashSegLenF',
'xyLineLabelConstantSpacingF', 'xyLineLabelFont',
'xyLineLabelFontAspectF', 'xyLineLabelFontColor',
'xyLineLabelFontColors', 'xyLineLabelFontHeightF',
'xyLineLabelFontQuality', 'xyLineLabelFontThicknessF',
'xyLineLabelFuncCode', 'xyLineThicknessF', 'xyLineThicknesses',
'xyMarkLineMode', 'xyMarkLineModes', 'xyMarker', 'xyMarkerColor',
'xyMarkerColors', 'xyMarkerSizeF', 'xyMarkerSizes',
'xyMarkerThicknessF', 'xyMarkerThicknesses', 'xyMarkers',
'xyMonoDashPattern', 'xyMonoLineColor', 'xyMonoLineLabelFontColor',
'xyMonoLineThickness', 'xyMonoMarkLineMode', 'xyMonoMarker',
'xyMonoMarkerColor', 'xyMonoMarkerSize', 'xyMonoMarkerThickness',
'xyXIrrTensionF', 'xyXIrregularPoints', 'xyXStyle', 'xyYIrrTensionF',
'xyYIrregularPoints', 'xyYStyle'), prefix=r'\b'),
Name.Builtin),
# Booleans
(r'\.(True|False)\.', Name.Builtin),
# Comparing Operators
(r'\.(eq|ne|lt|le|gt|ge|not|and|or|xor)\.', Operator.Word),
],
'strings': [
(r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
],
'nums': [
(r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
(r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
(r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
],
}
|
wakatime/wakatime
|
wakatime/packages/py27/pygments/lexers/ncl.py
|
Python
|
bsd-3-clause
| 63,986 | 0.004095 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides the tools used to internally run the astropy test suite
from the installed astropy. It makes use of the `pytest` testing framework.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import functools
import os
import sys
import types
import warnings
import pytest
from ..extern import six
from ..extern.six.moves import cPickle as pickle
try:
# Import pkg_resources to prevent it from issuing warnings upon being
# imported from within py.test. See
# https://github.com/astropy/astropy/pull/537 for a detailed explanation.
import pkg_resources # pylint: disable=W0611
except ImportError:
pass
from ..utils.exceptions import (AstropyDeprecationWarning,
AstropyPendingDeprecationWarning)
# For backward-compatibility with affiliated packages
from .runner import TestRunner # pylint: disable=W0611
__all__ = ['raises', 'enable_deprecations_as_exceptions', 'remote_data',
'treat_deprecations_as_exceptions', 'catch_warnings',
'assert_follows_unicode_guidelines', 'quantity_allclose',
'assert_quantity_allclose', 'check_pickling_recovery',
'pickle_protocol', 'generic_recursive_equality_test']
# pytest marker to mark tests which get data from the web
remote_data = pytest.mark.remote_data
# This is for Python 2.x and 3.x compatibility. distutils expects
# options to all be byte strings on Python 2 and Unicode strings on
# Python 3.
def _fix_user_options(options):
def to_str_or_none(x):
if x is None:
return None
return str(x)
return [tuple(to_str_or_none(x) for x in y) for y in options]
def _save_coverage(cov, result, rootdir, testing_path):
"""
This method is called after the tests have been run in coverage mode
to cleanup and then save the coverage data and report.
"""
from ..utils.console import color_print
if result != 0:
return
# The coverage report includes the full path to the temporary
# directory, so we replace all the paths with the true source
# path. Note that this will not work properly for packages that still
# rely on 2to3.
try:
# Coverage 4.0: _harvest_data has been renamed to get_data, the
# lines dict is private
cov.get_data()
except AttributeError:
# Coverage < 4.0
cov._harvest_data()
lines = cov.data.lines
else:
lines = cov.data._lines
for key in list(lines.keys()):
new_path = os.path.relpath(
os.path.realpath(key),
os.path.realpath(testing_path))
new_path = os.path.abspath(
os.path.join(rootdir, new_path))
lines[new_path] = lines.pop(key)
color_print('Saving coverage data in .coverage...', 'green')
cov.save()
color_print('Saving HTML coverage report in htmlcov...', 'green')
cov.html_report(directory=os.path.join(rootdir, 'htmlcov'))
class raises(object):
"""
A decorator to mark that a test should raise a given exception.
Use as follows::
@raises(ZeroDivisionError)
def test_foo():
x = 1/0
This can also be used a context manager, in which case it is just
an alias for the ``pytest.raises`` context manager (because the
two have the same name this help avoid confusion by being
flexible).
"""
# pep-8 naming exception -- this is a decorator class
def __init__(self, exc):
self._exc = exc
self._ctx = None
def __call__(self, func):
@functools.wraps(func)
def run_raises_test(*args, **kwargs):
pytest.raises(self._exc, func, *args, **kwargs)
return run_raises_test
def __enter__(self):
self._ctx = pytest.raises(self._exc)
return self._ctx.__enter__()
def __exit__(self, *exc_info):
return self._ctx.__exit__(*exc_info)
_deprecations_as_exceptions = False
_include_astropy_deprecations = True
_modules_to_ignore_on_import = set([
'compiler', # A deprecated stdlib module used by py.test
'scipy',
'pygments',
'ipykernel',
'setuptools'])
_warnings_to_ignore_entire_module = set([])
_warnings_to_ignore_by_pyver = {
(3, 4): set([
# py.test reads files with the 'U' flag, which is now
# deprecated in Python 3.4.
r"'U' mode is deprecated",
# BeautifulSoup4 triggers warning in stdlib's html module.x
r"The strict argument and mode are deprecated\.",
r"The value of convert_charrefs will become True in 3\.5\. "
r"You are encouraged to set the value explicitly\."]),
(3, 5): set([
# py.test raised this warning in inspect on Python 3.5.
# See https://github.com/pytest-dev/pytest/pull/1009
# Keeping it since e.g. lxml as of 3.8.0 is still calling getargspec()
r"inspect\.getargspec\(\) is deprecated, use "
r"inspect\.signature\(\) instead"]),
(3, 6): set([
# inspect raises this slightly different warning on Python 3.6.
# Keeping it since e.g. lxml as of 3.8.0 is still calling getargspec()
r"inspect\.getargspec\(\) is deprecated, use "
r"inspect\.signature\(\) or inspect\.getfullargspec\(\)"])}
def enable_deprecations_as_exceptions(include_astropy_deprecations=True,
modules_to_ignore_on_import=[],
warnings_to_ignore_entire_module=[],
warnings_to_ignore_by_pyver={}):
"""
Turn on the feature that turns deprecations into exceptions.
Parameters
----------
include_astropy_deprecations : bool
If set to `True`, ``AstropyDeprecationWarning`` and
``AstropyPendingDeprecationWarning`` are also turned into exceptions.
modules_to_ignore_on_import : list of str
List of additional modules that generate deprecation warnings
on import, which are to be ignored. By default, these are already
included: ``compiler``, ``scipy``, ``pygments``, ``ipykernel``, and
``setuptools``.
warnings_to_ignore_entire_module : list of str
List of modules with deprecation warnings to ignore completely,
not just during import. If ``include_astropy_deprecations=True``
is given, ``AstropyDeprecationWarning`` and
``AstropyPendingDeprecationWarning`` are also ignored for the modules.
warnings_to_ignore_by_pyver : dict
Dictionary mapping tuple of ``(major, minor)`` Python version to
a list of deprecation warning messages to ignore. This is in
addition of those already ignored by default
(see ``_warnings_to_ignore_by_pyver`` values).
"""
global _deprecations_as_exceptions
_deprecations_as_exceptions = True
global _include_astropy_deprecations
_include_astropy_deprecations = include_astropy_deprecations
global _modules_to_ignore_on_import
_modules_to_ignore_on_import.update(modules_to_ignore_on_import)
global _warnings_to_ignore_entire_module
_warnings_to_ignore_entire_module.update(warnings_to_ignore_entire_module)
global _warnings_to_ignore_by_pyver
for key, val in six.iteritems(warnings_to_ignore_by_pyver):
if key in _warnings_to_ignore_by_pyver:
_warnings_to_ignore_by_pyver[key].update(val)
else:
_warnings_to_ignore_by_pyver[key] = set(val)
def treat_deprecations_as_exceptions():
"""
Turn all DeprecationWarnings (which indicate deprecated uses of
Python itself or Numpy, but not within Astropy, where we use our
own deprecation warning class) into exceptions so that we find
out about them early.
This completely resets the warning filters and any "already seen"
warning state.
"""
# First, totally reset the warning state. The modules may change during
# this iteration thus we copy the original state to a list to iterate
# on. See https://github.com/astropy/astropy/pull/5513.
for module in list(six.itervalues(sys.modules)):
# We don't want to deal with six.MovedModules, only "real"
# modules.
if (isinstance(module, types.ModuleType) and
hasattr(module, '__warningregistry__')):
del module.__warningregistry__
if not _deprecations_as_exceptions:
return
warnings.resetwarnings()
# Hide the next couple of DeprecationWarnings
warnings.simplefilter('ignore', DeprecationWarning)
# Here's the wrinkle: a couple of our third-party dependencies
# (py.test and scipy) are still using deprecated features
# themselves, and we'd like to ignore those. Fortunately, those
# show up only at import time, so if we import those things *now*,
# before we turn the warnings into exceptions, we're golden.
for m in _modules_to_ignore_on_import:
try:
__import__(m)
except ImportError:
pass
# Now, start over again with the warning filters
warnings.resetwarnings()
# Now, turn DeprecationWarnings into exceptions
_all_warns = [DeprecationWarning]
# Only turn astropy deprecation warnings into exceptions if requested
if _include_astropy_deprecations:
_all_warns += [AstropyDeprecationWarning,
AstropyPendingDeprecationWarning]
for w in _all_warns:
warnings.filterwarnings("error", ".*", w)
# This ignores all deprecation warnings from given module(s),
# not just on import, for use of Astropy affiliated packages.
for m in _warnings_to_ignore_entire_module:
for w in _all_warns:
warnings.filterwarnings('ignore', category=w, module=m)
for v in _warnings_to_ignore_by_pyver:
if sys.version_info[:2] >= v:
for s in _warnings_to_ignore_by_pyver[v]:
warnings.filterwarnings("ignore", s, DeprecationWarning)
class catch_warnings(warnings.catch_warnings):
"""
A high-powered version of warnings.catch_warnings to use for testing
and to make sure that there is no dependence on the order in which
the tests are run.
This completely blitzes any memory of any warnings that have
appeared before so that all warnings will be caught and displayed.
``*args`` is a set of warning classes to collect. If no arguments are
provided, all warnings are collected.
Use as follows::
with catch_warnings(MyCustomWarning) as w:
do.something.bad()
assert len(w) > 0
"""
def __init__(self, *classes):
super(catch_warnings, self).__init__(record=True)
self.classes = classes
def __enter__(self):
warning_list = super(catch_warnings, self).__enter__()
treat_deprecations_as_exceptions()
if len(self.classes) == 0:
warnings.simplefilter('always')
else:
warnings.simplefilter('ignore')
for cls in self.classes:
warnings.simplefilter('always', cls)
return warning_list
def __exit__(self, type, value, traceback):
treat_deprecations_as_exceptions()
class ignore_warnings(catch_warnings):
"""
This can be used either as a context manager or function decorator to
ignore all warnings that occur within a function or block of code.
An optional category option can be supplied to only ignore warnings of a
certain category or categories (if a list is provided).
"""
def __init__(self, category=None):
super(ignore_warnings, self).__init__()
if isinstance(category, type) and issubclass(category, Warning):
self.category = [category]
else:
self.category = category
def __call__(self, func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
# Originally this just reused self, but that doesn't work if the
# function is called more than once so we need to make a new
# context manager instance for each call
with self.__class__(category=self.category):
return func(*args, **kwargs)
return wrapper
def __enter__(self):
retval = super(ignore_warnings, self).__enter__()
if self.category is not None:
for category in self.category:
warnings.simplefilter('ignore', category)
else:
warnings.simplefilter('ignore')
return retval
def assert_follows_unicode_guidelines(
x, roundtrip=None):
"""
Test that an object follows our Unicode policy. See
"Unicode guidelines" in the coding guidelines.
Parameters
----------
x : object
The instance to test
roundtrip : module, optional
When provided, this namespace will be used to evaluate
``repr(x)`` and ensure that it roundtrips. It will also
ensure that ``__bytes__(x)`` and ``__unicode__(x)`` roundtrip.
If not provided, no roundtrip testing will be performed.
"""
from .. import conf
from ..extern import six
with conf.set_temp('unicode_output', False):
bytes_x = bytes(x)
unicode_x = six.text_type(x)
repr_x = repr(x)
assert isinstance(bytes_x, bytes)
bytes_x.decode('ascii')
assert isinstance(unicode_x, six.text_type)
unicode_x.encode('ascii')
assert isinstance(repr_x, six.string_types)
if isinstance(repr_x, bytes):
repr_x.decode('ascii')
else:
repr_x.encode('ascii')
if roundtrip is not None:
assert x.__class__(bytes_x) == x
assert x.__class__(unicode_x) == x
assert eval(repr_x, roundtrip) == x
with conf.set_temp('unicode_output', True):
bytes_x = bytes(x)
unicode_x = six.text_type(x)
repr_x = repr(x)
assert isinstance(bytes_x, bytes)
bytes_x.decode('ascii')
assert isinstance(unicode_x, six.text_type)
assert isinstance(repr_x, six.string_types)
if isinstance(repr_x, bytes):
repr_x.decode('ascii')
else:
repr_x.encode('ascii')
if roundtrip is not None:
assert x.__class__(bytes_x) == x
assert x.__class__(unicode_x) == x
assert eval(repr_x, roundtrip) == x
@pytest.fixture(params=[0, 1, -1])
def pickle_protocol(request):
"""
Fixture to run all the tests for protocols 0 and 1, and -1 (most advanced).
(Originally from astropy.table.tests.test_pickle)
"""
return request.param
def generic_recursive_equality_test(a, b, class_history):
"""
Check if the attributes of a and b are equal. Then,
check if the attributes of the attributes are equal.
"""
dict_a = a.__dict__
dict_b = b.__dict__
for key in dict_a:
assert key in dict_b,\
"Did not pickle {0}".format(key)
if hasattr(dict_a[key], '__eq__'):
eq = (dict_a[key] == dict_b[key])
if '__iter__' in dir(eq):
eq = (False not in eq)
assert eq, "Value of {0} changed by pickling".format(key)
if hasattr(dict_a[key], '__dict__'):
if dict_a[key].__class__ in class_history:
# attempt to prevent infinite recursion
pass
else:
new_class_history = [dict_a[key].__class__]
new_class_history.extend(class_history)
generic_recursive_equality_test(dict_a[key],
dict_b[key],
new_class_history)
def check_pickling_recovery(original, protocol):
"""
Try to pickle an object. If successful, make sure
the object's attributes survived pickling and unpickling.
"""
f = pickle.dumps(original, protocol=protocol)
unpickled = pickle.loads(f)
class_history = [original.__class__]
generic_recursive_equality_test(original, unpickled,
class_history)
def assert_quantity_allclose(actual, desired, rtol=1.e-7, atol=None,
**kwargs):
"""
Raise an assertion if two objects are not equal up to desired tolerance.
This is a :class:`~astropy.units.Quantity`-aware version of
:func:`numpy.testing.assert_allclose`.
"""
import numpy as np
np.testing.assert_allclose(*_unquantify_allclose_arguments(actual, desired,
rtol, atol),
**kwargs)
def quantity_allclose(a, b, rtol=1.e-5, atol=None, **kwargs):
"""
Returns True if two arrays are element-wise equal within a tolerance.
This is a :class:`~astropy.units.Quantity`-aware version of
:func:`numpy.allclose`.
"""
import numpy as np
return np.allclose(*_unquantify_allclose_arguments(a, b, rtol, atol),
**kwargs)
def _unquantify_allclose_arguments(actual, desired, rtol, atol):
from .. import units as u
actual = u.Quantity(actual, subok=True, copy=False)
desired = u.Quantity(desired, subok=True, copy=False)
try:
desired = desired.to(actual.unit)
except u.UnitsError:
raise u.UnitsError("Units for 'desired' ({0}) and 'actual' ({1}) "
"are not convertible"
.format(desired.unit, actual.unit))
if atol is None:
# by default, we assume an absolute tolerance of 0
atol = u.Quantity(0)
else:
atol = u.Quantity(atol, subok=True, copy=False)
try:
atol = atol.to(actual.unit)
except u.UnitsError:
raise u.UnitsError("Units for 'atol' ({0}) and 'actual' ({1}) "
"are not convertible"
.format(atol.unit, actual.unit))
rtol = u.Quantity(rtol, subok=True, copy=False)
try:
rtol = rtol.to(u.dimensionless_unscaled)
except Exception:
raise u.UnitsError("`rtol` should be dimensionless")
return actual.value, desired.value, rtol.value, atol.value
|
AustereCuriosity/astropy
|
astropy/tests/helper.py
|
Python
|
bsd-3-clause
| 18,299 | 0 |
import sqlalchemy
metadata = sqlalchemy.MetaData()
log_table = sqlalchemy.Table('log', metadata,
sqlalchemy.Column('id', sqlalchemy.Integer, primary_key=True),
sqlalchemy.Column('filename', sqlalchemy.Unicode),
sqlalchemy.Column('digest', sqlalchemy.Unicode),
sqlalchemy.Column('comment', sqlalchemy.Unicode),
sqlalchemy.Column('user_agent', sqlalchemy.Unicode),
sqlalchemy.Column('traceback', sqlalchemy.Unicode))
def init(engine):
metadata.create_all(bind=engine)
|
Stackato-Apps/py3kwsgitest
|
tables.py
|
Python
|
mit
| 647 | 0.006182 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-30 12:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='added',
),
migrations.RemoveField(
model_name='user',
name='changed',
),
]
|
pashinin-com/pashinin.com
|
src/core/migrations/0002_auto_20161030_1553.py
|
Python
|
gpl-3.0
| 478 | 0 |
input = """
g(1).
g(2).
g(3).
f(a,b).
f(A,B):- g(A), g(B).
f(a,a).
"""
output = """
{f(1,1), f(1,2), f(1,3), f(2,1), f(2,2), f(2,3), f(3,1), f(3,2), f(3,3), f(a,a), f(a,b), g(1), g(2), g(3)}
"""
|
Yarrick13/hwasp
|
tests/wasp1/AllAnswerSets/edbidb_3.test.py
|
Python
|
apache-2.0
| 199 | 0.005025 |
from velox_deploy import *
|
kcompher/velox-modelserver
|
bin/cluster/fabfile.py
|
Python
|
apache-2.0
| 27 | 0 |
# (c) 2012, Jan-Piet Mens <jpmens(at)gmail.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: redis
author:
- Jan-Piet Mens (@jpmens) <jpmens(at)gmail.com>
- Ansible Core
version_added: "2.5"
short_description: fetch data from Redis
description:
- This looup returns a list of results from a Redis DB corresponding to a list of items given to it
requirements:
- redis (python library https://github.com/andymccurdy/redis-py/)
options:
_terms:
description: list of keys to query
host:
description: location of Redis host
default: '127.0.0.1'
env:
- name: ANSIBLE_REDIS_HOST
ini:
- section: lookup_redis
key: host
port:
port:
description: port on which Redis is listening on
default: 6379A
type: int
env:
- name: ANSIBLE_REDIS_PORT
ini:
- section: lookup_redis
key: port
socket:
description: path to socket on which to query Redis, this option overrides host and port options when set.
type: path
env:
- name: ANSIBLE_REDIS_SOCKET
ini:
- section: lookup_redis
key: socket
"""
EXAMPLES = """
- name: query redis for somekey (default or configured settings used)
debug: msg="{{ lookup('redis', 'somekey'}}"
- name: query redis for list of keys and non-default host and port
debug: msg="{{ lookup('redis', item, host='myredis.internal.com', port=2121) }}"
loop: '{{list_of_redis_keys}}'
- name: use list directly
debug: msg="{{ lookup('redis', 'key1', 'key2', 'key3') }}"
- name: use list directly with a socket
debug: msg="{{ lookup('redis', 'key1', 'key2', socket='/var/tmp/redis.sock') }}"
"""
RETURN = """
_raw:
description: value(s) stored in Redis
"""
import os
HAVE_REDIS = False
try:
import redis
HAVE_REDIS = True
except ImportError:
pass
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
if not HAVE_REDIS:
raise AnsibleError("Can't LOOKUP(redis_kv): module redis is not installed")
# get options
self.set_options(direct=kwargs)
# setup connection
host = self.get_option('host')
port = self.get_option('port')
socket = self.get_option('socket')
if socket is None:
conn = redis.Redis(host=host, port=port)
else:
conn = redis.Redis(unix_socket_path=socket)
ret = []
for term in terms:
try:
res = conn.get(term)
if res is None:
res = ""
ret.append(res)
except Exception:
ret.append("") # connection failed or key not found
return ret
|
Azulinho/ansible
|
lib/ansible/plugins/lookup/redis.py
|
Python
|
gpl-3.0
| 3,113 | 0.002891 |
from __future__ import absolute_import
###########################################################################
# (C) Vrije Universiteit, Amsterdam (the Netherlands) #
# #
# This file is part of AmCAT - The Amsterdam Content Analysis Toolkit #
# #
# AmCAT is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Affero General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# AmCAT is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Affero General Public #
# License along with AmCAT. If not, see <http://www.gnu.org/licenses/>. #
###########################################################################
"""
Module for running scrapers
"""
import logging;log = logging.getLogger(__name__)
from collections import namedtuple
from amcat.models import Article, Project
ScrapeError = namedtuple("ScrapeError", ["i", "unit", "error"])
class Controller(object):
def __init__(self):
self.errors = []
self.articles = []
def run(self, scraper):
try:
units = list(scraper._get_units())
except Exception as e:
self.errors.append(ScrapeError(None,None,e))
log.exception("scraper._get_units failed")
return self.articles
for i, unit in enumerate(units):
try:
articles = list(scraper._scrape_unit(unit))
except Exception as e:
log.exception("scraper._scrape_unit failed")
self.errors.append(ScrapeError(i,unit,e))
continue
self.articles += articles
for article in self.articles:
_set_default(article, 'project', scraper.project)
try:
articles, errors = Article.create_articles(self.articles, scraper.articleset)
self.saved_article_ids = {getattr(a, "duplicate_of", a.id) for a in self.articles}
for e in errors:
self.errors.append(ScrapeError(None,None,e))
except Exception as e:
self.errors.append(ScrapeError(None,None,e))
log.exception("scraper._get_units failed")
return self.saved_article_ids
def _set_default(obj, attr, val):
try:
if getattr(obj, attr, None) is not None: return
except Project.DoesNotExist:
pass # django throws DNE on x.y if y is not set and not nullable
setattr(obj, attr, val)
|
tschmorleiz/amcat
|
amcat/scripts/article_upload/controller.py
|
Python
|
agpl-3.0
| 3,148 | 0.0054 |
# -*- coding: utf-8 -*-
"""
Unit tests for reverse URL lookups.
"""
from __future__ import unicode_literals
import sys
import threading
import unittest
from admin_scripts.tests import AdminScriptTestCase
from django.conf import settings
from django.conf.urls import include, url
from django.contrib.auth.models import User
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.http import (
HttpRequest, HttpResponsePermanentRedirect, HttpResponseRedirect,
)
from django.shortcuts import redirect
from django.test import (
SimpleTestCase, TestCase, ignore_warnings, override_settings,
)
from django.test.utils import override_script_prefix
from django.urls import (
NoReverseMatch, RegexURLPattern, RegexURLResolver, Resolver404,
ResolverMatch, get_callable, get_resolver, resolve, reverse, reverse_lazy,
)
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from . import middleware, urlconf_outer, views
from .utils import URLObject
from .views import empty_view
resolve_test_data = (
# These entries are in the format: (path, url_name, app_name, namespace, view_name, func, args, kwargs)
# Simple case
('/normal/42/37/', 'normal-view', '', '', 'normal-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}),
(
'/view_class/42/37/', 'view-class', '', '', 'view-class', views.view_class_instance, tuple(),
{'arg1': '42', 'arg2': '37'}
),
(
'/included/normal/42/37/', 'inc-normal-view', '', '', 'inc-normal-view', views.empty_view, tuple(),
{'arg1': '42', 'arg2': '37'}
),
(
'/included/view_class/42/37/', 'inc-view-class', '', '', 'inc-view-class', views.view_class_instance, tuple(),
{'arg1': '42', 'arg2': '37'}
),
# Unnamed args are dropped if you have *any* kwargs in a pattern
('/mixed_args/42/37/', 'mixed-args', '', '', 'mixed-args', views.empty_view, tuple(), {'arg2': '37'}),
(
'/included/mixed_args/42/37/', 'inc-mixed-args', '', '', 'inc-mixed-args', views.empty_view, tuple(),
{'arg2': '37'}
),
(
'/included/12/mixed_args/42/37/', 'inc-mixed-args', '', '', 'inc-mixed-args', views.empty_view, tuple(),
{'arg2': '37'}
),
# Unnamed views should have None as the url_name. Regression data for #21157.
(
'/unnamed/normal/42/37/', None, '', '', 'urlpatterns_reverse.views.empty_view', views.empty_view, tuple(),
{'arg1': '42', 'arg2': '37'}
),
(
'/unnamed/view_class/42/37/', None, '', '', 'urlpatterns_reverse.views.ViewClass', views.view_class_instance,
tuple(), {'arg1': '42', 'arg2': '37'}
),
# If you have no kwargs, you get an args list.
('/no_kwargs/42/37/', 'no-kwargs', '', '', 'no-kwargs', views.empty_view, ('42', '37'), {}),
('/included/no_kwargs/42/37/', 'inc-no-kwargs', '', '', 'inc-no-kwargs', views.empty_view, ('42', '37'), {}),
(
'/included/12/no_kwargs/42/37/', 'inc-no-kwargs', '', '', 'inc-no-kwargs', views.empty_view,
('12', '42', '37'), {}
),
# Namespaces
(
'/test1/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns1', 'test-ns1:urlobject-view',
views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'test-ns3:urlobject-view',
views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/ns-included1/normal/42/37/', 'inc-normal-view', '', 'inc-ns1', 'inc-ns1:inc-normal-view', views.empty_view,
tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/included/test3/inner/42/37/', 'urlobject-view', 'testapp', 'test-ns3', 'test-ns3:urlobject-view',
views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/default/inner/42/37/', 'urlobject-view', 'testapp', 'testapp', 'testapp:urlobject-view', views.empty_view,
tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/other2/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns2', 'other-ns2:urlobject-view',
views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/other1/inner/42/37/', 'urlobject-view', 'nodefault', 'other-ns1', 'other-ns1:urlobject-view',
views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
# Nested namespaces
(
'/ns-included1/test3/inner/42/37/', 'urlobject-view', 'testapp', 'inc-ns1:test-ns3',
'inc-ns1:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/ns-included1/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view', 'testapp',
'inc-ns1:inc-ns4:inc-ns2:test-ns3', 'inc-ns1:inc-ns4:inc-ns2:test-ns3:urlobject-view', views.empty_view,
tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/app-included/test3/inner/42/37/', 'urlobject-view', 'inc-app:testapp', 'inc-app:test-ns3',
'inc-app:test-ns3:urlobject-view', views.empty_view, tuple(), {'arg1': '42', 'arg2': '37'}
),
(
'/app-included/ns-included4/ns-included2/test3/inner/42/37/', 'urlobject-view', 'inc-app:testapp',
'inc-app:inc-ns4:inc-ns2:test-ns3', 'inc-app:inc-ns4:inc-ns2:test-ns3:urlobject-view', views.empty_view,
tuple(), {'arg1': '42', 'arg2': '37'}
),
# Namespaces capturing variables
('/inc70/', 'inner-nothing', '', 'inc-ns5', 'inc-ns5:inner-nothing', views.empty_view, tuple(), {'outer': '70'}),
(
'/inc78/extra/foobar/', 'inner-extra', '', 'inc-ns5', 'inc-ns5:inner-extra', views.empty_view, tuple(),
{'outer': '78', 'extra': 'foobar'}
),
)
test_data = (
('places', '/places/3/', [3], {}),
('places', '/places/3/', ['3'], {}),
('places', NoReverseMatch, ['a'], {}),
('places', NoReverseMatch, [], {}),
('places?', '/place/', [], {}),
('places+', '/places/', [], {}),
('places*', '/place/', [], {}),
('places2?', '/', [], {}),
('places2+', '/places/', [], {}),
('places2*', '/', [], {}),
('places3', '/places/4/', [4], {}),
('places3', '/places/harlem/', ['harlem'], {}),
('places3', NoReverseMatch, ['harlem64'], {}),
('places4', '/places/3/', [], {'id': 3}),
('people', NoReverseMatch, [], {}),
('people', '/people/adrian/', ['adrian'], {}),
('people', '/people/adrian/', [], {'name': 'adrian'}),
('people', NoReverseMatch, ['name with spaces'], {}),
('people', NoReverseMatch, [], {'name': 'name with spaces'}),
('people2', '/people/name/', [], {}),
('people2a', '/people/name/fred/', ['fred'], {}),
('people_backref', '/people/nate-nate/', ['nate'], {}),
('people_backref', '/people/nate-nate/', [], {'name': 'nate'}),
('optional', '/optional/fred/', [], {'name': 'fred'}),
('optional', '/optional/fred/', ['fred'], {}),
('named_optional', '/optional/1/', [1], {}),
('named_optional', '/optional/1/', [], {'arg1': 1}),
('named_optional', '/optional/1/2/', [1, 2], {}),
('named_optional', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}),
('named_optional_terminated', '/optional/1/2/', [1, 2], {}),
('named_optional_terminated', '/optional/1/2/', [], {'arg1': 1, 'arg2': 2}),
('hardcoded', '/hardcoded/', [], {}),
('hardcoded2', '/hardcoded/doc.pdf', [], {}),
('people3', '/people/il/adrian/', [], {'state': 'il', 'name': 'adrian'}),
('people3', NoReverseMatch, [], {'state': 'il'}),
('people3', NoReverseMatch, [], {'name': 'adrian'}),
('people4', NoReverseMatch, [], {'state': 'il', 'name': 'adrian'}),
('people6', '/people/il/test/adrian/', ['il/test', 'adrian'], {}),
('people6', '/people//adrian/', ['adrian'], {}),
('range', '/character_set/a/', [], {}),
('range2', '/character_set/x/', [], {}),
('price', '/price/$10/', ['10'], {}),
('price2', '/price/$10/', ['10'], {}),
('price3', '/price/$10/', ['10'], {}),
('product', '/product/chocolate+($2.00)/', [], {'price': '2.00', 'product': 'chocolate'}),
('headlines', '/headlines/2007.5.21/', [], dict(year=2007, month=5, day=21)),
(
'windows', r'/windows_path/C:%5CDocuments%20and%20Settings%5Cspam/', [],
dict(drive_name='C', path=r'Documents and Settings\spam')
),
('special', r'/special_chars/~@+%5C$*%7C/', [r'~@+\$*|'], {}),
('special', r'/special_chars/some%20resource/', [r'some resource'], {}),
('special', r'/special_chars/10%25%20complete/', [r'10% complete'], {}),
('special', r'/special_chars/some%20resource/', [], {'chars': r'some resource'}),
('special', r'/special_chars/10%25%20complete/', [], {'chars': r'10% complete'}),
('special', NoReverseMatch, [''], {}),
('mixed', '/john/0/', [], {'name': 'john'}),
('repeats', '/repeats/a/', [], {}),
('repeats2', '/repeats/aa/', [], {}),
('repeats3', '/repeats/aa/', [], {}),
('insensitive', '/CaseInsensitive/fred', ['fred'], {}),
('test', '/test/1', [], {}),
('test2', '/test/2', [], {}),
('inner-nothing', '/outer/42/', [], {'outer': '42'}),
('inner-nothing', '/outer/42/', ['42'], {}),
('inner-nothing', NoReverseMatch, ['foo'], {}),
('inner-extra', '/outer/42/extra/inner/', [], {'extra': 'inner', 'outer': '42'}),
('inner-extra', '/outer/42/extra/inner/', ['42', 'inner'], {}),
('inner-extra', NoReverseMatch, ['fred', 'inner'], {}),
('inner-no-kwargs', '/outer-no-kwargs/42/inner-no-kwargs/1/', ['42', '1'], {}),
('disjunction', NoReverseMatch, ['foo'], {}),
('inner-disjunction', NoReverseMatch, ['10', '11'], {}),
('extra-places', '/e-places/10/', ['10'], {}),
('extra-people', '/e-people/fred/', ['fred'], {}),
('extra-people', '/e-people/fred/', [], {'name': 'fred'}),
('part', '/part/one/', [], {'value': 'one'}),
('part', '/prefix/xx/part/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/part2/one/', [], {'value': 'one'}),
('part2', '/part2/', [], {}),
('part2', '/prefix/xx/part2/one/', [], {'value': 'one', 'prefix': 'xx'}),
('part2', '/prefix/xx/part2/', [], {'prefix': 'xx'}),
# Tests for nested groups. Nested capturing groups will only work if you
# *only* supply the correct outer group.
('nested-noncapture', '/nested/noncapture/opt', [], {'p': 'opt'}),
('nested-capture', '/nested/capture/opt/', ['opt/'], {}),
('nested-capture', NoReverseMatch, [], {'p': 'opt'}),
('nested-mixedcapture', '/nested/capture/mixed/opt', ['opt'], {}),
('nested-mixedcapture', NoReverseMatch, [], {'p': 'opt'}),
('nested-namedcapture', '/nested/capture/named/opt/', [], {'outer': 'opt/'}),
('nested-namedcapture', NoReverseMatch, [], {'outer': 'opt/', 'inner': 'opt'}),
('nested-namedcapture', NoReverseMatch, [], {'inner': 'opt'}),
('non_path_include', '/includes/non_path_include/', [], {}),
# Tests for #13154
('defaults', '/defaults_view1/3/', [], {'arg1': 3, 'arg2': 1}),
('defaults', '/defaults_view2/3/', [], {'arg1': 3, 'arg2': 2}),
('defaults', NoReverseMatch, [], {'arg1': 3, 'arg2': 3}),
('defaults', NoReverseMatch, [], {'arg2': 1}),
# Security tests
('security', '/%2Fexample.com/security/', ['/example.com'], {}),
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.no_urls')
class NoURLPatternsTests(SimpleTestCase):
def test_no_urls_exception(self):
"""
RegexURLResolver should raise an exception when no urlpatterns exist.
"""
resolver = RegexURLResolver(r'^$', settings.ROOT_URLCONF)
with self.assertRaisesMessage(
ImproperlyConfigured,
"The included URLconf 'urlpatterns_reverse.no_urls' does not "
"appear to have any patterns in it. If you see valid patterns in "
"the file then the issue is probably caused by a circular import."
):
getattr(resolver, 'url_patterns')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class URLPatternReverse(SimpleTestCase):
def test_urlpattern_reverse(self):
for name, expected, args, kwargs in test_data:
try:
got = reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.assertEqual(expected, NoReverseMatch)
else:
self.assertEqual(got, expected)
def test_reverse_none(self):
# Reversing None should raise an error, not return the last un-named view.
with self.assertRaises(NoReverseMatch):
reverse(None)
@override_script_prefix('/{{invalid}}/')
def test_prefix_braces(self):
self.assertEqual(
'/%7B%7Binvalid%7D%7D/includes/non_path_include/',
reverse('non_path_include')
)
def test_prefix_parenthesis(self):
# Parentheses are allowed and should not cause errors or be escaped
with override_script_prefix('/bogus)/'):
self.assertEqual(
'/bogus)/includes/non_path_include/',
reverse('non_path_include')
)
with override_script_prefix('/(bogus)/'):
self.assertEqual(
'/(bogus)/includes/non_path_include/',
reverse('non_path_include')
)
@override_script_prefix('/bump%20map/')
def test_prefix_format_char(self):
self.assertEqual(
'/bump%2520map/includes/non_path_include/',
reverse('non_path_include')
)
@override_script_prefix('/%7Eme/')
def test_non_urlsafe_prefix_with_args(self):
# Regression for #20022, adjusted for #24013 because ~ is an unreserved
# character. Tests whether % is escaped.
self.assertEqual('/%257Eme/places/1/', reverse('places', args=[1]))
def test_patterns_reported(self):
# Regression for #17076
with self.assertRaisesMessage(NoReverseMatch, r"1 pattern(s) tried: ['people/(?P<name>\\w+)/$']"):
# this url exists, but requires an argument
reverse("people", args=[])
@override_script_prefix('/script:name/')
def test_script_name_escaping(self):
self.assertEqual(
reverse('optional', args=['foo:bar']),
'/script:name/optional/foo:bar/'
)
def test_reverse_returns_unicode(self):
name, expected, args, kwargs = test_data[0]
self.assertIsInstance(
reverse(name, args=args, kwargs=kwargs),
six.text_type
)
class ResolverTests(SimpleTestCase):
@ignore_warnings(category=RemovedInDjango20Warning)
def test_resolver_repr(self):
"""
Test repr of RegexURLResolver, especially when urlconf_name is a list
(#17892).
"""
# Pick a resolver from a namespaced URLconf
resolver = get_resolver('urlpatterns_reverse.namespace_urls')
sub_resolver = resolver.namespace_dict['test-ns1'][1]
self.assertIn('<RegexURLPattern list>', repr(sub_resolver))
def test_reverse_lazy_object_coercion_by_resolve(self):
"""
Verifies lazy object returned by reverse_lazy is coerced to
text by resolve(). Previous to #21043, this would raise a TypeError.
"""
urls = 'urlpatterns_reverse.named_urls'
proxy_url = reverse_lazy('named-url1', urlconf=urls)
resolver = get_resolver(urls)
resolver.resolve(proxy_url)
def test_resolver_reverse(self):
resolver = get_resolver('urlpatterns_reverse.named_urls')
self.assertEqual(resolver.reverse('named-url1'), '')
self.assertEqual(resolver.reverse('named-url2', 'arg'), 'extra/arg/')
self.assertEqual(resolver.reverse('named-url2', extra='arg'), 'extra/arg/')
def test_non_regex(self):
"""
A Resolver404 is raised if resolving doesn't meet the basic
requirements of a path to match - i.e., at the very least, it matches
the root pattern '^/'. Never return None from resolve() to prevent a
TypeError from occuring later (#10834).
"""
with self.assertRaises(Resolver404):
resolve('')
with self.assertRaises(Resolver404):
resolve('a')
with self.assertRaises(Resolver404):
resolve('\\')
with self.assertRaises(Resolver404):
resolve('.')
def test_404_tried_urls_have_names(self):
"""
The list of URLs that come back from a Resolver404 exception contains
a list in the right format for printing out in the DEBUG 404 page with
both the patterns and URL names, if available.
"""
urls = 'urlpatterns_reverse.named_urls'
# this list matches the expected URL types and names returned when
# you try to resolve a non-existent URL in the first level of included
# URLs in named_urls.py (e.g., '/included/non-existent-url')
url_types_names = [
[{'type': RegexURLPattern, 'name': 'named-url1'}],
[{'type': RegexURLPattern, 'name': 'named-url2'}],
[{'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url3'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': 'named-url4'}],
[{'type': RegexURLResolver}, {'type': RegexURLPattern, 'name': None}],
[{'type': RegexURLResolver}, {'type': RegexURLResolver}],
]
with self.assertRaisesMessage(Resolver404, b'tried' if six.PY2 else 'tried') as cm:
resolve('/included/non-existent-url', urlconf=urls)
e = cm.exception
# make sure we at least matched the root ('/') url resolver:
self.assertIn('tried', e.args[0])
tried = e.args[0]['tried']
self.assertEqual(
len(e.args[0]['tried']),
len(url_types_names),
'Wrong number of tried URLs returned. Expected %s, got %s.' % (
len(url_types_names), len(e.args[0]['tried'])
)
)
for tried, expected in zip(e.args[0]['tried'], url_types_names):
for t, e in zip(tried, expected):
self.assertIsInstance(t, e['type']), str('%s is not an instance of %s') % (t, e['type'])
if 'name' in e:
if not e['name']:
self.assertIsNone(t.name, 'Expected no URL name but found %s.' % t.name)
else:
self.assertEqual(
t.name,
e['name'],
'Wrong URL name. Expected "%s", got "%s".' % (e['name'], t.name)
)
def test_namespaced_view_detail(self):
resolver = get_resolver('urlpatterns_reverse.nested_urls')
self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.view1'))
self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.view2'))
self.assertTrue(resolver._is_callback('urlpatterns_reverse.nested_urls.View3'))
self.assertFalse(resolver._is_callback('urlpatterns_reverse.nested_urls.blub'))
@unittest.skipIf(six.PY2, "Python 2 doesn't support __qualname__.")
def test_view_detail_as_method(self):
# Views which have a class name as part of their path.
resolver = get_resolver('urlpatterns_reverse.method_view_urls')
self.assertTrue(resolver._is_callback('urlpatterns_reverse.method_view_urls.ViewContainer.method_view'))
self.assertTrue(resolver._is_callback('urlpatterns_reverse.method_view_urls.ViewContainer.classmethod_view'))
def test_populate_concurrency(self):
"""
RegexURLResolver._populate() can be called concurrently, but not more
than once per thread (#26888).
"""
resolver = RegexURLResolver(r'^/', 'urlpatterns_reverse.urls')
resolver._local.populating = True
thread = threading.Thread(target=resolver._populate)
thread.start()
thread.join()
self.assertNotEqual(resolver._reverse_dict, {})
@override_settings(ROOT_URLCONF='urlpatterns_reverse.reverse_lazy_urls')
class ReverseLazyTest(TestCase):
def test_redirect_with_lazy_reverse(self):
response = self.client.get('/redirect/')
self.assertRedirects(response, "/redirected_to/", status_code=302)
def test_user_permission_with_lazy_reverse(self):
alfred = User.objects.create_user('alfred', 'alfred@example.com', password='testpw')
response = self.client.get('/login_required_view/')
self.assertRedirects(response, "/login/?next=/login_required_view/", status_code=302)
self.client.force_login(alfred)
response = self.client.get('/login_required_view/')
self.assertEqual(response.status_code, 200)
def test_inserting_reverse_lazy_into_string(self):
self.assertEqual(
'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
if six.PY2:
self.assertEqual(
b'Some URL: %s' % reverse_lazy('some-login-page'),
'Some URL: /login/'
)
class ReverseLazySettingsTest(AdminScriptTestCase):
"""
reverse_lazy can be used in settings without causing a circular
import error.
"""
def setUp(self):
self.write_settings('settings.py', extra="""
from django.urls import reverse_lazy
LOGIN_URL = reverse_lazy('login')""")
def tearDown(self):
self.remove_settings('settings.py')
def test_lazy_in_settings(self):
out, err = self.run_manage(['check'])
self.assertNoOutput(err)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class ReverseShortcutTests(SimpleTestCase):
def test_redirect_to_object(self):
# We don't really need a model; just something with a get_absolute_url
class FakeObj(object):
def get_absolute_url(self):
return "/hi-there/"
res = redirect(FakeObj())
self.assertIsInstance(res, HttpResponseRedirect)
self.assertEqual(res.url, '/hi-there/')
res = redirect(FakeObj(), permanent=True)
self.assertIsInstance(res, HttpResponsePermanentRedirect)
self.assertEqual(res.url, '/hi-there/')
def test_redirect_to_view_name(self):
res = redirect('hardcoded2')
self.assertEqual(res.url, '/hardcoded/doc.pdf')
res = redirect('places', 1)
self.assertEqual(res.url, '/places/1/')
res = redirect('headlines', year='2008', month='02', day='17')
self.assertEqual(res.url, '/headlines/2008.02.17/')
with self.assertRaises(NoReverseMatch):
redirect('not-a-view')
def test_redirect_to_url(self):
res = redirect('/foo/')
self.assertEqual(res.url, '/foo/')
res = redirect('http://example.com/')
self.assertEqual(res.url, 'http://example.com/')
# Assert that we can redirect using UTF-8 strings
res = redirect('/æøå/abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5/abc/')
# Assert that no imports are attempted when dealing with a relative path
# (previously, the below would resolve in a UnicodeEncodeError from __import__ )
res = redirect('/æøå.abc/')
self.assertEqual(res.url, '/%C3%A6%C3%B8%C3%A5.abc/')
res = redirect('os.path')
self.assertEqual(res.url, 'os.path')
def test_no_illegal_imports(self):
# modules that are not listed in urlpatterns should not be importable
redirect("urlpatterns_reverse.nonimported_module.view")
self.assertNotIn("urlpatterns_reverse.nonimported_module", sys.modules)
def test_reverse_by_path_nested(self):
# Views added to urlpatterns using include() should be reversible.
from .views import nested_view
self.assertEqual(reverse(nested_view), '/includes/nested_path/')
def test_redirect_view_object(self):
from .views import absolute_kwargs_view
res = redirect(absolute_kwargs_view)
self.assertEqual(res.url, '/absolute_arg_view/')
with self.assertRaises(NoReverseMatch):
redirect(absolute_kwargs_view, wrong_argument=None)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
@ignore_warnings(category=RemovedInDjango20Warning)
class NamespaceTests(SimpleTestCase):
def test_ambiguous_object(self):
"Names deployed via dynamic URL objects that require namespaces can't be resolved"
with self.assertRaises(NoReverseMatch):
reverse('urlobject-view')
with self.assertRaises(NoReverseMatch):
reverse('urlobject-view', args=[37, 42])
with self.assertRaises(NoReverseMatch):
reverse('urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
def test_ambiguous_urlpattern(self):
"Names deployed via dynamic URL objects that require namespaces can't be resolved"
with self.assertRaises(NoReverseMatch):
reverse('inner-nothing')
with self.assertRaises(NoReverseMatch):
reverse('inner-nothing', args=[37, 42])
with self.assertRaises(NoReverseMatch):
reverse('inner-nothing', kwargs={'arg1': 42, 'arg2': 37})
def test_non_existent_namespace(self):
"Non-existent namespaces raise errors"
with self.assertRaises(NoReverseMatch):
reverse('blahblah:urlobject-view')
with self.assertRaises(NoReverseMatch):
reverse('test-ns1:blahblah:urlobject-view')
def test_normal_name(self):
"Normal lookups work as expected"
self.assertEqual('/normal/', reverse('normal-view'))
self.assertEqual('/normal/37/42/', reverse('normal-view', args=[37, 42]))
self.assertEqual('/normal/42/37/', reverse('normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/+%5C$*/', reverse('special-view'))
def test_simple_included_name(self):
"Normal lookups work on names included from other patterns"
self.assertEqual('/included/normal/', reverse('inc-normal-view'))
self.assertEqual('/included/normal/37/42/', reverse('inc-normal-view', args=[37, 42]))
self.assertEqual('/included/normal/42/37/', reverse('inc-normal-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/included/+%5C$*/', reverse('inc-special-view'))
def test_namespace_object(self):
"Dynamic URL objects can be found using a namespace"
self.assertEqual('/test1/inner/', reverse('test-ns1:urlobject-view'))
self.assertEqual('/test1/inner/37/42/', reverse('test-ns1:urlobject-view', args=[37, 42]))
self.assertEqual('/test1/inner/42/37/', reverse('test-ns1:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/test1/inner/+%5C$*/', reverse('test-ns1:urlobject-special-view'))
def test_app_object(self):
"Dynamic URL objects can return a (pattern, app_name) 2-tuple, and include() can set the namespace"
self.assertEqual('/newapp1/inner/', reverse('new-ns1:urlobject-view'))
self.assertEqual('/newapp1/inner/37/42/', reverse('new-ns1:urlobject-view', args=[37, 42]))
self.assertEqual('/newapp1/inner/42/37/', reverse('new-ns1:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/newapp1/inner/+%5C$*/', reverse('new-ns1:urlobject-special-view'))
def test_app_object_default_namespace(self):
"Namespace defaults to app_name when including a (pattern, app_name) 2-tuple"
self.assertEqual('/new-default/inner/', reverse('newapp:urlobject-view'))
self.assertEqual('/new-default/inner/37/42/', reverse('newapp:urlobject-view', args=[37, 42]))
self.assertEqual(
'/new-default/inner/42/37/', reverse('newapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/new-default/inner/+%5C$*/', reverse('newapp:urlobject-special-view'))
def test_embedded_namespace_object(self):
"Namespaces can be installed anywhere in the URL pattern tree"
self.assertEqual('/included/test3/inner/', reverse('test-ns3:urlobject-view'))
self.assertEqual('/included/test3/inner/37/42/', reverse('test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual(
'/included/test3/inner/42/37/', reverse('test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/included/test3/inner/+%5C$*/', reverse('test-ns3:urlobject-special-view'))
def test_namespace_pattern(self):
"Namespaces can be applied to include()'d urlpatterns"
self.assertEqual('/ns-included1/normal/', reverse('inc-ns1:inc-normal-view'))
self.assertEqual('/ns-included1/normal/37/42/', reverse('inc-ns1:inc-normal-view', args=[37, 42]))
self.assertEqual(
'/ns-included1/normal/42/37/', reverse('inc-ns1:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/ns-included1/+%5C$*/', reverse('inc-ns1:inc-special-view'))
def test_app_name_pattern(self):
"Namespaces can be applied to include()'d urlpatterns that set an app_name attribute"
self.assertEqual('/app-included1/normal/', reverse('app-ns1:inc-normal-view'))
self.assertEqual('/app-included1/normal/37/42/', reverse('app-ns1:inc-normal-view', args=[37, 42]))
self.assertEqual(
'/app-included1/normal/42/37/', reverse('app-ns1:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/app-included1/+%5C$*/', reverse('app-ns1:inc-special-view'))
def test_namespace_pattern_with_variable_prefix(self):
"When using an include with namespaces when there is a regex variable in front of it"
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/normal/', reverse('inc-outer:inc-normal-view', args=[42]))
self.assertEqual(
'/ns-outer/42/normal/37/4/',
reverse('inc-outer:inc-normal-view', kwargs={'outer': 42, 'arg1': 37, 'arg2': 4})
)
self.assertEqual('/ns-outer/42/normal/37/4/', reverse('inc-outer:inc-normal-view', args=[42, 37, 4]))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', kwargs={'outer': 42}))
self.assertEqual('/ns-outer/42/+%5C$*/', reverse('inc-outer:inc-special-view', args=[42]))
def test_multiple_namespace_pattern(self):
"Namespaces can be embedded"
self.assertEqual('/ns-included1/test3/inner/', reverse('inc-ns1:test-ns3:urlobject-view'))
self.assertEqual('/ns-included1/test3/inner/37/42/', reverse('inc-ns1:test-ns3:urlobject-view', args=[37, 42]))
self.assertEqual(
'/ns-included1/test3/inner/42/37/',
reverse('inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/ns-included1/test3/inner/+%5C$*/', reverse('inc-ns1:test-ns3:urlobject-special-view'))
def test_nested_namespace_pattern(self):
"Namespaces can be nested"
self.assertEqual(
'/ns-included1/ns-included4/ns-included1/test3/inner/',
reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view')
)
self.assertEqual(
'/ns-included1/ns-included4/ns-included1/test3/inner/37/42/',
reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', args=[37, 42])
)
self.assertEqual(
'/ns-included1/ns-included4/ns-included1/test3/inner/42/37/',
reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual(
'/ns-included1/ns-included4/ns-included1/test3/inner/+%5C$*/',
reverse('inc-ns1:inc-ns4:inc-ns1:test-ns3:urlobject-special-view')
)
def test_app_lookup_object(self):
"A default application namespace can be used for lookup"
self.assertEqual('/default/inner/', reverse('testapp:urlobject-view'))
self.assertEqual('/default/inner/37/42/', reverse('testapp:urlobject-view', args=[37, 42]))
self.assertEqual('/default/inner/42/37/', reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/default/inner/+%5C$*/', reverse('testapp:urlobject-special-view'))
def test_app_lookup_object_with_default(self):
"A default application namespace is sensitive to the 'current' app can be used for lookup"
self.assertEqual('/included/test3/inner/', reverse('testapp:urlobject-view', current_app='test-ns3'))
self.assertEqual(
'/included/test3/inner/37/42/',
reverse('testapp:urlobject-view', args=[37, 42], current_app='test-ns3')
)
self.assertEqual(
'/included/test3/inner/42/37/',
reverse('testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='test-ns3')
)
self.assertEqual(
'/included/test3/inner/+%5C$*/', reverse('testapp:urlobject-special-view', current_app='test-ns3')
)
def test_app_lookup_object_without_default(self):
"An application namespace without a default is sensitive to the 'current' app can be used for lookup"
self.assertEqual('/other2/inner/', reverse('nodefault:urlobject-view'))
self.assertEqual('/other2/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42]))
self.assertEqual('/other2/inner/42/37/', reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}))
self.assertEqual('/other2/inner/+%5C$*/', reverse('nodefault:urlobject-special-view'))
self.assertEqual('/other1/inner/', reverse('nodefault:urlobject-view', current_app='other-ns1'))
self.assertEqual(
'/other1/inner/37/42/', reverse('nodefault:urlobject-view', args=[37, 42], current_app='other-ns1')
)
self.assertEqual(
'/other1/inner/42/37/',
reverse('nodefault:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='other-ns1')
)
self.assertEqual('/other1/inner/+%5C$*/', reverse('nodefault:urlobject-special-view', current_app='other-ns1'))
def test_special_chars_namespace(self):
self.assertEqual('/+%5C$*/included/normal/', reverse('special:inc-normal-view'))
self.assertEqual('/+%5C$*/included/normal/37/42/', reverse('special:inc-normal-view', args=[37, 42]))
self.assertEqual(
'/+%5C$*/included/normal/42/37/',
reverse('special:inc-normal-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/+%5C$*/included/+%5C$*/', reverse('special:inc-special-view'))
def test_namespaces_with_variables(self):
"Namespace prefixes can capture variables: see #15900"
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', kwargs={'outer': '70'}))
self.assertEqual(
'/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', kwargs={'outer': '78', 'extra': 'foobar'})
)
self.assertEqual('/inc70/', reverse('inc-ns5:inner-nothing', args=['70']))
self.assertEqual('/inc78/extra/foobar/', reverse('inc-ns5:inner-extra', args=['78', 'foobar']))
def test_nested_app_lookup(self):
"A nested current_app should be split in individual namespaces (#24904)"
self.assertEqual('/ns-included1/test4/inner/', reverse('inc-ns1:testapp:urlobject-view'))
self.assertEqual('/ns-included1/test4/inner/37/42/', reverse('inc-ns1:testapp:urlobject-view', args=[37, 42]))
self.assertEqual(
'/ns-included1/test4/inner/42/37/',
reverse('inc-ns1:testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37})
)
self.assertEqual('/ns-included1/test4/inner/+%5C$*/', reverse('inc-ns1:testapp:urlobject-special-view'))
self.assertEqual(
'/ns-included1/test3/inner/',
reverse('inc-ns1:testapp:urlobject-view', current_app='inc-ns1:test-ns3')
)
self.assertEqual(
'/ns-included1/test3/inner/37/42/',
reverse('inc-ns1:testapp:urlobject-view', args=[37, 42], current_app='inc-ns1:test-ns3')
)
self.assertEqual(
'/ns-included1/test3/inner/42/37/',
reverse('inc-ns1:testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37}, current_app='inc-ns1:test-ns3')
)
self.assertEqual(
'/ns-included1/test3/inner/+%5C$*/',
reverse('inc-ns1:testapp:urlobject-special-view', current_app='inc-ns1:test-ns3')
)
def test_current_app_no_partial_match(self):
"current_app should either match the whole path or shouldn't be used"
self.assertEqual(
'/ns-included1/test4/inner/',
reverse('inc-ns1:testapp:urlobject-view', current_app='non-existent:test-ns3')
)
self.assertEqual(
'/ns-included1/test4/inner/37/42/',
reverse('inc-ns1:testapp:urlobject-view', args=[37, 42], current_app='non-existent:test-ns3')
)
self.assertEqual(
'/ns-included1/test4/inner/42/37/',
reverse('inc-ns1:testapp:urlobject-view', kwargs={'arg1': 42, 'arg2': 37},
current_app='non-existent:test-ns3')
)
self.assertEqual(
'/ns-included1/test4/inner/+%5C$*/',
reverse('inc-ns1:testapp:urlobject-special-view', current_app='non-existent:test-ns3')
)
@override_settings(ROOT_URLCONF=urlconf_outer.__name__)
class RequestURLconfTests(SimpleTestCase):
def test_urlconf(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden(self):
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 404)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:,inner:/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.NullChangeURLconfMiddleware' % middleware.__name__,
]
)
def test_urlconf_overridden_with_null(self):
"""
Overriding request.urlconf with None will fall back to the default
URLconf.
"""
response = self.client.get('/test/me/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'outer:/test/me/,inner:/inner_urlconf/second_test/')
response = self.client.get('/inner_urlconf/second_test/')
self.assertEqual(response.status_code, 200)
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 404)
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_inner_in_response_middleware(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a response middleware.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInResponseMiddleware' % middleware.__name__,
]
)
def test_reverse_outer_in_response_middleware(self):
"""
Test reversing an URL from the *default* URLconf from inside
a response middleware.
"""
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseInnerInStreaming' % middleware.__name__,
]
)
def test_reverse_inner_in_streaming(self):
"""
Test reversing an URL from the *overridden* URLconf from inside
a streaming response.
"""
response = self.client.get('/second_test/')
self.assertEqual(response.status_code, 200)
self.assertEqual(b''.join(response), b'/second_test/')
@override_settings(
MIDDLEWARE=[
'%s.ChangeURLconfMiddleware' % middleware.__name__,
'%s.ReverseOuterInStreaming' % middleware.__name__,
]
)
def test_reverse_outer_in_streaming(self):
"""
Test reversing an URL from the *default* URLconf from inside
a streaming response.
"""
message = "Reverse for 'outer' with arguments '()' and keyword arguments '{}' not found."
with self.assertRaisesMessage(NoReverseMatch, message):
self.client.get('/second_test/')
b''.join(self.client.get('/second_test/'))
class ErrorHandlerResolutionTests(SimpleTestCase):
"""Tests for handler400, handler404 and handler500"""
def setUp(self):
urlconf = 'urlpatterns_reverse.urls_error_handlers'
urlconf_callables = 'urlpatterns_reverse.urls_error_handlers_callables'
self.resolver = RegexURLResolver(r'^$', urlconf)
self.callable_resolver = RegexURLResolver(r'^$', urlconf_callables)
def test_named_handlers(self):
handler = (empty_view, {})
self.assertEqual(self.resolver.resolve_error_handler(400), handler)
self.assertEqual(self.resolver.resolve_error_handler(404), handler)
self.assertEqual(self.resolver.resolve_error_handler(500), handler)
def test_callable_handlers(self):
handler = (empty_view, {})
self.assertEqual(self.callable_resolver.resolve_error_handler(400), handler)
self.assertEqual(self.callable_resolver.resolve_error_handler(404), handler)
self.assertEqual(self.callable_resolver.resolve_error_handler(500), handler)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls_without_full_import')
class DefaultErrorHandlerTests(SimpleTestCase):
def test_default_handler(self):
"If the urls.py doesn't specify handlers, the defaults are used"
response = self.client.get('/test/')
self.assertEqual(response.status_code, 404)
with self.assertRaisesMessage(ValueError, "I don't think I'm getting good"):
self.client.get('/bad_view/')
@override_settings(ROOT_URLCONF=None)
class NoRootUrlConfTests(SimpleTestCase):
"""Tests for handler404 and handler500 if ROOT_URLCONF is None"""
def test_no_handler_exception(self):
with self.assertRaises(ImproperlyConfigured):
self.client.get('/test/me/')
@override_settings(ROOT_URLCONF='urlpatterns_reverse.namespace_urls')
class ResolverMatchTests(SimpleTestCase):
@ignore_warnings(category=RemovedInDjango20Warning)
def test_urlpattern_resolve(self):
for path, url_name, app_name, namespace, view_name, func, args, kwargs in resolve_test_data:
# Test legacy support for extracting "function, args, kwargs"
match_func, match_args, match_kwargs = resolve(path)
self.assertEqual(match_func, func)
self.assertEqual(match_args, args)
self.assertEqual(match_kwargs, kwargs)
# Test ResolverMatch capabilities.
match = resolve(path)
self.assertEqual(match.__class__, ResolverMatch)
self.assertEqual(match.url_name, url_name)
self.assertEqual(match.app_name, app_name)
self.assertEqual(match.namespace, namespace)
self.assertEqual(match.view_name, view_name)
self.assertEqual(match.func, func)
self.assertEqual(match.args, args)
self.assertEqual(match.kwargs, kwargs)
# ... and for legacy purposes:
self.assertEqual(match[0], func)
self.assertEqual(match[1], args)
self.assertEqual(match[2], kwargs)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_resolver_match_on_request(self):
response = self.client.get('/resolver_match/')
resolver_match = response.resolver_match
self.assertEqual(resolver_match.url_name, 'test-resolver-match')
def test_resolver_match_on_request_before_resolution(self):
request = HttpRequest()
self.assertIsNone(request.resolver_match)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.erroneous_urls')
class ErroneousViewTests(SimpleTestCase):
def test_noncallable_view(self):
# View is not a callable (explicit import; arbitrary Python object)
with self.assertRaisesMessage(TypeError, 'view must be a callable'):
url(r'uncallable-object/$', views.uncallable)
def test_invalid_regex(self):
# Regex contains an error (refs #6170)
msg = '(regex_error/$" is not a valid regular expression'
with self.assertRaisesMessage(ImproperlyConfigured, msg):
reverse(views.empty_view)
class ViewLoadingTests(SimpleTestCase):
def test_view_loading(self):
self.assertEqual(get_callable('urlpatterns_reverse.views.empty_view'), empty_view)
# passing a callable should return the callable
self.assertEqual(get_callable(empty_view), empty_view)
def test_exceptions(self):
# A missing view (identified by an AttributeError) should raise
# ViewDoesNotExist, ...
with self.assertRaisesMessage(ViewDoesNotExist, "View does not exist in"):
get_callable('urlpatterns_reverse.views.i_should_not_exist')
# ... but if the AttributeError is caused by something else don't
# swallow it.
with self.assertRaises(AttributeError):
get_callable('urlpatterns_reverse.views_broken.i_am_broken')
class IncludeTests(SimpleTestCase):
url_patterns = [
url(r'^inner/$', views.empty_view, name='urlobject-view'),
url(r'^inner/(?P<arg1>[0-9]+)/(?P<arg2>[0-9]+)/$', views.empty_view, name='urlobject-view'),
url(r'^inner/\+\\\$\*/$', views.empty_view, name='urlobject-special-view'),
]
app_urls = URLObject('inc-app')
def test_include_app_name_but_no_namespace(self):
msg = "Must specify a namespace if specifying app_name."
with self.assertRaisesMessage(ValueError, msg):
include(self.url_patterns, app_name='bar')
def test_include_urls(self):
self.assertEqual(include(self.url_patterns), (self.url_patterns, None, None))
@ignore_warnings(category=RemovedInDjango20Warning)
def test_include_namespace(self):
# no app_name -> deprecated
self.assertEqual(include(self.url_patterns, 'namespace'), (self.url_patterns, None, 'namespace'))
@ignore_warnings(category=RemovedInDjango20Warning)
def test_include_namespace_app_name(self):
# app_name argument to include -> deprecated
self.assertEqual(
include(self.url_patterns, 'namespace', 'app_name'),
(self.url_patterns, 'app_name', 'namespace')
)
@ignore_warnings(category=RemovedInDjango20Warning)
def test_include_3_tuple(self):
# 3-tuple -> deprecated
self.assertEqual(
include((self.url_patterns, 'app_name', 'namespace')),
(self.url_patterns, 'app_name', 'namespace')
)
def test_include_2_tuple(self):
self.assertEqual(
include((self.url_patterns, 'app_name')),
(self.url_patterns, 'app_name', 'app_name')
)
def test_include_2_tuple_namespace(self):
self.assertEqual(
include((self.url_patterns, 'app_name'), namespace='namespace'),
(self.url_patterns, 'app_name', 'namespace')
)
def test_include_app_name(self):
self.assertEqual(
include(self.app_urls),
(self.app_urls, 'inc-app', 'inc-app')
)
def test_include_app_name_namespace(self):
self.assertEqual(
include(self.app_urls, 'namespace'),
(self.app_urls, 'inc-app', 'namespace')
)
@override_settings(ROOT_URLCONF='urlpatterns_reverse.urls')
class LookaheadTests(SimpleTestCase):
def test_valid_resolve(self):
test_urls = [
'/lookahead-/a-city/',
'/lookbehind-/a-city/',
'/lookahead+/a-city/',
'/lookbehind+/a-city/',
]
for test_url in test_urls:
match = resolve(test_url)
self.assertEqual(match.kwargs, {'city': 'a-city'})
def test_invalid_resolve(self):
test_urls = [
'/lookahead-/not-a-city/',
'/lookbehind-/not-a-city/',
'/lookahead+/other-city/',
'/lookbehind+/other-city/',
]
for test_url in test_urls:
with self.assertRaises(Resolver404):
resolve(test_url)
def test_valid_reverse(self):
url = reverse('lookahead-positive', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookahead+/a-city/')
url = reverse('lookahead-negative', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookahead-/a-city/')
url = reverse('lookbehind-positive', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookbehind+/a-city/')
url = reverse('lookbehind-negative', kwargs={'city': 'a-city'})
self.assertEqual(url, '/lookbehind-/a-city/')
def test_invalid_reverse(self):
with self.assertRaises(NoReverseMatch):
reverse('lookahead-positive', kwargs={'city': 'other-city'})
with self.assertRaises(NoReverseMatch):
reverse('lookahead-negative', kwargs={'city': 'not-a-city'})
with self.assertRaises(NoReverseMatch):
reverse('lookbehind-positive', kwargs={'city': 'other-city'})
with self.assertRaises(NoReverseMatch):
reverse('lookbehind-negative', kwargs={'city': 'not-a-city'})
|
dfunckt/django
|
tests/urlpatterns_reverse/tests.py
|
Python
|
bsd-3-clause
| 50,749 | 0.003074 |
from worldengine.simulations.basic import *
import random
from worldengine.views.basic import color_prop
from PyQt4 import QtGui
class WatermapView(object):
def is_applicable(self, world):
return world.has_watermap()
def draw(self, world, canvas):
width = world.width
height = world.height
th = world.watermap['thresholds']['river']
for y in range(0, height):
for x in range(0, width):
if world.is_ocean((x, y)):
r = g = 0
b = 255
else:
w = world.watermap['data'][y][x]
if w > th:
r = g = 0
b = 255
else:
r = g = b = 0
col = QtGui.QColor(r, g, b)
canvas.setPixel(x, y, col.rgb())
|
ftomassetti/worldengine
|
worldengine/views/WatermapView.py
|
Python
|
mit
| 880 | 0 |
"""NuGridPy package version"""
__version__ = '0.7.6'
|
NuGrid/NuGridPy
|
nugridpy/version.py
|
Python
|
bsd-3-clause
| 54 | 0 |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# -*- mode: python -*-
# vi: set ft=python :
import os
from setuptools import setup, find_packages
README_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'README')
DESCRIPTION = 'Easy image thumbnails in Django.'
if os.path.exists(README_PATH): LONG_DESCRIPTION = open(README_PATH).read()
else: LONG_DESCRIPTION = DESCRIPTION
setup(
name='django-thumbs',
version='1.0.4',
install_requires=['django'],
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author='Matt Pegler',
author_email='matt@pegler.co',
url='https://github.com/pegler/django-thumbs/',
packages=['thumbs'],
)
|
pegler/django-thumbs
|
setup.py
|
Python
|
bsd-2-clause
| 691 | 0.004342 |
# -*- coding: utf-8 -*-
"""
lets.transparentlet
~~~~~~~~~~~~~~~~~~~
Deprecated. gevent-1.1 keeps a traceback exactly.
If you want to just prevent to print an exception by the hub, use
:mod:`lets.quietlet` instead.
:copyright: (c) 2013-2018 by Heungsub Lee
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from gevent.pool import Group as TransparentGroup
from lets.quietlet import quiet as no_error_handling
from lets.quietlet import Quietlet as Transparentlet
__all__ = ['Transparentlet', 'TransparentGroup', 'no_error_handling']
|
sublee/lets
|
lets/transparentlet.py
|
Python
|
bsd-3-clause
| 600 | 0 |
#!/usr/bin/python
from gevent import monkey
monkey.patch_all()
import logging
import gevent
from gevent.coros import BoundedSemaphore
from kafka import KafkaClient, KeyedProducer, SimpleConsumer, common
from uveserver import UVEServer
import os
import json
import copy
import traceback
import uuid
import struct
import socket
import discoveryclient.client as client
from sandesh_common.vns.constants import ALARM_PARTITION_SERVICE_NAME
from pysandesh.util import UTCTimestampUsec
import select
import redis
from collections import namedtuple
PartInfo = namedtuple("PartInfo",["ip_address","instance_id","acq_time","port"])
def sse_pack(d):
"""Pack data in SSE format"""
buffer = ''
for k in ['event','data']:
if k in d.keys():
buffer += '%s: %s\n' % (k, d[k])
return buffer + '\n'
class UveStreamPart(gevent.Greenlet):
def __init__(self, partno, logger, q, pi, rpass):
gevent.Greenlet.__init__(self)
self._logger = logger
self._q = q
self._pi = pi
self._partno = partno
self._rpass = rpass
def syncpart(self, redish):
inst = self._pi.instance_id
part = self._partno
keys = list(redish.smembers("AGPARTKEYS:%s:%d" % (inst, part)))
ppe = redish.pipeline()
for key in keys:
ppe.hgetall("AGPARTVALUES:%s:%d:%s" % (inst, part, key))
pperes = ppe.execute()
idx=0
for res in pperes:
for tk,tv in res.iteritems():
msg = {'event': 'sync', 'data':\
json.dumps({'partition':self._partno,
'key':keys[idx], 'type':tk, 'value':tv})}
self._q.put(sse_pack(msg))
idx += 1
def _run(self):
lredis = None
pb = None
while True:
try:
lredis = redis.StrictRedis(
host=self._pi.ip_address,
port=self._pi.port,
password=self._rpass,
db=2)
pb = lredis.pubsub()
inst = self._pi.instance_id
part = self._partno
pb.subscribe('AGPARTPUB:%s:%d' % (inst, part))
self.syncpart(lredis)
for message in pb.listen():
if message["type"] != "message":
continue
dataline = message["data"]
try:
elems = json.loads(dataline)
except:
self._logger.error("AggUVE Parsing failed: %s" % str(message))
continue
else:
self._logger.error("AggUVE loading: %s" % str(elems))
ppe = lredis.pipeline()
for elem in elems:
# This UVE was deleted
if elem["type"] is None:
ppe.exists("AGPARTVALUES:%s:%d:%s" % \
(inst, part, elem["key"]))
else:
ppe.hget("AGPARTVALUES:%s:%d:%s" % \
(inst, part, elem["key"]), elem["type"])
pperes = ppe.execute()
idx = 0
for elem in elems:
if elem["type"] is None:
msg = {'event': 'update', 'data':\
json.dumps({'partition':part,
'key':elem["key"], 'type':None})}
else:
vjson = pperes[idx]
if vjson is None:
vdata = None
else:
vdata = json.loads(vjson)
msg = {'event': 'update', 'data':\
json.dumps({'partition':part,
'key':elem["key"], 'type':elem["type"],
'value':vdata})}
self._q.put(sse_pack(msg))
idx += 1
except gevent.GreenletExit:
break
except Exception as ex:
template = "Exception {0} in uve stream proc. Arguments:\n{1!r}"
messag = template.format(type(ex).__name__, ex.args)
self._logger.error("%s : traceback %s" % \
(messag, traceback.format_exc()))
lredis = None
if pb is not None:
pb.close()
pb = None
gevent.sleep(2)
return None
class UveStreamer(gevent.Greenlet):
def __init__(self, logger, q, rfile, agp_cb, partitions, rpass):
gevent.Greenlet.__init__(self)
self._logger = logger
self._q = q
self._rfile = rfile
self._agp_cb = agp_cb
self._agp = {}
self._parts = {}
self._partitions = partitions
self._rpass = rpass
def _run(self):
inputs = [ self._rfile ]
outputs = [ ]
msg = {'event': 'init', 'data':\
json.dumps({'partitions':self._partitions})}
self._q.put(sse_pack(msg))
while True:
readable, writable, exceptional = select.select(inputs, outputs, inputs, 1)
if (readable or writable or exceptional):
break
newagp = self._agp_cb()
set_new, set_old = set(newagp.keys()), set(self._agp.keys())
intersect = set_new.intersection(set_old)
# deleted parts
for elem in set_old - intersect:
self.partition_stop(elem)
# new parts
for elem in set_new - intersect:
self.partition_start(elem, newagp[elem])
# changed parts
for elem in intersect:
if self._agp[elem] != newagp[elem]:
self.partition_stop(elem)
self.partition_start(elem, newagp[elem])
self._agp = newagp
for part, pi in self._agp.iteritems():
self.partition_stop(part)
def partition_start(self, partno, pi):
self._logger.error("Starting agguve part %d using %s" %( partno, pi))
msg = {'event': 'clear', 'data':\
json.dumps({'partition':partno, 'acq_time':pi.acq_time})}
self._q.put(sse_pack(msg))
self._parts[partno] = UveStreamPart(partno, self._logger,
self._q, pi, self._rpass)
self._parts[partno].start()
def partition_stop(self, partno):
self._logger.error("Stopping agguve part %d" % partno)
self._parts[partno].kill()
self._parts[partno].get()
del self._parts[partno]
class PartitionHandler(gevent.Greenlet):
def __init__(self, brokers, group, topic, logger, limit):
gevent.Greenlet.__init__(self)
self._brokers = brokers
self._group = group
self._topic = topic
self._logger = logger
self._limit = limit
self._uvedb = {}
self._partoffset = 0
self._kfk = None
def msg_handler(self, mlist):
self._logger.info("%s Reading %s" % (self._topic, str(mlist)))
return True
def _run(self):
pcount = 0
while True:
try:
self._logger.error("New KafkaClient %s" % self._topic)
self._kfk = KafkaClient(self._brokers , "kc-" + self._topic)
try:
consumer = SimpleConsumer(self._kfk, self._group, self._topic, buffer_size = 4096*4, max_buffer_size=4096*32)
#except:
except Exception as ex:
template = "Consumer Failure {0} occured. Arguments:\n{1!r}"
messag = template.format(type(ex).__name__, ex.args)
self._logger.info("%s" % messag)
raise RuntimeError(messag)
self._logger.error("Starting %s" % self._topic)
# Find the offset of the last message that has been queued
consumer.seek(-1,2)
try:
mi = consumer.get_message(timeout=0.1)
consumer.commit()
except common.OffsetOutOfRangeError:
mi = None
#import pdb; pdb.set_trace()
self._logger.info("Last Queued for %s is %s" % \
(self._topic,str(mi)))
# start reading from last previously processed message
if mi != None:
consumer.seek(0,1)
else:
consumer.seek(0,0)
if self._limit:
raise gevent.GreenletExit
while True:
try:
mlist = consumer.get_messages(10,timeout=0.5)
if not self.msg_handler(mlist):
raise gevent.GreenletExit
consumer.commit()
pcount += len(mlist)
except TypeError as ex:
self._logger.error("Type Error: %s trace %s" % \
(str(ex.args), traceback.format_exc()))
gevent.sleep(0.1)
except common.FailedPayloadsError as ex:
self._logger.error("Payload Error: %s" % str(ex.args))
gevent.sleep(0.1)
except gevent.GreenletExit:
break
except AssertionError as ex:
self._partoffset = ex
break
except Exception as ex:
template = "An exception of type {0} occured. Arguments:\n{1!r}"
messag = template.format(type(ex).__name__, ex.args)
self._logger.error("%s : traceback %s" % \
(messag, traceback.format_exc()))
self.stop_partition()
gevent.sleep(2)
self._logger.error("Stopping %s pcount %d" % (self._topic, pcount))
partdb = self.stop_partition()
return self._partoffset, partdb
class UveStreamProc(PartitionHandler):
# Arguments:
#
# brokers : broker list for kafka bootstrap
# partition : partition number
# uve_topic : Topic to consume
# logger : logging object to use
# callback : Callback function for reporting the set of the UVEs
# that may have changed for a given notification
# rsc : Callback function to check on collector status
# and get sync contents for new collectors
# aginst : instance_id of alarmgen
# rport : redis server port
# disc : discovery client to publish to
def __init__(self, brokers, partition, uve_topic, logger, callback,
host_ip, rsc, aginst, rport, disc = None):
super(UveStreamProc, self).__init__(brokers, "workers",
uve_topic, logger, False)
self._uvedb = {}
self._uvein = {}
self._uveout = {}
self._callback = callback
self._partno = partition
self._host_ip = host_ip
self._ip_code, = struct.unpack('>I', socket.inet_pton(
socket.AF_INET, host_ip))
self.disc_rset = set()
self._resource_cb = rsc
self._aginst = aginst
self._disc = disc
self._acq_time = UTCTimestampUsec()
self._rport = rport
def acq_time(self):
return self._acq_time
def resource_check(self, msgs):
'''
This function compares the known collectors with the
list from discovery, and syncs UVE keys accordingly
'''
newset , coll_delete, chg_res = self._resource_cb(self._partno, self.disc_rset, msgs)
for coll in coll_delete:
self._logger.error("Part %d lost collector %s" % (self._partno, coll))
self.stop_partition(coll)
if len(chg_res):
self.start_partition(chg_res)
self.disc_rset = newset
if self._disc:
data = { 'instance-id' : self._aginst,
'partition' : str(self._partno),
'ip-address': self._host_ip,
'acq-time': str(self._acq_time),
'port':str(self._rport)}
self._disc.publish(ALARM_PARTITION_SERVICE_NAME, data)
def stop_partition(self, kcoll=None):
clist = []
if not kcoll:
clist = self._uvedb.keys()
# If all collectors are being cleared, clear resoures too
self.disc_rset = set()
if self._disc:
# TODO: Unpublish instead of setting acq-time to 0
data = { 'instance-id' : self._aginst,
'partition' : str(self._partno),
'ip-address': self._host_ip,
'acq-time': "0",
'port':str(self._rport)}
self._disc.publish(ALARM_PARTITION_SERVICE_NAME, data)
else:
clist = [kcoll]
self._logger.error("Stopping part %d collectors %s" % \
(self._partno,clist))
partdb = {}
chg = {}
for coll in clist:
partdb[coll] = {}
for gen in self._uvedb[coll].keys():
partdb[coll][gen] = {}
for tab in self._uvedb[coll][gen].keys():
for rkey in self._uvedb[coll][gen][tab].keys():
uk = tab + ":" + rkey
chg[uk] = None
partdb[coll][gen][uk] = \
set(self._uvedb[coll][gen][tab][rkey].keys())
del self._uvedb[coll]
self._logger.error("Stopping part %d UVEs %s" % \
(self._partno,str(chg.keys())))
self._callback(self._partno, chg)
return partdb
def start_partition(self, cbdb):
''' This function loads the initial UVE database.
for the partition
'''
self._logger.error("Starting part %d collectors %s" % \
(self._partno, str(cbdb.keys())))
uves = {}
for kcoll,coll in cbdb.iteritems():
self._uvedb[kcoll] = {}
for kgen,gen in coll.iteritems():
self._uvedb[kcoll][kgen] = {}
for kk in gen.keys():
tabl = kk.split(":",1)
tab = tabl[0]
rkey = tabl[1]
if not tab in self._uvedb[kcoll][kgen]:
self._uvedb[kcoll][kgen][tab] = {}
self._uvedb[kcoll][kgen][tab][rkey] = {}
uves[kk] = {}
for typ, contents in gen[kk].iteritems():
self._uvedb[kcoll][kgen][tab][rkey][typ] = {}
self._uvedb[kcoll][kgen][tab][rkey][typ]["c"] = 0
self._uvedb[kcoll][kgen][tab][rkey][typ]["u"] = \
uuid.uuid1(self._ip_code)
uves[kk][typ] = contents
self._logger.error("Starting part %d UVEs %s" % \
(self._partno, str(uves.keys())))
self._callback(self._partno, uves)
def contents(self):
return self._uvedb
def stats(self):
''' Return the UVEKey-Count stats collected over
the last time period for this partition, and
the incoming UVE Notifs as well.
Also, the stats should be cleared to prepare
for the next period of collection.
'''
ret_out = copy.deepcopy(self._uveout)
ret_in = copy.deepcopy(self._uvein)
self._uveout = {}
self._uvein = {}
return ret_in, ret_out
def msg_handler(self, mlist):
self.resource_check(mlist)
for mm in mlist:
if mm is None:
continue
self._logger.debug("%s Reading offset %d" % \
(self._topic, mm.offset))
if not self.msg_handler_single(mm):
self._logger.info("%s could not handle %s" % \
(self._topic, str(mm)))
return False
return True
def msg_handler_single(self, om):
self._partoffset = om.offset
chg = {}
try:
uv = json.loads(om.message.value)
coll = uv["coll"]
gen = uv["gen"]
if not self._uvedb.has_key(coll):
# This partition is not synced yet.
# Ignore this message
self._logger.debug("%s Ignoring UVE %s" % (self._topic, str(om)))
return True
if not self._uvedb[coll].has_key(gen):
self._uvedb[coll][gen] = {}
if (uv["message"] == "UVEUpdate"):
tabl = uv["key"].split(":",1)
tab = tabl[0]
rkey = tabl[1]
if tab not in self._uvedb[coll][gen]:
self._uvedb[coll][gen][tab] = {}
if not rkey in self._uvedb[coll][gen][tab]:
self._uvedb[coll][gen][tab][rkey] = {}
removed = False
# uv["type"] and uv["value"] can be decoded as follows:
# uv["type"] can be one of the following:
# - None # All Types under this UVE are deleted
# uv["value"] will not be present
# (this option is only for agg UVE updates)
# - "<Struct>" # uv["value"] refers to this struct
# uv["value"] can be one of the following:
# - None # This Type has been deleted.
# - {} # The Type has a value, which is
# not available in this message.
# (this option is only for raw UVE updates)
# - {<Value>} # The Value of the Type
# (this option is only for agg UVE updates)
if uv["type"] is None:
# TODO: Handling of delete UVE case
return False
if uv["value"] is None:
if uv["type"] in self._uvedb[coll][gen][tab][rkey]:
del self._uvedb[coll][gen][tab][rkey][uv["type"]]
if not len(self._uvedb[coll][gen][tab][rkey]):
del self._uvedb[coll][gen][tab][rkey]
removed = True
if not removed:
if uv["type"] in self._uvedb[coll][gen][tab][rkey]:
self._uvedb[coll][gen][tab][rkey][uv["type"]]["c"] +=1
else:
self._uvedb[coll][gen][tab][rkey][uv["type"]] = {}
self._uvedb[coll][gen][tab][rkey][uv["type"]]["c"] = 1
self._uvedb[coll][gen][tab][rkey][uv["type"]]["u"] = \
uuid.uuid1(self._ip_code)
chg[uv["key"]] = { uv["type"] : uv["value"] }
# Record stats on UVE Keys being processed
if not self._uveout.has_key(tab):
self._uveout[tab] = {}
if self._uveout[tab].has_key(uv["key"]):
self._uveout[tab][uv["key"]] += 1
else:
self._uveout[tab][uv["key"]] = 1
# Record stats on the input UVE Notifications
if not self._uvein.has_key(tab):
self._uvein[tab] = {}
if not self._uvein[tab].has_key(coll):
self._uvein[tab][coll] = {}
if not self._uvein[tab][coll].has_key(gen):
self._uvein[tab][coll][gen] = {}
if not self._uvein[tab][coll][gen].has_key(uv["type"]):
self._uvein[tab][coll][gen][uv["type"]] = 1
else:
self._uvein[tab][coll][gen][uv["type"]] += 1
else:
# Record stats on UVE Keys being processed
for tab in self._uvedb[coll][gen].keys():
for rkey in self._uvedb[coll][gen][tab].keys():
uk = tab + ":" + rkey
if not self._uveout.has_key(tab):
self._uveout[tab] = {}
if self._uveout[tab].has_key(uk):
self._uveout[tab][uk] += 1
else:
self._uveout[tab][uk] = 1
# when a generator is delelted, we need to
# notify for *ALL* its UVEs
chg[uk] = None
del self._uvedb[coll][gen]
except Exception as ex:
template = "An exception of type {0} in uve proc . Arguments:\n{1!r}"
messag = template.format(type(ex).__name__, ex.args)
self._logger.info("%s" % messag)
return False
else:
self._callback(self._partno, chg)
return True
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
workers = {}
brokers = "localhost:9092,localhost:9093,localhost:9094"
group = "workers"
kafka = KafkaClient(brokers,str(os.getpid()))
cons = SimpleConsumer(kafka, group, "ctrl")
cons.provide_partition_info()
print "Starting control"
end_ready = False
while end_ready == False:
try:
while True:
part, mmm = cons.get_message(timeout=None)
mm = mmm.message
print "Consumed ctrl " + str(mm)
if mm.value == "start":
if workers.has_key(mm.key):
print "Dup partition %s" % mm.key
raise ValueError
else:
ph = UveStreamProc(brokers, int(mm.key), "uve-" + mm.key, "alarm-x" + mm.key, logging)
ph.start()
workers[int(mm.key)] = ph
elif mm.value == "stop":
#import pdb; pdb.set_trace()
if workers.has_key(int(mm.key)):
ph = workers[int(mm.key)]
gevent.kill(ph)
res,db = ph.get()
print "Returned " + str(res)
print "State :"
for k,v in db.iteritems():
print "%s -> %s" % (k,str(v))
del workers[int(mm.key)]
else:
end_ready = True
cons.commit()
gevent.sleep(2)
break
except TypeError:
gevent.sleep(0.1)
except common.FailedPayloadsError as ex:
print "Payload Error: " + str(ex.args)
gevent.sleep(0.1)
lw=[]
for key, value in workers.iteritems():
gevent.kill(value)
lw.append(value)
gevent.joinall(lw)
print "Ending Consumers"
|
facetothefate/contrail-controller
|
src/opserver/partition_handler.py
|
Python
|
apache-2.0
| 23,447 | 0.00917 |
# -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon
#
# This file is part of a woob module.
#
# This woob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This woob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this woob module. If not, see <http://www.gnu.org/licenses/>.
from woob.browser.pages import JsonPage, pagination, HTMLPage
from woob.browser.elements import ItemElement, DictElement, method
from woob.browser.filters.json import Dict
from woob.browser.filters.html import XPath
from woob.browser.filters.standard import (CleanText, CleanDecimal, Currency,
Env, Regexp, Field, BrowserURL)
from woob.capabilities.base import NotAvailable, NotLoaded
from woob.capabilities.housing import (Housing, HousingPhoto, City,
UTILITIES, ENERGY_CLASS, POSTS_TYPES,
ADVERT_TYPES)
from woob.capabilities.address import PostalAddress
from woob.tools.capabilities.housing.housing import PricePerMeterFilter
from woob.tools.json import json
from woob.exceptions import ActionNeeded
from .constants import TYPES, RET
import codecs
import decimal
class ErrorPage(HTMLPage):
def on_load(self):
raise ActionNeeded("Please resolve the captcha")
class CitiesPage(JsonPage):
@method
class iter_cities(DictElement):
ignore_duplicate = True
class item(ItemElement):
klass = City
obj_id = Dict('Params/ci')
obj_name = Dict('Display')
class SearchResultsPage(HTMLPage):
def __init__(self, *args, **kwargs):
HTMLPage.__init__(self, *args, **kwargs)
json_content = Regexp(CleanText('//script'),
r"window\[\"initialData\"\] = JSON.parse\(\"({.*})\"\);window\[\"tags\"\]")(self.doc)
json_content = codecs.unicode_escape_decode(json_content)[0]
json_content = json_content.encode('utf-8', 'surrogatepass').decode('utf-8')
self.doc = json.loads(json_content)
@pagination
@method
class iter_housings(DictElement):
item_xpath = 'cards/list'
# Prevent DataError on same ids
ignore_duplicate = True
def next_page(self):
page_nb = Dict('navigation/pagination/page')(self)
max_results = Dict('navigation/counts/count')(self)
results_per_page = Dict('navigation/pagination/resultsPerPage')(self)
if int(max_results) / int(results_per_page) > int(page_nb):
return BrowserURL('search', query=Env('query'), page_number=int(page_nb) + 1)(self)
# TODO handle bellesdemeures
class item(ItemElement):
klass = Housing
def condition(self):
return (
Dict('cardType')(self) not in ['advertising', 'ali', 'localExpert']
and Dict('id', default=False)(self)
and Dict('classifiedURL', default=False)(self)
)
obj_id = Dict('id')
def obj_type(self):
idType = int(Env('query_type')(self))
type = next(k for k, v in TYPES.items() if v == idType)
if type == POSTS_TYPES.FURNISHED_RENT:
# SeLoger does not let us discriminate between furnished and not furnished.
return POSTS_TYPES.RENT
return type
def obj_title(self):
return "{} - {} - {}".format(Dict('estateType')(self),
" / ".join(Dict('tags')(self)),
Field('location')(self))
def obj_advert_type(self):
is_agency = Dict('contact/agencyId', default=False)(self)
if is_agency:
return ADVERT_TYPES.PROFESSIONAL
else:
return ADVERT_TYPES.PERSONAL
obj_utilities = UTILITIES.EXCLUDED
def obj_photos(self):
photos = []
for photo in Dict('photos')(self):
photos.append(HousingPhoto(photo))
return photos
def obj_location(self):
quartier = Dict('districtLabel')(self)
quartier = quartier if quartier else ''
ville = Dict('cityLabel')(self)
ville = ville if ville else ''
cp = Dict('zipCode')(self)
cp = cp if cp else ''
return u'%s %s (%s)' % (quartier, ville, cp)
obj_url = Dict('classifiedURL')
obj_text = Dict('description')
obj_cost = CleanDecimal(Dict('pricing/price', default=NotLoaded), default=NotLoaded)
obj_currency = Currency(Dict('pricing/price', default=NotLoaded), default=NotLoaded)
obj_price_per_meter = CleanDecimal(Dict('pricing/squareMeterPrice'), default=PricePerMeterFilter)
class HousingPage(HTMLPage):
def __init__(self, *args, **kwargs):
HTMLPage.__init__(self, *args, **kwargs)
json_content = Regexp(
CleanText('//script'),
r"window\[\"initialData\"\] = JSON.parse\(\"({.*})\"\);"
)(self.doc)
json_content = codecs.unicode_escape_decode(json_content)[0]
json_content = json_content.encode('utf-8', 'surrogatepass').decode('utf-8')
self.doc = {
"advert": json.loads(json_content).get('advert', {}).get('mainAdvert', {}),
"agency": json.loads(json_content).get('agency', {})
}
@method
class get_housing(ItemElement):
klass = Housing
def parse(self, el):
self.agency_doc = el['agency']
self.el = el['advert']
obj_id = Dict('id')
def obj_house_type(self):
naturebien = Dict('propertyNatureId')(self)
try:
return next(k for k, v in RET.items() if v == naturebien)
except StopIteration:
return NotLoaded
def obj_type(self):
idType = Dict('idTransactionType')(self)
try:
type = next(k for k, v in TYPES.items() if v == idType)
if type == POSTS_TYPES.FURNISHED_RENT:
# SeLoger does not let us discriminate between furnished and not furnished.
return POSTS_TYPES.RENT
return type
except StopIteration:
return NotAvailable
def obj_advert_type(self):
if 'Agences' in self.agency_doc['type']:
return ADVERT_TYPES.PROFESSIONAL
else:
return ADVERT_TYPES.PERSONAL
def obj_photos(self):
photos = []
for photo in Dict('photoList')(self):
photos.append(HousingPhoto(photo['fullscreenUrl']))
return photos
obj_title = Dict('title')
def obj_location(self):
address = Dict('address')(self)
return u'%s %s (%s)' % (address['neighbourhood'], address['city'],
address['zipCode'])
def obj_address(self):
address = Dict('address')(self)
p = PostalAddress()
p.street = address['street']
p.postal_code = address['zipCode']
p.city = address['city']
p.full_address = Field('location')(self)
return p
obj_text = Dict('description')
def obj_cost(self):
propertyPrice = Dict('propertyPrice')(self)
return decimal.Decimal(propertyPrice['prix'])
def obj_currency(self):
propertyPrice = Dict('propertyPrice')(self)
return propertyPrice['priceUnit']
obj_price_per_meter = PricePerMeterFilter()
obj_area = CleanDecimal(Dict('surface'))
def obj_url(self):
return self.page.url
def obj_phone(self):
return self.agency_doc.get('agencyPhoneNumber', {}).get('value',
NotAvailable)
def obj_utilities(self):
return NotLoaded # TODO
obj_bedrooms = CleanDecimal(Dict('bedroomCount'))
obj_rooms = CleanDecimal(Dict('numberOfRooms'))
class HousingJsonPage(JsonPage):
@method
class get_housing(ItemElement):
klass = Housing
def obj_DPE(self):
DPE = Dict("energie", default="")(self)
if DPE['status'] > 0:
return NotAvailable
else:
return getattr(ENERGY_CLASS, DPE['lettre'], NotAvailable)
def obj_GES(self):
GES = Dict("ges", default="")(self)
if GES['status'] > 0:
return NotAvailable
else:
return getattr(ENERGY_CLASS, GES['lettre'], NotAvailable)
def obj_details(self):
details = {}
for c in Dict('categories')(self):
if c['criteria']:
details[c['name']] = ' / '.join([_['value'] for _ in c['criteria']])
for _, c in Dict('infos_acquereur')(self).items():
for key, value in c.items():
details[key] = value
return details
|
Phyks/Flatisfy
|
modules/seloger/pages.py
|
Python
|
mit
| 9,785 | 0.002146 |
# perf trace event handlers, generated by perf trace -g python
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
|
droidzone/Supernova-Kernel
|
tools/tools/perf/scripts/python/check-perf-trace.py
|
Python
|
gpl-2.0
| 2,501 | 0.02479 |
# flake8: noqa
import sys
import toml
import log
from .uploader import DropboxUploader
from .file_manager import DirectoryPoller, VolumePoller
SECT = 'flysight-manager'
class ConfigError(Exception):
pass
class FlysightConfig(object):
pass
class DropboxConfig(object):
pass
class VimeoConfig(object):
pass
class YoutubeConfig(object):
pass
class SendgridConfig(object):
pass
class PushoverConfig(object):
pass
class CameraConfig(object):
def __init__(self, name, cfg):
self._name = name
self._mountpoint = cfg["mountpoint"]
self._uuid = cfg["uuid"]
@property
def mountpoint(self):
return self._mountpoint
@property
def uuid(self):
return self._uuid
class GoProConfig(object):
def __init__(self):
self._cameras = {}
def add_camera(self, name, config):
self._cameras[name] = CameraConfig(name, config)
def cameras(self):
return self._cameras
class GswoopConfig(object):
pass
def get_poller(ty):
if ty == 'flysight':
get_sect = lambda cfg: cfg.flysight_cfg
elif ty == 'gopro':
get_sect = lambda cfg: cfg
else:
raise "Unknown ty: %s" % (repr(ty))
platform = sys.platform
if platform.startswith('linux'):
return lambda name, cfg: VolumePoller(name, get_sect(cfg).uuid, ty)
elif platform == 'darwin':
return lambda name, cfg: DirectoryPoller(name, get_sect(cfg).mountpoint, ty)
else:
raise 'Unknown platform: %s' % (repr(platform))
@log.make_loggable
class Configuration(object):
"""Stub class to be replaced by a real configuration system"""
CONFIG_FILE = 'flysight-manager.ini'
def __init__(self):
self.flysight_enabled = False
self.gopro_enabled = False
self.gswoop_enabled = False
self.vimeo_enabled = False
self.youtube_enabled = False
self.sendgrid_enabled = False
self.noop = False
self.preserve = False
self.processors = []
self.info("Loading config from %s" % self.CONFIG_FILE)
cfg = toml.load(open(self.CONFIG_FILE, 'rb'))
self.load_config(cfg)
self._uploader = None
if self.gswoop_enabled:
self.info("Enabling gswoop processor")
self.processors.append("gswoop")
def load_config(self, cfg):
"""Validate the configuration"""
get = lambda x: cfg[SECT][x]
# TODO: Confirm how this handles bools
enabled = lambda x: cfg[x]["enabled"]
backend = get('storage_backend')
if backend == 'dropbox':
self.storage_backend = 'dropbox'
self.dropbox_cfg = self.load_dropbox_opts(cfg)
else:
raise ConfigError("Unknown storage_backend: %s" % backend)
if enabled("flysight"):
self.flysight_enabled = True
self.flysight_cfg = self.load_flysight_opts(cfg)
if enabled("gopro"):
self.gopro_enabled = True
self.gopro_cfg = self.load_gopro_opts(cfg)
if enabled("gswoop"):
self.gswoop_enabled = True
self.gswoop_cfg = self.load_gswoop_opts(cfg)
if enabled("vimeo"):
self.vimeo_enabled = True
self.vimeo_cfg = self.load_vimeo_opts(cfg)
if enabled("youtube"):
self.youtube_enabled = True
self.youtube_cfg = self.load_youtube_opts(cfg)
if enabled("sendgrid"):
self.sendgrid_enabled = True
self.sendgrid_cfg = self.load_sendgrid_opts(cfg)
if enabled("pushover"):
self.pushover_enabled = True
self.pushover_cfg = self.load_pushover_opts(cfg)
def load_dropbox_opts(self, cfg):
get = lambda x: cfg["dropbox"][x]
_cfg = DropboxConfig()
_cfg.token = get("token")
return _cfg
def load_vimeo_opts(self, cfg):
get = lambda x: cfg["vimeo"][x]
_cfg = VimeoConfig()
_cfg.token = get("token")
return _cfg
def load_sendgrid_opts(self, cfg):
get = lambda x: cfg["sendgrid"][x]
_cfg = SendgridConfig()
_cfg.token = get("token")
_cfg.from_addr = get("from")
_cfg.to_addr = get("to")
_cfg.subject = get("subject")
return _cfg
def load_pushover_opts(self, cfg):
get = lambda x: cfg["pushover"][x]
_cfg = PushoverConfig()
_cfg.token = get("token")
_cfg.user = get("user")
return _cfg
def load_youtube_opts(self, cfg):
get = lambda x: cfg["youtube"][x]
_cfg = YoutubeConfig()
_cfg.access_token = get("access_token")
_cfg.client_id = get("client_id")
_cfg.client_secret = get("client_secret")
_cfg.refresh_token = get("refresh_token")
_cfg.token_uri = get("token_uri")
return _cfg
def load_gopro_opts(self, cfg):
_cfg = GoProConfig()
# Extract the enabled key, then pray that anything else is a camera
for k, v in cfg["gopro"].items():
if isinstance(v, dict):
_cfg.add_camera(k, v)
return _cfg
def load_flysight_opts(self, cfg):
get = lambda x: cfg["flysight"][x]
_cfg = FlysightConfig()
_cfg.mountpoint = get("mountpoint")
_cfg.uuid = get("uuid")
return _cfg
def load_gswoop_opts(self, cfg):
get = lambda x: cfg["gswoop"][x]
_cfg = GswoopConfig()
_cfg.binary = get("binary")
return _cfg
@property
def uploader(self):
if not self._uploader:
if self.storage_backend == 'dropbox':
self._uploader = DropboxUploader(self.dropbox_cfg.token, self.noop)
else:
raise ConfigError('Unknown storage backend: %s' % self.storage_backend)
return self._uploader
def update_with_args(self, args):
if args.noop:
self.debug("Setting noop flag")
self.noop = args.noop
if args.preserve:
self.debug("Setting preserve flag")
self.preserve = args.preserve
|
richo/flysight-manager
|
flysight_manager/config.py
|
Python
|
mit
| 6,142 | 0.002279 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014-2022 Daniele Simonetti
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from PyQt5 import QtCore, QtGui, QtWidgets
import l5r.widgets as widgets
import l5r.api as api
import l5r.api.character.rankadv
class NextRankDlg(QtWidgets.QDialog):
def __init__(self, pc, parent=None):
super(NextRankDlg, self).__init__(parent)
self.pc = pc
self.build_ui()
self.connect_signals()
# self.setWindowFlags(QtCore.Qt.Tool)
self.setWindowTitle(self.tr("L5R: CM - Advance Rank"))
def build_ui(self):
vbox = QtWidgets.QVBoxLayout(self)
vbox.addWidget(QtWidgets.QLabel(self.tr("""\
You can now advance your Rank,
what would you want to do?
""")))
self.bt_go_on = QtWidgets.QPushButton(
self.tr("Advance in my current school")
)
self.bt_new_school = QtWidgets.QPushButton(
self.tr("Join a new school"))
for bt in [self.bt_go_on, self.bt_new_school]:
bt.setMinimumSize(QtCore.QSize(0, 38))
vbox.addWidget(self.bt_go_on)
vbox.addWidget(self.bt_new_school)
vbox.setSpacing(12)
is_path = api.data.schools.is_path(
api.character.schools.get_current()
)
former_school_adv = api.character.rankadv.get_former_school()
former_school = api.data.schools.get(former_school_adv.school) if former_school_adv else None
# check if the PC is following an alternate path
if is_path:
# offer to going back
if former_school:
self.bt_go_on.setText(self.tr("Continue ") + former_school.name)
else:
self.bt_go_on.setText(self.tr("Go back to your old school"))
self.bt_go_on.setEnabled(former_school != None)
def connect_signals(self):
self.bt_go_on.clicked.connect(self.simply_go_on)
self.bt_new_school.clicked.connect(self.join_new_school)
def join_new_school(self):
dlg = widgets.SchoolChooserDialog(self)
if dlg.exec_() == QtWidgets.QDialog.Rejected:
return
self.accept()
def simply_go_on(self):
is_path = api.data.schools.is_path(
api.character.schools.get_current()
)
# check if the PC is following an alternate path
if is_path:
# the PC want to go back to the old school.
# find the first school that is not a path
api.character.rankadv.leave_path()
else:
api.character.rankadv.advance_rank()
self.accept()
def test():
import sys
app = QtWidgets.QApplication(sys.argv)
dlg = NextRankDlg(None, None)
dlg.show()
sys.exit(app.exec_())
if __name__ == '__main__':
test()
|
OpenNingia/l5r-character-manager-3
|
l5r/dialogs/newrankdlg.py
|
Python
|
gpl-3.0
| 3,473 | 0.001152 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='ClassObj',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('expression', models.CharField(max_length=255)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='DataStoreBase',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='LayerObj',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('layer_type', models.SmallIntegerField(choices=[(3, b'raster'), (2, b'vector polygon'), (1, b'vector line'), (0, b'vector point')])),
('projection', models.CharField(default=b'init=epsg:4326', help_text=b'PROJ4 definition of the layer projection', max_length=255)),
('data', models.CharField(help_text=b'Full filename of the spatial data to process.', max_length=255)),
('class_item', models.CharField(help_text=b'Item name in attribute table to use for class lookups.', max_length=255, blank=True)),
('ows_abstract', models.TextField(blank=True)),
('ows_enable_request', models.CharField(default=b'*', max_length=255)),
('ows_include_items', models.CharField(default=b'all', max_length=50, blank=True)),
('gml_include_items', models.CharField(default=b'all', max_length=50, blank=True)),
('ows_opaque', models.SmallIntegerField(null=True, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MapLayer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status', models.SmallIntegerField(choices=[(0, b'off'), (1, b'on'), (2, b'default')])),
('layer_obj', models.ForeignKey(to='djangomapserver.LayerObj')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MapObj',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(help_text=b'Unique identifier.', max_length=255)),
('status', models.SmallIntegerField(choices=[(0, b'off'), (1, b'on'), (2, b'default')])),
('projection', models.CharField(default=b'init=epsg:4326', help_text=b'PROJ4 definition of the map projection', max_length=255)),
('units', models.SmallIntegerField(blank=True, choices=[(5, b'Decimal degrees')])),
('size', models.CommaSeparatedIntegerField(help_text=b'Map size in pixel units', max_length=10)),
('cell_size', models.FloatField(help_text=b'Pixel size in map units.', null=True, blank=True)),
('image_type', models.CharField(max_length=10, choices=[(b'png', b'png')])),
('ows_sld_enabled', models.BooleanField(default=True)),
('ows_abstract', models.TextField(blank=True)),
('ows_enable_request', models.CharField(default=b'*', max_length=255)),
('ows_encoding', models.CharField(default=b'utf-8', max_length=20)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='MapServerColor',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('red', models.IntegerField(null=True, blank=True)),
('green', models.IntegerField(null=True, blank=True)),
('blue', models.IntegerField(null=True, blank=True)),
('hex_string', models.CharField(max_length=9, blank=True)),
('attribute', models.CharField(max_length=255, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RectObj',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('max_x', models.FloatField()),
('max_y', models.FloatField()),
('min_x', models.FloatField()),
('min_y', models.FloatField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ShapefileDataStore',
fields=[
('datastorebase_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='djangomapserver.DataStoreBase')),
('path', models.CharField(help_text=b'Path to the directory holding shapefiles.', max_length=255)),
],
options={
},
bases=('djangomapserver.datastorebase',),
),
migrations.CreateModel(
name='SpatialiteDataStore',
fields=[
('datastorebase_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='djangomapserver.DataStoreBase')),
('path', models.CharField(help_text=b'Path to the Spatialite database file.', max_length=255)),
],
options={
},
bases=('djangomapserver.datastorebase',),
),
migrations.CreateModel(
name='StyleObj',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('class_obj', models.ForeignKey(to='djangomapserver.ClassObj')),
('color', models.ForeignKey(to='djangomapserver.MapServerColor')),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='mapobj',
name='extent',
field=models.ForeignKey(help_text=b"Map's spatial extent.", to='djangomapserver.RectObj'),
preserve_default=True,
),
migrations.AddField(
model_name='mapobj',
name='image_color',
field=models.ForeignKey(blank=True, to='djangomapserver.MapServerColor', help_text=b'Initial map background color.', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='mapobj',
name='layers',
field=models.ManyToManyField(to='djangomapserver.LayerObj', null=True, through='djangomapserver.MapLayer', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='maplayer',
name='map_obj',
field=models.ForeignKey(to='djangomapserver.MapObj'),
preserve_default=True,
),
migrations.AddField(
model_name='maplayer',
name='style',
field=models.ForeignKey(blank=True, to='djangomapserver.StyleObj', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='layerobj',
name='data_store',
field=models.ForeignKey(to='djangomapserver.DataStoreBase'),
preserve_default=True,
),
migrations.AddField(
model_name='layerobj',
name='extent',
field=models.ForeignKey(help_text=b"Layer's spatial extent.", to='djangomapserver.RectObj'),
preserve_default=True,
),
migrations.AddField(
model_name='classobj',
name='layer_obj',
field=models.ForeignKey(to='djangomapserver.LayerObj'),
preserve_default=True,
),
]
|
ricardogsilva/django-mapserver
|
djangomapserver/migrations/0001_initial.py
|
Python
|
bsd-2-clause
| 8,539 | 0.00445 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 30, transform = "None", sigma = 0.0, exog_count = 20, ar_order = 12);
|
antoinecarme/pyaf
|
tests/artificial/transf_None/trend_MovingAverage/cycle_30/ar_12/test_artificial_32_None_MovingAverage_30_12_20.py
|
Python
|
bsd-3-clause
| 264 | 0.087121 |
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2014, Erkan Ozgur Yilmaz
#
# This module is part of oyProjectManager and is released under the BSD 2
# License: http://www.opensource.org/licenses/BSD-2-Clause
"""
Database Module
===============
This is where all the magic happens.
.. versionadded:: 0.2.0
SQLite3 Database:
To hold the information about all the data created
:class:`~oyProjectManager.models.project.Project`\ s,
:class:`~oyProjectManager.models.sequence.Sequence`\ s,
:class:`~oyProjectManager.models.shot.Shot`\ s,
:class:`~oyProjectManager.models.asset.Asset`\ s and
:class:`~oyProjectManager.models.version.VersionType`\ s
, there is a ".metadata.db" file in the repository root. This SQLite3
database has all the information about everything.
With this new extension it is much faster to query any data needed.
Querying data is very simple and fun. To get any kind of data from the
database, just call the ``db.setup()`` and then use ``db.query`` to get the
data.
For a simple example, lets get all the shots for a Sequence called
"TEST_SEQ" in the "TEST_PROJECT"::
from oyProjectManager import db
from oyProjectManager import Project, Sequence, Shot
# setup the database session
db.setup()
all_shots = Shot.query().join(Sequence).\
filter(Sequence.project.name="TEST_PROJECT").\
filter(Shot.sequence.name=="TEST_SEQ").all()
that's it.
"""
import os
import logging
import sqlalchemy
import oyProjectManager
from oyProjectManager.db.declarative import Base
# SQLAlchemy database engine
engine = None
# SQLAlchemy session manager
session = None
query = None
# SQLAlchemy metadata
metadata = None
database_url = None
# create a logger
logger = logging.getLogger(__name__)
#logger.setLevel(logging.WARNING)
logger.setLevel(logging.DEBUG)
def setup(database_url_in=None):
"""Utility function that helps to connect the system to the given database.
Returns the created session
:param database_url_in: The database address, default is None. If the
database_url is skipped or given as None, the default database url
from the :mod:`oyProjectManager.config` will be used. This is good,
just call ``db.setup()`` and then use ``db.session`` and ``db.query``
to get the data.
:returns: sqlalchemy.orm.session
"""
global engine
global session
global query
global metadata
global database_url
# create engine
# TODO: create tests for this
if database_url_in is None:
logger.debug("using the default database_url from the config file")
# use the default database
conf = oyProjectManager.conf
database_url_in = conf.database_url
# expand user and env variables if any
# TODO: because the dialect part and the address part are now coming from
# from one source, it is not possible to expand any variables in the path,
# try to use SQLAlchemy to separate the dialect and the address part and
# expand any data and then merge it again
#database_url_in = os.path.expanduser(
# os.path.expandvars(
# os.path.expandvars(
# database_url_in
# )
# )
#)
while "$" in database_url_in or "~" in database_url_in:
database_url_in = os.path.expanduser(
os.path.expandvars(
database_url_in
)
)
database_url = database_url_in
logger.debug("setting up database in %s" % database_url)
engine = sqlalchemy.create_engine(database_url, echo=False)
# create the tables
metadata = Base.metadata
metadata.create_all(engine)
# create the Session class
Session = sqlalchemy.orm.sessionmaker(bind=engine)
# create and save session object to session
session = Session()
query = session.query
# initialize the db
__init_db__()
# TODO: create a test to check if the returned session is session
return session
def __init_db__():
"""initializes the just setup database
It adds:
- Users
- VersionTypes
to the database.
"""
logger.debug("db is newly created, initializing the db")
global query
global session
# get the users from the config
from oyProjectManager import conf
# ------------------------------------------------------
# create the users
from oyProjectManager.models.auth import User
# get all users from db
users_from_db = query(User).all()
for user_data in conf.users_data:
name = user_data.get("name")
initials = user_data.get("initials")
email = user_data.get("email")
user_from_config = User(name, initials, email)
if user_from_config not in users_from_db:
session.add(user_from_config)
# ------------------------------------------------------
# add the VersionTypes
from oyProjectManager.models.version import VersionType
version_types_from_db = query(VersionType).all()
for version_type in conf.version_types:
version_type_from_conf = VersionType(**version_type)
if version_type_from_conf not in version_types_from_db:
session.add(version_type_from_conf)
session.commit()
logger.debug("finished initialization of the db")
|
dshlai/oyprojectmanager
|
oyProjectManager/db/__init__.py
|
Python
|
bsd-2-clause
| 5,475 | 0.009315 |
# --------------------------------------------------------------------------- #
# CUBECOLOURDIALOG Widget wxPython IMPLEMENTATION
#
# Python Code By:
#
# Andrea Gavana, @ 16 Aug 2007
# Latest Revision: 14 Apr 2010, 12.00 GMT
#
#
# TODO List
#
# 1. Find A Way To Reduce Flickering On The 2 ColourPanels;
#
# 2. See Why wx.GCDC Doesn't Work As I Thought (!). It Looks Slow As A Turtle,
# But Probably I Am Doing Something Wrong While Painting The Alpha Textures.
#
#
# For All Kind Of Problems, Requests Of Enhancements And Bug Reports, Please
# Write To Me At:
#
# andrea.gavana@gmail.com
# gavana@kpo.kz
#
# Or, Obviously, To The wxPython Mailing List!!!
#
#
# End Of Comments
# --------------------------------------------------------------------------- #
"""
CubeColourDialog is an alternative implementation of `wx.ColourDialog`.
Description
===========
The CubeColourDialog is an alternative implementation of `wx.ColourDialog`, and it
offers different functionalities with respect to the default wxPython one. It
can be used as a replacement of `wx.ColourDialog` with exactly the same syntax and
methods.
Some features:
- RGB components may be controlled using spin controls or with mouse gestures
on a 3D RGB cube, with the 3 components laying on the X, Y, Z axes;
- HSB components may be controlled using spin controls or with mouse gestures
on a 2D colour wheel;
- Brightness has its own vertical slider to play with;
- The colour alpha channel can be controlled using another vertical slider, or
via spin control;
- The colour alpha channel controls can be completely hidden at startup or the
choice to use the alpha channel can be left to the user while playing with the
dialog, via a simple `wx.CheckBox`;
- The "old colour" and "new colour" are displayed in two small custom panel,
which support alpha transparency and texture;
- CubeColourDialog displays also the HTML colour code in hexadecimal format;
- When available, a corresponding "Web Safe" colour is generated using a 500
web colours "database" (a dictionary inside the widget source code). Web Safe
colours are recognized by all the browsers;
- When available, a corresponding "HTML name" for the selected colour is displayed,
by using the same 500 web colours "database";
- When available, a corresponding "Microsoft Access Code" for the selected colour
is displayed, by using the same 500 web colours "database".
And much more.
Window Styles
=============
This class supports the following window styles:
================== =========== ==================================================
Window Styles Hex Value Description
================== =========== ==================================================
``CCD_SHOW_ALPHA`` 0x1 Show the widget used to control colour alpha channels in `CubeColourDialog`.
================== =========== ==================================================
Events Processing
=================
`No custom events are available for this class.`
License And Version
===================
CubeColourDialog is distributed under the wxPython license.
Latest Revision: Andrea Gavana @ 14 Apr 2010, 12.00 GMT
Version 0.3.
"""
__docformat__ = "epytext"
#----------------------------------------------------------------------
# Beginning Of CUBECOLOURDIALOG wxPython Code
#----------------------------------------------------------------------
import wx
import colorsys
from math import pi, sin, cos, sqrt, atan2
from wx.lib.embeddedimage import PyEmbeddedImage
# Define a translation string
_ = wx.GetTranslation
# Show the alpha control in the dialog
CCD_SHOW_ALPHA = 1
""" Show the widget used to control colour alpha channels in `CubeColourDialog`. """
# Radius of the HSB colour wheel
RADIUS = 100
""" Radius of the HSB colour wheel. """
# Width of the mouse-controlled colour pointer
RECT_WIDTH = 5
""" Width of the mouse-controlled colour pointer. """
# Dictionary keys for the RGB colour cube
RED, GREEN, BLUE = 0, 1, 2
""" Dictionary keys for the RGB colour cube. """
Vertex = wx.Point(95, 109)
Top = wx.Point(95, 10)
Left = wx.Point(16, 148)
Right = wx.Point(174, 148)
colourAttributes = ["r", "g", "b", "h", "s", "v"]
colourMaxValues = [255, 255, 255, 359, 255, 255]
checkColour = wx.Colour(200, 200, 200)
HTMLCodes = {'#B0171F': ['Indian red', '2037680', ''],
'#DC143C': ['Crimson', '3937500', '#CC0033'],
'#FFB6C1': ['Lightpink', '12695295', '#FFCCCC'],
'#FFAEB9': ['Lightpink 1', '12168959', ''],
'#EEA2AD': ['Lightpink 2', '11379438', ''],
'#CD8C95': ['Lightpink 3', '9800909', ''],
'#8B5F65': ['Lightpink 4', '6643595', ''],
'#FFC0CB': ['Pink', '13353215', '#FFCCCC'],
'#FFB5C5': ['Pink 1', '12957183', ''],
'#EEA9B8': ['Pink 2', '12102126', ''],
'#CD919E': ['Pink 3', '10392013', ''],
'#8B636C': ['Pink 4', '7103371', ''],
'#DB7093': ['Palevioletred', '9662683', '#CC6699'],
'#FF82AB': ['Palevioletred 1', '11240191', ''],
'#EE799F': ['Palevioletred 2', '10451438', ''],
'#CD6889': ['Palevioletred 3', '9005261', ''],
'#8B475D': ['Palevioletred 4', '6113163', ''],
'#FFF0F5': ['Lavenderblush 1 (lavenderblush)', '16118015', '#FFFFFF'],
'#EEE0E5': ['Lavenderblush 2', '15065326', ''],
'#CDC1C5': ['Lavenderblush 3', '12960205', ''],
'#8B8386': ['Lavenderblush 4', '8815499', ''],
'#FF3E96': ['Violetred 1', '9846527', ''],
'#EE3A8C': ['Violetred 2', '9190126', ''],
'#CD3278': ['Violetred 3', '7877325', ''],
'#8B2252': ['Violetred 4', '5382795', ''],
'#FF69B4': ['Hotpink', '11823615', '#FF66CC'],
'#FF6EB4': ['Hotpink 1', '11824895', ''],
'#EE6AA7': ['Hotpink 2', '10971886', ''],
'#CD6090': ['Hotpink 3', '9461965', ''],
'#8B3A62': ['Hotpink 4', '6437515', ''],
'#872657': ['Raspberry', '5711495', ''],
'#FF1493': ['Deeppink 1 (deeppink)', '9639167', '#FF0099'],
'#EE1289': ['Deeppink 2', '8983278', ''],
'#CD1076': ['Deeppink 3', '7737549', ''],
'#8B0A50': ['Deeppink 4', '5245579', ''],
'#FF34B3': ['Maroon 1', '11744511', ''],
'#EE30A7': ['Maroon 2', '10957038', ''],
'#CD2990': ['Maroon 3', '9447885', ''],
'#8B1C62': ['Maroon 4', '6429835', ''],
'#C71585': ['Mediumvioletred', '8721863', '#CC0066'],
'#D02090': ['Violetred', '9445584', ''],
'#DA70D6': ['Orchid', '14053594', '#CC66CC'],
'#FF83FA': ['Orchid 1', '16417791', ''],
'#EE7AE9': ['Orchid 2', '15301358', ''],
'#CD69C9': ['Orchid 3', '13199821', ''],
'#8B4789': ['Orchid 4', '8996747', ''],
'#D8BFD8': ['Thistle', '14204888', '#CCCCCC'],
'#FFE1FF': ['Thistle 1', '16769535', ''],
'#EED2EE': ['Thistle 2', '15651566', ''],
'#CDB5CD': ['Thistle 3', '13481421', ''],
'#8B7B8B': ['Thistle 4', '9141131', ''],
'#FFBBFF': ['Plum 1', '16759807', ''],
'#EEAEEE': ['Plum 2', '15642350', ''],
'#CD96CD': ['Plum 3', '13473485', ''],
'#8B668B': ['Plum 4', '9135755', ''],
'#DDA0DD': ['Plum', '14524637', '#CC99CC'],
'#EE82EE': ['Violet', '15631086', '#FF99FF'],
'#FF00FF': ['Magenta (fuchsia)', '16711935', '#FF00FF'],
'#EE00EE': ['Magenta 2', '15597806', ''],
'#CD00CD': ['Magenta 3', '13435085', ''],
'#8B008B': ['Magenta 4 (darkmagenta)', '9109643', '#990099'],
'#800080': ['Purple', '8388736', '#990099'],
'#BA55D3': ['Mediumorchid', '13850042', '#CC66CC'],
'#E066FF': ['Mediumorchid 1', '16738016', ''],
'#D15FEE': ['Mediumorchid 2', '15622097', ''],
'#B452CD': ['Mediumorchid 3', '13456052', ''],
'#7A378B': ['Mediumorchid 4', '9123706', ''],
'#9400D3': ['Darkviolet', '13828244', '#9900CC'],
'#9932CC': ['Darkorchid', '13382297', '#9933CC'],
'#BF3EFF': ['Darkorchid 1', '16727743', ''],
'#B23AEE': ['Darkorchid 2', '15612594', ''],
'#9A32CD': ['Darkorchid 3', '13447834', ''],
'#68228B': ['Darkorchid 4', '9118312', ''],
'#4B0082': ['Indigo', '8519755', '#330099'],
'#8A2BE2': ['Blueviolet', '14822282', '#9933FF'],
'#9B30FF': ['Purple 1', '16724123', ''],
'#912CEE': ['Purple 2', '15608977', ''],
'#7D26CD': ['Purple 3', '13444733', ''],
'#551A8B': ['Purple 4', '9116245', ''],
'#9370DB': ['Mediumpurple', '14381203', '#9966CC'],
'#AB82FF': ['Mediumpurple 1', '16745131', ''],
'#9F79EE': ['Mediumpurple 2', '15628703', ''],
'#8968CD': ['Mediumpurple 3', '13461641', ''],
'#5D478B': ['Mediumpurple 4', '9127773', ''],
'#483D8B': ['Darkslateblue', '9125192', '#333399'],
'#8470FF': ['Lightslateblue', '16740484', ''],
'#7B68EE': ['Mediumslateblue', '15624315', '#6666FF'],
'#6A5ACD': ['Slateblue', '13458026', '#6666CC'],
'#836FFF': ['Slateblue 1', '16740227', ''],
'#7A67EE': ['Slateblue 2', '15624058', ''],
'#6959CD': ['Slateblue 3', '13457769', ''],
'#473C8B': ['Slateblue 4', '9124935', ''],
'#F8F8FF': ['Ghostwhite', '16775416', '#FFFFFF'],
'#E6E6FA': ['Lavender', '16443110', '#FFFFFF'],
'#0000FF': ['Blue', '16711680', '#0000FF'],
'#0000EE': ['Blue 2', '15597568', ''],
'#0000CD': ['Blue 3 (mediumblue)', '13434880', '#0000CC'],
'#00008B': ['Blue 4 (darkblue)', '9109504', '#000099'],
'#000080': ['Navy', '8388608', '#000099'],
'#191970': ['Midnightblue', '7346457', '#000066'],
'#3D59AB': ['Cobalt', '11229501', ''],
'#4169E1': ['Royalblue', '14772545', '#3366CC'],
'#4876FF': ['Royalblue 1', '16741960', ''],
'#436EEE': ['Royalblue 2', '15625795', ''],
'#3A5FCD': ['Royalblue 3', '13459258', ''],
'#27408B': ['Royalblue 4', '9125927', ''],
'#6495ED': ['Cornflowerblue', '15570276', '#6699FF'],
'#B0C4DE': ['Lightsteelblue', '14599344', '#99CCCC'],
'#CAE1FF': ['Lightsteelblue 1', '16769482', ''],
'#BCD2EE': ['Lightsteelblue 2', '15651516', ''],
'#A2B5CD': ['Lightsteelblue 3', '13481378', ''],
'#6E7B8B': ['Lightsteelblue 4', '9141102', ''],
'#778899': ['Lightslategray', '10061943', '#669999'],
'#708090': ['Slategray', '9470064', '#669999'],
'#C6E2FF': ['Slategray 1', '16769734', ''],
'#B9D3EE': ['Slategray 2', '15651769', ''],
'#9FB6CD': ['Slategray 3', '13481631', ''],
'#6C7B8B': ['Slategray 4', '9141100', ''],
'#1E90FF': ['Dodgerblue 1 (dodgerblue)', '16748574', '#3399FF'],
'#1C86EE': ['Dodgerblue 2', '15631900', ''],
'#1874CD': ['Dodgerblue 3', '13464600', ''],
'#104E8B': ['Dodgerblue 4', '9129488', ''],
'#F0F8FF': ['Aliceblue', '16775408', '#FFFFFF'],
'#4682B4': ['Steelblue', '11829830', '#3399CC'],
'#63B8FF': ['Steelblue 1', '16758883', ''],
'#5CACEE': ['Steelblue 2', '15641692', ''],
'#4F94CD': ['Steelblue 3', '13472847', ''],
'#36648B': ['Steelblue 4', '9135158', ''],
'#87CEFA': ['Lightskyblue', '16436871', '#99CCFF'],
'#B0E2FF': ['Lightskyblue 1', '16769712', ''],
'#A4D3EE': ['Lightskyblue 2', '15651748', ''],
'#8DB6CD': ['Lightskyblue 3', '13481613', ''],
'#607B8B': ['Lightskyblue 4', '9141088', ''],
'#87CEFF': ['Skyblue 1', '16764551', ''],
'#7EC0EE': ['Skyblue 2', '15646846', ''],
'#6CA6CD': ['Skyblue 3', '13477484', ''],
'#4A708B': ['Skyblue 4', '9138250', ''],
'#87CEEB': ['Skyblue', '15453831', '#99CCFF'],
'#00BFFF': ['Deepskyblue 1 (deepskyblue)', '16760576', '#00CCFF'],
'#00B2EE': ['Deepskyblue 2', '15643136', ''],
'#009ACD': ['Deepskyblue 3', '13474304', ''],
'#00688B': ['Deepskyblue 4', '9136128', ''],
'#33A1C9': ['Peacock', '13214003', ''],
'#ADD8E6': ['Lightblue', '15128749', '#99CCFF'],
'#BFEFFF': ['Lightblue 1', '16773055', ''],
'#B2DFEE': ['Lightblue 2', '15654834', ''],
'#9AC0CD': ['Lightblue 3', '13484186', ''],
'#68838B': ['Lightblue 4', '9143144', ''],
'#B0E0E6': ['Powderblue', '15130800', '#CCCCFF'],
'#98F5FF': ['Cadetblue 1', '16774552', ''],
'#8EE5EE': ['Cadetblue 2', '15656334', ''],
'#7AC5CD': ['Cadetblue 3', '13485434', ''],
'#53868B': ['Cadetblue 4', '9143891', ''],
'#00F5FF': ['Turquoise 1', '16774400', ''],
'#00E5EE': ['Turquoise 2', '15656192', ''],
'#00C5CD': ['Turquoise 3', '13485312', ''],
'#00868B': ['Turquoise 4', '9143808', ''],
'#5F9EA0': ['Cadetblue', '10526303', '#669999'],
'#00CED1': ['Darkturquoise', '13749760', '#00CCCC'],
'#F0FFFF': ['Azure 1 (azure)', '16777200', '#FFFFFF'],
'#E0EEEE': ['Azure 2', '15658720', ''],
'#C1CDCD': ['Azure 3', '13487553', ''],
'#838B8B': ['Azure 4', '9145219', ''],
'#E0FFFF': ['Lightcyan 1 (lightcyan)', '16777184', '#CCFFFF'],
'#D1EEEE': ['Lightcyan 2', '15658705', ''],
'#B4CDCD': ['Lightcyan 3', '13487540', ''],
'#7A8B8B': ['Lightcyan 4', '9145210', ''],
'#BBFFFF': ['Paleturquoise 1', '16777147', ''],
'#AEEEEE': ['Paleturquoise 2 (paleturquoise)', '15658670', ''],
'#96CDCD': ['Paleturquoise 3', '13487510', ''],
'#668B8B': ['Paleturquoise 4', '9145190', ''],
'#2F4F4F': ['Darkslategray', '5197615', '#336666'],
'#97FFFF': ['Darkslategray 1', '16777111', ''],
'#8DEEEE': ['Darkslategray 2', '15658637', ''],
'#79CDCD': ['Darkslategray 3', '13487481', ''],
'#528B8B': ['Darkslategray 4', '9145170', ''],
'#00FFFF': ['Cyan / aqua', '16776960', '#00FFFF'],
'#00EEEE': ['Cyan 2', '15658496', ''],
'#00CDCD': ['Cyan 3', '13487360', ''],
'#008B8B': ['Cyan 4 (darkcyan)', '9145088', '#009999'],
'#008080': ['Teal', '8421376', '#009999'],
'#48D1CC': ['Mediumturquoise', '13422920', '#33CCCC'],
'#20B2AA': ['Lightseagreen', '11186720', '#339999'],
'#03A89E': ['Manganeseblue', '10397699', ''],
'#40E0D0': ['Turquoise', '13688896', '#33CCCC'],
'#808A87': ['Coldgrey', '8882816', ''],
'#00C78C': ['Turquoiseblue', '9225984', ''],
'#7FFFD4': ['Aquamarine 1 (aquamarine)', '13959039', '#66FFCC'],
'#76EEC6': ['Aquamarine 2', '13037174', ''],
'#66CDAA': ['Aquamarine 3 (mediumaquamarine)', '11193702', '#66CC99'],
'#458B74': ['Aquamarine 4', '7637829', ''],
'#00FA9A': ['Mediumspringgreen', '10156544', '#00FF99'],
'#F5FFFA': ['Mintcream', '16449525', '#FFFFFF'],
'#00FF7F': ['Springgreen', '8388352', '#00FF66'],
'#00EE76': ['Springgreen 1', '7794176', ''],
'#00CD66': ['Springgreen 2', '6737152', ''],
'#008B45': ['Springgreen 3', '4557568', ''],
'#3CB371': ['Mediumseagreen', '7451452', '#33CC66'],
'#54FF9F': ['Seagreen 1', '10485588', ''],
'#4EEE94': ['Seagreen 2', '9760334', ''],
'#43CD80': ['Seagreen 3', '8441155', ''],
'#2E8B57': ['Seagreen 4 (seagreen)', '5737262', '#339966'],
'#00C957': ['Emeraldgreen', '5753088', ''],
'#BDFCC9': ['Mint', '13237437', ''],
'#3D9140': ['Cobaltgreen', '4231485', ''],
'#F0FFF0': ['Honeydew 1 (honeydew)', '15794160', '#FFFFFF'],
'#E0EEE0': ['Honeydew 2', '14741216', ''],
'#C1CDC1': ['Honeydew 3', '12701121', ''],
'#838B83': ['Honeydew 4', '8620931', ''],
'#8FBC8F': ['Darkseagreen', '9419919', '#99CC99'],
'#C1FFC1': ['Darkseagreen 1', '12713921', ''],
'#B4EEB4': ['Darkseagreen 2', '11857588', ''],
'#9BCD9B': ['Darkseagreen 3', '10210715', ''],
'#698B69': ['Darkseagreen 4', '6916969', ''],
'#98FB98': ['Palegreen', '10025880', '#99FF99'],
'#9AFF9A': ['Palegreen 1', '10157978', ''],
'#90EE90': ['Palegreen 2 (lightgreen)', '9498256', '#99FF99'],
'#7CCD7C': ['Palegreen 3', '8179068', ''],
'#548B54': ['Palegreen 4', '5540692', ''],
'#32CD32': ['Limegreen', '3329330', '#33CC33'],
'#228B22': ['Forestgreen', '2263842', '#339933'],
'#00FF00': ['Green 1 (lime)', '65280', '#00FF00'],
'#00EE00': ['Green 2', '60928', ''],
'#00CD00': ['Green 3', '52480', ''],
'#008B00': ['Green 4', '35584', ''],
'#008000': ['Green', '32768', '#009900'],
'#006400': ['Darkgreen', '25600', '#006600'],
'#308014': ['Sapgreen', '1343536', ''],
'#7CFC00': ['Lawngreen', '64636', '#66FF00'],
'#7FFF00': ['Chartreuse 1 (chartreuse)', '65407', '#66FF00'],
'#76EE00': ['Chartreuse 2', '61046', ''],
'#66CD00': ['Chartreuse 3', '52582', ''],
'#458B00': ['Chartreuse 4', '35653', ''],
'#ADFF2F': ['Greenyellow', '3145645', '#99FF33'],
'#CAFF70': ['Darkolivegreen 1', '7405514', ''],
'#BCEE68': ['Darkolivegreen 2', '6876860', ''],
'#A2CD5A': ['Darkolivegreen 3', '5950882', ''],
'#6E8B3D': ['Darkolivegreen 4', '4033390', ''],
'#556B2F': ['Darkolivegreen', '3107669', '#666633'],
'#6B8E23': ['Olivedrab', '2330219', '#669933'],
'#C0FF3E': ['Olivedrab 1', '4128704', ''],
'#B3EE3A': ['Olivedrab 2', '3862195', ''],
'#9ACD32': ['Olivedrab 3 (yellowgreen)', '3329434', '#99CC33'],
'#698B22': ['Olivedrab 4', '2263913', ''],
'#FFFFF0': ['Ivory 1 (ivory)', '15794175', '#FFFFFF'],
'#EEEEE0': ['Ivory 2', '14741230', ''],
'#CDCDC1': ['Ivory 3', '12701133', ''],
'#8B8B83': ['Ivory 4', '8620939', ''],
'#F5F5DC': ['Beige', '14480885', '#FFFFCC'],
'#FFFFE0': ['Lightyellow 1 (lightyellow)', '14745599', '#FFFFFF'],
'#EEEED1': ['Lightyellow 2', '13758190', ''],
'#CDCDB4': ['Lightyellow 3', '11849165', ''],
'#8B8B7A': ['Lightyellow 4', '8031115', ''],
'#FAFAD2': ['Lightgoldenrodyellow', '13826810', '#FFFFCC'],
'#FFFF00': ['Yellow 1 (yellow)', '65535', '#FFFF00'],
'#EEEE00': ['Yellow 2', '61166', ''],
'#CDCD00': ['Yellow 3', '52685', ''],
'#8B8B00': ['Yellow 4', '35723', ''],
'#808069': ['Warmgrey', '6914176', ''],
'#808000': ['Olive', '32896', '#999900'],
'#BDB76B': ['Darkkhaki', '7059389', '#CCCC66'],
'#FFF68F': ['Khaki 1', '9434879', ''],
'#EEE685': ['Khaki 2', '8775406', ''],
'#CDC673': ['Khaki 3', '7587533', ''],
'#8B864E': ['Khaki 4', '5146251', ''],
'#F0E68C': ['Khaki', '9234160', ''],
'#EEE8AA': ['Palegoldenrod', '11200750', '#FFFF99'],
'#FFFACD': ['Lemonchiffon 1 (lemonchiffon)', '13499135', '#FFFFCC'],
'#EEE9BF': ['Lemonchiffon 2', '12577262', ''],
'#CDC9A5': ['Lemonchiffon 3', '10865101', ''],
'#8B8970': ['Lemonchiffon 4', '7375243', ''],
'#FFEC8B': ['Lightgoldenrod 1', '9170175', ''],
'#EEDC82': ['Lightgoldenrod 2', '8576238', ''],
'#CDBE70': ['Lightgoldenrod 3', '7388877', ''],
'#8B814C': ['Lightgoldenrod 4', '5013899', ''],
'#E3CF57': ['Banana', '5754851', ''],
'#FFD700': ['Gold 1 (gold)', '55295', '#FFCC00'],
'#EEC900': ['Gold 2', '51694', ''],
'#CDAD00': ['Gold 3', '44493', ''],
'#8B7500': ['Gold 4', '30091', ''],
'#FFF8DC': ['Cornsilk 1 (cornsilk)', '14481663', '#FFFFCC'],
'#EEE8CD': ['Cornsilk 2', '13494510', ''],
'#CDC8B1': ['Cornsilk 3', '11651277', ''],
'#8B8878': ['Cornsilk 4', '7899275', ''],
'#DAA520': ['Goldenrod', '2139610', '#CC9933'],
'#FFC125': ['Goldenrod 1', '2474495', ''],
'#EEB422': ['Goldenrod 2', '2274542', ''],
'#CD9B1D': ['Goldenrod 3', '1940429', ''],
'#8B6914': ['Goldenrod 4', '1337739', ''],
'#B8860B': ['Darkgoldenrod', '755384', '#CC9900'],
'#FFB90F': ['Darkgoldenrod 1', '1030655', ''],
'#EEAD0E': ['Darkgoldenrod 2', '962030', ''],
'#CD950C': ['Darkgoldenrod 3', '824781', ''],
'#8B6508': ['Darkgoldenrod 4', '550283', ''],
'#FFA500': ['Orange 1 (orange)', '42495', '#FF9900'],
'#EE9A00': ['Orange 2', '39662', ''],
'#CD8500': ['Orange 3', '34253', ''],
'#8B5A00': ['Orange 4', '23179', ''],
'#FFFAF0': ['Floralwhite', '15792895', '#FFFFFF'],
'#FDF5E6': ['Oldlace', '15136253', '#FFFFFF'],
'#F5DEB3': ['Wheat', '11788021', '#FFCCCC'],
'#FFE7BA': ['Wheat 1', '12249087', ''],
'#EED8AE': ['Wheat 2', '11458798', ''],
'#CDBA96': ['Wheat 3', '9878221', ''],
'#8B7E66': ['Wheat 4', '6717067', ''],
'#FFE4B5': ['Moccasin', '11920639', '#FFCCCC'],
'#FFEFD5': ['Papayawhip', '14020607', '#FFFFCC'],
'#FFEBCD': ['Blanchedalmond', '13495295', '#FFFFCC'],
'#FFDEAD': ['Navajowhite 1 (navajowhite)', '11394815', '#FFCC99'],
'#EECFA1': ['Navajowhite 2', '10604526', ''],
'#CDB38B': ['Navajowhite 3', '9155533', ''],
'#8B795E': ['Navajowhite 4', '6191499', ''],
'#FCE6C9': ['Eggshell', '13231868', ''],
'#D2B48C': ['Tan', '9221330', '#CCCC99'],
'#9C661F': ['Brick', '2057884', ''],
'#FF9912': ['Cadmiumyellow', '1219071', ''],
'#FAEBD7': ['Antiquewhite', '14150650', '#FFFFCC'],
'#FFEFDB': ['Antiquewhite 1', '14413823', ''],
'#EEDFCC': ['Antiquewhite 2', '13426670', ''],
'#CDC0B0': ['Antiquewhite 3', '11583693', ''],
'#8B8378': ['Antiquewhite 4', '7897995', ''],
'#DEB887': ['Burlywood', '8894686', '#CCCC99'],
'#FFD39B': ['Burlywood 1', '10212351', ''],
'#EEC591': ['Burlywood 2', '9553390', ''],
'#CDAA7D': ['Burlywood 3', '8235725', ''],
'#8B7355': ['Burlywood 4', '5600139', ''],
'#FFE4C4': ['Bisque 1 (bisque)', '12903679', '#FFFFCC'],
'#EED5B7': ['Bisque 2', '12047854', ''],
'#CDB79E': ['Bisque 3', '10401741', ''],
'#8B7D6B': ['Bisque 4', '7044491', ''],
'#E3A869': ['Melon', '6924515', ''],
'#ED9121': ['Carrot', '2200045', ''],
'#FF8C00': ['Darkorange', '36095', '#FF9900'],
'#FF7F00': ['Darkorange 1', '32767', ''],
'#EE7600': ['Darkorange 2', '30446', ''],
'#CD6600': ['Darkorange 3', '26317', ''],
'#8B4500': ['Darkorange 4', '17803', ''],
'#FF8000': ['Orange', '33023', ''],
'#FFA54F': ['Tan 1', '5219839', ''],
'#EE9A49': ['Tan 2', '4823790', ''],
'#CD853F': ['Tan 3 (peru)', '4163021', '#CC9933'],
'#8B5A2B': ['Tan 4', '2841227', ''],
'#FAF0E6': ['Linen', '15134970', '#FFFFFF'],
'#FFDAB9': ['Peachpuff 1 (peachpuff)', '12180223', '#FFCCCC'],
'#EECBAD': ['Peachpuff 2', '11389934', ''],
'#CDAF95': ['Peachpuff 3', '9809869', ''],
'#8B7765': ['Peachpuff 4', '6649739', ''],
'#FFF5EE': ['Seashell 1 (seashell)', '15660543', '#FFFFFF'],
'#EEE5DE': ['Seashell 2', '14607854', ''],
'#CDC5BF': ['Seashell 3', '12568013', ''],
'#8B8682': ['Seashell 4', '8554123', ''],
'#F4A460': ['Sandybrown', '6333684', '#FF9966'],
'#C76114': ['Rawsienna', '1335751', ''],
'#D2691E': ['Chocolate', '1993170', '#CC6633'],
'#FF7F24': ['Chocolate 1', '2392063', ''],
'#EE7621': ['Chocolate 2', '2193134', ''],
'#CD661D': ['Chocolate 3', '1926861', ''],
'#8B4513': ['Chocolate 4 (saddlebrown)', '1262987', '#993300'],
'#292421': ['Ivoryblack', '2171945', ''],
'#FF7D40': ['Flesh', '4226559', ''],
'#FF6103': ['Cadmiumorange', '221695', ''],
'#8A360F': ['Burntsienna', '997002', ''],
'#A0522D': ['Sienna', '2970272', '#996633'],
'#FF8247': ['Sienna 1', '4686591', ''],
'#EE7942': ['Sienna 2', '4356590', ''],
'#CD6839': ['Sienna 3', '3762381', ''],
'#8B4726': ['Sienna 4', '2508683', ''],
'#FFA07A': ['Lightsalmon 1 (lightsalmon)', '8036607', '#FF9966'],
'#EE9572': ['Lightsalmon 2', '7509486', ''],
'#CD8162': ['Lightsalmon 3', '6455757', ''],
'#8B5742': ['Lightsalmon 4', '4347787', ''],
'#FF7F50': ['Coral', '5275647', '#FF6666'],
'#FF4500': ['Orangered 1 (orangered)', '17919', '#FF3300'],
'#EE4000': ['Orangered 2', '16622', ''],
'#CD3700': ['Orangered 3', '14285', ''],
'#8B2500': ['Orangered 4', '9611', ''],
'#5E2612': ['Sepia', '1189470', ''],
'#E9967A': ['Darksalmon', '8034025', '#FF9966'],
'#FF8C69': ['Salmon 1', '6917375', ''],
'#EE8262': ['Salmon 2', '6456046', ''],
'#CD7054': ['Salmon 3', '5533901', ''],
'#8B4C39': ['Salmon 4', '3755147', ''],
'#FF7256': ['Coral 1', '5665535', ''],
'#EE6A50': ['Coral 2', '5270254', ''],
'#CD5B45': ['Coral 3', '4545485', ''],
'#8B3E2F': ['Coral 4', '3096203', ''],
'#8A3324': ['Burntumber', '2372490', ''],
'#FF6347': ['Tomato 1 (tomato)', '4678655', '#FF6633'],
'#EE5C42': ['Tomato 2', '4349166', ''],
'#CD4F39': ['Tomato 3', '3755981', ''],
'#8B3626': ['Tomato 4', '2504331', ''],
'#FA8072': ['Salmon', '7504122', '#FF9966'],
'#FFE4E1': ['Mistyrose 1 (mistyrose)', '14804223', '#FFCCFF'],
'#EED5D2': ['Mistyrose 2', '13817326', ''],
'#CDB7B5': ['Mistyrose 3', '11909069', ''],
'#8B7D7B': ['Mistyrose 4', '8093067', ''],
'#FFFAFA': ['Snow 1 (snow)', '16448255', '#FFFFFF'],
'#EEE9E9': ['Snow 2', '15329774', ''],
'#CDC9C9': ['Snow 3', '13224397', ''],
'#8B8989': ['Snow 4', '9013643', ''],
'#BC8F8F': ['Rosybrown', '9408444', '#CC9999'],
'#FFC1C1': ['Rosybrown 1', '12698111', ''],
'#EEB4B4': ['Rosybrown 2', '11842798', ''],
'#CD9B9B': ['Rosybrown 3', '10197965', ''],
'#8B6969': ['Rosybrown 4', '6908299', ''],
'#F08080': ['Lightcoral', '8421616', '#FF9999'],
'#CD5C5C': ['Indianred', '6053069', '#CC6666'],
'#FF6A6A': ['Indianred 1', '6974207', ''],
'#EE6363': ['Indianred 2', '6513646', ''],
'#8B3A3A': ['Indianred 4', '3816075', ''],
'#CD5555': ['Indianred 3', '5592525', ''],
'#A52A2A': ['Brown', '2763429', '#993333'],
'#FF4040': ['Brown 1', '4210943', ''],
'#EE3B3B': ['Brown 2', '3881966', ''],
'#CD3333': ['Brown 3', '3355597', ''],
'#8B2323': ['Brown 4', '2302859', ''],
'#B22222': ['Firebrick', '2237106', '#993333'],
'#FF3030': ['Firebrick 1', '3158271', ''],
'#EE2C2C': ['Firebrick 2', '2895086', ''],
'#CD2626': ['Firebrick 3', '2500301', ''],
'#8B1A1A': ['Firebrick 4', '1710731', ''],
'#FF0000': ['Red 1 (red)', '255', '#FF0000'],
'#EE0000': ['Red 2', '238', ''],
'#CD0000': ['Red 3', '205', ''],
'#8B0000': ['Red 4 (darkred)', '139', '#990000'],
'#800000': ['Maroon', '128', '#990000'],
'#8E388E': ['Sgi beet', '9320590', ''],
'#7171C6': ['Sgi slateblue', '13005169', ''],
'#7D9EC0': ['Sgi lightblue', '12623485', ''],
'#388E8E': ['Sgi teal', '9342520', ''],
'#71C671': ['Sgi chartreuse', '7456369', ''],
'#8E8E38': ['Sgi olivedrab', '3706510', ''],
'#C5C1AA': ['Sgi brightgray', '11190725', ''],
'#C67171': ['Sgi salmon', '7434694', ''],
'#555555': ['Sgi darkgray', '5592405', ''],
'#1E1E1E': ['Sgi gray 12', '1973790', ''],
'#282828': ['Sgi gray 16', '2631720', ''],
'#515151': ['Sgi gray 32', '5329233', ''],
'#5B5B5B': ['Sgi gray 36', '5987163', ''],
'#848484': ['Sgi gray 52', '8684676', ''],
'#8E8E8E': ['Sgi gray 56', '9342606', ''],
'#AAAAAA': ['Sgi lightgray', '11184810', ''],
'#B7B7B7': ['Sgi gray 72', '12040119', ''],
'#C1C1C1': ['Sgi gray 76', '12698049', ''],
'#EAEAEA': ['Sgi gray 92', '15395562', ''],
'#F4F4F4': ['Sgi gray 96', '16053492', ''],
'#FFFFFF': ['White', '16777215', '#FFFFFF'],
'#F5F5F5': ['White smoke (gray)', '16119285', '#FFFFFF'],
'#DCDCDC': ['Gainsboro', '14474460', '#CCCCCC'],
'#D3D3D3': ['Lightgrey', '13882323', '#CCCCCC'],
'#C0C0C0': ['Silver', '12632256', '#CCCCCC'],
'#A9A9A9': ['Darkgray', '11119017', '#999999'],
'#808080': ['Gray', '8421504', ''],
'#696969': ['Dimgray (gray 42)', '6908265', '#666666'],
'#000000': ['Black', '0', '#000000'],
'#FCFCFC': ['Gray 99', '16579836', ''],
'#FAFAFA': ['Gray 98', '16448250', ''],
'#F7F7F7': ['Gray 97', '16250871', ''],
'#F2F2F2': ['Gray 95', '15921906', ''],
'#F0F0F0': ['Gray 94', '15790320', ''],
'#EDEDED': ['Gray 93', '15592941', ''],
'#EBEBEB': ['Gray 92', '15461355', ''],
'#E8E8E8': ['Gray 91', '15263976', ''],
'#E5E5E5': ['Gray 90', '15066597', ''],
'#E3E3E3': ['Gray 89', '14935011', ''],
'#E0E0E0': ['Gray 88', '14737632', ''],
'#DEDEDE': ['Gray 87', '14606046', ''],
'#DBDBDB': ['Gray 86', '14408667', ''],
'#D9D9D9': ['Gray 85', '14277081', ''],
'#D6D6D6': ['Gray 84', '14079702', ''],
'#D4D4D4': ['Gray 83', '13948116', ''],
'#D1D1D1': ['Gray 82', '13750737', ''],
'#CFCFCF': ['Gray 81', '13619151', ''],
'#CCCCCC': ['Gray 80', '13421772', ''],
'#C9C9C9': ['Gray 79', '13224393', ''],
'#C7C7C7': ['Gray 78', '13092807', ''],
'#C4C4C4': ['Gray 77', '12895428', ''],
'#C2C2C2': ['Gray 76', '12763842', ''],
'#BFBFBF': ['Gray 75', '12566463', ''],
'#BDBDBD': ['Gray 74', '12434877', ''],
'#BABABA': ['Gray 73', '12237498', ''],
'#B8B8B8': ['Gray 72', '12105912', ''],
'#B5B5B5': ['Gray 71', '11908533', ''],
'#B3B3B3': ['Gray 70', '11776947', ''],
'#B0B0B0': ['Gray 69', '11579568', ''],
'#ADADAD': ['Gray 68', '11382189', ''],
'#ABABAB': ['Gray 67', '11250603', ''],
'#A8A8A8': ['Gray 66', '11053224', ''],
'#A6A6A6': ['Gray 65', '10921638', ''],
'#A3A3A3': ['Gray 64', '10724259', ''],
'#A1A1A1': ['Gray 63', '10592673', ''],
'#9E9E9E': ['Gray 62', '10395294', ''],
'#9C9C9C': ['Gray 61', '10263708', ''],
'#999999': ['Gray 60', '10066329', ''],
'#969696': ['Gray 59', '9868950', ''],
'#949494': ['Gray 58', '9737364', ''],
'#919191': ['Gray 57', '9539985', ''],
'#8F8F8F': ['Gray 56', '9408399', ''],
'#8C8C8C': ['Gray 55', '9211020', ''],
'#8A8A8A': ['Gray 54', '9079434', ''],
'#878787': ['Gray 53', '8882055', ''],
'#858585': ['Gray 52', '8750469', ''],
'#828282': ['Gray 51', '8553090', ''],
'#7F7F7F': ['Gray 50', '8355711', ''],
'#7D7D7D': ['Gray 49', '8224125', ''],
'#7A7A7A': ['Gray 48', '8026746', ''],
'#787878': ['Gray 47', '7895160', ''],
'#757575': ['Gray 46', '7697781', ''],
'#737373': ['Gray 45', '7566195', ''],
'#707070': ['Gray 44', '7368816', ''],
'#6E6E6E': ['Gray 43', '7237230', ''],
'#6B6B6B': ['Gray 42', '7039851', ''],
'#696969': ['Dimgray (gray 42)', '6908265', '#666666'],
'#666666': ['Gray 40', '6710886', ''],
'#636363': ['Gray 39', '6513507', ''],
'#616161': ['Gray 38', '6381921', ''],
'#5E5E5E': ['Gray 37', '6184542', ''],
'#5C5C5C': ['Gray 36', '6052956', ''],
'#595959': ['Gray 35', '5855577', ''],
'#575757': ['Gray 34', '5723991', ''],
'#545454': ['Gray 33', '5526612', ''],
'#525252': ['Gray 32', '5395026', ''],
'#4F4F4F': ['Gray 31', '5197647', ''],
'#4D4D4D': ['Gray 30', '5066061', ''],
'#4A4A4A': ['Gray 29', '4868682', ''],
'#474747': ['Gray 28', '4671303', ''],
'#454545': ['Gray 27', '4539717', ''],
'#424242': ['Gray 26', '4342338', ''],
'#404040': ['Gray 25', '4210752', ''],
'#3D3D3D': ['Gray 24', '4013373', ''],
'#3B3B3B': ['Gray 23', '3881787', ''],
'#383838': ['Gray 22', '3684408', ''],
'#363636': ['Gray 21', '3552822', ''],
'#333333': ['Gray 20', '3355443', ''],
'#303030': ['Gray 19', '3158064', ''],
'#2E2E2E': ['Gray 18', '3026478', ''],
'#2B2B2B': ['Gray 17', '2829099', ''],
'#292929': ['Gray 16', '2697513', ''],
'#262626': ['Gray 15', '2500134', ''],
'#242424': ['Gray 14', '2368548', ''],
'#212121': ['Gray 13', '2171169', ''],
'#1F1F1F': ['Gray 12', '2039583', ''],
'#1C1C1C': ['Gray 11', '1842204', ''],
'#1A1A1A': ['Gray 10', '1710618', ''],
'#171717': ['Gray 9', '1513239', ''],
'#141414': ['Gray 8', '1315860', ''],
'#121212': ['Gray 7', '1184274', ''],
'#0F0F0F': ['Gray 6', '986895', ''],
'#0D0D0D': ['Gray 5', '855309', ''],
'#0A0A0A': ['Gray 4', '657930', ''],
'#080808': ['Gray 3', '526344', ''],
'#050505': ['Gray 2', '328965', ''],
'#030303': ['Gray 1', '197379', ''],
}
HSVWheelImage = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAAMoAAADJCAIAAADtkzpRAAAAA3NCSVQICAjb4U/gAAAgAElE"
"QVR4nO2decgsz3fWn6q5MWLcgsaoqAiKRBM14gJuNCj0T9SIuEVRiEIwSDSCElFJBPegKGqI"
"a1QigiYuqFHQEeU3RtSgRjQ/0ASXgBqXuGBihPh7u45/VJ1znnOqet737vd+vzZz5/b09PQs"
"/ennPPVUdb/lox/9KP7/NE1/ddsrUIELwDPLhz/qdn3bn/cdnZ697Q/wNqcv3/YSWCkVYg+B"
"Akhfs9zdzse2/YzC7/fhJu/DhdcfIJ4uKBWlQApQUAtQgIJyQa2QAikoFejrF5qpNKPLRV/e"
"bx1KKcB/3XZm7rt+yGj74OP1O+MOrkQSEVONGABAqQMjQw3pVlczypkogvZCKQAg/4c/zIcA"
"tQ8mXl+07cUlysWps6U7/lJdbAroWSMGSmGZFCuyFTBK8xFQ0XsBofYdH1DUPlB4fWHwUh2p"
"XgGdrV4K6yQ/3WwV4mmpVcuHzBlp2HhISJnCSULtuyhq/+uDxdkHAa8v2HYMrapa1IrVO8SC"
"aPMYDzscJULDy2Wug5OeSYJskjShNZk2CXoG+WRtJfzXDwRn7zdenzcO+nJBKa49Nm8qVVS9"
"2G+xShUlrJj8KCt9axmyOnGWwJpkbKYtsGVSB0iBfL9tvwD/4T2H7L3E61eQXFE5qxGvoVDk"
"t4wnLoKdp6KK1TDm+wsby9iZA5u9fPL1NepZHUsWbPFTQPtBKmb/5v3k7D3D65cOz97lyg3W"
"dDPpKlC/VRQROFgueBXA8PulogINQMWFKHnUk61FS99FCKzCYJGS+ZIyVhti9sO2/QL8y/cN"
"svcGr1+odbAGfeoy47e5RCaGdE2k0AuQilrHHkXFpasXF9NYNOeayA/npuLa1BfHsUUxk1nM"
"Pn3bL8C/eH8gew/w+rnaP6MKFG59JxZNrBSRZMKG/CAs7I7KoGy6/VZRFbJnBU1V5FTAuNn4"
"mPcKNbEGti4EFrQuG3NjBaB95rZfgH/6PkD2TuP1szS+wgSWOqpagysflisVyhpnVLeKalXR"
"GKybrWcF0msrRn1c+K1owpK1ytlEnUQrJhSt4KLS2FTPSvH7PAO0n7DtF+AfvtuQvaN47dsO"
"4JkSoykDsxWqJLkrhsmNv+UU+vBiu0pTDClOZyWGvNlYI2RpPvn38yqZwrAOetawdKs0o35O"
"gPaTt/0C/L13FbL6+CpvfNq3nwnUilr8Vgou/VbxrISn+u1CM12HLiXf14JL9flaUCs+wdaB"
"vkv1V10KatW3ps+QHvJbX+ztani7S0WtY2v8kWz7NX02XTiW18VvMpzA9lP3t73T1tO7pV77"
"9nMwpKVC7Y7mVW62pgyiFpccbzYWFz9vKvbmoYqc0Ba6KoyaeHHjHwRsKovB4MdsIgRdNeuZ"
"mffQhGSDX9FUZU3Vmq7jVRIQQLaftKPh9o/eLRl7V/Dat58PoAzfw7EQusTa8VqASFiNuZf3"
"/Bhn9HKptNocpery7utr9bJ152bNSV/znC2HCQu2rFAK2a95BloiW0VFA1qDCKRtP37HgdvX"
"vSuQvRN47dtnY3QwLw9os1k1YpSNF2kVNx5BAuZUEYh93lSnGh9wrxa817KbiIdLMEyRvGZa"
"GK29ffd0dLXkwLrxwlCy0qmCVIhACqQBsv3o/fbP3wnC3j5e+/ZLVflhyXVsohsoNeZbhXDp"
"hc/vuXrGyL6S6+fuSFgfpb324t1KbdatabhEkqi5A7u3T/n4AbUw/Itzi9JqYkUrmrLWEaMI"
"RCCAiN4wZOwz9tvH3j5hbxOvffscaCOOG1NxB/QffSgWAljcYGQZQySsL0kRvzGUZkL3UZ8i"
"lC0JGHNWc1lEiSTN6Vd1sFizuUUplAAPyIbxkjZEqzU0QWuQijZkbPvhOw7cvvFtQvbW8Nq3"
"zyWnFX76pGEFwCCsQxPMFht/NvIIUT6rFEsaqFb6vMX6FaWqbqUY7MRmGW3By9OSJGbLh4tI"
"gqyYrWw1UYACaYMzljG07Yfut3/91gh7O3jt2+dp4y7gpYep/bLdZduu6h6/wCELYFGzcR2x"
"1iBOVUcomLvySgr3XhdiJcgVx2Ap96KvVsqKrQrUIYSJJ3SBrKMCZvulkDW0ApGgW2a/3I0B"
"TbYfvN++6e0Q9qbx2rfPRyyI6cieHiaPXzGk6H7LMekZyLE5ScUlyrsUi4pWRblot2MsgukW"
"4JvHpnLho29nvgoUN3TmLojtxAmyVlAhgmZmq6jHF7VnHbthy7YfuOPA7ZvfNGRvFK99+7Xw"
"3o9gfvngnrwX1PEUKGEUKt65mVANwWOJ6u0v3VSXutbDiJ6N9c9c8ayiUbCeG48TZ6lWSuRs"
"yVb3+8UqYPT4nrpVtNFybAyTUPtR7dcAC/4U2vap++2/vFHC3hxe+/bry0q0av7p54dCVqlY"
"0FoHNJxZLAnz1iW1IosNkbAt0zqiLv6iKJwKWDxOWLQWSqZCZfGV9TaG4mhyZcYfzrdMYUR/"
"CLf5XiURsJO2fe/99t/eHGFvCK99+w32O841MaIWCoq+5KK1w6GBO30wYamFWL16mhsbq1Hf"
"diWfVIoOl9Amxbo/e1q+8F7ktJJoDbdOvY0uZvaSHsrryjpowmEi1BrQuFy2geUolC5m2yfv"
"t//5hgh7E3jt22/up+VEmBZ+i/jzp5S5WnzGyhnbdnf0dl8BOJFppiivtnGup50SHk54lqmy"
"95qzCW7rJbb4KWvTFBpyaBUzhRF1iFYXqg7WkCshxRrWfkDWmqdi332/feubIOy147VvX0xj"
"3sdBOctYfGitKnsqeRGjJ6DGtBUALmkWTNj5jJU2BSWpf4AaEekGn8vc2Y1zYK/yKeuqC7aG"
"blUSnHiDKZwSo0KVwSJr38kD+zCbadsn7bdvf+2EvV689u23UiNxeHZOuWqEjFDz2IIFjwhb"
"jABTrRpmizTM8zCyXyABM0Gyl7tWwV+VpYvzCBp4PXsvifxlerj3WstodvcFgAhaGa1Codxr"
"VqwG4o/xamijkm6fuN++4/US9hrx2rffUcfYhGY/t3GmS1oUrcRTWshHc4ETVnXX+kzJ914u"
"CSzudkQfYq9G3kIyHw89e69Zq2YfdiJjpljD4EeSRrilLkIqWhmJ/HkY0TSMACPFLcru5XzJ"
"9mzHgZu8LsheC1779iUYfnyETFQTTRi6klkQxd6r6C/r9ZSPZjXFxbQKFFJMwwxDuxKOGqDG"
"33C5aIksgK7ZXaN3B02Q2Sf0b0EJRWiyRMWyGLnwt2PRYuNFMNl8h6YLEiG1Viwz+1Nz8thv"
"l9dC2GtSLwu3kpsx72W/uw1PSNbehix7GY14AdRHFJFahhQsYF4l4YUStKYUX6eUcdYQD2Fd"
"GHy6P6uPrs1PCSPo2RRGyPBVLmMnYUSrOiSMlQwQDCgtEvu/++07vXrCXj1e+/b7Uyif7HzN"
"cjXfz63IFveWiZzfVMlGbDGd0LFuP5KCdjlsZvu06dAqPsGy+5WAcQVMhVKIV4nLGTsLI7wm"
"khmwBqOKVjdevag2NvWsTzLqYCfMlKxplTRVU8JeOQl45Xjt2x/koKgueAqcxZkZL08lJncP"
"OrenEE+euJ4M/2I7z0+J1VkNBSrQKp5hnDt0Fnd5WYzqlewXZxYtMmdhhEcS7O4nF2/OnYug"
"BHosj/CQDEoYzZPObf9nv32XVyxgrxKvffuylG9xdbCHE1U5D2Plm70XYefjCjFm7OEcgM08"
"gQLYsMTCsN5QmEdJ3PFe0weG1jtDzbRqroPBeBlkaGX0W3tvT3FTH5xW17OZKlFfH1ELGla2"
"b9tv3+1VEvbK8Nq3P8pj5eoJT2dVkvHih7S3Go+nSCHF1PlYAaQwbB5jCBczQ00uI8EHRpFN"
"hS97LzZbVM1nsEzGiq6WNCyFEeMlqlKUO5iMNfZYsvJYMrzXwtebmPGStv3P/fbJr4ywV4PX"
"vv0JeCOfdct2SeIpiZatYKaqRchCIyBKHdJQaR7RWkfcgBSGmbUn5vrHuERcSskYZQGj3Iut"
"2Lo+EnDzOWcmV9Cuaynmt7J/l9Edb1qVW4sUfc1UtYZW0Io6M2tdCkS2b9lvn/JqCHsFeO3b"
"n8KoiQ7W7L1mGSM+OLYYMKlisXgkMRh7C45XmW6I2oZJybhiQuts1qc6cCx1FYNFhuwYC/GE"
"fdkIk6Tv0nuBoKsthtKvx0Sk+6nxKMuyqO3HNmnYf95v3/cVEPayeO3bV1gMoT9rmaCxheua"
"SGyxtpmj9w6iyeCbZrivT1WSC6IhBWfOJK1/jEKfLY0D6ze7piEY/fveK1pGbpr0bzdzZq1F"
"lqsuSNRa9B6hyFOLhInFEy0q1qpW6sLtP+y3H/CyhL28el1mTeKfft4NBJ+YwvFeiUomego1"
"athbYVchMMSnEiGd/3ju9MVCV4OsoABSRo9TOJvjzHstNaxmmwUe5lUgddUX5DZrjIZI9msK"
"IzjKahRDBMKIp0FnVez0Nn7gVxBVvBRe+/bn9JRU8x+FKuOiOE6O6pGiqd0mQe1ID7hWMl7r"
"Kkn3gHZTcipB5JmYCXcWzVSxl6/hGOD5kpTM5DzWSih5RFXoBVrFpx5l2XItgrDyxxSq99JS"
"uFCv1tDK9k377Qe/lIC9OF779lWg8SqxQGSkzr18bkuWvGaNbIUWJXN2UhxzPME1kQWs0rzp"
"GYB+vriegJ/DVZoxa5XKoj+s4dMub06hF75GMpYUy+zX3AsklZKIqGHzzXSxS5eR147tX++3"
"H/rihL2Mel0iIpgfxsLBbM3MneEVXAt7OHIqtiQR5qcSVdUt5OKIBBy8kvb3HaMgoe3K2XvN"
"9ZFom9niKhlO1qj6O6jRTqZ+ik+z/RLSJ85RqdvR0ZHhwxhBwKtqewUl8gXx2re/Es9yXv7Q"
"C/KibuWyuJKxUGVi63LWMGPFQ4qpFYm4pCh8oHFpUEtktHm39+y9uCDGh/yxWdKCXOk7NrVc"
"hYZtyQSWjDro8Sl1+GRfbyUyNAz195szMBkPoeW41e1f7bdPe0EBexG89u2rEa7dLXqyTS6L"
"CbUSOLOf3imcZSxCFsriKqQwGuZ+7mzCqBPJeLKGpC1p1YVNElWkx3w4zWwlpPgDd/IKvyr3"
"Xjf4mIg7YcQikVcFalMj0eqm+bCmNdEeSrVnt4/tt894EcJeTL3sWlmwtk88ppN6Zb+VFCsK"
"m69cwsvTfRCGSN4gCYu2JCbUQmCRGpJk3Vq8DgVW3zd4L/7kRJuFETA3zko2DbkRwDx79Pgm"
"SK2FdDQD1MbHCVXPPJbWypHE2kOJUisvxMnz47Vvf9POnGGPVc8f1gVns7W3JS5U6dniW/AO"
"IqKN99OyFZkIQ42c0bDVXrM4rXhGVWzhvayyp/nEVqUwQkXrJIyQpFhssGIYwbY9jJKYwq2G"
"abnWSrE8jHBsWjFb3f7Zfvsxzy1gz4fXvv1tuL0Va+lQyQhSz8IWbS+XgyRswYFNVbI/G5qT"
"0ZMZYU86C7JGzth4Ff2bMdW/y+Pei+s+s0VUpVsxJVG73XuvU2txiuPlbnwaHNW0sLFQnT/0"
"5Q1Stn+833788xH2vOrFSeDsQnLoRZYrAMRH+VL22FotOUveK3ImKlT38WLCWLegnHEAZlbs"
"nveKVZLBmqlC1C0UGyfo5wKtxnI1c/fnGmbGq/GbDEpC4tU4hphcl7Vbx/LnLpHPgde+3bi1"
"yId1jbQtTf0sVCfeKwjVrF7xYWCLF04kjZCCBw/etV+ITzV4bV0YfP7wDNmqzWhhRG8e6YnX"
"PL55cKYFsSWPRf3W3OibdSsnpVXbhkLVUF0XyHXZbTRFR76//YP99pOeQ8Ceite+/X3kJCKg"
"w45kablmB5bqJm+K9OyOeq2VzGolFvbLrDqKN1C4Iem6pSJdAGHvFQ8wPoQSRksHNswDDUy1"
"y3TNYYTxtBrF1fd6zk5TPMHR11xYG/1g2gjw1zJnpmfYbvtteyphT1evuvxZJ9TckHHRnMrH"
"rF5SJhyXMkZe3ncqiRYXypmqfEv5Kp1aUvTTWg99JXeVBCzVRCuICSzhQlnJeGWbxbUphxEc"
"n8qkT1QErajNvoq9fxMdki+hXNtqvLzV5wpan4TXvn0twmDwkESwo4qtKl+Symj13eAvrycw"
"EXyL+lhoR04GH4wRiRlOmpDAwKh/PJM3P1qW3osONgY9lOx4AiPYE5F5L9QLlMbRk2g1yYrl"
"+nRWH6PNkph1SQsWza39lPJLgZTt7+y3n/4kAXuievlF4WM5SIfvApdJ25YSyK1CWyGXRWo3"
"BK+zgsx2KhO2Dils4KFSyN9uwHcZQJxCVsJhM7OVjsym725jnfUyXX3c6RycWinMALWsTyGG"
"0ALnpo2qoT+bKqDyNJ6Fr/OcHv9xvPbt67Doa/PdPNULscpiyyMcS3PGo8RkekkooJG2cB9p"
"M1jvtCJhV8ckEyZkwqSgAFJ1pFd0manoS6RteYG4ABlEgBRrpSESFJ8yVcKd1mqkbGygpakh"
"3LK25xRuCVt+az/qiJ3EXCvb39xvP+NxAXuKel0qAIdmWQTTDHuUXEz52bn0JLzOauK0JEPG"
"C58SUlQqkUm6IjqLGzkt+3aMERdr7r0uRfQEjbMwIg8lncdyreLT5KtafGhNRf5o1uEdlqsE"
"+kiKovr6JAF7BK99+xjoIhGrupBtFpyMwntltmgrQ8Z7kc2ZC1sqiyxmtl9JyWw3B+O1OjvN"
"YILO+HUlqi9sq28t8QM3OqJ4IKGl9tZ7neogdfjw2MDg6FOgyt7o3Fflh73RQKZq2QLw0lmp"
"y2hscPvq/fZZjwjYo+plf3Zgvok22hNqWa4SWJMhSwimoomJpOXCpG18ABrrJZ0FqTeDDMxc"
"P6tWn2p65Cwu98V+CxC+0JIJ2BRGIHcj6p8+CHFUaCfykAeWqNRg5Jf3H4AechuQnTumxCt4"
"/IMgg5bRxwXsHl779g3Rdcn0sz5uyOI+YCWYYVrwRBVWrLfgzHglvKLvcZVCaEjmeILv9a86"
"diu2GA8dayKTDaJKathvFkbUABbm8H0aEDG3GeduxEZplg9MjYFWqw7TkC7QvPkthUwiZH2m"
"le0v7beff0/A7qvXRaVrYTvqhI6JVkStzFxOdTDjSxVndvRggJYyRqix72GYels4XDynz5Oe"
"2QcrBQJdfz5sGHcGa6qPyzDibBSXgZUGRFg15DRBY6qFzeIuRQXFTzdiMSOFawUCNCNJaXNz"
"VsfFEF9Mvfbt3wIV/d8pT65DE3xZw6K7Yh0KRXPCaGztRN4WJixBlgij0x5z43E6v8PZquPv"
"djf+OgQT/yYz3FYT7U9scNf18gQNxCTiLD6dfT03Hpp/9WTCeChOslkeQOhLEGvioO3oh8n2"
"5/bbLzkVsDvqNUbdYFSljJTNc6My1ousVSvUwvKEJmnDndKZ3dhdDStYj6RgwrzBSOMpAA+r"
"0jHDkAnJGGNtZr//kanUC7SKT4VD+TSyeaqPfs4PtwpPAtJFy5EHR+iYVS+OhzIXX3VgIPgC"
"6oWphxFBexyFGb4zuXpsodio1yVS8U1Z2IQsWrZlETJMYC0SihhP2MN6oZbj9JWXYA22qvde"
"l9ClOOvWMpRPA0oFHm4FHWpxhXTGR9QnQ8qliLIJJozX51eNr1CeG699+2YeM2gzGCYX1mCE"
"/75BkM5TiSxapG3ZuvHDGTtaOJszUdoYMitVj4QU+gGKvsU4fwRj/fng4WMsWK74Nxmt95pT"
"U3P3J+NqchGcKiOHW6FLUTIc5ugbidMIVOGne4BJQlTBA1IU7m4WsX3FfvucdX08Uy8/w8wA"
"gv6gWDGXDujZjREuYVjY8oVLQ2YUTjVxbfaT9yIN4/NsC84H59RHvtfsvYQWFmvZmFTHExXN"
"1HOUFcOtfPURxMRLf1F7iOrPckY6h14BMm08Qt0VKo5jrHMQUkIKOvK5csfgL/Dat2/B8L/5"
"B40kZXVZ8eQqdf/ZGvQgvWrsKq6Sk4bdx4sNvkzVcEEYCVghcU2ZapBP3tsURoB3sqqF9TPG"
"OH6dcqWklKshsmcKPYO8MDYbA3mqUlYlm37Rxt+AWpqD+wLpeGH74/vtVy4EbKlefDkrAdFg"
"PytcusISWi1bsfOjP+wtY+vckC2anAzT7PFZ7YwwupTXCCkqWXu7VyvWD+10oa+kW6aOiGHE"
"6L1Wd8zGy6x9jCGWJ5MtGo9FRS6dFMT5woSU56LUTkza1mnzdmIE19e35HZt8Jd42WnyGZ0k"
"ab1RqYZM5tVWPOVAPy1PLo3lai5J9PLQFIjqBSuOHFJAQ/xVv9CYIe6fJelK3qvQW9PXsZk2"
"eq+NoWzqDTIsvFc8q4Iaj2abKHBvxSkZ/LUoPyva3Gal7NQ9lgb9tJqxeCzpmvDat28DXR3Z"
"ftAkV0zS0pDRFgpXt7k4cgY21c1g9u+qV3L9iyppB7KicNZ4zBqm756akwl0ro9cUfqNdWsV"
"RuQhpnT9rXZis4J5is7dqq1lFsmHtahDh7Uc2blzFW7jWWiaag1M3fL2pfvt1+T6OKuXjU5x"
"jJYMFV/HMEolI/m2oEn3C2jyVWWxMitWKJ0lzMx4QX8zxCbkMp6wY6N/jKd4r8GWXj2gldFa"
"PIkhzrLTWcmWNktStdJaFgpftFlcH9lmDZ+ubzGexcKWia0MHFZqFwZ/xsvTVJ0pEbjMRDJk"
"y5kSeVqhliQhg1gDTFnDZuaiFXNJIw0Turbq4s8skLu3YdA1frzZe81hxPxHpjiViB3V5p+4"
"J3F12jRZey2R8aSxmG+tXH87lDCs3JUWxHGvbcYDKmB2tMLL6H289u3/9gKYdnBaArqPrHil"
"w6ReSyLTflqVSNMefgsnqUTUmKcYf7j3IockS/vFlwRTv28vYUNmn9zuz8KI6gWxaRiRTq32"
"UVwsNuc2KzT94uB6lh+jxG2W8SdhRkjbWsVBWsgdAxbii6rXuC/b791vXxjqY1Kv8bN2tw6g"
"QKwgIvysvmTJX3pJxCK4+yV/hUC5I2YqS/5UjFt5O08KKaY+IhOwQrWV2eK3e1oY4SXvfnwa"
"wy2ZbVZy9DFKXSYU7sl0yweXTnsX0zZY36LVeb8/uD524c71MeHlDW9M9/PDBBNjBD2msUIk"
"vUsSgzOqmCSWveTPIlvZfiUl0+1YQQyneJDZ768qWv74A8g006z3GqKm3oekxvh0vrSp++XJ"
"deUKSObd65rNk7syX+UGKyIYHJVpGLcZ9dKbwtVTE5tO4dR+zOoFMrNlaJjoD+24nAE3K9xM"
"Ekvasr+SeU0kLTVMGfLoNTUqywoyDinKGM41UtaTq391D3yp4fMzpqyIUpH+yBRbe2swJvMe"
"O3zYdbmYTdaeQbECZ6xYTQxpFsVavjL0vcTLqBm1A8QoW7Te8OxF8xyvfXuWFAsrONKSSuvr"
"ci+s3Jys00tOJC24KyxIdYcXXxu0bWpL5qiCUFuUyIkw2HWEE+LRfnkYUVCo9zqGET7WeVkZ"
"hR6SKxqC1CbVqWvaHDWKHoSGPyRirMaNhzIajLyaIAihqJofR29Cbr9tv/0Wt1+sXuPifYZI"
"gSDcL1DDk72/gYIARKF1cu2bmHPLlWhLKYaVUa6eyyqpePUvXpYDDOl7yXTheym+taeFEevQ"
"IZ1pyAzF6KtR4Ztt1kxbyC/g9/ZeoiMZoQrUJjm222FbjpB1i34cyX0xXhfa05mSM2ISN4SF"
"7TOpsbAutTAaMjNwkpLV9JK6XoGRSjKWNYwgS83GNPzL2F14L2OLi6PWMm0t5sE2y/IXbdbY"
"/WSz8nB4QpBtVoCJ/RYPueGqF+tdM+dODUNR0A+xrsaxkePwnu9ov4J6wdt0BRGImYlHsTt7"
"4WpNCzltly/WXyUCjNqCpxrICI3HhFeBnPcR2WdrJauXea/C+9x1qxXNt2b1usNZHxXDwJFE"
"jXmyWRJtVovuyp464OwaSdCHBxZEWp3tyjS+VPOXC903oybitW/fPeFyBhbOn2UiFZfTwvpo"
"ulEyT4G8+96fX1LotWTLPLYgyLBK8KFLpE7n5MX2QdN7PvMnjQ08GZGM2WbNEqUr54yUaqXn"
"UmazLFOwtzsGB0fMTrlheMDf+lD1OpShIwZmBtYBCLbfuN++5JrUy/uCQDPz/R3sztQOdx/2"
"Qx+uSd4a4JewbzMiST8WzCX1ipGY1dNFSJEG3ds5jwjX92LRAs9DZGhVDiNOwi0vVdzbzTaL"
"Ay0E2lyHSHWkZFZSo8+SrQbyW6Z2ALT3OqBTcVh+axvUca39VUdj+2V4XUiYHmFL24ZC0WuI"
"TGcdwurhff6iMi1UDSdiVsNeDxp2FlWQhvUt1xhPLC4LPTUbtcEoRalixbI+nKkPcXnadHJa"
"ByvQFD1kJZtubPa5r7AhbsEclerZoV2KI9ZqqmFF6y+0jakQ9/bBhFcehDPP38Hu/grzBk2o"
"kg4RYc7ufVITc1PxuqNhoTJySEHXwmQZs5SOkSWwhus6j09zFk9pVj5pLDkt6qVm527N0sSH"
"YzqXP3NIdXg+M+8HYjqvuJhnz7rV30WIVG27THhd4DuysJDYlFx/slYg3Xph7OYbphmmR3+n"
"vAWqs0HD4om4uS1p8xjfLp2dJglfBQvKQmeFwwj19TmGsC6apZFaqg7ZrGZDmXnQFb3qONzI"
"h35G+DYDoFaOjVTxTRlbHaOuxweVb1iDVFFjvPbt+zMKEamw59I8r3a25hlbS+xMt7ic4VTh"
"5iAtMMc4xjzC255zHqYUcs+j1Uc+z1HU87kJm+LTk1FcQ5Dma4AX2tMnNsv5m0QraFUa+te7"
"qJVICxeoQ1qNWgv6xJrX1zkkADq08AJgvPbA9vn77cuuUPUy4yX3gVhSdR/BO8+eSdqdhcxN"
"vLlKnbcuQwugrmSM7NQ8/AtTxiZ8ozCC8y0aHvOozWIBW9qsAFPJEJiKjDJX9QQke0gRvJU/"
"az+ywzvU1yc7b6JlR8WD4GijUxVVR30NBet41eVOPZteCYLwFkDoJ6CaC6rXa1jTU+dVMmjY"
"iSFLGob5b0FarEqwNr0XrSk0cH5uJJrlarPNshpHLbhkswwyLmdi8sFkG4cAACAASURBVNZc"
"TlzeSKXYkD0oebDzHLk6K3wP4vP9h2x6hDw0HN1pXcb+PB4GglofB14vQ9ISxEdXfnm1myWt"
"tyXvojZ0a5a9KaSQ1EekuFeSz6Z/dB1jiPDihB+yWXNXtJc8GkflQhXP4clyNd/PRc2aeKo9"
"BqL36ojDd0jcSBnwjQIqOGQE96OyA60AFNIewsH9KI6rPS3UISjnICWeRv4e5+3l0u8LwPpE"
"Ly+mWE/EbgZuaciYpEnMhB5aJMYlEjqSwv1WHQuldt2SOYyYbFZbIXXHZnkcdfi+dBuEsHDR"
"uCPBc/07cEiom6FF2S1aPOEMFw9++721Bga1DaDmZ9Md/qh6MT1ne33e3+fw3bt/ysqznsUl"
"woDOt7piLrJlS3JIQYplSmmu6058al2H8czV+zbL7rPZAi1JM/AWXzLjfZ3eq3NoEfeV9eN1"
"fRofCf6OB8lSEzxIOD2kqYC5q6s4Dsbrwu013VvFIfT9Lqt5pkISAXfmnwvBO8CljS/bmDOI"
"CbWkYbRCPq2jEIjq6GlEV7P4lJ37+SmEs81aO/c0r2RYWOAJlg2914euTA8eW3TzxOsw5fZe"
"nIoNU1UhgoeHEMUdOtAIdTgwwmutXk8H5Q40d6YnbE1sq+fYiQV1PJ9KISJ5JS+RVbk0B1aI"
"M8smTLrUaQ2qprEPPjqPzvfKzUAaDsrCdsDv7VkrW0FCOP1ibqiRONjitxbKJmSEDod6LNGa"
"29lCRRMcB442WqnWkCw6Nt++7KN4vdj0vDjeh/L+FnD3nh/WvBGZB8rWCS+KZ/mPWzFb2rfY"
"zsIt62BufqyHZhrIXVlh0hkpOk7hcMKcNl2hWcTQW3B2X0YjsZHMNGWoce6FcRaQdSr0D2CJ"
"PC44HnAIcIGIX3hC1H6NdxQcsKsyPdu3z4w7hsUg64TWxMK6Qk/hZMn87MzQaAfoPIcUmBoZ"
"/sI7CN4Bjm81qpoxR61ITH96F+PnbzqW61Gb5dozOfdG6HAz0LuiuYuQ7FcXlYMSCg5I+2c4"
"qGd6ABQHjTmaBcehGwRBI4ObBwAVDw+jSo572j690faL9ttXXZ89XboeVYvnun/h6SmKeGfN"
"c+AMqWUeBvtDySpvJXZdFzVeNl5+OejqLE04uU/q0iwypXEyQvz1cIFHzoQ6yy7KGOripBuE"
"9vkModWW4AGUio8/qJeveqmmlr/XkEAAeNZ9Pe2Ref7OzPyq56Dkhe+fsvE7hTgtmRMyUrUU"
"Uri80Tmx5rTioCspoahx4UuZgomHS5ruWpYZt1zAAw20agUPBxU+2qZ5qX74mIVCj6+0I3K0"
"FTDEiYMxMYltY53DhE3fl/keHm7siOdQr3dnehkcl9hpT+XitCXuqVQHRmGEO3EeKpiCAytb"
"cwOQVxBv5QUFAhWj4r00Pa3t5czVi9J8TuofDsBqq+Ch+THATPSmycPHx0v6EnNUjbbcQezb"
"fziILTtUBO8pXs87PRFEnqmRsOqEoQA0MkItV2LlLFOw7CDlDp4vCI2y6h2IlE4dHx+xp7fs"
"ONqoOB5oVBYgXY3UffsSXd7lx0Tx0EilZxZu7cUpbNPW2Bfa+lQceeLiiKc9tVyegv47D5fz"
"KejHqmFxv2Q/33QGXHHvbyMp+GQyZULIU3t1O5uvIz032RA1VRZB9R35YL2BID+kKRfU2g+O"
"+wY1eTIIgvsmP26yZ3wMLqflASChhbHdwBqmeH3w1etlJrZoI3b2v1vG7b4oYKlY+D4TPIia"
"J2h/sw46EKuMbazTmXtogPotL2SKo5WwxvNTg8DkJ1slZkicsyRIrq/H9KoViAMvWdjlmoz7"
"S+yYl5yewv6dN6qr+dQ8nJfcubmxbcrEU+8l1LVhulukoZGRaqHlH+55nWkfe/1NS4reN5o/"
"uz9b53jCa4FjVJJnZvJ5alPJfHRHzvv1TthVnjDPn4QMeLjnglonFvtnMDIEuKx4vU+w6Mco"
"wOqvPb8XNzzP/dOnx7fzDO2R3zr5qXkXLueNrTo9u7RIjAvP8/r87k0DFQtFGbLy/D+VvYVE"
"pHq9qjLi8dKHBEJewua9jkleaP7+krNpdthn90m9eM+XUSKF5gG0gos66SJjN1d96lk02aXv"
"mwKIvrBABKUAQjgWNMEz3bhYH04Z+5G9PQoKUAS13FM+tuf2nWSFkSAEWFBjfVG2IChlQFbQ"
"PZa1GXumIfn+paZ5C4s9d3fJ0+fPltyfnrJ+L47tVHW4RDbgE3S+xJULrckogPTGXsK9y/aq"
"Eoup6H79hChF/SW2Zn95o1fxs4JADH/BGbK098YHkHFc9YZ/758SQbE2oJ2L0dMB1AhcvXt/"
"ie8Z9kqcP9uXb1g/XwTcZ2hoGLrCU4tuppwbMvup5qLWt/Ms+rCqKFxoC0IYgaitUXuYwvRG"
"6SsuiVmuYLXOsHY6RSErA6QCFO6Q9pvqGaCjzlUVIf6UTwekapG1+3kJ3b+did+XpeNR99HV"
"6wAmR5XUAnGXsFYZK8/icpCKsD23VyV7NPPHK/NqEmfmeX6j+T59kUIzojar0esLUApa8RXQ"
"UEF9iByuijqzxtgyJfrQR34+sZ7Sy8M966UBWvPyoAxnAvliUnpv/Wf9Jz+o63G5/+YNszjZ"
"TIk7uGjhM73hHdzoNEvEp8r07ne+2Z1faKlhplIXzUftILEP2frOkjGW3sgbL2+lCPVVdzfW"
"CDJACyXdS1xolRTi63ORzfPzz3A2P/1IcsTSjGl+vj9/9vE3HdOz67+4feRHbIj2WYBSIBK8"
"EU5aZELttbY6TGy3HVELMZW/MRVAsoez5cuvNbPFnkxIn2xJWC4Rsv7uggpIUWbsVaIOTOwc"
"RqESmS5AEg3Z3BTAuZ7Nqha0sNI88vyrn+jXdTG2e4n41tv1K9Hlo7TFxlicGvlokOTM++lC"
"r7XdfCH1AsmbxIfz97gvTvNqEt9U4kKDg2tijcttu6Xf+7jBIWmFACjGEHcgskqJGX/aJebM"
"HAUOO1IlRVjTVrb59TQXYgnu0E7HSMvDHpjn50gC07PBGD8D0NpAp8aXGgQXqnr8DvNAvOlb"
"+m62JWZink1FU+Iun7cGJsZERXx9IbZA22GkLHGoNkBcv07TFxQJbA3v1ZuQOhKnCCE1ekLE"
"G5W9+wV0TkdfE3oqhExFENE2AdPCKW+zjbue6Wo2n+2r3U+/Lhfi9XwlZEGIzwc+MJQoBvdc"
"U6xeFIXszDgzHxfalMT1G9VZrkfP6K2DqEzbn5ekjwH6JNy2yBUQLkWmW0ZYwfgTCaK+vgFV"
"TRhkQOYhhY0ObaDzzJQ50dGK4+9rmmDYedst7kt91dIA2X4980lpcgSR4ZBpy887+bFOCOr0"
"rO/PRgY87d07OXja07YRdlSJ50ov4WOqxSRMIuUzRuBfTNPXRFW6ndksmG7BCXO2rET24lhG"
"YWld4TikCPVRwE8Je/9YPZMzy/teZ2z/wXbn8h6PrTD3N4j/f69hUf0TLiSwz+ewaOAF8We4"
"RM4ykFazdZZBgyhtz3RhstgtyiFjMX8Jfnjm3JOMDbc0HTO23B6OPKL3GQhKHaJVKu3oQuVY"
"NKTwEXzMGV1xPvyJg2jIzIdxVMYe2QTMta1/Sls5acacTaQSKavVkOcfmVJjouZ53cIojkzG"
"MXkX0xKGk/foWUruu3nl3yXOy7QCpo9hW7Nil9qDiAdDpeNf2GbxVaiondhEXVd/YYXoiM6C"
"YaiK4jU226AhhTYb56y1MVicwZJF6wHuMG3n3p8lJDUeYV9V8rz/lLK+T9O9lWP7dz2NX5fU"
"q4zZbnhR9DjuzkN/dFuTP0o/pvkde3se3QjH3d83DlrOX2oubbPgXeLWeJ0mQaUYqRKNPL99"
"x0tkWEwPI6z10N+iEXnit2KDoedC2aZ2pbmxRuVmoEYwheoZd/Zy39vHcuzIfQSx0U+PuAIk"
"3jPWcWXWirCyvYSLo6Dpr171vmCxnyrNpLdK+5uViRmaNQ/xXVJmsVQv+63u2CzjKS2stJDb"
"if1+BF2cR1BkLxh4QHTjejmc4g1G3bSnX9M1w9mQOWoJqZOAo+/LVvK+N0YzduoFs5HinzIl"
"vayLtPfG250UX+H9LI7X9T/d9k/ZCJjsVCx3SJTw3l2mCemhDUNggBIiZ2zZzNJmjUJZ6DAm"
"hkpczd7DHH0xy1W0hbhkq6C18S4NRJiPpEhG3lqRChA3KtmNpQKKSgyxA1MJBGK6oTt+FiHH"
"It2DXsjzUfZstSc2NgUAbtcvdbyAYL/STp3np62NGdsNBh+3HzmO4vn0FrwFxKpXoAVrhZrf"
"otPyVmF/qBa+31XTrd6iLMMVBLYKHU5FP4Nt5zSkSM3JqVEpBBxoOdR+ITYCgiEjLLj7PLRD"
"7SUMhNHD2jMR4/zN2kY7LX2SIGCA4xWDe2YilUh7NrXdViSPdRahPBuyMkzP/C5LmxWoWpU/"
"UAABWjP7enX0NmNhxJAJiiT8TSkltR3aBGi4YKIqyJhVTMrGDLgz78/OLC0Je72Eh6ZwSRch"
"Cwph25fhevLCGT4rsjxv/PkOd7yaBEfV4oAZkD7ZPDPEq83Kx8SUM9WZSEKUokVNfILNCuGW"
"zctoxJiRAsZfJrK4qy7rY9ca814M2VlIkTQsNSrNnDWaN0PWf4LwR7Jpxhyb7WZGIaWybsL4"
"uJzqox/9U5F1Gu6uQPOkXhJ2YTLsBsdc1JitFrHDJH4hG2MNi9A8glSnYVrTY09ajb/yHJ+a"
"qe9Nwm68Olt9CGGwbiZpvHeEIBshhYb1c9zKumWjXpdgCW0BEbUEEzcw01AzUyzrm/KHxl+E"
"DPat7hdZTKUWvjDhdf3ft498543BGzRQ5WLzPssPouBZ69KQWioWE5nC22XhC1uYVsg2K60T"
"FWtQJRDRvzXV96w4W4Eqro9s7SXcCkvU4gITJdbX82GJbZKoYOqjIeMKldKNxt7faqgCMWa4"
"AUjreAk27OZGaMKu3q6/O+MF2DVzsiU32hoN7ZrrJssYJxe878dnYIGhDubnslkGXKfkzGaF"
"L2ABBMYxXE/CiFa1GVHym46g1dSxH3t2HRMQYc2SsBVk4TapWi6CjzkzjtBgO54Nmf2CLEvx"
"oS0MuNg9r5OaBbpwGsFB4/nCH4MZb9Srw6BNKwXXzWV2Jb5J3+vzOrzQn5W1zvkenXNX/YQW"
"OlQl2MWM8dJ7UbtZ+y4rBFDnrEfKhaSLUda3KKI1tIcUIdyq+tep9bPOVKWGpEcbBBa7NJM9"
"6DGBElFbwkTFzr2/fgdulqYhQ6ZkYSGUYJNAGZSc4QVxFbEfFKRMupvWoCC+JKSvk81KDJ0i"
"dUJb4izVU6OKzVbAq4+J0DDCA/rEVlSvpoMmbL/Y3qmiYiZKGFt7fzj95dhE3jIS8xYoG3zq"
"I+enDCYTFQs+bAamRpSysv5ZDxUL1cCIK2k7T1YjXtd222WzAQuG1LCA2uxK4ercsc0MIT5k"
"m2U1cbZZa9qIMyvfC4z6Q4pP7SnBOO2nQjnoYyRrjiQCWyV8ntZQUnAfHVgPKaroaOlFK5Iu"
"Dd8mCnOaP6mX0ZYxmuJZBm4O0oKRUm4YoGaytNRCrIwXbtcvXuMFjIFfEnehIcKTwZT8OBIc"
"FKa/vM2qar2xslmWvgbCSLTmMKICUoGmspSSCE0onOm+X2bvpZWBr+JbBK5bi7h1/iMuyYFF"
"CoMbM4y62qkIheQiNiOMkjk/a6ZkESBEOXSq+GEM1aYp4tV8j/qeLqOVNOihtjqTx6wsyxym"
"mUdsFnTnRYjzEFMJ61d2YFZDJSwZ9cu4L6gV7fAGo9XEVB97j1CjAau+nARMb6X4X0GLJAmn"
"EojlMjqw5OhDGNvf0oxdtE2BJDKCWYSstWhognjVly/HCM2hWk7QE17ihPGyJR+Ixa6eyI9u"
"1VdYRAz2I0h+iqFZxhb3wq1JsURbi0X8D1/nMMI0DB6G+VvT+bT2AS4nhBWGJkjRSZdRSMhY"
"hMpEFTMq/k0Whiw6fRZCaPABJdW4aRLqoOGLO97fuDjB6/qJt/3bt3R2q72Qf+UOwTyAYlaj"
"pv1CTQdpSdz4MtA6816I4TumcKuDNVfG1FocYQS8wTjCiDrY8kiCCEPVv4sN3Zq+aYWOOmne"
"hBRYSJEgo6jC41ZSMiMsq445M0MnXUA/tQlqRCo6tkd6Ng1BZIWzYWr2RhCg3q6//h5eAHDQ"
"r0mZglmxxKfQCkXCOmkmMYfJ1KcK2Ohd2Fd5a1HoQ+hkYKXK6Ol8f3HRMMIu8XcEa+8ZGL/j"
"gRIzUXddqli4E1KEqGLSrVCh2I0ZczwClnUreX9qhJ4hNZjgGdIzJ5g2yxIVFAv+zadpwkt9"
"SVhStMhOqBkZ8wDXpD1pV83Asakq0wuxcl05PlX+lroFtfNN+3yq7RG9BqA3GzEG57gP6xrc"
"xvhKYXdIkHFI0d/xJKSYR+ywx5eJOQTBCGeLrFqLNtNOkDqrnsLRCzmwuVcKCFCupozX9Xvc"
"9v++BU0iaDCO/AAB7MPMDukMqUhSI31icNnd+6ZiSQpKdoKXcBihJ15bJBmsi7LF9dG/QtH9"
"Qjra6F1Yxka7quESEodlEkachWZjXBgwtaoXwWpT2QqKyM/SzCx1lpBxy3GdbgzCbtdf/The"
"gyl9CxOMS/yh4XszIBhApMLHxNhescAiaCHLFUGGO+HWymwhxaf6VA8jejsxJPWM1In3cgHT"
"5aZbIQmzPqL+1BhJMWlVvp8S1xSGLRuVXklPyqtrUsRuvobUbMjA9VTOiynmNuM5XjJ+QUQy"
"AiV6+DJ59WzlqUUZVo4lr9jBw7ol4ePZDNv8JFqGF4cR/RQgkZFID8hUwELDUNOvrL62m8xj"
"sW5Bl0cNk9zVHZUsRxXcwEz2axmPUR0Mzin68fmEpUcNWSN3L5SQpSIrvFcew+v6fW77f9yg"
"fitBxgJmS2YszpqQSRSxIg+zzWIt5KfIwvtMHC0oFEaU4udbFB4/aDaLHf259+LDzOu1auQl"
"EdYgmAbjy0lIwSWMy2KwWYlUClo5jM0FNFq3eaEVfn8qpf9cT/VbAABu1899Kl59t5mA5Taj"
"jJmzmsg4NspFK+0k5smpumOzELe+GhsIa5FMY7mGbln3ItXEpgO8ZrYW3gtD2Ibg6SHt30i0"
"3GvvkIcUi65uqKjMBotEK9e+uIIBFAKwqElr7594iiWY1e7eULMyXNrJtMbr+gNu+zdtEoUK"
"9EM3uzKg6KhOyTvDkYqQhb1FaRZjtAi3piQidyyu4lObgRBb9HfvalEXmIyXccNaRR1ciLlX"
"IsxRE4D7uZPf8pCCFCvpFsetQZxmQxbHU8jK5jfmKUpUI2WVSNLakHVJw+36Oc+HV0fJKwL8"
"MqoMxxI72+sJskVp6xsXnzeGluHW3E4M3mt1X9UhoTPE1eAApqTeqqSVRTb142FVZS3hxCFM"
"Z6dlyJoStoZs2dVtKmXCM9uvFInNYytmm6+/I4dnoFeF5IxmwnDFe6b+MbxkXDmHa5+xAiWJ"
"NQlRlnLtsxXmsQ+ptchNwhg6jIVn7cSTMKIYW9Z7rb/5ki0rgo6amYEKOfQYKN7jaWVxTVg/"
"VsdICiIshxS2a1cVM91Sy3FpyFrUpLUhm5NYylfXEkgvfzG8rj/ktn/DJp2zSbFaGoqjjakZ"
"qbJCrUYlK/Ehk2Si5ctV3k7xojBCZHRejQGG/fCzVAJxp8Cbjf0eCJ9cgHbkQWCOEZywtgwp"
"Rom0/Y1QEIUVq5GEJImKhixU1WjIgvww0KmeTja/xbZkqq3WqEQBLrfrZ78IXv2nlUY00G9i"
"rj83D2NpSy9htpJtMm6YPNthYPulq6UGY1IsaGuxdiEvoxRW0UCVHS1UyaCqBl0YD4yiDgyF"
"+h/tW0drP8cTg7BmmrnK7gNn9Czrlpc/qnpz9BUMGdXEBFZYQUZs4zwlyDphl0kJnhOv66fd"
"9q/frBRagbA9vaiJyklo9E1WPXeqJH2KH5stfOGNCABCKo7lstZiKXoZkhqSiMxWqo+xJlpl"
"HJlqVTtVQ4MxnZ3WhJTMqqR7/Kod0lhY+2zzT8ZQcGYWDFnsWVo4+tQU4GQfoeVoL/HaWvvw"
"itv15704Xn2Pdmfai4s1JIPhjZUx2CxuVaUKuLJZQecItVmrgGC2kOJTrpL2k/KwwRIiCSOm"
"TPUxSFfDpTtgHVEoh37xBtiPIIE5CylAnD0+Hp/N2bpczolr1K0W3RKPgF0Or0g84axEUhPy"
"sekRvK4/6rZ/nQtY352Wiwbh0WclLqyxMuY0i5v3Rkl/IbcxhRqkkSfTRaH41HTOB3XZ4arD"
"BnMkcQBs6kv4dv75j3GBoNBgpMK9PDvNadNf59K0WudAa0ofUkNSCClXI2R0kiFzZWJPUBxZ"
"sD5hkjqupMPD3q6f9bJ49R1pDgzJZpXAU7JZYWFEx/hglUoFdHgs7QnAFG5B542nTpj5ng6B"
"yZVBNthS0ZJUFslytRIPjE5e09NrVa7CfAMs3z/hDDJakbnYJci6osy27F5zcmKulYBFcGDV"
"L7q5tvx1/Ild3mDPJu42GJ8Dr+uPve1fuxk3eIJzZ6v+qM0q9DCUS8NOwgqNroAqGAYLpFsu"
"Y6mdGGVimC3J1j55r/k4qeq3gnSRte8/yMWak1NI0aSfVFJKMD0zbdX/dmRmq4aaaLFWzsPI"
"hEk0Z7JyY0HtkvenAgrcrh95NXgBQHPj1eYxM08ZqjUJFa+Q/hDGOj41HK32nYQRfOL1cI2U"
"1EtUrMSWmy19GCojhj5ZNgGu9S1k9+OQEFKyFjRMRkjB+rGEzNCJ3ZRz4sqceTVs8cBKz6rC"
"BTfGNdHAIoP/BNf1HHhdf+Jt/5qt/4gsNinONu0pjFoizx5KeItEGGuVUzUj1QUMOq+QVXG5"
"ajZs0LRK7rIV62OojNDGI1+HnAlbnZ2mWrWOKmJXN1GyLJecaflqmJaklmbsDp8NWaDKOK6Z"
"tla0F+invUq8+tSORcq1GIQjMdwiQUpZV7L5VcsKiLMcn1oPjyLFuZf1AoFGokLNlqQbYpUk"
"1PyYmXOvhkvRPEGGQIIiCa+YdtpVC+XSIVue0j0KnFWiOQ+rpzKW4UsDyLhRWVTGJVMV1lz1"
"iK//cNnL4XX9qbf9724mTgmyPp3arKl5iPOxgakXKImWyKCwX/bNuhRFUUMKIw60ohe94Ugi"
"OTCsHp7nXgBK1a+pehy8V0OBfzYzZGgYJ32chRSuWGqrZ++VIgw29cHdJzWKMtbrZm4WrEaP"
"ccsUl9v1p7x6vABcf9pt/9tbnXhi0cKJzTqjyksk6xAtT3h1npZhhPUCXczIjxa0hg566CKx"
"pS7N2YqFkqWrYhgPP5m2xSak/Rp6JNwPKYrEPx6TZ+i8D7539UpNy1UYlvOICR1vVLZFVfU1"
"5Xb9yU8HBs+FFzA8PutWsFkEWVGXw5EYtwMMyhk+RjBfSZAUi72XmfoKja+IGwu6fCEo8Yok"
"NbpsSfJefeY4gOLidCnu5QNPCtlZPOGcPSmkiHGXs9Xc+DN58/VRWtpCLHwmh0HDFNnh6J+j"
"LL4IXteP3Pa/sS3t1x2bldec1MsTr/NwK1+XCy5dEC+Ohb08uXv7m4x9OE0a4NU46Fp5r+Tu"
"ezsUTWWv+FE3NImQ4ibkvZAihewLyMqapKBbbLYmo8ZLEjr57RRoa1cCt+uPe714Abj+rNv+"
"17bERyGkgs3SFXgm1Ee1WTUC2qwvxZ61wc0965IgdT7qhn4ihwxjDJwnFMxWCrqi95J0q2gf"
"R6ldcryvCUJnb3PcBXXxVjqnMRTuwEIrElFCpqgiRPOcSsy5F8JCKYHOkHrElbUmQsoLsIUX"
"wAvIJdKkazni1GN97l6cslOkmohxWLeIFOAF0dxxoRzVElQYQ9q+K2S/ZraS0+IVwB6/oVa0"
"B/0kQj8FKZOt7K1jJQxzPze3Ir2fe1YaFqoW8oWlG8sLDcc4sMfey7RTYmFt9YkZ/Ty9CF7X"
"n3vb/+LmcsUWKj7MY5qJqjk7BaUPgLsrWx7GoGLMm6m/KFJAaCeGG3uyQlWS6+DkvdLtOOi0"
"PPb1COUvW7EI2cWsPYlZ/ivdASAeBhgVa55JGWkuo1woMbE4DfxCvV0//QU4wQuqF3D9Bbf9"
"K7dgs+ZOaGLLWgBBt2QEVHO7kgMIvgfl9WLDuTBAKdpRPf6yeoIMq3n2XiZmhn70XiOeUIj5"
"zw15K2cOI5LOKWG1ARRStD5aGisOzkKK4LrOZIwlkBKynHJZTK/dUIrg7fppLwYJXhgvABDv"
"BuFKl/sQ7VCmmZydim+Vxzv0+1Qo2dRzGIHqBbHpeWZN1oFq48GDXB8JO0YqeK8DAEpFbeO1"
"TfyKZd2KMWGi/e5zas+Bftezy5NCihVkzYop26+paZluYbWoYe35+n+W04vjdf3Ft/3Pbuy6"
"mCSuidlgcdc1ixbXRHhlRLT2hXqEgGG8zF2lmgj+JaF/77NMfmtVH5MbC3aKZrq961ctdPtV"
"glaNnyVe4XcVT3TgWLTmgmjpFAJkHFWkDu9EYUgu5vDCmgu9tfhDXpgQvJR6Addfdtu/YmPP"
"Ptss6wi3ZxfxqZY80WJXWefI2osNkeCRg0oSjIY6fiL+q582wKsg9mpPXp6zCZkEzB8WJanv"
"6EM/VaHUPtqvplsJIUVuS/aQQjykCIUyyZX+vhmj2Im5rIxJqIJM9tbiS7GFl8QLAATtiD2P"
"5uspMs2NSo6yoEu48ZgUK5p6VzUKIwZMQnu9umK5jBEWTR1bU/eW5SrlXqpPHVZvDKpjg2gT"
"1c5RUGXyJqRyNrQwyVg/pdto8E9DoATjNaUPwcufpBK5/zF2FkmD1Nv1B70sGy+P1/WX3/Yv"
"31qjRVGibElY+IT41BQLcN2y4iiKWjjxOioWRxLjZooVK6PJXqNuR0zSNa7vJQB9u+T9vbUI"
"xyhY+5XxAtsyDymST6qnkAXa5hGIsep5cUypB436ejnLZdNLqxdw/dzb/sc2NyUEk9fNSBUj"
"yMwVtl+GlBqs1iAIpt7GOkNrIhJhRWslwp5ikkZC8Vg2wd4raEHFGQAAC5JJREFURDAkeBal"
"el8Q5fhu18iEBciaczaFFGq5TMlmDRsi1PLyNkuUEiapVlZNIj715cHAK8ELwPXzbvuXbS3p"
"lj5M3itbLupPTFo1llsYoSXGrh9e9OSfLlG9nYg6sTUn9ekiXma2ovdKjccUT0CL4PhDvjbi"
"stFpKdR4HC3NFFKcPFx0RLLMWAtxrWE1a5UQUi0CannHOFIvt+v3eiVU4FXhBeD6+bf9Dylh"
"VPL6VCJwLeLFY7ZgWsVhBNVK7wXqokWaBK6JpmFqs7zZmNJUdvcsRTxPNyhhRbsKhgiV0IE9"
"miZtfKNHQwoGaxxXS6flhTJeNyCL2dS9E1CLfUfUVrhdv+erQgKvEC8A1y+47b9/k1TykGsi"
"NFkAgm5JjE8rHf3WdW1pKv+RKesF4rK4iCT0z23YwpI0jNxYromxIdmH5bSGi252VMDipdBt"
"VjT1nTC7SFM448NSDAspwslhE2eZpDkPO0lcF09VCG7X7/YKecCrxQvA9dfd9t+75YZhqokT"
"Xr3SnfUzJkNmQZffZPxcRQnzvN6cu0TdMrtGqDlMPK4w3Yo6ql4B2+DpUgFttGJCahxarG1m"
"HxHAWnR1Z9Fa0db0a8yG7A5nEdZXzhZeOV4Arl9423/3tmgYcmvL8KJ7TGEEYxd6gcri5J/+"
"m8PGRxzh2UZghbIoka3ovWbLNVbWV3Wd7m1Jbi3a1xSVMVAeEUqhYdfO+7n9lIrmRXPWsBS3"
"PnqKETUIbtfv/MpJwOvAq0/tiNEogLOxgaswwmag61vvtUALnyJSJ8j48K6JrWjtja25LLrf"
"nwulEmZ10IJiUaQgXtyFhKpZphpjMFg/d6MwdhFSWJRKrivLVV+nBYx8Jg74aeV2/U6vCYPX"
"gtf1N93237H1uDVVybluptEQHSwulCZaYx3zWCZXrFKab41hg3XBFpOEEkg6K4vLxuPAqCtl"
"HRkpaM3K/Y9wqfNM1dKK1M9tkEn/80TxG5ogJ2u/rIxrDfNKers+9xjUp0+vS72uX3QDsH/x"
"NodbaUnCy8MIgWgrkjsZK3VdQ8MIP4Gx6Az/GYRJt/xs7OKC5AzNf8Zx6hrS8j6Y4K6LRpAZ"
"WHNqX7Xch1Siu7ppUOvqunM6MmzR/0NCxWnZoia2vOde6fS68OrT9bff9t+8pQqICSm+5zCC"
"YwhwCqDOPaUPDtNEVXZgFEl4fYS/PDUVA1i0gr9WL3XjBkm8hrqMMTeqUuAwLNky7Req3hEZ"
"NSwXRAStSqjFqOJ2fXitex+vKvu/M11/160d6DfRmXQ7HtAOtAdd8oDjwHGgtbCONEhDO4A2"
"5u3WpiXjdtA6hy6JM4irjeVH2GzTe0C30yBC30s30h4gAjkAoXfk950+G+bPbyuLL1REuiYp"
"VUfCCDgqWh3LDyqI3oSskHq7fsfr3vV4A3gBuP6eWwfroHsjKWD3oM8+QPrDhwHf4MP2us0r"
"IrxOO/Lu9D0tg1rbiy2u0OI+9k0dkAPHxx2XPiTa15lpfoiwEoUSvwL0QPLPIPFIaBAhwg66"
"DxhVmqm6cobsdv32N7Df8WbwAnD9fbd+ZBtSxwMd+kvgHvJCjJ+Y1Et3TFcLUWFDIobY4n3W"
"lNSwJuNyIpCDaahWKStdYpseD13eOGXgLyvTBg1B8Kci1DJMpkbLKCtw5jX0dv3WN7PT8cbw"
"AnD9A7dRLyJVVhmPuTg++J5GcwJa3OtoURWshgoJxiROLVXPI1co060kYE7Jx30hoCr4EF8u"
"Ua4kvGmbIObvCPGHsUSWIV0sWkd1MVvcqrL1P97YHsebxAvA9UsDYUGfzpwZFUdzKog7hqub"
"3QfrI3GXq+x5eZ28V4veK2D3EN+XZNhXk3g8PKyqp8kV66vJlSpZN/i2QpSuSbTaamG/x+V2"
"/ZY3ubvxuluO83T9wzcA++duheJToUvu2pgI7t4GnZ3V7O+1YGQQiGGEjZ4oaXlqNlrQFduP"
"bXWOUJuGExY9w7FAoy8LHaBDVQtdTU7TijsDWXNIEUZPeFpWc5uRW5Hr9uPt+s1veEf36Y2q"
"l03XL7+Z+TWnZZZfVA+OBxwrgTFhyNYqlhgWDFe7aJZzfeQiGC1X60KVNOyI87OjEq1x9FG9"
"8Rs/efeCi4rJ6tsg3X71FiI3IdnCH5UK4r9/K3sZbwsvANc/PQplyiOYsxat+rK4hN0ZV3N3"
"dYSZgNoKX9/N0Xv1mrt8bXKETHmb3zTRHDmzL7t4iR9g3WbhtEqO+8vt+u/e1i7Gmy+OPF3/"
"zA3A/ou3o9EYVM0Y7bLhtaBV6gXS2lS19jWs/wwCYjSf0lTveUSomCE+nTuFNOUuOsSIB6/m"
"Pko6regSs/sxMK6ddnKj91HaClYudSZ2dadhzb0gfuPb2KthemvqZdP1z0e/H6XLwgj2wi4e"
"FEZke95IUU4euuqwRLVJXeiWloyPJ7TQ/P4D0Ovmg3/s8OFj+zGUePrASE+5gM1twwqxgvgv"
"3/aOBd4FvABc/8INgtY0jHjIv3iLVcNL4UnD3v0N7UWva8nfTNWKa2UOMngHQw2ZNVR5C2ab"
"xFuvoCYhw+RbMH3SD+/0y3i4aEVyJIHL7fr1b3uXjultFkeern/5BmD/rK0dsMvdVL6ch7YT"
"rdMahYYN6nwvlK34yC17eW4k0riJBq+evOSsM3tcpvBA7cMSGxpwKWiHXjynF99GI6Sb1kTB"
"BQB3L+owinBNihb6ucXGLYq+tk1nE6Herv/kre7GPL0rePXp+tU3APtHNuFT+OlSg+OermrZ"
"QwfRIak2Jif4MEaHxqkm78VmC2UBllHYHnTErF0HoKEU1DZyhIFv049kVzqh5EImwiye4PtE"
"W4P3c0NQu/2C3K5f+zb33Mn0ThTHNF3/1k3PZdCakmoTuZbG9Su2EL3kxVZYaBhSv1Aj75U8"
"Fj8ctVs8j2gP/qb2Ru3j9Bms81u/UUuNzTMrRluYg9nuTYF6u/6Dt73T1tO7pV42/a3rDcDP"
"3rdx1NIYr+Wt2SguoSFfLFE0IKefRhbkqpA+Fbo/EzBLR1WHoJXRFEuEAl6TXmTFskFvOVmN"
"YoY2tR+BA/ia6+3t7qn70zuKV5/++vUG4OftWy8KlUcLluGB7D6Uwum8oPXMufeabyOgNy+F"
"SGQb3Q/jeLBSaH8Xp1HogPFiG3LY4xg03Wzze9ty0/frWH/tuw1Wn95pvPr0l683AJ9NSmZ+"
"K/T/TGwNreJeoKhhUggsU5cJuxRrVTVnTc/vHTNFL9YFUi/rm4qyBABCJC3Ps13egH/yPoDV"
"p/cArz595fUG4JcZZKk46sUmml5SlZuNzJD3LSp8btgBKHnLoc8MXAEu2u3o7cROQB2c2ch6"
"QEXL6qDNE2EpVs1xK/Cx9wesPr03ePXpz15vAH7FvtkvzhfAGWOjLYNQ7MDixPVRvJGYG4wl"
"8LQul3RNr85EtUvem7XiRiIAkEaqVnn3dou0wT3WN7xvYPXpPcOrT39af+tfRWLW6Jwzi7VQ"
"opKB2LJ44uQcoVNTD/qjaP1KheOfP/RSyDxBYwuip8H7i8IZ24IGfNP7SZVN7yVeNv2R6w3A"
"F+xb75lJOWquhtb5OEWs3FQETjlLPY9Fzw0S6n8UxMInQeeaXfaMrpZjCz19Bb75PQerT+83"
"Xn36Q7onfsO+QagIElue3Sdfn7KJ2GycwQoO7Bg49qlIhqzPHA3PKtpD4OnChKlWCfDfPxBU"
"2fRBwMum36P75reoOfMKqEhhqo88UKLRaY93wEoP+zSsVcvr9Ey4ml3D8Gp2xYMGfPsHiyqb"
"ykc/+tG3/Rle7/Ql+9YvVHHpt4Jq90AtY3mfGfe6ctXbJd0XXApqC8/aq/pftrKVi+gKLayM"
"DyhSPH3w8eLpD+5bBmUGjilc4lVRWgbuUpyqS0E5dJviqxXgkz4ESPH04cIrTX8y0rZQqZOn"
"GLXKVElYoQCf+iHjKU0farzuTF+9b/cJs4UF+JEfbobuTP8PvRhXFuF66lUAAAAASUVORK5C"
"YII=")
#----------------------------------------------------------------------
RGBCubeImage = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAAL4AAAC+CAIAAAAEFiLKAAAAA3NCSVQICAjb4U/gAAAWGUlE"
"QVR4nO1df+g0xXn/zD5qWzR/qBQlRZQSSSumIbZUli4y0L6lNAqNaUkiIRpItSVSKi+0Fpsm"
"oCmSWtJYIUmbxgZepMGS2hQ0JU0zhA3S8KbBJBClQQzBUg1Jg00wUfemf8zt7jO/du/ue3c7"
"ezcfRJ7v3N7s7MxnPvM8z8zeK5RSyMhYH8XUDciYKzJ1MjZEpk7GhsjUydgQmTpDkFJO3YR0"
"kakThZRSZ/bEkakThuENgMyeGDJ1Auh4Y5DZE0SmjgvDEmH/h8weDyJnk4OoTkk0oEY3gmr1"
"71M3J0WcM3UDkkN1vcTLaAAABDRAKU8Rilr968QtSwyZOj2qN0kslqRZtIWdUcrrCVSrf56g"
"ZUkiUwcAqrf1SmO40rQfNfaVpXwTgWr1j/trXKo4dupU73CVJqY6HKV8G4FqdWbn7UsYx0ud"
"6taw0gyrDkcp30GgWj24qyamjWOkTvVuiWaENMOqw1HKWwlFrT6y5VYmj+OiTnVa4qWVSLOK"
"6nCU8t2EolZ/vZ2GzgHHQp3qzlWVZl3V4SjlaUJRq784UVtngsOnTvXecZ/m5KrDUco7CUWt"
"/nyT5s4Hh0yd6p6h6GnYPgl1DEr5XgLV6s82rSB1HCZ1qvs2V5qTLFg+SnkPgWr1JyerJkUc"
"GnWqD51UaU6+YPko5X0EqtUd26gsFRwOdaqPbkdptqs6HKX8EIFqdfv2qpwSh0Cd6uObRE/D"
"pNmu6nCU8qOEola/u+2K9415U6d6aL08zeqk2YXqcJTy44SiVrfspvp9YK7UqR7eldLsWnU4"
"SvkQoajVW3d5k11hftSpPr19n2b/qsNRyocJRa3evPtbbRNzok712DajpxRUh6OUnyZQrd64"
"rxueFPOgTvX5/SnN/lWHo5SPEahWv77f226C1KlT1ftWmqlUh6OUnydQra6b4uarIl3qVGen"
"UZptbUScHKWsCVSrcromDCFF6lRP7CN62s9GxMlRyrOEolbXTN0QF2lRp3pqt3maqTYiTo5S"
"PkEoavW6qRvSIxXqVE+nojSpqQ5HKZ8iFLW6cuqGAClQp3p2ep8mNTd5GKV8mlDU6oppmzEl"
"darnp4ye0gzOV0cpnyVQrS6dqgHTUKd6IV2lSV91OEr5PIFqdfH+b71v6lQvpq40c1EdjlK+"
"QKBanb/Pm+6POpWeh9LMS3U4SvkigWp13n5utw/qVOdINGg0MB/SzJE6BqXUhKJWevzSk2G3"
"1KkukHgJzQKYG2nmtWD5KOU5hKJWL+3uFruiTnWhRDNj0sxXdThKeQGhqNULu6h8+9SpLpV4"
"efakmbvqcJTyQkJRq+9ut9ptUqe6TGKBpgHmT5rDUB2OUl5KoFo9u60Kt0Od6kqJlw+KNIek"
"OhylvIxAtXrm5FWdlDrVVYemNIeqOhylvJJAtXryJJVsTp3qmsNUmsNWHY5SXkWgWn1ts69v"
"Qp3q2kOIno5ZdThKeQ2hqNXZdb+4NnWqUh4DaY5BdThKeS2hqNXjq39lbeo0x0Ga41GdjbE2"
"dRbHQZpMnVHsSnXmTppjW7A2wPZV5zBIk1VnFNtUnUMiTVadUWxHdQ6PNFl1RnFS1TlU0mTV"
"GcXmqnPYpMmqM4pNVOcYSJNVZxT7SAnOkTRZdUax25TgfEmTqTOKdDciUlgWuZHhIMWNiBRI"
"k1VnFGltRKRDmqw6o0hlIyI10mTVGcX0GxFpkiarziim3IhImTRZdUYxzUZE+qTJqjOKfW9E"
"zIU0WXVGsb+NiHmRJqvOKNJNCaawLHIjw0GKKcEUSJOpM4q0UoLpkCYvWKNIJSWYGmmOU3XE"
"OhdPnxJMkzTHqTprsSGfTR76lBsZDvLZ5CGbGxkO8tnkIZsbGQ7y2eQhmxsZDvLZ5CGbGxkO"
"8tnkoW8hUyeOdDciUlgWuZHhIMWNiBRIk1VnFGltRKRDmqw6o0hlIyI10mTVGcX0GxFpkiar"
"zijy2eQhmxsZDvLZ5CGbGxkO8tnkIZsbGQ7y2eQhmxsZDtJNCaawLHIjw0GKKcEUSJOpM4q0"
"UoLpkCYvWKNIJSWYGmmy6oxi+pRgmqTJqjOKfDZ56FNuZDjIZ5OHbG5kOMhnk4dsbmQ4yGeT"
"h2xuZDjIZ5OHbG5kOMhnk4e+hUydONLdiEhhWeRGhoMUNyJSIE1WnVGktRGRDmmy6owilY2I"
"1EiTVWcU029EpEmarDqjyGeTh2xuZDjIZ5OHbG5kOMhnk4dsbmQ4yGeTo7ZuNIBGN0KchwwP"
"6aYEpyTNjzQAcZ5AUwDQugEgxKvcvjhupJgSnJI039cAxPkCTYGiAAiAoJ8EoPWPAAhxkd8n"
"x4m0UoJTkuY7GoC4SGBRoChQEBYAFWgIAFAIehVAWn8fgBCvDvXNcSGVlOCUpPm2BiAuEUAB"
"KgBCUWABEKEBUCyNosACgn4aKLR+DoAQl0e76QgwfUpwStI8rQGIywUWLWlQoAAWS6UBCATj"
"8fBPBb0aIK2/DUCI10a66sBxpGeT9Tc0APGauNKYBQtLdwdUoAFAyz+XCvSzQKH1NwEIcXW0"
"yw4UR3c2WX9VAxBXCzQFzi2wGFAaQgEsChDQUFyBfh6A1t8AIMQvDfXdYeGIzibrL2kA4g0C"
"6BzhAkAvMI5r7ChNoLz7eiHoFwDS+isAhCij3XdAOIqzyfqLGoAobaUBel3hBgD4SuOUM6Ov"
"hwT9MgCtvwRACDnQjQeAAz+brD+nAQjJQ+5eKtrhN74LM7pyS2nYgkXEjI52nQL9CkBafwGA"
"EKeGenPOONizyfozGoA4xaIn4whzA+hj79419so7AwCoX7Cccrt+Qb8KQOvPARDihmiHzhbp"
"bkRsTppHNABxg0BTgIqlQiwdYeYRd0rTu8axcltpQPFy90aCfgMotH4UgBA3DvftvJDiRsTm"
"pHlYAxA3spA74NLCir2XrrEnMH1wjojS2C7z4I0E/RZQaP0pAELcNNrJs0BaGxGbk+aMBiBu"
"CobczKeB7RHHYvKR4NyL0l3fmd+IK9BbAGj9SQBC3LxGpyeJVDYiNifNgxqAuLkNue2YuTVg"
"7yeQVR7I/q0WnDtKY/nOiLVE0NsB0voTAIS4bbi3U8b0GxGbk+bDGoC4rVWaaMzsx96h8o2D"
"c+eOQ9F7fyNB7wKg9ccACHH7GgOQDGZ5NlnfrwGI273knjPvnZAbTADglHOjDbnhBedASGD4"
"HYMtCd7RhPG/D5DWDwAQ4nSsz9PEzM4m6w9oAOK0wIIpDWDFxo5OdCF3HzzbAtOvXMY1iQfn"
"rsB0KxciLeEXcM2zWiLoDgBafxCAEHeuMgopYDZnk/X7NQBxlx1yA3bszaZ1rxORmLzP/nWO"
"yGrB+XpROpeuyAVFgQUJ+iOg0Ppe088k7h4YhRQwg7PJ+j0agHifmfrnggoAy1HvfNumQMEW"
"FDDxWP7pG+zrA9G7+dRxjYtWaRxdMd9qPCUzuhUo7wwYH1+cd5fRJ63fB4DEvWMDMhnSTQku"
"AH2nBiDe36ZeePRrprLr5GJECQJftwUAdm2Wb8Rj71WCf6ZkjoCtENILuhsotL4LAIkPrjo8"
"e0SKKcEFoO/QAMRfEhqA2FznYXanGb3rENn6NobruwwojVMtP8oTz/65rRrdkOeRfLhVgj4A"
"kNanAZB4YK2R2jXSSgkuAH27BiDub3u/m5R+FL1cI1rfNhxdjwXPsSi9u8z1ncdi71i1a7XK"
"rlbQ/QC0/gMAJD627pDtCKmkBBeAfpcGIP6G0ADnOuk1rgS2JETLR4Pn4Wq7y1ao1lKaWPA/"
"8PXRao0f/WGAtL4VAIlPDI7SPjB9SnAB6Js1APEgUxrAimm5EwpPafqNJ9t3GQ+eg2F8XGkC"
"1XKlWTH4755ulZifh/RGgR4EoPU7AZD45LrDt0VMfDZZv0UDEA8RFp7SBMLsgSiaTVln6o9m"
"5yyP26t/3OOOJf3s5lnbEaFmO/WEQ/pOgc4AhdY3ASDxKXeQ9oLJzibrGzUA8XDbO+Gpxqbs"
"cmb7W9+OErRGrNwVMDsIXzE475QgXL6L+gPdIuhhAFr/DgASj64+iFvBBGeT9W9qAOJfWp9m"
"4cXMnRGLckeD5+GYPFzPXOsX9AhAWt8AgMRnx4dwS9jr2WR9SgMQn7VD7t6liJzKi87g4Km/"
"uBIcdP2CPgOQ1qcAkPjC2DBuAXs6m6yv0wCEYo6wE446HvFAlBsLnp3yo6xfkAKgtQRA4vGN"
"KLEqdn42WZcagHicsGDRUywKtXxDVr6t4Pk46hf0RaDQugRA4iujY7oZdqg6+g0agPhy+6gj"
"wS3b4g6X+7G3F9y60a83U4fK/fPL3Uqx6/q95MKynkiUPhpSFMCCBH0ZgNa/CIDE19cd6FHs"
"RHX01RqA+LofcgeDT77FPRyTrxbcBhL/bKb69fe7HHb9MSVwg/9IFB0767N2cuFE9Qv6KlBo"
"fTUAEt/E9rBl1dGv0QDEU+2jxmJjf+PGFRg7Jg+4nKtMUK9+f4KOurSuUvLgn9c/3AB27BCh"
"+v0H6ZwY9wGHfWe/fuNEPwVA69cCIPGtdQc9iK2pjr5cAxDfYiE3sJyyFIktY+V+7Bp6U643"
"/HqcwzqjN+p8jlgDovVEyp3kgn/scMUbjTdgjZ4U9DRAWl8OgMRzwfGVUiqlgh852ILq6Es0"
"APFcu0kU3nD25oc/76NvxI3GtLZH6ShBtAFe/YOv5IWUbHgPvPNhI0/q1x+L0mPvHAa2wIIN"
"cN4t/G+AtL4EAInvrUuADqtSpyMjVx19kQYgvtdSIeBRtg9geY7+tPDy/U7ICm/+uS4nv5E9"
"QQMN6OoJnfixGhC8kf2kQzcafNLYg4SV5oRP6napoO8ApPVFAEj8sBvlFfmAtVTHsMeojj5f"
"AxA/bEPuXmBs+rse69i0CPyMTWDeuB5xfyOvAYHg1txoteDZfSMn/qTujQae1G5JwLcdvFH/"
"dvPokzo3CjypoP8DCq3PB0DiJQCAllKssmats2BpSCG11gDES+ZRz0UBgE1TAlD0RncSzynv"
"j4eyr4OWua9AeWsgVL9f7tTfVxgrt+vvDKd+/0ZOPf2VTjmLjHg93doULo/X7zjXndFJjlN/"
"uLwL4w1pfgLQWBlr+joaQgj8uHDLqV3GouXF8n8LWMZyirSXGaN3+rgBtni1iVfA8kZ5eec+"
"F+wymPgOyzMSvNx8zQRNYPU45c64dvVYvFmZQN2i41zgjLT/UodVztUdlngbKQqmIZx8xI/P"
"WYs32MBNhgbEAq+cC2pPkvvt4AmSbsXtoiHjFS2Y0YkT7GDYPwRDZqQLLFoHq/OFrXJumE/b"
"zi2YnhOsdcHc2ve0zAA3rG2uwZra2GtNFxA0hdUGfzEie303t+57pn0oYkbTdoi/g+EfsCen"
"P5eNVI/WANZxcjamzpI9LyulZHW9u4L2C7C94loeHIKOWz+ZnF6IHUiIhXKjDlDA7VjhLLqp"
"x2lbd0GQ+rzNHfV5uSUA6F2W2NvNQ8E/6/muPztWWVmMZSPVP9VmPKWUtuSs5O6sQx1h/dXF"
"XLL67WVHBzw4NmbhN6Qcxy0eIAwPVS/1o+d17KFyhmSNoYrRbngLkwuDd6rQoVcvmbw8Etub"
"JjVOfzqjUKAAiNSZmo9jYHRXocOK+Z9RyOrtQDvqi3bUzaA2zFh+Gitvv25cVMtoO46Xo10u"
"0a6STasZDbtgWd4a3SqJdlHgTe2/RWhCBm+qadKiM0w7fcNuatc5vdHW4zYVaIr+iZxe6jvZ"
"aaHdt6yT1d/1pDkhNlqwQlD1GQCyeudS58NrkOdIxrJwlkcZTL4FYxbb5ey8bMt1dXxYr6kx"
"J9dZ71bxgle6IHhlpM1d/qxrvCn3I1ayKlQf2RpjOmxNdRzI6ve8SUkBAeD6EZhejjB4AjAw"
"7bgSdCIHYioVlwowSeg1z5eK7u6OyJE36WOa4clYWHGZXHUyNiK9S0P91fZJY7Ar6hjI6g/Z"
"WLLFIqzqsJenYVVn8u6MUGCobCoEhiq2tjL9t2jnDxXYHIiUL3wjRjtuOItsfG7Y/anu3RVp"
"DHZLHQNZ3QmEpt3wUC2CUuEpAefZNocq1FRrCIPUJ8uh4VNiwKGJSq8zc+w2x7zJBam7d0sa"
"g31Qx0BW7wkPai/vITEPTjtfzDnPwmMWGqqojAVp1349MGYhAbBWSc5Cu4XBRdZdhvymwp4w"
"7fL0p/sgjcHW3ORRqPpuALK6Z+m+FZHdxK7EGMTzql46NeBuEwB34ybmJlvudtz5LZysP69n"
"LS8Ytks76JibtN7ABWipSaT+eH+M6bA/1XEgq/viYh4JbpvItFsElSCmEEHXNbg4rp8sCLuu"
"3ioZyxoElsvQKsmWS3X7BKQxmIw6BrJ6AIi5QRRW9YHgwhmqfszaC+BlTfyIyU/8uP5QaCUK"
"Lz0hVg0sPdyhia2SraFum4w0BhNTx0BWfwv4YzYoFQNDFZYKrBfchiPwsbyiI2M+9WNZO8eh"
"GcgaNKRumZg0BklQx0BWf9+vTQOqzsfMCdFXGrPBDJAzVK6MwVaIYNbAy/cM54LXSRaotyZB"
"GoP9ucmjUPUtAGT1D0uvMPybfo7DGH9hZXlB527baWsnPR1zcp0XeP0LfC94wIjlgp2HWni5"
"4DcnxJgOCamOA1k9Eglu7bRhLK9jrUfxvGLAoYG1ETbk0MQ0I5QK8r1+30vzHZo3pkgag3Sp"
"YyCrx4BYXieSAo7l6AZSwLFkHUIJntX3SZwEj5UBiueCzdr0a+mSxiB16hjI6t8ibpAjFcx7"
"iA3hgD/kyNhwLnhke8T3h3wZi+SCr0udNAbzoI6BrGpb1REaM08hYltaASVwFGI0F8wuWCWv"
"M5YsUNfOgzQGc6KOgazOhtN3vusTyOuPDSoGEjxrJQvieyaOc2Mye6+fE2kMEoqwVoSql/+q"
"r6y+BjIxSysM/hk/ft6bKHBUuelOASPyUunqv0Lq734gEMQ1/Ul4ddX8GNNhftTpoOrXAZDV"
"fwEARl+R5PF2qDxwLtNc32pV7KcFnXcOnQu6ZIEx2pPw6soZk8ZgxtQxUPWVAGT1zHJaO3md"
"LusTUwgKyVWMf7Ef8w7+cmVExtRlsyeNweypY6DqKwDI6n+W5LDevY0rAbV+jNEeXyGC9HJ+"
"vGL4ZSi2SqqLD4Q0BgdCHQNVXwpAVv/brkHtoMJeehx6BX9Gw3FoOnenM4LlLqtaL/jCgyKN"
"wUFRx0DVFxpDVi+2fnQb8vhv2TkCE3wBBbBXQ0fVorsf6rwDZEyHA6ROB1X/FABZveIO8JIu"
"nr9spGLo1S3mR/s8Y56N0odMGoNDpo6Bqs8BICuyHRq4u6qdYT5yFyzHj46+5aleOXzSGBw+"
"dQxU/QoAWV0Q8qMx8pan6+iE/SH1g2MhjcGxUMdA1T8AIKuLAdipoI4oYJke28uOv5Crnj8u"
"0hgcF3UMVP1dY8jqZ0Aw/3KilY9mwVH0x5pA6tljZEyHY6ROB1U/C0BWV4TzOl4KuHdonjlq"
"0hgcNXUMVP0MAFn9nKsrQYfmyUyaJTJ1llD1kwBk9Xrm2YC7Qeo/M2ksZOpYUPUTAGR1LYBu"
"wVJnM2kCyNQJQNX/MXUTZoBi6gZkzBWZOhkbIlMnY0Nk6mRsiP8H0trOAD1A8ycAAAAASUVO"
"RK5CYII=")
def rad2deg(x):
"""
Transforms radians into degrees.
:param `x`: a float representing an angle in radians.
"""
return 180.0*x/pi
def deg2rad(x):
"""
Transforms degrees into radians.
:param `x`: a float representing an angle in degrees.
"""
return x*pi/180.0
def toscale(x):
"""
Normalize a value as a function of the radius.
:param `x`: a float value to normalize
"""
return x*RADIUS/255.0
def scaletomax(x):
"""
Normalize a value as a function of the radius.
:param `x`: a float value to normalize
"""
return x*255.0/RADIUS
def rgb2html(colour):
"""
Transforms a RGB triplet into an html hex string.
:param `colour`: a tuple of red, green, blue integers.
"""
hexColour = "#%02x%02x%02x"%(colour.r, colour.g, colour.b)
return hexColour.upper()
def Slope(pt1, pt2):
"""
Calculates the slope of the line connecting 2 points.
:param `pt1`: an instance of `wx.Point`;
:param `pt2`: another instance of `wx.Point`.
"""
y = float(pt2.y - pt1.y)
x = float(pt2.x - pt1.x)
if x:
return y/x
else:
return None
def Intersection(line1, line2):
"""
Calculates the intersection point between 2 lines.
:param `line1`: an instance of L{LineDescription};
:param `line2`: another instance of L{LineDescription}.
"""
if line1.slope == line2.slope:
# Parallel lines, no intersection
return wx.Point(0, 0)
elif line1.slope is None:
# First Line is vertical, eqn is x=0
# Put x = 0 in second line eqn to get y
x = line1.x
y = line2.slope*x + line2.c
elif line2.slope is None:
# second line is vertical Equation of line is x=0
# Put x = 0 in first line eqn to get y
x = line2.x
y = line1.slope*line2.x + line1.c
else:
y = ((line1.c*line2.slope) - (line2.c*line1.slope))/(line2.slope - line1.slope)
x = (y - line1.c)/line1.slope
return wx.Point(int(x), int(y))
def FindC(line):
""" Internal function. """
if line.slope is None:
c = line.y
else:
c = line.y - line.slope*line.x
return c
def PointOnLine(pt1, pt2, length, maxLen):
""" Internal function. """
a = float(length)
if pt2.x != pt1.x:
m = float((pt2.y - pt1.y))/(pt2.x - pt1.x)
m2 = m*m
a2 = a*a
c = pt1.y - m*pt1.x
c2 = c*c
A = 1.0
x = pt1.x
B = 2.0 * pt1.x
x *= x
C = x - a2/(m2 + 1)
x = (B + sqrt(B*B - (4.0*A*C)))/(2.0*A)
y = m*x + c
pt = wx.Point(int(x), int(y))
if Distance(pt, pt1) > maxLen or Distance(pt, pt2) > maxLen:
x = (B - sqrt(B*B - (4.0*A*C)))/(2.0*A)
y = m*x + c
pt = wx.Point(int(x), int(y))
else:
a2 = a*a
y = sqrt(a2)
x = 0.0
pt = wx.Point(int(x), int(y))
pt.x += pt1.x
pt.y += pt1.y
if Distance(pt, pt1) > maxLen or Distance(pt, pt2) > maxLen:
y = -1.0*y
pt = wx.Point(int(x), int(y))
pt.x += pt1.x
pt.y += pt1.y
return pt
def Distance(pt1, pt2):
"""
Returns the distance between 2 points.
:param `pt1`: an instance of `wx.Point`;
:param `pt2`: another instance of `wx.Point`.
"""
distance = sqrt((pt1.x - pt2.x)**2.0 + (pt1.y - pt2.y)**2.0)
return int(distance)
def AngleFromPoint(pt, center):
"""
Returns the angle between the x-axis and the line connecting the center and
the point `pt`.
:param `pt`: an instance of `wx.Point`;
:param `center`: a float value representing the center.
"""
y = -1*(pt.y - center.y)
x = pt.x - center.x
if x == 0 and y == 0:
return 0.0
else:
return atan2(y, x)
def PtFromAngle(angle, sat, center):
"""
Given the angle with respect to the x-axis, returns the point based on
the saturation value.
:param `angle`: a float representing an angle;
:param `sat`: a float representing the colour saturation value;
:param `center`: a float value representing the center.
"""
angle = deg2rad(angle)
sat = toscale(sat)
x = sat*cos(angle)
y = sat*sin(angle)
pt = wx.Point(int(x), -int(y))
pt.x += center.x
pt.y += center.y
return pt
def RestoreOldDC(dc, oldPen, oldBrush, oldMode):
"""
Restores the old settings for a `wx.DC`.
:param `dc`: an instance of `wx.DC`;
:param `oldPen`: an instance of `wx.Pen`;
:param `oldBrush`: an instance of `wx.Brush`;
:param `oldMode`: the `wx.DC` drawing mode bit.
"""
dc.SetPen(oldPen)
dc.SetBrush(oldBrush)
dc.SetLogicalFunction(oldMode)
def DrawCheckerBoard(dc, rect, checkColour, box=5):
"""
Draws a checkerboard on a `wx.DC`.
:param `dc`: an instance of `wx.DC`;
:param `rect`: the client rectangle on which to draw the checkerboard;
:param `checkColour`: the colour used for the dark checkerboards;
:param `box`: the checkerboards box sizes.
:note: Used for the Alpha channel control and the colour panels.
"""
y = rect.y
checkPen = wx.Pen(checkColour)
checkBrush = wx.Brush(checkColour)
dc.SetPen(checkPen)
dc.SetBrush(checkBrush)
dc.SetClippingRect(rect)
while y < rect.height:
x = box*((y/box)%2) + 2
while x < rect.width:
dc.DrawRectangle(x, y, box, box)
x += box*2
y += box
class Colour(wx.Colour):
"""
This is a subclass of `wx.Colour`, which adds Hue, Saturation and Brightness
capability to the base class. It contains also methods to convert RGB triplets
into HSB triplets and vice-versa.
"""
def __init__(self, colour):
"""
Default class constructor.
:param `colour`: a standard `wx.Colour`.
"""
wx.Colour.__init__(self)
self.r = colour.Red()
self.g = colour.Green()
self.b = colour.Blue()
self._alpha = colour.Alpha()
self.ToHSV()
def ToRGB(self):
""" Converts a HSV triplet into a RGB triplet. """
maxVal = self.v
delta = (maxVal*self.s)/255.0
minVal = maxVal - delta
hue = float(self.h)
if self.h > 300 or self.h <= 60:
self.r = maxVal
if self.h > 300:
self.g = int(minVal)
hue = (hue - 360.0)/60.0
self.b = int(-(hue*delta - minVal))
else:
self.b = int(minVal)
hue = hue/60.0
self.g = int(hue*delta + minVal)
elif self.h > 60 and self.h < 180:
self.g = int(maxVal)
if self.h < 120:
self.b = int(minVal)
hue = (hue/60.0 - 2.0)*delta
self.r = int(minVal - hue)
else:
self.r = int(minVal)
hue = (hue/60.0 - 2.0)*delta
self.b = int(minVal + hue)
else:
self.b = int(maxVal)
if self.h < 240:
self.r = int(minVal)
hue = (hue/60.0 - 4.0)*delta
self.g = int(minVal - hue)
else:
self.g = int(minVal)
hue = (hue/60.0 - 4.0)*delta
self.r = int(minVal + hue)
def ToHSV(self):
""" Converts a RGB triplet into a HSV triplet. """
minVal = float(min(self.r, min(self.g, self.b)))
maxVal = float(max(self.r, max(self.g, self.b)))
delta = maxVal - minVal
self.v = int(maxVal)
if abs(delta) < 1e-6:
self.h = self.s = 0
else:
temp = delta/maxVal
self.s = int(temp*255.0)
if self.r == int(maxVal):
temp = float(self.g-self.b)/delta
elif self.g == int(maxVal):
temp = 2.0 + (float(self.b-self.r)/delta)
else:
temp = 4.0 + (float(self.r-self.g)/delta)
temp *= 60
if temp < 0:
temp += 360
elif temp >= 360.0:
temp = 0
self.h = int(temp)
def GetPyColour(self):
""" Returns the wxPython `wx.Colour` associated with this instance. """
return wx.Colour(self.r, self.g, self.b, self._alpha)
class LineDescription(object):
""" Simple class to store description and constants for a line in 2D space. """
def __init__(self, x=0, y=0, slope=None, c=None):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `x`: the x coordinate of the first point;
:param `y`: the y coordinate of the first point;
:param `slope`: the line's slope;
:param `c`: a floating point constant.
"""
self.x = x
self.y = y
self.slope = slope
self.c = c
class BasePyControl(wx.PyControl):
"""
Base class used to hold common code for the HSB colour wheel and the RGB
colour cube.
"""
def __init__(self, parent, bitmap=None):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent;
:param `bitmap`: the background bitmap for this custom control.
"""
wx.PyControl.__init__(self, parent, style=wx.NO_BORDER)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self._bitmap = bitmap
mask = wx.Mask(self._bitmap, wx.Colour(192, 192, 192))
self._bitmap.SetMask(mask)
self._mainDialog = wx.GetTopLevelParent(self)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)
self.Bind(wx.EVT_LEFT_UP, self.OnLeftUp)
self.Bind(wx.EVT_MOTION, self.OnMotion)
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` for L{BasePyControl}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
dc = wx.AutoBufferedPaintDC(self)
dc.SetBackground(wx.Brush(self.GetParent().GetBackgroundColour()))
dc.Clear()
dc.DrawBitmap(self._bitmap, 0, 0, True)
if self._mainDialog._initOver:
self.DrawMarkers(dc)
def OnEraseBackground(self, event):
"""
Handles the ``wx.EVT_ERASE_BACKGROUND`` for L{BasePyControl}.
:param `event`: a `wx.EraseEvent` event to be processed.
:note: This is intentionally empty to reduce flicker.
"""
pass
def DrawMarkers(self, dc=None):
"""
Draws the markers on top of the background bitmap.
:param `dc`: an instance of `wx.DC`.
:note: This method must be overridden in derived classes.
"""
pass
def DrawLines(self, dc):
"""
Draws the lines connecting the markers on top of the background bitmap.
:param `dc`: an instance of `wx.DC`.
:note: This method must be overridden in derived classes.
"""
pass
def AcceptsFocusFromKeyboard(self):
"""
Can this window be given focus by keyboard navigation? If not, the
only way to give it focus (provided it accepts it at all) is to click
it.
:note: This method always returns ``False`` as we do not accept focus from
the keyboard.
:note: Overridden from `wx.PyControl`.
"""
return False
def AcceptsFocus(self):
"""
Can this window be given focus by mouse click?
:note: This method always returns ``False`` as we do not accept focus from
mouse click.
:note: Overridden from `wx.PyControl`.
"""
return False
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` for L{BasePyControl}.
:param `event`: a `wx.MouseEvent` event to be processed.
:note: This method must be overridden in derived classes.
"""
pass
def OnLeftUp(self, event):
"""
Handles the ``wx.EVT_LEFT_UP`` for L{BasePyControl}.
:param `event`: a `wx.MouseEvent` event to be processed.
:note: This method must be overridden in derived classes.
"""
pass
def OnMotion(self, event):
"""
Handles the ``wx.EVT_MOTION`` for L{BasePyControl}.
:param `event`: a `wx.MouseEvent` event to be processed.
:note: This method must be overridden in derived classes.
"""
pass
def OnSize(self, event):
"""
Handles the ``wx.EVT_SIZE`` for L{BasePyControl}.
:param `event`: a `wx.SizeEvent` event to be processed.
"""
self.Refresh()
def DoGetBestSize(self):
""" Returns the custom control best size (used by sizers). """
return wx.Size(self._bitmap.GetWidth(), self._bitmap.GetHeight())
class RGBCube(BasePyControl):
"""
Implements the drawing, mouse handling and sizing routines for the RGB
cube colour.
"""
def __init__(self, parent):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window.
"""
BasePyControl.__init__(self, parent, bitmap=RGBCubeImage.GetBitmap())
self._index = -1
def DrawMarkers(self, dc=None):
"""
Draws the markers on top of the background bitmap.
:param `dc`: an instance of `wx.DC`.
"""
if dc is None:
dc = wx.ClientDC(self)
oldPen, oldBrush, oldMode = dc.GetPen(), dc.GetBrush(), dc.GetLogicalFunction()
dc.SetPen(wx.WHITE_PEN)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetLogicalFunction(wx.XOR)
rects = []
blueLen = self._mainDialog._blueLen
greenLen = self._mainDialog._greenLen
redLen = self._mainDialog._redLen
colour = self._mainDialog._colour
pt = [wx.Point() for i in xrange(3)]
pt[0] = PointOnLine(Vertex, Top, (colour.r*redLen)/255, redLen)
pt[1] = PointOnLine(Vertex, Left, (colour.g*greenLen)/255, greenLen)
pt[2] = PointOnLine(Vertex, Right, (colour.b*blueLen)/255, blueLen)
for i in xrange(3):
rect = wx.Rect(pt[i].x - RECT_WIDTH, pt[i].y - RECT_WIDTH, 2*RECT_WIDTH, 2*RECT_WIDTH)
rects.append(rect)
dc.DrawRectangleRect(rect)
self.DrawLines(dc)
RestoreOldDC(dc, oldPen, oldBrush, oldMode)
self._rects = rects
def DrawLines(self, dc):
"""
Draws the lines connecting the markers on top of the background bitmap.
:param `dc`: an instance of `wx.DC`.
"""
cuboid = self._mainDialog._cuboid
dc.DrawLinePoint(cuboid[1], cuboid[2])
dc.DrawLinePoint(cuboid[2], cuboid[3])
dc.DrawLinePoint(cuboid[3], cuboid[4])
dc.DrawLinePoint(cuboid[4], cuboid[5])
dc.DrawLinePoint(cuboid[5], cuboid[2])
dc.DrawLinePoint(cuboid[5], cuboid[6])
dc.DrawLinePoint(cuboid[6], cuboid[7])
dc.DrawLinePoint(cuboid[7], cuboid[4])
dc.DrawLinePoint(cuboid[1], cuboid[6])
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` for L{RGBCube}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
self._mouseIn = False
if self._rects[RED].Contains(point):
self.CaptureMouse()
self._mouseIn = True
self._index = RED
elif self._rects[GREEN].Contains(point):
self.CaptureMouse()
self._mouseIn = True
self._index = GREEN
elif self._rects[BLUE].Contains(point):
self.CaptureMouse()
self._mouseIn = True
self._index = BLUE
def OnLeftUp(self, event):
"""
Handles the ``wx.EVT_LEFT_UP`` for L{RGBCube}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if self.GetCapture():
self.ReleaseMouse()
self._mouseIn = False
def OnMotion(self, event):
"""
Handles the ``wx.EVT_MOTION`` for L{RGBCube}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
if not (self.GetCapture() and self._mouseIn):
event.Skip()
return
bChange = False
mainDialog = self._mainDialog
colour = mainDialog._colour
redLen, greenLen, blueLen = mainDialog._redLen, mainDialog._greenLen, mainDialog._blueLen
dc = wx.ClientDC(self)
self.DrawMarkers(dc)
if self._index == RED:
if point.y > Vertex.y:
point.y = Vertex.y
point.x = Vertex.x
val = Distance(point, Vertex)
if val > redLen:
val = redLen
val = (float(val)/redLen)*255
colour.r = int(val)
pt = PointOnLine(Vertex, Top, (colour.r*redLen)/255, redLen)
self._rects[RED] = wx.Rect(pt.x - RECT_WIDTH, pt.y - RECT_WIDTH,
2*RECT_WIDTH, 2*RECT_WIDTH)
bChange = True
elif self._index == GREEN:
if point.x > Vertex.x:
point.x = Vertex.x
point.y = self._rects[GREEN].GetTop() + RECT_WIDTH
val = Distance(point, Vertex)
if val > greenLen:
val = greenLen
val = (float(val)/greenLen)*255
colour.g = int(val)
pt = PointOnLine(Vertex, Left, (colour.g*greenLen)/255, greenLen)
self._rects[GREEN] = wx.Rect(pt.x - RECT_WIDTH, pt.y - RECT_WIDTH,
2*RECT_WIDTH, 2*RECT_WIDTH)
bChange = True
elif self._index == BLUE:
if point.x < Vertex.x:
point.x = Vertex.x
point.y = self._rects[BLUE].GetTop() + RECT_WIDTH
val = Distance(point, Vertex)
if val > blueLen:
val = blueLen
val = (float(val)/blueLen)*255
colour.b = int(val)
pt = PointOnLine(Vertex, Right, (colour.b*blueLen)/255, blueLen)
self._rects[BLUE] = wx.Rect(pt.x - RECT_WIDTH, pt.y - RECT_WIDTH,
2*RECT_WIDTH, 2*RECT_WIDTH)
bChange = True
if bChange:
mainDialog.CalcCuboid()
self.DrawMarkers(dc)
colour.ToHSV()
mainDialog.SetSpinVals()
mainDialog.CalcRects()
mainDialog.DrawHSB()
mainDialog.DrawBright()
mainDialog.DrawAlpha()
class HSVWheel(BasePyControl):
"""
Implements the drawing, mouse handling and sizing routines for the HSV
colour wheel.
"""
def __init__(self, parent):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window.
"""
BasePyControl.__init__(self, parent, bitmap=HSVWheelImage.GetBitmap())
self._mouseIn = False
def DrawMarkers(self, dc=None):
"""
Draws the markers on top of the background bitmap.
:param `dc`: an instance of `wx.DC`.
"""
if dc is None:
dc = wx.ClientDC(self)
oldPen, oldBrush, oldMode = dc.GetPen(), dc.GetBrush(), dc.GetLogicalFunction()
dc.SetPen(wx.WHITE_PEN)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetLogicalFunction(wx.XOR)
dc.DrawRectangleRect(self._mainDialog._currentRect)
RestoreOldDC(dc, oldPen, oldBrush, oldMode)
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` for L{HSVWheel}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
self._mouseIn = False
if self.InCircle(point):
self._mouseIn = True
if self._mouseIn:
self.CaptureMouse()
self.TrackPoint(point)
def OnLeftUp(self, event):
"""
Handles the ``wx.EVT_LEFT_UP`` for L{HSVWheel}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if self.GetCapture():
self.ReleaseMouse()
self._mouseIn = False
def OnMotion(self, event):
"""
Handles the ``wx.EVT_MOTION`` for L{HSVWheel}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
if self.GetCapture() and self._mouseIn:
self.TrackPoint(point)
def InCircle(self, pt):
"""
Returns whether a point is inside the HSV wheel or not.
:param `pt`: an instance of `wx.Point`.
"""
return Distance(pt, self._mainDialog._centre) <= RADIUS
def TrackPoint(self, pt):
"""
Track a mouse event inside the HSV colour wheel.
:param `pt`: an instance of `wx.Point`.
"""
if not self._mouseIn:
return
dc = wx.ClientDC(self)
self.DrawMarkers(dc)
mainDialog = self._mainDialog
colour = mainDialog._colour
colour.h = int(rad2deg(AngleFromPoint(pt, mainDialog._centre)))
if colour.h < 0:
colour.h += 360
colour.s = int(scaletomax(Distance(pt, mainDialog._centre)))
if colour.s > 255:
colour.s = 255
mainDialog.CalcRects()
self.DrawMarkers(dc)
colour.ToRGB()
mainDialog.SetSpinVals()
mainDialog.CalcCuboid()
mainDialog.DrawRGB()
mainDialog.DrawBright()
mainDialog.DrawAlpha()
class BaseLineCtrl(wx.PyControl):
"""
Base class used to hold common code for the Alpha channel control and the
brightness palette control.
"""
def __init__(self, parent):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window.
"""
wx.PyControl.__init__(self, parent, size=(20, 200), style=wx.NO_BORDER)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self._mainDialog = wx.GetTopLevelParent(self)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)
self.Bind(wx.EVT_LEFT_UP, self.OnLeftUp)
self.Bind(wx.EVT_MOTION, self.OnMotion)
def OnEraseBackground(self, event):
"""
Handles the ``wx.EVT_ERASE_BACKGROUND`` for L{BaseLineCtrl}.
:param `event`: a `wx.EraseEvent` event to be processed.
:note: This is intentionally empty to reduce flicker.
"""
pass
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` for L{BaseLineCtrl}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
theRect = self.GetClientRect()
if not theRect.Contains(point):
event.Skip()
return
self.CaptureMouse()
self.TrackPoint(point)
def OnLeftUp(self, event):
"""
Handles the ``wx.EVT_LEFT_UP`` for L{BaseLineCtrl}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
if self.GetCapture():
self.ReleaseMouse()
def OnMotion(self, event):
"""
Handles the ``wx.EVT_MOTION`` for L{BaseLineCtrl}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
point = wx.Point(event.GetX(), event.GetY())
if self.GetCapture():
self.TrackPoint(point)
def OnSize(self, event):
"""
Handles the ``wx.EVT_SIZE`` for L{BaseLineCtrl}.
:param `event`: a `wx.SizeEvent` event to be processed.
"""
self.Refresh()
def DoGetBestSize(self):
""" Returns the custom control best size (used by sizers). """
return wx.Size(24, 208)
def BuildRect(self):
""" Internal method. """
brightRect = wx.Rect(*self.GetClientRect())
brightRect.x += 2
brightRect.y += 6
brightRect.width -= 4
brightRect.height -= 8
return brightRect
def AcceptsFocusFromKeyboard(self):
"""
Can this window be given focus by keyboard navigation? If not, the
only way to give it focus (provided it accepts it at all) is to click
it.
:note: This method always returns ``False`` as we do not accept focus from
the keyboard.
:note: Overridden from `wx.PyControl`.
"""
return False
def AcceptsFocus(self):
"""
Can this window be given focus by mouse click?
:note: This method always returns ``False`` as we do not accept focus from
mouse click.
:note: Overridden from `wx.PyControl`.
"""
return False
class BrightCtrl(BaseLineCtrl):
"""
Implements the drawing, mouse handling and sizing routines for the brightness
palette control.
"""
def __init__(self, parent):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window.
"""
BaseLineCtrl.__init__(self, parent)
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` for L{BrightCtrl}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
dc = wx.AutoBufferedPaintDC(self)
dc.SetBackground(wx.Brush(self.GetParent().GetBackgroundColour()))
dc.Clear()
colour = self._mainDialog._colour.GetPyColour()
brightRect = self.BuildRect()
target_red = colour.Red()
target_green = colour.Green()
target_blue = colour.Blue()
h, s, v = colorsys.rgb_to_hsv(target_red / 255.0, target_green / 255.0,
target_blue / 255.0)
v = 1.0
vstep = 1.0/(brightRect.height-1)
for y_pos in range(brightRect.y, brightRect.height+brightRect.y):
r, g, b = [c * 255.0 for c in colorsys.hsv_to_rgb(h, s, v)]
colour = wx.Colour(int(r), int(g), int(b))
dc.SetPen(wx.Pen(colour, 1, wx.SOLID))
dc.DrawRectangle(brightRect.x, y_pos, brightRect.width, 1)
v = v - vstep
dc.SetPen(wx.BLACK_PEN)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangleRect(brightRect)
self.DrawMarkers(dc)
def TrackPoint(self, pt):
"""
Tracks a mouse action inside the palette control.
:param `pt`: an instance of `wx.Point`.
"""
brightRect = self.BuildRect()
d = brightRect.GetBottom() - pt.y
d *= 255
d /= brightRect.height
if d < 0:
d = 0
if d > 255:
d = 255;
mainDialog = self._mainDialog
colour = mainDialog._colour
mainDialog.DrawMarkers()
colour.v = int(d)
colour.ToRGB()
mainDialog.SetSpinVals()
mainDialog.CalcRects()
mainDialog.CalcCuboid()
mainDialog.DrawMarkers()
mainDialog.DrawAlpha()
def DrawMarkers(self, dc=None):
"""
Draws square markers used with mouse gestures.
:param `dc`: an instance of `wx.DC`.
"""
if dc is None:
dc = wx.ClientDC(self)
colour = self._mainDialog._colour
brightRect = self.BuildRect()
y = int(colour.v/255.0*brightRect.height)
y = brightRect.GetBottom() - y
brightMark = wx.Rect(brightRect.x-2, y-4, brightRect.width+4, 8)
oldPen, oldBrush, oldMode = dc.GetPen(), dc.GetBrush(), dc.GetLogicalFunction()
dc.SetPen(wx.Pen(wx.WHITE, 2))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetLogicalFunction(wx.XOR)
dc.DrawRectangleRect(brightMark)
RestoreOldDC(dc, oldPen, oldBrush, oldMode)
class AlphaCtrl(BaseLineCtrl):
"""
Implements the drawing, mouse handling and sizing routines for the alpha
channel control.
"""
def __init__(self, parent):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window.
"""
BaseLineCtrl.__init__(self, parent)
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` for L{AlphaCtrl}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
pdc = wx.PaintDC(self)
dc = wx.GCDC(pdc)
mem_dc = wx.MemoryDC()
fullRect = self.GetClientRect()
bmp = wx.EmptyBitmap(fullRect.width, fullRect.height)
mem_dc.SelectObject(bmp)
rect = self.BuildRect()
backBrush = wx.Brush(self.GetParent().GetBackgroundColour())
mem_dc.SetBackground(backBrush)
mem_dc.Clear()
mem_dc.SetBrush(wx.WHITE_BRUSH)
mem_dc.DrawRectangleRect(rect)
DrawCheckerBoard(mem_dc, rect, checkColour)
self.DrawAlphaShading(mem_dc, rect)
mem_dc.DestroyClippingRegion()
self.DrawMarkers(mem_dc)
mem_dc.SetBrush(wx.TRANSPARENT_BRUSH)
mem_dc.SetPen(wx.BLACK_PEN)
mem_dc.DrawRectangleRect(rect)
mem_dc.SelectObject(wx.NullBitmap)
pdc.DrawBitmap(bmp, 0, 0)
def DrawAlphaShading(self, dc, rect):
"""
Draws the alpha shading on top of the checkerboard.
:param `dc`: an instance of `wx.DC`;
:param `rect`: the L{AlphaCtrl} client rectangle.
"""
gcdc = wx.GCDC(dc)
colour = self._mainDialog._colour.GetPyColour()
alpha = 255.0
vstep = 255.0*2/(rect.height-1)
r, g, b = colour.Red(), colour.Green(), colour.Blue()
colour_gcdc = wx.Colour(r, g, b, alpha)
gcdc.SetBrush(wx.TRANSPARENT_BRUSH)
for y_pos in range(rect.y, rect.height+rect.y, 2):
colour_gcdc = wx.Colour(r, g, b, int(alpha))
gcdc.SetPen(wx.Pen(colour_gcdc, 1, wx.SOLID))
gcdc.DrawRectangle(rect.x, y_pos, rect.width, 2)
alpha = alpha - vstep
def TrackPoint(self, pt):
"""
Tracks a mouse action inside the Alpha channel control.
:param `pt`: an instance of `wx.Point`.
"""
alphaRect = self.BuildRect()
d = alphaRect.GetBottom() - pt.y
d *= 255
d /= alphaRect.height
if d < 0:
d = 0
if d > 255:
d = 255
self._mainDialog._colour._alpha = int(d)
self.Refresh()
self._mainDialog.SetSpinVals()
def DrawMarkers(self, dc=None):
"""
Draws square markers used with mouse gestures.
:param `dc`: an instance of `wx.DC`.
"""
if dc is None:
dc = wx.ClientDC(self)
colour = self._mainDialog._colour
alphaRect = self.BuildRect()
y = int(colour._alpha/255.0*alphaRect.height)
y = alphaRect.GetBottom() - y
alphaMark = wx.Rect(alphaRect.x-2, y-4, alphaRect.width+4, 8)
oldPen, oldBrush, oldMode = dc.GetPen(), dc.GetBrush(), dc.GetLogicalFunction()
dc.SetPen(wx.Pen(wx.WHITE, 2))
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.SetLogicalFunction(wx.XOR)
dc.DrawRectangleRect(alphaMark)
RestoreOldDC(dc, oldPen, oldBrush, oldMode)
class ColourPanel(wx.PyPanel):
"""
Simple custom class used to display "old" and "new" colour panels, with alpha
blending capabilities.
"""
def __init__(self, parent, style=wx.SIMPLE_BORDER):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window;
:param `style`: the L{ColourPanel} window style.
"""
wx.PyPanel.__init__(self, parent, style=style)
self._mainDialog = wx.GetTopLevelParent(self)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_SIZE, self.OnSize)
self._colour = Colour(wx.WHITE)
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` for L{ColourPanel}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
pdc = wx.PaintDC(self)
dc = wx.GCDC(pdc)
mem_dc = wx.MemoryDC()
rect = self.GetClientRect()
bmp = wx.EmptyBitmap(rect.width, rect.height)
mem_dc.SelectObject(bmp)
backBrush = wx.Brush(self.GetParent().GetBackgroundColour())
mem_dc.SetBackground(backBrush)
mem_dc.Clear()
mem_dc.SetBrush(wx.WHITE_BRUSH)
mem_dc.DrawRectangleRect(rect)
DrawCheckerBoard(mem_dc, rect, checkColour, box=10)
gcdc = wx.GCDC(mem_dc)
colour_gcdc = wx.Colour(self._colour.r, self._colour.g, self._colour.b, self._colour._alpha)
gcdc.SetBrush(wx.Brush(colour_gcdc))
gcdc.SetPen(wx.Pen(colour_gcdc))
gcdc.DrawRectangleRect(rect)
mem_dc.SelectObject(wx.NullBitmap)
dc.DrawBitmap(bmp, 0, 0)
def OnEraseBackground(self, event):
"""
Handles the ``wx.EVT_ERASE_BACKGROUND`` for L{ColourPanel}.
:param `event`: a `wx.EraseEvent` event to be processed.
:note: This is intentionally empty to reduce flicker.
"""
pass
def OnSize(self, event):
"""
Handles the ``wx.EVT_SIZE`` for L{ColourPanel}.
:param `event`: a `wx.SizeEvent` event to be processed.
"""
self.Refresh()
def RefreshColour(self, colour):
"""
Refresh the panel after a colour/alpha change.
:param `colour`: the new background colour of L{ColourPanel}.
"""
self._colour = colour
self.Refresh()
def AcceptsFocusFromKeyboard(self):
"""
Can this window be given focus by keyboard navigation? If not, the
only way to give it focus (provided it accepts it at all) is to click
it.
:note: This method always returns ``False`` as we do not accept focus from
the keyboard.
:note: Overridden from `wx.PyPanel`.
"""
return False
def AcceptsFocus(self):
"""
Can this window be given focus by mouse click?
:note: This method always returns ``False`` as we do not accept focus from
mouse click.
:note: Overridden from `wx.PyPanel`.
"""
return False
class CustomPanel(wx.PyControl):
"""
This panel displays a series of cutom colours (chosen by the user) just like
the standard `wx.ColourDialog`.
"""
def __init__(self, parent, colourData):
"""
Default class constructor.
Used internally. Do not call it in your code!
:param `parent`: the control parent window;
:param `colourData`: an instance of `wx.ColourData`.
"""
wx.PyControl.__init__(self, parent, style=wx.NO_BORDER)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self._colourData = colourData
self._customColours = [None]*16
self._mainDialog = wx.GetTopLevelParent(self)
self.InitializeColours()
self._smallRectangleSize = wx.Size(20, 16)
self._gridSpacing = 4
self._customColourRect = wx.Rect(2, 2, (8*self._smallRectangleSize.x) + (7*self._gridSpacing),
(2*self._smallRectangleSize.y) + (1*self._gridSpacing))
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEraseBackground)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_LEFT_DOWN, self.OnLeftDown)
def InitializeColours(self):
""" Initializes the 16 custom colours in L{CustomPanel}. """
curr = self._colourData.GetColour()
self._colourSelection = -1
for i in xrange(16):
c = self._colourData.GetCustomColour(i)
if c.Ok():
self._customColours[i] = self._colourData.GetCustomColour(i)
else:
self._customColours[i] = wx.Colour(255, 255, 255)
if c == curr:
self._colourSelection = i
def DoGetBestSize(self):
""" Returns the custom control best size (used by sizers). """
return self._customColourRect.width+4, self._customColourRect.height+4
def OnPaint(self, event):
"""
Handles the ``wx.EVT_PAINT`` for L{CustomPanel}.
:param `event`: a `wx.PaintEvent` event to be processed.
"""
dc = wx.AutoBufferedPaintDC(self)
dc.SetBackground(wx.Brush(self.GetParent().GetBackgroundColour()))
dc.Clear()
self.PaintCustomColours(dc)
self.PaintHighlight(dc, True)
def OnEraseBackground(self, event):
"""
Handles the ``wx.EVT_ERASE_BACKGROUND`` for L{CustomPanel}.
:param `event`: a `wx.EraseEvent` event to be processed.
:note: This is intentionally empty to reduce flicker.
"""
pass
def OnSize(self, event):
"""
Handles the ``wx.EVT_SIZE`` for L{CustomPanel}.
:param `event`: a `wx.SizeEvent` event to be processed.
"""
self.Refresh()
def OnLeftDown(self, event):
"""
Handles the ``wx.EVT_LEFT_DOWN`` for L{CustomPanel}.
:param `event`: a `wx.MouseEvent` event to be processed.
"""
x, y = event.GetX(), event.GetY()
selX = (x - self._customColourRect.x)/(self._smallRectangleSize.x + self._gridSpacing)
selY = (y - self._customColourRect.y)/(self._smallRectangleSize.y + self._gridSpacing)
ptr = selX + selY*8
dc = wx.ClientDC(self)
self.PaintHighlight(dc, False)
self._colourSelection = ptr
self._mainDialog._colour = Colour(self._customColours[self._colourSelection])
self.PaintCustomColour(dc, selX, selY)
self.PaintHighlight(dc, True)
self._mainDialog.DrawAll()
def PaintCustomColours(self, dc):
"""
Draws all the 16 subpanels with their custom colours.
:param `dc`: an instance of `wx.DC`.
"""
for i in xrange(2):
for j in xrange(8):
ptr = i*8 + j
x = (j*(self._smallRectangleSize.x+self._gridSpacing)) + self._customColourRect.x
y = (i*(self._smallRectangleSize.y+self._gridSpacing)) + self._customColourRect.y
dc.SetPen(wx.BLACK_PEN)
brush = wx.Brush(self._customColours[ptr])
dc.SetBrush(brush)
dc.DrawRectangle(x, y, self._smallRectangleSize.x, self._smallRectangleSize.y)
def PaintHighlight(self, dc, draw=True):
"""
Highlight the current custom colour selection (if any).
:param `dc`: an instance of `wx.DC`;
:param `draw`: whether to draw a thin black border around the selected custom
colour or not.
"""
if self._colourSelection < 0:
return
# Number of pixels bigger than the standard rectangle size
# for drawing a highlight
deltaX = deltaY = 2
# User-defined colours
y = self._colourSelection/8
x = self._colourSelection - (y*8)
x = (x*(self._smallRectangleSize.x + self._gridSpacing) + self._customColourRect.x) - deltaX
y = (y*(self._smallRectangleSize.y + self._gridSpacing) + self._customColourRect.y) - deltaY
if draw:
dc.SetPen(wx.BLACK_PEN)
else:
dc.SetPen(wx.LIGHT_GREY_PEN)
dc.SetBrush(wx.TRANSPARENT_BRUSH)
dc.DrawRectangle(x, y, (self._smallRectangleSize.x + (2*deltaX)), (self._smallRectangleSize.y + (2*deltaY)))
def PaintCustomColour(self, dc, selX, selY):
"""
Paints a newly added custom colour subpanel.
:param `dc`: an instance of `wx.DC`;
:param `selX`: the x coordinate of the custom colour subpanel;
:param `selY`: the y coordinate of the custom colour subpanel.
"""
dc.SetPen(wx.BLACK_PEN)
brush = wx.Brush(self._customColours[self._colourSelection])
dc.SetBrush(brush)
ptr = selX*8 + selY
x = (selX*(self._smallRectangleSize.x+self._gridSpacing)) + self._customColourRect.x
y = (selY*(self._smallRectangleSize.y+self._gridSpacing)) + self._customColourRect.y
dc.DrawRectangle(x, y, self._smallRectangleSize.x, self._smallRectangleSize.y)
dc.SetBrush(wx.NullBrush)
def AddCustom(self, colour):
"""
Adds a user-chosen colour to the list of custom colours.
:param `colour`: an instance of `wx.Colour`.
"""
self._colourSelection += 1
self._colourSelection = self._colourSelection%16
dc = wx.ClientDC(self)
self._customColours[self._colourSelection] = colour.GetPyColour()
self._colourData.SetCustomColour(self._colourSelection, self._customColours[self._colourSelection])
self.PaintCustomColours(dc)
class CubeColourDialog(wx.Dialog):
"""
This is the CubeColourDialog main class implementation.
"""
def __init__(self, parent, colourData=None, agwStyle=CCD_SHOW_ALPHA):
"""
Default class constructor.
:param `colourData`: a standard `wx.ColourData` (as used in `wx.ColourDialog`);
:param `agwStyle`: can be either ``None`` or ``CCD_SHOW_ALPHA``, depending if you want
to hide the alpha channel control or not.
"""
wx.Dialog.__init__(self, parent, id=wx.ID_ANY, title=_("CubeColourDialog: Choose Colour"),
pos=wx.DefaultPosition, size=(900, 900), style=wx.DEFAULT_DIALOG_STYLE)
if colourData:
self._colourData = colourData
else:
self._colourData = wx.ColourData()
self._colourData.SetColour(wx.Colour(128, 128, 128))
self._colour = Colour(self._colourData.GetColour())
self._oldColour = Colour(self._colourData.GetColour())
self._inMouse = False
self._initOver = False
self._inDrawAll = False
self._agwStyle = agwStyle
self.mainPanel = wx.Panel(self, -1)
self.hsvSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "HSB")
self.rgbValueSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "RGB Values")
self.hsvValueSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "HSB Values")
self.rgbSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "RGB")
self.alphaSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "Alpha")
self.alphaValueSizer_staticbox = wx.StaticBox(self.mainPanel, -1, "Alpha")
self.rgbBitmap = RGBCube(self.mainPanel)
self.hsvBitmap = HSVWheel(self.mainPanel)
self.brightCtrl = BrightCtrl(self.mainPanel)
self.alphaCtrl = AlphaCtrl(self.mainPanel)
self.showAlpha = wx.CheckBox(self.mainPanel, -1, "Show Alpha Control")
self.customColours = CustomPanel(self.mainPanel, self._colourData)
self.addCustom = wx.Button(self.mainPanel, -1, "Add to custom colours")
self.okButton = wx.Button(self.mainPanel, -1, "Ok")
self.cancelButton = wx.Button(self.mainPanel, -1, "Cancel")
self.oldColourPanel = ColourPanel(self.mainPanel, style=wx.SIMPLE_BORDER)
self.newColourPanel = ColourPanel(self.mainPanel, style=wx.SIMPLE_BORDER)
self.redSpin = wx.SpinCtrl(self.mainPanel, -1, "180", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.greenSpin = wx.SpinCtrl(self.mainPanel, -1, "180", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.blueSpin = wx.SpinCtrl(self.mainPanel, -1, "180", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.hueSpin = wx.SpinCtrl(self.mainPanel, -1, "0", min=0, max=359,
style=wx.SP_ARROW_KEYS)
self.saturationSpin = wx.SpinCtrl(self.mainPanel, -1, "", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.brightnessSpin = wx.SpinCtrl(self.mainPanel, -1, "", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.alphaSpin = wx.SpinCtrl(self.mainPanel, -1, "", min=0, max=255,
style=wx.SP_ARROW_KEYS)
self.accessCode = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
self.htmlCode = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
self.webSafe = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
self.htmlName = wx.TextCtrl(self.mainPanel, -1, "", style=wx.TE_READONLY)
self.SetProperties()
self.DoLayout()
self.spinCtrls = [self.redSpin, self.greenSpin, self.blueSpin,
self.hueSpin, self.saturationSpin, self.brightnessSpin]
for spin in self.spinCtrls:
spin.Bind(wx.EVT_SPINCTRL, self.OnSpinCtrl)
self.Bind(wx.EVT_SPINCTRL, self.OnAlphaSpin, self.alphaSpin)
self.Bind(wx.EVT_BUTTON, self.OnOk, self.okButton)
self.Bind(wx.EVT_BUTTON, self.OnCancel, self.cancelButton)
self.Bind(wx.EVT_BUTTON, self.OnAddCustom, self.addCustom)
self.Bind(wx.EVT_CHECKBOX, self.OnShowAlpha)
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
self.Bind(wx.EVT_CHAR_HOOK, self.OnKeyUp)
self.Centre(wx.BOTH)
wx.CallAfter(self.InitDialog)
def SetProperties(self):
""" Sets some initial properties for L{CubeColourDialog} (sizes, values). """
self.okButton.SetDefault()
self.oldColourPanel.SetMinSize((-1, 50))
self.newColourPanel.SetMinSize((-1, 50))
self.redSpin.SetMinSize((60, -1))
self.greenSpin.SetMinSize((60, -1))
self.blueSpin.SetMinSize((60, -1))
self.hueSpin.SetMinSize((60, -1))
self.saturationSpin.SetMinSize((60, -1))
self.brightnessSpin.SetMinSize((60, -1))
self.alphaSpin.SetMinSize((60, -1))
self.showAlpha.SetValue(1)
self.accessCode.SetInitialSize((80, -1))
self.webSafe.SetInitialSize((80, -1))
self.htmlCode.SetInitialSize((80, -1))
def DoLayout(self):
""" Layouts all the controls in the L{CubeColourDialog}. """
dialogSizer = wx.BoxSizer(wx.VERTICAL)
mainSizer = wx.GridBagSizer(10, 5)
hsvValueSizer = wx.StaticBoxSizer(self.hsvValueSizer_staticbox, wx.VERTICAL)
hsvGridSizer = wx.GridSizer(2, 3, 2, 10)
rgbValueSizer = wx.StaticBoxSizer(self.rgbValueSizer_staticbox, wx.HORIZONTAL)
rgbGridSizer = wx.GridSizer(2, 3, 2, 10)
alphaValueSizer = wx.StaticBoxSizer(self.alphaValueSizer_staticbox, wx.VERTICAL)
alphaGridSizer = wx.BoxSizer(wx.VERTICAL)
customSizer = wx.BoxSizer(wx.VERTICAL)
buttonSizer = wx.BoxSizer(wx.VERTICAL)
accessSizer = wx.BoxSizer(wx.VERTICAL)
panelSizer = wx.BoxSizer(wx.VERTICAL)
htmlSizer1 = wx.BoxSizer(wx.HORIZONTAL)
htmlSizer2 = wx.BoxSizer(wx.VERTICAL)
htmlSizer_a = wx.BoxSizer(wx.VERTICAL)
htmlSizer_b = wx.BoxSizer(wx.VERTICAL)
hsvSizer = wx.StaticBoxSizer(self.hsvSizer_staticbox, wx.HORIZONTAL)
rgbSizer = wx.StaticBoxSizer(self.rgbSizer_staticbox, wx.VERTICAL)
alphaSizer = wx.StaticBoxSizer(self.alphaSizer_staticbox, wx.VERTICAL)
mainSizer.Add(self.showAlpha, (0, 0), (1, 1), wx.LEFT|wx.TOP, 10)
htmlLabel1 = wx.StaticText(self.mainPanel, -1, "HTML Code")
htmlLabel2 = wx.StaticText(self.mainPanel, -1, "Web Safe")
htmlSizer_a.Add(htmlLabel1, 0, wx.TOP, 3)
htmlSizer_b.Add(htmlLabel2, 0, wx.TOP, 3)
htmlSizer_a.Add(self.htmlCode, 0, wx.TOP, 3)
htmlSizer_b.Add(self.webSafe, 0, wx.TOP, 3)
htmlSizer1.Add(htmlSizer_a, 0)
htmlSizer1.Add(htmlSizer_b, 0, wx.LEFT, 10)
mainSizer.Add(htmlSizer1, (1, 0), (1, 1), wx.LEFT|wx.RIGHT, 10)
htmlLabel3 = wx.StaticText(self.mainPanel, -1, "HTML Name")
htmlSizer2.Add(htmlLabel3, 0, wx.TOP|wx.BOTTOM, 3)
htmlSizer2.Add(self.htmlName, 0)
mainSizer.Add(htmlSizer2, (1, 1), (1, 1), wx.LEFT|wx.RIGHT, 10)
customLabel = wx.StaticText(self.mainPanel, -1, "Custom Colours")
customSizer.Add(customLabel, 0, wx.BOTTOM, 3)
customSizer.Add(self.customColours, 0)
customSizer.Add(self.addCustom, 0, wx.TOP|wx.ALIGN_LEFT|wx.ALIGN_CENTER_VERTICAL, 5)
mainSizer.Add(customSizer, (0, 2), (2, 2), wx.ALIGN_CENTER|wx.LEFT|wx.RIGHT, 5)
rgbSizer.Add(self.rgbBitmap, 0, wx.ALL, 15)
mainSizer.Add(rgbSizer, (2, 0), (1, 1), wx.ALL|wx.EXPAND, 10)
hsvSizer.Add(self.hsvBitmap, 0, wx.ALL, 15)
hsvSizer.Add(self.brightCtrl, 0, wx.RIGHT|wx.TOP|wx.BOTTOM, 15)
mainSizer.Add(hsvSizer, (2, 1), (1, 1), wx.ALL|wx.EXPAND, 10)
alphaSizer.Add(self.alphaCtrl, 0, wx.TOP|wx.ALIGN_CENTER, 15)
mainSizer.Add(alphaSizer, (2, 2), (1, 1), wx.ALL|wx.EXPAND, 10)
oldLabel = wx.StaticText(self.mainPanel, -1, "Old Colour")
panelSizer.Add(oldLabel, 0, wx.BOTTOM, 3)
panelSizer.Add(self.oldColourPanel, 0, wx.BOTTOM|wx.EXPAND, 20)
newLabel = wx.StaticText(self.mainPanel, -1, "New Colour")
accessLabel = wx.StaticText(self.mainPanel, -1, "MS Access Code")
accessSizer.Add(accessLabel, 0, wx.BOTTOM, 3)
accessSizer.Add(self.accessCode, 0)
panelSizer.Add(newLabel, 0, wx.BOTTOM, 3)
panelSizer.Add(self.newColourPanel, 0, wx.EXPAND)
panelSizer.Add((0, 0), 1, wx.EXPAND)
panelSizer.Add(accessSizer, 0, wx.TOP, 5)
mainSizer.Add(panelSizer, (2, 3), (1, 1), wx.ALL|wx.EXPAND, 10)
redLabel = wx.StaticText(self.mainPanel, -1, "Red")
rgbGridSizer.Add(redLabel, 0)
greenLabel = wx.StaticText(self.mainPanel, -1, "Green")
rgbGridSizer.Add(greenLabel, 0)
blueLabel = wx.StaticText(self.mainPanel, -1, "Blue")
rgbGridSizer.Add(blueLabel, 0)
rgbGridSizer.Add(self.redSpin, 0, wx.EXPAND)
rgbGridSizer.Add(self.greenSpin, 0, wx.EXPAND)
rgbGridSizer.Add(self.blueSpin, 0, wx.EXPAND)
rgbValueSizer.Add(rgbGridSizer, 1, 0, 0)
mainSizer.Add(rgbValueSizer, (3, 0), (1, 1), wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 10)
hueLabel = wx.StaticText(self.mainPanel, -1, "Hue")
hsvGridSizer.Add(hueLabel, 0)
saturationLabel = wx.StaticText(self.mainPanel, -1, "Saturation")
hsvGridSizer.Add(saturationLabel, 0)
brightnessLabel = wx.StaticText(self.mainPanel, -1, "Brightness")
hsvGridSizer.Add(brightnessLabel, 0)
hsvGridSizer.Add(self.hueSpin, 0, wx.EXPAND)
hsvGridSizer.Add(self.saturationSpin, 0, wx.EXPAND)
hsvGridSizer.Add(self.brightnessSpin, 0, wx.EXPAND)
hsvValueSizer.Add(hsvGridSizer, 1, wx.EXPAND)
mainSizer.Add(hsvValueSizer, (3, 1), (1, 1), wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 10)
alphaLabel = wx.StaticText(self.mainPanel, -1, "Alpha")
alphaGridSizer.Add(alphaLabel, 0)
alphaGridSizer.Add(self.alphaSpin, 0, wx.EXPAND|wx.TOP, 10)
alphaValueSizer.Add(alphaGridSizer, 1, wx.EXPAND)
mainSizer.Add(alphaValueSizer, (3, 2), (1, 1), wx.LEFT|wx.RIGHT|wx.BOTTOM|wx.EXPAND, 10)
buttonSizer.Add(self.okButton, 0, wx.BOTTOM, 3)
buttonSizer.Add(self.cancelButton, 0)
mainSizer.Add(buttonSizer, (3, 3), (1, 1), wx.ALIGN_CENTER|wx.LEFT|wx.RIGHT, 5)
self.mainPanel.SetAutoLayout(True)
self.mainPanel.SetSizer(mainSizer)
mainSizer.Fit(self.mainPanel)
mainSizer.SetSizeHints(self.mainPanel)
if self.GetAGWWindowStyleFlag() & CCD_SHOW_ALPHA == 0:
mainSizer.Hide(self.showAlpha)
mainSizer.Hide(alphaSizer)
mainSizer.Hide(alphaValueSizer)
dialogSizer.Add(self.mainPanel, 1, wx.EXPAND)
self.SetAutoLayout(True)
self.SetSizer(dialogSizer)
dialogSizer.Fit(self)
dialogSizer.SetSizeHints(self)
self.Layout()
self.mainSizer = mainSizer
self.dialogSizer = dialogSizer
self.alphaSizers = [alphaSizer, alphaValueSizer]
def InitDialog(self):
""" Initialize the L{CubeColourDialog}. """
hsvRect = self.hsvBitmap.GetClientRect()
self._centre = wx.Point(hsvRect.x + hsvRect.width/2, hsvRect.y + hsvRect.height/2)
self._redLen = Distance(Vertex, Top)
self._greenLen = Distance(Vertex, Left)
self._blueLen = Distance(Vertex, Right)
self.CalcSlopes()
self.CalcCuboid()
self.CalcRects()
self.SetSpinVals()
self._initOver = True
wx.CallAfter(self.Refresh)
def CalcSlopes(self):
""" Calculates the line slopes in the RGB colour cube. """
self._lines = {RED: LineDescription(), GREEN: LineDescription(), BLUE: LineDescription}
self._lines[RED].slope = Slope(Top, Vertex)
self._lines[GREEN].slope = Slope(Left, Vertex)
self._lines[BLUE].slope = Slope(Right, Vertex)
for i in xrange(3):
self._lines[i].x = Vertex.x
self._lines[i].y = Vertex.y
self._lines[i].c = FindC(self._lines[i])
def CalcCuboid(self):
""" Calculates the RGB colour cube vertices. """
rLen = (self._colour.r*self._redLen)/255.0
gLen = (self._colour.g*self._greenLen)/255.0
bLen = (self._colour.b*self._blueLen)/255.0
lines = [LineDescription() for i in xrange(12)]
self._cuboid = [None]*8
self._cuboid[0] = Vertex
self._cuboid[1] = PointOnLine(Vertex, Top, int(rLen), self._redLen)
self._cuboid[3] = PointOnLine(Vertex, Left, int(gLen), self._greenLen)
self._cuboid[7] = PointOnLine(Vertex, Right, int(bLen), self._blueLen)
lines[0] = self._lines[RED]
lines[1] = self._lines[GREEN]
lines[2] = self._lines[BLUE]
lines[3].slope = self._lines[GREEN].slope
lines[3].x = self._cuboid[1].x
lines[3].y = self._cuboid[1].y
lines[3].c = FindC(lines[3])
lines[4].slope = self._lines[RED].slope
lines[4].x = self._cuboid[3].x
lines[4].y = self._cuboid[3].y
lines[4].c = FindC(lines[4])
lines[5].slope = self._lines[BLUE].slope
lines[5].x = self._cuboid[3].x
lines[5].y = self._cuboid[3].y
lines[5].c = FindC(lines[5])
lines[6].slope = self._lines[GREEN].slope
lines[6].x = self._cuboid[7].x
lines[6].y = self._cuboid[7].y
lines[6].c = FindC(lines[6])
lines[10].slope = self._lines[BLUE].slope
lines[10].x = self._cuboid[1].x
lines[10].y = self._cuboid[1].y
lines[10].c = FindC(lines[10])
lines[11].slope = self._lines[RED].slope
lines[11].x = self._cuboid[7].x
lines[11].y = self._cuboid[7].y
lines[11].c = FindC(lines[11])
self._cuboid[2] = Intersection(lines[3], lines[4])
self._cuboid[4] = Intersection(lines[5], lines[6])
self._cuboid[6] = Intersection(lines[10], lines[11])
lines[7].slope = self._lines[RED].slope
lines[7].x = self._cuboid[4].x
lines[7].y = self._cuboid[4].y
lines[7].c = FindC(lines[7])
lines[8].slope = self._lines[BLUE].slope
lines[8].x = self._cuboid[2].x
lines[8].y = self._cuboid[2].y
lines[8].c = FindC(lines[8])
self._cuboid[5] = Intersection(lines[7], lines[8])
def CalcRects(self):
""" Calculates the brightness control user-selected rect. """
pt = PtFromAngle(self._colour.h, self._colour.s, self._centre)
self._currentRect = wx.Rect(pt.x - RECT_WIDTH, pt.y - RECT_WIDTH,
2*RECT_WIDTH, 2*RECT_WIDTH)
def DrawMarkers(self, dc=None):
"""
Draws the markers for all the controls.
:param `dc`: an instance of `wx.DC`. If `dc` is ``None``, a `wx.ClientDC` is
created on the fly.
"""
if dc is None:
dc = wx.ClientDC(self)
self.hsvBitmap.DrawMarkers()
self.rgbBitmap.DrawMarkers()
self.brightCtrl.DrawMarkers()
def DrawRGB(self):
""" Refreshes the RGB colour cube. """
self.rgbBitmap.Refresh()
def DrawHSB(self):
""" Refreshes the HSB colour wheel. """
self.hsvBitmap.Refresh()
def DrawBright(self):
""" Refreshes the brightness control. """
self.brightCtrl.Refresh()
def DrawAlpha(self):
""" Refreshes the alpha channel control. """
self.alphaCtrl.Refresh()
def SetSpinVals(self):
""" Sets the values for all the spin controls. """
self.redSpin.SetValue(self._colour.r)
self.greenSpin.SetValue(self._colour.g)
self.blueSpin.SetValue(self._colour.b)
self.hueSpin.SetValue(self._colour.h)
self.saturationSpin.SetValue(self._colour.s)
self.brightnessSpin.SetValue(self._colour.v)
self.alphaSpin.SetValue(self._colour._alpha)
self.SetPanelColours()
self.SetCodes()
def SetPanelColours(self):
""" Assigns colours to the colour panels. """
self.oldColourPanel.RefreshColour(self._oldColour)
self.newColourPanel.RefreshColour(self._colour)
def SetCodes(self):
""" Sets the HTML/MS Access codes (if any) in the text controls. """
colour = rgb2html(self._colour)
self.htmlCode.SetValue(colour)
self.htmlCode.Refresh()
if colour in HTMLCodes:
colourName, access, webSafe = HTMLCodes[colour]
self.webSafe.SetValue(webSafe)
self.accessCode.SetValue(access)
self.htmlName.SetValue(colourName)
else:
self.webSafe.SetValue("")
self.accessCode.SetValue("")
self.htmlName.SetValue("")
def OnCloseWindow(self, event):
"""
Handles the ``wx.EVT_CLOSE`` event for L{CubeColourDialog}.
:param `event`: a `wx.CloseEvent` event to be processed.
"""
self.EndModal(wx.ID_CANCEL)
def OnKeyUp(self, event):
"""
Handles the ``wx.EVT_CHAR_HOOK`` event for L{CubeColourDialog}.
:param `event`: a `wx.KeyEvent` event to be processed.
"""
if event.GetKeyCode() == wx.WXK_ESCAPE:
self.EndModal(wx.ID_CANCEL)
event.Skip()
def ShowModal(self):
"""
Shows L{CubeColourDialog} as a modal dialog. Program flow does
not return until the dialog has been dismissed with `EndModal`.
:note: Overridden from `wx.Dialog`.
"""
return wx.Dialog.ShowModal(self)
def SetAGWWindowStyleFlag(self, agwStyle):
"""
Sets the L{CubeColourDialog} window style flags.
:param `agwStyle`: can only be ``CCD_SHOW_ALPHA`` or ``None``.
"""
show = self.GetAGWWindowStyleFlag() & CCD_SHOW_ALPHA
self._agwStyle = agwStyle
self.mainSizer.Show(self.alphaSizers[0], show)
self.mainSizer.Show(self.alphaSizers[1], show)
self.mainSizer.Fit(self.mainPanel)
self.mainSizer.SetSizeHints(self.mainPanel)
self.mainSizer.Layout()
self.dialogSizer.Fit(self)
self.dialogSizer.SetSizeHints(self)
self.Layout()
self.Refresh()
self.Update()
def GetAGWWindowStyleFlag(self):
"""
Returns the L{CubeColourDialog} window style flags.
:see: L{SetAGWWindowStyleFlag} for a list of possible flags.
"""
return self._agwStyle
def OnOk(self, event):
"""
Handles the Ok ``wx.EVT_BUTTON`` event for L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
self.EndModal(wx.ID_OK)
def OnCancel(self, event):
"""
Handles the Cancel ``wx.EVT_BUTTON`` event for L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
self.OnCloseWindow(event)
def OnAddCustom(self, event):
"""
Handles the Add Custom ``wx.EVT_BUTTON`` event for L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
self.customColours.AddCustom(self._colour)
def OnShowAlpha(self, event):
"""
Shows/hides the alpha channel control in L{CubeColourDialog}.
:param `event`: a `wx.CommandEvent` event to be processed.
"""
agwStyle = self.GetAGWWindowStyleFlag()
show = event.IsChecked()
if show:
agwStyle |= CCD_SHOW_ALPHA
else:
agwStyle &= ~CCD_SHOW_ALPHA
self.SetAGWWindowStyleFlag(agwStyle)
def OnSpinCtrl(self, event):
"""
Handles the ``wx.EVT_SPINCTRL`` event for RGB and HSB colours.
:param `event`: a `wx.SpinEvent` event to be processed.
"""
obj = event.GetEventObject()
position = self.spinCtrls.index(obj)
colourVal = event.GetInt()
attribute, maxVal = colourAttributes[position], colourMaxValues[position]
self.AssignColourValue(attribute, colourVal, maxVal, position)
def OnAlphaSpin(self, event):
"""
Handles the ``wx.EVT_SPINCTRL`` event for the alpha channel.
:param `event`: a `wx.SpinEvent` event to be processed.
"""
colourVal = event.GetInt()
originalVal = self._colour._alpha
if colourVal != originalVal and self._initOver:
if colourVal < 0:
colourVal = 0
if colourVal > 255:
colourVal = 255
self._colour._alpha = colourVal
self.DrawAlpha()
def AssignColourValue(self, attribute, colourVal, maxVal, position):
""" Common code to handle spin control changes. """
originalVal = getattr(self._colour, attribute)
if colourVal != originalVal and self._initOver:
if colourVal < 0:
colourVal = 0
if colourVal > maxVal:
colourVal = maxVal
setattr(self._colour, attribute, colourVal)
if position < 3:
self._colour.ToHSV()
else:
self._colour.ToRGB()
self.DrawAll()
def DrawAll(self):
""" Draws all the custom controls after a colour change. """
if self._initOver and not self._inDrawAll:
self._inDrawAll = True
dc1 = wx.ClientDC(self.hsvBitmap)
self.hsvBitmap.DrawMarkers(dc1)
dc2 = wx.ClientDC(self.rgbBitmap)
self.rgbBitmap.DrawMarkers(dc2)
self.rgbBitmap.DrawLines(dc2)
dc3 = wx.ClientDC(self.brightCtrl)
self.brightCtrl.DrawMarkers(dc3)
dc4 = wx.ClientDC(self.alphaCtrl)
self.alphaCtrl.DrawMarkers(dc4)
self.CalcCuboid()
self.CalcRects()
self.DrawRGB()
self.DrawHSB()
self.DrawBright()
self.DrawAlpha()
self.SetSpinVals()
self._inDrawAll = False
def GetColourData(self):
""" Returns a wxPython compatible `wx.ColourData`. """
self._colourData.SetColour(self._colour.GetPyColour())
return self._colourData
def GetRGBAColour(self):
""" Returns a 4-elements tuple of red, green, blue, alpha components. """
return (self._colour.r, self._colour.g, self._colour.b, self._colour._alpha)
def GetHSVAColour(self):
""" Returns a 4-elements tuple of hue, saturation, brightness, alpha components. """
return (self._colour.h, self._colour.s, self._colour.v, self._colour._alpha)
|
ezequielpereira/Time-Line
|
libs64/wx/lib/agw/cubecolourdialog.py
|
Python
|
gpl-3.0
| 139,714 | 0.003285 |
#-------------------------------------------------------------------------------
#
# This file is part of pygimplib.
#
# Copyright (C) 2014, 2015 khalim19 <khalim19@gmail.com>
#
# pygimplib is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pygimplib is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pygimplib. If not, see <http://www.gnu.org/licenses/>.
#
#-------------------------------------------------------------------------------
"""
This module defines the following classes:
* `ItemData` - an associative container that stores all GIMP items and item
groups of a certain type
* subclasses of `ItemData`:
* `LayerData` for layers
* `ChannelData` for channels
* `PathData` for paths
* `_ItemDataElement` - wrapper for `gimp.Item` objects containing custom
attributes derived from the original `gimp.Item` attributes
"""
#===============================================================================
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
str = unicode
#===============================================================================
import os
import abc
from collections import OrderedDict
from collections import namedtuple
import gimp
from . import pgpath
from . import objectfilter
#===============================================================================
pdb = gimp.pdb
#===============================================================================
class ItemData(object):
"""
This class is an interface to store all items (and item groups) of a certain
type (e.g. layers, channels or paths) of a GIMP image in an ordered
dictionary, allowing to access the items via their names and get various
custom attributes derived from the existing item attributes.
Use one of the subclasses for items of a certain type:
* `LayerData` for layers,
* `ChannelData` for channels,
* `PathData` for paths (vectors).
For custom item attributes, see the documentation for the `_ItemDataElement`
class. `_ItemDataElement` is common for all `ItemData` subclasses.
Attributes:
* `image` - GIMP image to get item data from.
* `is_filtered` - If True, ignore items that do not match the filter
(`ObjectFilter`) in this object when iterating.
* `filter` (read-only) - `ObjectFilter` instance where you can add or remove
filter rules or subfilters to filter items.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, image, is_filtered=False, filter_match_type=objectfilter.ObjectFilter.MATCH_ALL):
self.image = image
self.is_filtered = is_filtered
# Filters applied to all items in self._itemdata
self._filter = objectfilter.ObjectFilter(filter_match_type)
# Contains all items (including item groups) in the item tree.
# key: `_ItemDataElement.orig_name` (derived from `gimp.Item.name`, which is unique)
# value: `_ItemDataElement` object
self._itemdata = OrderedDict()
# key `_ItemDataElement` object (parent) or None (root of the item tree)
# value: set of `_ItemDataElement` objects
self._uniquified_itemdata = {}
self._fill_item_data()
@property
def filter(self):
return self._filter
def __getitem__(self, name):
"""
Access an `_ItemDataElement` object by its `orig_name` attribute.
"""
return self._itemdata[name]
def __contains__(self, name):
"""
Return True if an `_ItemDataElement` object, specified by its `orig_name`
attribute, is in the item data. Otherwise return False.
"""
return name in self._itemdata
def __len__(self):
"""
Return the number of all item data elements - that is, all immediate
children of the image and all nested children.
"""
return len([item_elem for item_elem in self])
def __iter__(self):
"""
If `is_filtered` is False, iterate over all items. If `is_filtered` is True,
iterate only over items that match the filter in this object.
Yields:
* `item_elem` - The current `_ItemDataElement` object.
"""
if not self.is_filtered:
for item_elem in self._itemdata.values():
yield item_elem
else:
for item_elem in self._itemdata.values():
if self._filter.is_match(item_elem):
yield item_elem
def _items(self):
"""
Yield current (`gimp.Item.name`, `_ItemDataElement` object) tuple.
"""
if not self.is_filtered:
for name, item_elem in self._itemdata.items():
yield name, item_elem
else:
for name, item_elem in self._itemdata.items():
if self._filter.is_match(item_elem):
yield name, item_elem
def uniquify_name(self, item_elem, include_item_path=True,
uniquifier_position=None, uniquifier_position_parents=None):
"""
Make the `name` attribute in the specified `_ItemDataElement` object
unique among all other, already uniquified `_ItemDataElement` objects.
To achieve uniquification, a string ("uniquifier") in the form of
" (<number>)" is inserted at the end of the item names.
Parameters:
* `item_elem` - `_ItemDataElement` object whose `name` attribute
will be uniquified.
* `include_item_path` - If True, take the item path into account when
uniquifying.
* `uniquifier_position` - Position (index) where the uniquifier is inserted
into the current item. If the position is None, insert the uniquifier at
the end of the item name (i.e. append it).
* `uniquifier_position_parents` - Position (index) where the uniquifier is
inserted into the parents of the current item. If the position is None,
insert the uniquifier at the end of the name of each parent. This
parameter has no effect if `include_item_path` is False.
"""
if include_item_path:
for elem in item_elem.parents + [item_elem]:
parent = elem.parent
if parent not in self._uniquified_itemdata:
self._uniquified_itemdata[parent] = set()
if elem not in self._uniquified_itemdata[parent]:
item_names = set([elem_.name for elem_ in self._uniquified_itemdata[parent]])
if elem.name not in item_names:
self._uniquified_itemdata[parent].add(elem)
else:
if elem == item_elem:
position = uniquifier_position
else:
position = uniquifier_position_parents
elem.name = pgpath.uniquify_string(elem.name, item_names, position)
self._uniquified_itemdata[parent].add(elem)
else:
# Use None as the root of the item tree.
parent = None
if parent not in self._uniquified_itemdata:
self._uniquified_itemdata[parent] = set()
item_elem.name = pgpath.uniquify_string(
item_elem.name, self._uniquified_itemdata[parent], uniquifier_position)
self._uniquified_itemdata[parent].add(item_elem.name)
def _fill_item_data(self):
"""
Fill the _itemdata dictionary, containing
<gimp.Item.name, _ItemDataElement> pairs.
"""
_ItemTreeNode = namedtuple('_ItemTreeNode', ['children', 'parents'])
item_tree = [_ItemTreeNode(self._get_children_from_image(self.image), [])]
while item_tree:
node = item_tree.pop(0)
index = 0
for item in node.children:
parents = list(node.parents)
item_elem = _ItemDataElement(item, parents)
if pdb.gimp_item_is_group(item):
item_tree.insert(index, _ItemTreeNode(self._get_children_from_item(item), parents + [item_elem]))
index += 1
self._itemdata[item_elem.orig_name] = item_elem
@abc.abstractmethod
def _get_children_from_image(self, image):
"""
Return a list of immediate child items from the specified image.
If no child items exist, return an empty list.
"""
pass
@abc.abstractmethod
def _get_children_from_item(self, item):
"""
Return a list of immediate child items from the specified item.
If no child items exist, return an empty list.
"""
pass
class LayerData(ItemData):
def _get_children_from_image(self, image):
return image.layers
def _get_children_from_item(self, item):
return item.layers
class ChannelData(ItemData):
def _get_children_from_image(self, image):
return image.channels
def _get_children_from_item(self, item):
return item.children
class PathData(ItemData):
def _get_children_from_image(self, image):
return image.vectors
def _get_children_from_item(self, item):
return item.children
#===============================================================================
class _ItemDataElement(object):
"""
This class wraps a `gimp.Item` object and defines custom item attributes.
Note that the attributes will not be up to date if changes were made to the
original `gimp.Item` object.
Attributes:
* `item` (read-only) - `gimp.Item` object.
* `parents` (read-only) - List of `_ItemDataElement` parents for this item,
sorted from the topmost parent to the bottommost (immediate) parent.
* `level` (read-only) - Integer indicating which level in the item tree is
the item positioned at. 0 means the item is at the top level. The higher
the level, the deeper the item is in the item tree.
* `parent` (read-only) - Immediate `_ItemDataElement` parent of this object.
If this object has no parent, return None.
* `item_type` (read-only) - Item type - one of the following:
* `ITEM` - normal item,
* `NONEMPTY_GROUP` - non-empty item group (contains children),
* `EMPTY_GROUP` - empty item group (contains no children).
* `name` - Item name as a `unicode` string, initially equal to the `orig_name`
attribute. Modify this attribute instead of `gimp.Item.name` to avoid
modifying the original item.
* `orig_name` (read-only) - original `gimp.Item.name` as a `unicode` string.
* `path_visible` (read-only) - Visibility of all item's parents and this
item. If all items are visible, `path_visible` is True. If at least one
of these items is invisible, `path_visible` is False.
"""
__ITEM_TYPES = ITEM, NONEMPTY_GROUP, EMPTY_GROUP = (0, 1, 2)
def __init__(self, item, parents=None):
if item is None:
raise TypeError("item cannot be None")
self.name = item.name.decode()
self.tags = set()
self._orig_name = self.name
self._item = item
self._parents = parents if parents is not None else []
self._level = len(self._parents)
if self._parents:
self._parent = self._parents[-1]
else:
self._parent = None
if pdb.gimp_item_is_group(self._item):
if self._item.children:
self._item_type = self.NONEMPTY_GROUP
else:
self._item_type = self.EMPTY_GROUP
else:
self._item_type = self.ITEM
self._path_visible = self._get_path_visibility()
@property
def item(self):
return self._item
@property
def parents(self):
return self._parents
@property
def level(self):
return self._level
@property
def parent(self):
return self._parent
@property
def item_type(self):
return self._item_type
@property
def orig_name(self):
return self._orig_name
@property
def path_visible(self):
return self._path_visible
def get_file_extension(self):
"""
Get file extension from the `name` attribute.
If `name` has no file extension, return an empty string.
"""
return pgpath.get_file_extension(self.name)
def set_file_extension(self, file_extension):
"""
Set file extension in the `name` attribute.
To remove the file extension from `name`, pass an empty string or None.
"""
root = os.path.splitext(self.name)[0]
if file_extension:
self.name = '.'.join((root, file_extension))
else:
self.name = root
def get_filepath(self, directory, include_item_path=True):
"""
Return file path given the specified directory, item name and names of its
parents.
If `include_item_path` is True, create file path in the following format:
<directory>/<item path components>/<item name>
If `include_item_path` is False, create file path in the following format:
<directory>/<item name>
If directory is not an absolute path or is None, prepend the current working
directory.
Item path components consist of parents' item names, starting with the
topmost parent.
"""
if directory is None:
directory = ""
path = os.path.abspath(directory)
if include_item_path:
path_components = self.get_path_components()
if path_components:
path = os.path.join(path, os.path.join(*path_components))
path = os.path.join(path, self.name)
return path
def get_path_components(self):
"""
Return a list of names of all parents of this item as path components.
"""
return [parent.name for parent in self.parents]
def validate_name(self):
"""
Validate the `name` attribute of this item and all of its parents.
"""
self.name = pgpath.FilenameValidator.validate(self.name)
for parent in self._parents:
parent.name = pgpath.FilenameValidator.validate(parent.name)
def _get_path_visibility(self):
"""
If this item and all of its parents are visible, return True, otherwise
return False.
"""
path_visible = True
if not self._item.visible:
path_visible = False
else:
for parent in self._parents:
if not parent.item.visible:
path_visible = False
break
return path_visible
|
Buggaboo/gimp-plugin-export-layers
|
export_layers/pygimplib/pgitemdata.py
|
Python
|
gpl-3.0
| 14,487 | 0.015669 |
#!/usr/bin/env python
# coding: utf-8
from module import Module
import numpy as np
try:
from im2col_cyt import im2col_cython, col2im_cython
except ImportError:
print('Installation broken, please reinstall PyFunt')
from numpy.lib.stride_tricks import as_strided
def tile_array(a, b1, b2):
r, c = a.shape
rs, cs = a.strides
x = as_strided(a, (r, b1, c, b2), (rs, 0, cs, 0))
return x.reshape(r*b1, c*b2)
class SpatialUpSamplingNearest(Module):
def __init__(self, scale):
super(SpatialUpSamplingNearest, self).__init__()
self.scale_factor = scale
if self.scale_factor < 1:
raise Exception('scale_factor must be greater than 1')
if np.floor(self.scale_factor) != self.scale_factor:
raise Exception('scale_factor must be integer')
def update_output(self, x):
out_size = x.shape
out_size[x.ndim - 1] *= self.scale_factor
out_size[x.ndim - 2] *= self.scale_factor
N, C, H, W = out_size
stride = self.scale_factor
pool_height = pool_width = stride
x_reshaped = x.transpose(2, 3, 0, 1).flatten()
out_cols = np.zeros(out_size)
out_cols[:, np.arange(out_cols.shape[1])] = x_reshaped
out = col2im_cython(out_cols, N * C, 1, H, W, pool_height, pool_width,
padding=0, stride=stride)
out = out.reshape(out_size)
return self.grad_input
return self.output
def update_grad_input(self, x, grad_output, scale=1):
N, C, H, W = grad_output.shape
pool_height = pool_width = self.scale_factor
stride = self.scale_factor
out_height = (H - pool_height) / stride + 1
out_width = (W - pool_width) / stride + 1
grad_output_split = grad_output.reshape(N * C, 1, H, W)
grad_output_cols = im2col_cython(
grad_output_split, pool_height, pool_width, padding=0, stride=stride)
grad_intput_cols = grad_output_cols[0, np.arange(grad_output_cols.shape[1])]
grad_input = grad_intput_cols.reshape(
out_height, out_width, N, C).transpose(2, 3, 0, 1)
self.output = grad_input
|
dnlcrl/PyFunt
|
pyfunt/spatial_up_sampling_nearest.py
|
Python
|
mit
| 2,179 | 0.001377 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import re
import six
from oslo_policy import _checks
from oslo_policy._i18n import _LE
LOG = logging.getLogger(__name__)
def reducer(*tokens):
"""Decorator for reduction methods.
Arguments are a sequence of tokens, in order, which should trigger running
this reduction method.
"""
def decorator(func):
# Make sure we have a list of reducer sequences
if not hasattr(func, 'reducers'):
func.reducers = []
# Add the tokens to the list of reducer sequences
func.reducers.append(list(tokens))
return func
return decorator
class ParseStateMeta(type):
"""Metaclass for the :class:`.ParseState` class.
Facilitates identifying reduction methods.
"""
def __new__(mcs, name, bases, cls_dict):
"""Create the class.
Injects the 'reducers' list, a list of tuples matching token sequences
to the names of the corresponding reduction methods.
"""
reducers = []
for key, value in cls_dict.items():
if not hasattr(value, 'reducers'):
continue
for reduction in value.reducers:
reducers.append((reduction, key))
cls_dict['reducers'] = reducers
return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict)
@six.add_metaclass(ParseStateMeta)
class ParseState(object):
"""Implement the core of parsing the policy language.
Uses a greedy reduction algorithm to reduce a sequence of tokens into
a single terminal, the value of which will be the root of the
:class:`Check` tree.
.. note::
Error reporting is rather lacking. The best we can get with this
parser formulation is an overall "parse failed" error. Fortunately, the
policy language is simple enough that this shouldn't be that big a
problem.
"""
def __init__(self):
"""Initialize the ParseState."""
self.tokens = []
self.values = []
def reduce(self):
"""Perform a greedy reduction of the token stream.
If a reducer method matches, it will be executed, then the
:meth:`reduce` method will be called recursively to search for any more
possible reductions.
"""
for reduction, methname in self.reducers:
if (len(self.tokens) >= len(reduction) and
self.tokens[-len(reduction):] == reduction):
# Get the reduction method
meth = getattr(self, methname)
# Reduce the token stream
results = meth(*self.values[-len(reduction):])
# Update the tokens and values
self.tokens[-len(reduction):] = [r[0] for r in results]
self.values[-len(reduction):] = [r[1] for r in results]
# Check for any more reductions
return self.reduce()
def shift(self, tok, value):
"""Adds one more token to the state.
Calls :meth:`reduce`.
"""
self.tokens.append(tok)
self.values.append(value)
# Do a greedy reduce...
self.reduce()
@property
def result(self):
"""Obtain the final result of the parse.
:raises ValueError: If the parse failed to reduce to a single result.
"""
if len(self.values) != 1:
raise ValueError('Could not parse rule')
return self.values[0]
@reducer('(', 'check', ')')
@reducer('(', 'and_expr', ')')
@reducer('(', 'or_expr', ')')
def _wrap_check(self, _p1, check, _p2):
"""Turn parenthesized expressions into a 'check' token."""
return [('check', check)]
@reducer('check', 'and', 'check')
def _make_and_expr(self, check1, _and, check2):
"""Create an 'and_expr'.
Join two checks by the 'and' operator.
"""
return [('and_expr', _checks.AndCheck([check1, check2]))]
@reducer('and_expr', 'and', 'check')
def _extend_and_expr(self, and_expr, _and, check):
"""Extend an 'and_expr' by adding one more check."""
return [('and_expr', and_expr.add_check(check))]
@reducer('check', 'or', 'check')
def _make_or_expr(self, check1, _or, check2):
"""Create an 'or_expr'.
Join two checks by the 'or' operator.
"""
return [('or_expr', _checks.OrCheck([check1, check2]))]
@reducer('or_expr', 'or', 'check')
def _extend_or_expr(self, or_expr, _or, check):
"""Extend an 'or_expr' by adding one more check."""
return [('or_expr', or_expr.add_check(check))]
@reducer('not', 'check')
def _make_not_expr(self, _not, check):
"""Invert the result of another check."""
return [('check', _checks.NotCheck(check))]
def _parse_check(rule):
"""Parse a single base check rule into an appropriate Check object."""
# Handle the special checks
if rule == '!':
return _checks.FalseCheck()
elif rule == '@':
return _checks.TrueCheck()
try:
kind, match = rule.split(':', 1)
except Exception:
LOG.exception(_LE('Failed to understand rule %s') % rule)
# If the rule is invalid, we'll fail closed
return _checks.FalseCheck()
# Find what implements the check
if kind in _checks.registered_checks:
return _checks.registered_checks[kind](kind, match)
elif None in _checks.registered_checks:
return _checks.registered_checks[None](kind, match)
else:
LOG.error(_LE('No handler for matches of kind %s') % kind)
return _checks.FalseCheck()
# Used for tokenizing the policy language
_tokenize_re = re.compile(r'\s+')
def _parse_tokenize(rule):
"""Tokenizer for the policy language.
Most of the single-character tokens are specified in the
_tokenize_re; however, parentheses need to be handled specially,
because they can appear inside a check string. Thankfully, those
parentheses that appear inside a check string can never occur at
the very beginning or end ("%(variable)s" is the correct syntax).
"""
for tok in _tokenize_re.split(rule):
# Skip empty tokens
if not tok or tok.isspace():
continue
# Handle leading parens on the token
clean = tok.lstrip('(')
for i in range(len(tok) - len(clean)):
yield '(', '('
# If it was only parentheses, continue
if not clean:
continue
else:
tok = clean
# Handle trailing parens on the token
clean = tok.rstrip(')')
trail = len(tok) - len(clean)
# Yield the cleaned token
lowered = clean.lower()
if lowered in ('and', 'or', 'not'):
# Special tokens
yield lowered, clean
elif clean:
# Not a special token, but not composed solely of ')'
if len(tok) >= 2 and ((tok[0], tok[-1]) in
[('"', '"'), ("'", "'")]):
# It's a quoted string
yield 'string', tok[1:-1]
else:
yield 'check', _parse_check(clean)
# Yield the trailing parens
for i in range(trail):
yield ')', ')'
def parse_rule(rule):
"""Parses policy to the tree.
Translates a policy written in the policy language into a tree of
Check objects.
"""
# Empty rule means always accept
if not rule:
return _checks.TrueCheck()
# Parse the token stream
state = ParseState()
for tok, value in _parse_tokenize(rule):
state.shift(tok, value)
try:
return state.result
except ValueError:
# Couldn't parse the rule
LOG.exception(_LE('Failed to understand rule %s') % rule)
# Fail closed
return _checks.FalseCheck()
|
darren-wang/op
|
oslo_policy/_parser.py
|
Python
|
apache-2.0
| 8,552 | 0 |
#!/usr/bin/env python
import telnetlib
import time
import socket
import sys
import getpass
TELNET_PORT = 23
TELNET_TIMEOUT = 6
def send_command(remote_conn, cmd):
'''
Initiate the Telnet Session
'''
cmd = cmd.rstrip()
remote_conn.write(cmd + '\n')
time.sleep(1)
return remote_conn.read_very_eager()
def login(remote_conn, username, password):
'''
Login to pynet-rtr1
'''
output = remote_conn.read_until("sername:", TELNET_TIMEOUT)
remote_conn.write(username + '\n')
output += remote_conn.read_until("ssword:", TELNET_TIMEOUT)
remote_conn.write(password + '\n')
return output
def no_more(remote_conn, paging_cmd='terminal length 0'):
'''
No paging of Output
'''
return send_command(remote_conn, paging_cmd)
def telnet_connect(ip_addr):
'''
Establish the Telnet Connection
'''
try:
return telnetlib.Telnet(ip_addr, TELNET_PORT, TELNET_TIMEOUT)
except socket.timeout:
sys.exit("Connection timed-out")
def main():
'''
Connect to pynet-rtr1, login, and issue 'show ip int brief'
'''
ip_addr = raw_input("IP address: ")
ip_addr = ip_addr.strip()
username = 'pyclass'
password = getpass.getpass()
remote_conn = telnet_connect(ip_addr)
output = login(remote_conn, username, password)
time.sleep(1)
remote_conn.read_very_eager()
no_more(remote_conn)
output = send_command(remote_conn, 'show ip int brief')
print "\n\n"
print output
print "\n\n"
remote_conn.close()
if __name__ == "__main__":
main()
|
gahlberg/pynet_class_work
|
class2/ex2a_telnet.py
|
Python
|
apache-2.0
| 1,588 | 0.003149 |
from django.shortcuts import render, render_to_response
from django.shortcuts import redirect
from django.template import RequestContext
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.conf import settings
from manage.forms import *
from manage.models import *
from tasks.models import *
import os
import csv
from django.http import HttpResponse, HttpRequest
# Views
def login(request):
if request.method == "POST":
username = request.POST.get('username')
password = request.POST.get('password')
if (username == settings.MANAGE_USERNAME and password == settings.MANAGE_PASS):
return redirect('manage.views.main')
return render(request, 'manage/login.html', {})
def main(request):
# Make sure no direct access to main page
try:
referer = request.META['HTTP_REFERER']
except:
return redirect('manage.views.login')
if referer.startswith('http://colcat.calit2.uci.edu:8003'):
return render(request, 'manage/main.html', {})
return redirect('manage.views.login')
# LANGUAGES
def new_language(request):
if request.method == "POST":
form = LanguageForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.save()
return HttpResponseRedirect(reverse('manage.views.view_languages'))
else:
form = LanguageForm()
return render(request, 'manage/new-language.html', {'form': form})
def view_languages(request):
language_list = Language.objects.all()
context_dict = {'languages': language_list}
return render(request, 'manage/view-languages.html', context_dict)
# IMAGES
def new_image(request):
# Handle file upload
if request.method == 'POST':
form = ImageForm(request.POST, request.FILES)
if form.is_valid():
print request.FILES['image_filepath'].name
print request.FILES['image_filepath']
image_name = os.path.splitext(request.FILES['image_filepath'].name)[0]
newimg = Image_Data(image_filepath = request.FILES['image_filepath'], image_id = image_name, language_name = request.POST.get('language_name'), task_type_id = request.POST.get('task_type_id'))
newimg.save()
# Redirect to the document list after POST
return HttpResponseRedirect(reverse('manage.views.view_images'))
else:
form = ImageForm() # A empty, unbound form
return render(request, 'manage/new-image.html', {'form': form})
def view_images(request):
image_list = Image_Data.objects.all()
context_dict = {'images': image_list}
return render(request, 'manage/view-images.html', context_dict)
# DATA MODELS
def new_data_model(request):
if request.method == "POST":
form = DataModelForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.save()
return HttpResponseRedirect(reverse('manage.views.view_data_models'))
else:
form = DataModelForm()
return render(request, 'manage/new-data-model.html', {'form': form})
def view_data_models(request):
model_list = Data_Model.objects.all()
context_dict = {'models': model_list}
return render(request, 'manage/view-data-models.html', context_dict)
# TASKS
def new_task(request):
if request.method == "POST":
form = TaskForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.language_id = request.POST.get('language_id')
post.task_type_id = request.POST.get('task_type_id')
post.image_id = request.POST.get('image_id')
post.task_name = request.POST.get('language_id') + '_' + request.POST.get('task_type_id') + '_' + request.POST.get('image_id')
post.task_url = '/tasks/'+request.POST.get('language_id')+'/'+request.POST.get('task_type_id') + '/'+request.POST.get('image_id')
post.save()
return HttpResponseRedirect(reverse('manage.views.view_tasks'))
else:
form = TaskForm()
return render(request, 'manage/new-task.html', {'form': form})
def view_tasks(request):
if request.method == "POST":
if 'create_batch_file' in request.POST:
print "Creating batch file..."
task_choices = request.POST.getlist('task_choices')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="batch.csv"'
writer = csv.writer(response)
headers = ['task_language_id', 'task_type_id', 'task_img_id']
writer.writerow(headers)
for tid in task_choices:
task = Task.objects.get(task_id=tid)
task_info = [task.language_id, task.task_type_id, task.image_id]
writer.writerow(task_info)
print 'Finished writing batch file'
return response
elif 'mark_tasks_complete' in request.POST:
print "Marking tasks complete..."
tasks_complete = request.POST.getlist('tasks_complete')
print tasks_complete
for tid in tasks_complete:
task = Task.objects.get(task_id=tid)
task.complete = True
task.save()
task_list = Task.objects.all()
context_dict = {'tasks': task_list}
return render(request, 'manage/view-tasks.html', context_dict)
def new_task_type(request):
if request.method == "POST":
form = TaskTypeForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.save()
return HttpResponseRedirect(reverse('manage.views.view_task_types'))
else:
form = TaskTypeForm()
return render(request, 'manage/new-task-type.html', {'form': form})
def view_task_types(request):
task_type_list = Task_Type.objects.all()
context_dict = {'task_types': task_type_list}
return render(request, 'manage/view-task-types.html', context_dict)
def new_task_template(request):
if request.method == "POST":
form = TaskTemplateForm(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.save()
return HttpResponseRedirect(reverse('manage.views.view_task_templates'))
else:
form = TaskTemplateForm()
return render(request, 'manage/new-task-template.html', {'form': form})
def view_task_templates(request):
template_list = Task_Template.objects.all()
context_dict = {'templates': template_list}
return render(request, 'manage/view-task-templates.html', context_dict)
# RESPONSES
def download_responses(request):
response_lists = []
# Add objects for each response type
try:
response_list_foci_001 = Task_Foci_001.objects.all()
response_lists.append(response_list_foci_001)
except:
pass
try:
response_list_naming_001 = Task_Naming_001.objects.all()
response_lists.append(response_list_naming_001)
except:
pass
context_dict = {'response_lists': [r.model.__name__ for r in response_lists]}
for rlist in response_lists:
write_responses_to_csv(rlist, 'uploads/responses/'+rlist.model.__name__+'.csv')
return render(request, 'manage/download-responses.html', context_dict)
import csv
from django.db.models.loading import get_model
def write_responses_to_csv(qs, outfile_path):
model = qs.model
writer = csv.writer(open(outfile_path, 'w'))
headers = []
for field in model._meta.fields:
headers.append(field.name)
writer.writerow(headers)
for obj in qs:
row = []
for field in headers:
val = getattr(obj, field)
if callable(val):
val = val()
if type(val) == unicode:
val = val.encode("utf-8")
row.append(val)
writer.writerow(row)
|
csdevsc/colcat_crowdsourcing_application
|
manage/views.py
|
Python
|
mit
| 7,968 | 0.005522 |
import numpy as np
from scipy.stats import sem
import scipy.constants as const
from uncertainties import ufloat
import uncertainties.unumpy as unp
from uncertainties.unumpy import (nominal_values as noms, std_devs as stds)
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from PIL import Image
import scipy.misc
from pint import UnitRegistry
u = UnitRegistry()
Q_ = u.Quantity
## Wellenlängen in nm
lambda_b = Q_(480.0, 'nanometer')
n_b = 1.4635
h = Q_(const.h, 'joule * second')
e_0 = Q_(const.e, 'coulomb')
mu_bohr = Q_(const.physical_constants['Bohr magneton'][0], 'joule/tesla')
c = Q_(const.c, 'meter / second')
d = Q_(4, 'millimeter')
dispsgebiet_b = lambda_b**2 / (2 * d) * np.sqrt(1 / (n_b**2 - 1))
## Hysterese, B in mT
def poly(x, a, b, c, d):
return a * x**3 + b * x**2 + c * x + d
B_auf = np.array([4, 87, 112,174, 230, 290, 352, 419,
476, 540, 600, 662, 714, 775, 823,872, 916, 959, 987,
1015, 1046, 1072])
B_ab = np.array([7, 57, 120, 180, 251, 306, 361, 428,
480, 550, 612, 654, 715, 780, 830, 878, 924, 962,
993, 1020, 1050, 1072])
I = np.linspace(0, 21, 22)
params_B_auf, covariance_B_auf = curve_fit(poly, I, B_auf)
params_B_ab, covariance_B_ab = curve_fit(poly, I, B_ab)
### BLAU ###
## Bild eins Zeitstempel 10:33
## Bild zwei I = 5.6 A Pol = +-1
## Abstände zwischen zwei Linien zu den benachbarten
## beiden Linien gemessen +-> |*| |*| (so wurde 1 gemessen)
## zwei beinhaltet die Abstände der Peaks von einer gespaltenen Linie
## Pixelbreiten der 3 + 13 Linie
pixel_01_b = np.array([(1405 + 1244) / 2, (1690 + 1541) / 2, (1952
+ 1852) / 2, (2170 + 2055) / 2, (2399 + 2278) / 2, (2596 + 2481) / 2, (2781 +
2673) / 2, (2961 + 2861) / 2, (3130 + 3033) / 2, (3294 + 3202) / 2])
pixel_02_b_1 = np.array([(1419 + 1060) / 2, (1728 + 1419) / 2, (1973
+ 1728) / 2, (1973 + 1728) / 2, (2215 + 1973) / 2, (2435 + 2215) / 2, (2638 +
2435) / 2, (2816 + 2638) / 2, (3013 + 2816) / 2, (3176 + 3010) / 2, (3342 +
3176) / 2])
pixel_02_b_2 = np.array([(1494 -1339), (1776 - 1657), (2035 - 1910), (2273 - 2154), (2478 - 2377),
(2677 - 2582), (2873 - 2769), (3045 - 2959), 3217 - 3135, 3383 - 3303])
delta_S_b = np.zeros(len(pixel_01_b) - 1)
for i in range(0, len(pixel_01_b) - 1, 1):
delta_S_b[i] = pixel_01_b[i + 1] - pixel_01_b[i]
#print(delta_S_b)
del_S_b = pixel_02_b_2[1:10]#np.zeros(9)
#for i in range(0, len(pixel_02_b_2) - 1, 1):
# del_S_b[i] = pixel_02_b_2[i + 1] - pixel_02_b_2[i]
del_lambda_b = (1 / 2 * dispsgebiet_b * del_S_b / delta_S_b)
delta_E_b = (h * c / lambda_b**2 * del_lambda_b).to('eV')
g_b = (delta_E_b / (mu_bohr * Q_(poly(5.6, *params_B_auf), 'millitesla'))).to('dimensionless')
g_b_best = ufloat(np.mean(g_b), np.std(g_b, ddof=1))
print(g_b,'##', g_b_best)
print(del_S_b, '##', delta_S_b)
print('Hysterese 5.6 A', poly(5.6, *params_B_auf))
print((2 + 3/2) / 2)
|
smjhnits/Praktikum_TU_D_16-17
|
Fortgeschrittenenpraktikum/Protokolle/V27_Zeeman-Effekt/Python/blau_s.py
|
Python
|
mit
| 2,866 | 0.010479 |
from mutant_django.generator import DjangoBase
def register(app):
app.extend_generator('django', django_json_field)
def django_json_field(gen):
gen.field_generators['JSON'] = JSONField
class JSONField(DjangoBase):
DJANGO_FIELD = 'JSONField'
def render_imports(self):
return ['from jsonfield import JSONField']
|
peterdemin/mutant
|
src/mutant_django_json/__init__.py
|
Python
|
isc
| 341 | 0 |
#!/usr/bin/env python3
# Copyright (c) 2019-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test descriptor wallet function."""
from test_framework.blocktools import COINBASE_MATURITY
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error
)
class WalletDescriptorTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [['-keypool=100']]
self.wallet_names = []
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
self.skip_if_no_sqlite()
def run_test(self):
if self.is_bdb_compiled():
# Make a legacy wallet and check it is BDB
self.nodes[0].createwallet(wallet_name="legacy1", descriptors=False)
wallet_info = self.nodes[0].getwalletinfo()
assert_equal(wallet_info['format'], 'bdb')
self.nodes[0].unloadwallet("legacy1")
else:
self.log.warning("Skipping BDB test")
# Make a descriptor wallet
self.log.info("Making a descriptor wallet")
self.nodes[0].createwallet(wallet_name="desc1", descriptors=True)
# A descriptor wallet should have 100 addresses * 4 types = 400 keys
self.log.info("Checking wallet info")
wallet_info = self.nodes[0].getwalletinfo()
assert_equal(wallet_info['format'], 'sqlite')
assert_equal(wallet_info['keypoolsize'], 400)
assert_equal(wallet_info['keypoolsize_hd_internal'], 400)
assert 'keypoololdest' not in wallet_info
# Check that getnewaddress works
self.log.info("Test that getnewaddress and getrawchangeaddress work")
addr = self.nodes[0].getnewaddress("", "legacy")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('pkh(')
assert_equal(addr_info['hdkeypath'], 'm/44\'/1\'/0\'/0/0')
addr = self.nodes[0].getnewaddress("", "p2sh-segwit")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('sh(wpkh(')
assert_equal(addr_info['hdkeypath'], 'm/49\'/1\'/0\'/0/0')
addr = self.nodes[0].getnewaddress("", "bech32")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('wpkh(')
assert_equal(addr_info['hdkeypath'], 'm/84\'/1\'/0\'/0/0')
# Check that getrawchangeaddress works
addr = self.nodes[0].getrawchangeaddress("legacy")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('pkh(')
assert_equal(addr_info['hdkeypath'], 'm/44\'/1\'/0\'/1/0')
addr = self.nodes[0].getrawchangeaddress("p2sh-segwit")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('sh(wpkh(')
assert_equal(addr_info['hdkeypath'], 'm/49\'/1\'/0\'/1/0')
addr = self.nodes[0].getrawchangeaddress("bech32")
addr_info = self.nodes[0].getaddressinfo(addr)
assert addr_info['desc'].startswith('wpkh(')
assert_equal(addr_info['hdkeypath'], 'm/84\'/1\'/0\'/1/0')
# Make a wallet to receive coins at
self.nodes[0].createwallet(wallet_name="desc2", descriptors=True)
recv_wrpc = self.nodes[0].get_wallet_rpc("desc2")
send_wrpc = self.nodes[0].get_wallet_rpc("desc1")
# Generate some coins
self.generatetoaddress(self.nodes[0], COINBASE_MATURITY + 1, send_wrpc.getnewaddress())
# Make transactions
self.log.info("Test sending and receiving")
addr = recv_wrpc.getnewaddress()
send_wrpc.sendtoaddress(addr, 10)
# Make sure things are disabled
self.log.info("Test disabled RPCs")
assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importprivkey, "cVpF924EspNh8KjYsfhgY96mmxvT6DgdWiTYMtMjuM74hJaU5psW")
assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importpubkey, send_wrpc.getaddressinfo(send_wrpc.getnewaddress()))
assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importaddress, recv_wrpc.getnewaddress())
assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importmulti, [])
assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.addmultisigaddress, 1, [recv_wrpc.getnewaddress()])
assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.dumpprivkey, recv_wrpc.getnewaddress())
assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.dumpwallet, 'wallet.dump')
assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.importwallet, 'wallet.dump')
assert_raises_rpc_error(-4, "This type of wallet does not support this command", recv_wrpc.rpc.sethdseed)
self.log.info("Test encryption")
# Get the master fingerprint before encrypt
info1 = send_wrpc.getaddressinfo(send_wrpc.getnewaddress())
# Encrypt wallet 0
send_wrpc.encryptwallet('pass')
send_wrpc.walletpassphrase('pass', 10)
addr = send_wrpc.getnewaddress()
info2 = send_wrpc.getaddressinfo(addr)
assert info1['hdmasterfingerprint'] != info2['hdmasterfingerprint']
send_wrpc.walletlock()
assert 'hdmasterfingerprint' in send_wrpc.getaddressinfo(send_wrpc.getnewaddress())
info3 = send_wrpc.getaddressinfo(addr)
assert_equal(info2['desc'], info3['desc'])
self.log.info("Test that getnewaddress still works after keypool is exhausted in an encrypted wallet")
for _ in range(500):
send_wrpc.getnewaddress()
self.log.info("Test that unlock is needed when deriving only hardened keys in an encrypted wallet")
send_wrpc.walletpassphrase('pass', 10)
send_wrpc.importdescriptors([{
"desc": "wpkh(tprv8ZgxMBicQKsPd7Uf69XL1XwhmjHopUGep8GuEiJDZmbQz6o58LninorQAfcKZWARbtRtfnLcJ5MQ2AtHcQJCCRUcMRvmDUjyEmNUWwx8UbK/0h/*h)#y4dfsj7n",
"timestamp": "now",
"range": [0,10],
"active": True
}])
send_wrpc.walletlock()
# Exhaust keypool of 100
for _ in range(100):
#send_wrpc.getnewaddress(address_type='bech32')
send_wrpc.getnewaddress('', 'bech32')
# This should now error
assert_raises_rpc_error(-12, "Keypool ran out, please call keypoolrefill first", send_wrpc.getnewaddress, '', 'bech32')
self.log.info("Test born encrypted wallets")
self.nodes[0].createwallet('desc_enc', False, False, 'pass', False, True)
enc_rpc = self.nodes[0].get_wallet_rpc('desc_enc')
enc_rpc.getnewaddress() # Makes sure that we can get a new address from a born encrypted wallet
self.log.info("Test blank descriptor wallets")
self.nodes[0].createwallet(wallet_name='desc_blank', blank=True, descriptors=True)
blank_rpc = self.nodes[0].get_wallet_rpc('desc_blank')
assert_raises_rpc_error(-4, 'This wallet has no available keys', blank_rpc.getnewaddress)
self.log.info("Test descriptor wallet with disabled private keys")
self.nodes[0].createwallet(wallet_name='desc_no_priv', disable_private_keys=True, descriptors=True)
nopriv_rpc = self.nodes[0].get_wallet_rpc('desc_no_priv')
assert_raises_rpc_error(-4, 'This wallet has no available keys', nopriv_rpc.getnewaddress)
self.log.info("Test descriptor exports")
self.nodes[0].createwallet(wallet_name='desc_export', descriptors=True)
exp_rpc = self.nodes[0].get_wallet_rpc('desc_export')
self.nodes[0].createwallet(wallet_name='desc_import', disable_private_keys=True, descriptors=True)
imp_rpc = self.nodes[0].get_wallet_rpc('desc_import')
addr_types = [('legacy', False, 'pkh(', '44\'/1\'/0\'', -13),
('p2sh-segwit', False, 'sh(wpkh(', '49\'/1\'/0\'', -14),
('bech32', False, 'wpkh(', '84\'/1\'/0\'', -13),
('legacy', True, 'pkh(', '44\'/1\'/0\'', -13),
('p2sh-segwit', True, 'sh(wpkh(', '49\'/1\'/0\'', -14),
('bech32', True, 'wpkh(', '84\'/1\'/0\'', -13)]
for addr_type, internal, desc_prefix, deriv_path, int_idx in addr_types:
int_str = 'internal' if internal else 'external'
self.log.info("Testing descriptor address type for {} {}".format(addr_type, int_str))
if internal:
addr = exp_rpc.getrawchangeaddress(address_type=addr_type)
else:
addr = exp_rpc.getnewaddress("", addr_type)
desc = exp_rpc.getaddressinfo(addr)['parent_desc']
assert_equal(desc_prefix, desc[0:len(desc_prefix)])
idx = desc.index('/') + 1
assert_equal(deriv_path, desc[idx:idx + 9])
if internal:
assert_equal('1', desc[int_idx])
else:
assert_equal('0', desc[int_idx])
self.log.info("Testing the same descriptor is returned for address type {} {}".format(addr_type, int_str))
for i in range(0, 10):
if internal:
addr = exp_rpc.getrawchangeaddress(address_type=addr_type)
else:
addr = exp_rpc.getnewaddress("", addr_type)
test_desc = exp_rpc.getaddressinfo(addr)['parent_desc']
assert_equal(desc, test_desc)
self.log.info("Testing import of exported {} descriptor".format(addr_type))
imp_rpc.importdescriptors([{
'desc': desc,
'active': True,
'next_index': 11,
'timestamp': 'now',
'internal': internal
}])
for i in range(0, 10):
if internal:
exp_addr = exp_rpc.getrawchangeaddress(address_type=addr_type)
imp_addr = imp_rpc.getrawchangeaddress(address_type=addr_type)
else:
exp_addr = exp_rpc.getnewaddress("", addr_type)
imp_addr = imp_rpc.getnewaddress("", addr_type)
assert_equal(exp_addr, imp_addr)
if __name__ == '__main__':
WalletDescriptorTest().main ()
|
tecnovert/particl-core
|
test/functional/wallet_descriptor.py
|
Python
|
mit
| 10,725 | 0.00317 |
dimensions(8,2)
wall((0, 2), (8, 2))
wall((1, 1.5),(1.5, 1.5))
wall((2, 1.6),(2.8, 1.6))
wall((3.1, 1.4),(3.5, 1.4))
initialRobotLoc(1.0, 1.0)
|
Cynary/distro6.01
|
arch/6.01Soft/lib601-F13-4/soar/worlds/oneDdiff.py
|
Python
|
mit
| 148 | 0.027027 |
import web
urls = (
'/hello','Index'
)
app = web.application(urls,globals())
render = web.template.render('/usr/local/LPTHW/ex51/gothonweb/templates/',base="layout")
class Index(object):
def GET(self):
return render.hello_form()
def POST(self):
form = web.input(name="Nobody",greet="Hello")
greeting = "%s,%s" % (form.greet,form.name)
return render.index(greeting = greeting)
if __name__ == '__main__':
app.run()
|
tridvaodin/Assignments-Valya-Maskaliova
|
LPTHW/projects/gothonweb/bin/app.py
|
Python
|
gpl-2.0
| 488 | 0.020492 |
import os, sys, re
import ConfigParser
import optparse
import shutil
import subprocess
import difflib
import collections
#import numpy as np
# Alberto Meseguer file; 18/11/2016
# Modified by Quim Aguirre; 13/03/2017
# This file is the master coordinator of the DIANA project. It is used to run multiple DIANA commands in parallel in the cluster
#-------------#
# Functions #
#-------------#
#-------------#
# Options #
#-------------#
def parse_options():
'''
This function parses the command line arguments and returns an optparse object.
'''
parser = optparse.OptionParser("pddi.py [--dummy=DUMMY_DIR] -i INPUT_FILE [-o OUTPUT_DIR] [-v]")
# Directory arguments
parser.add_option("-i", action="store", type="string", dest="input_file", help="Input crossings file", metavar="INPUT_FILE")
parser.add_option("-s", action="store", type="string", dest="sif_file", help="Input SIF file")
parser.add_option("-t", action="store", type="string", dest="type_of_analysis", help="Type of analysis: 'profile_creation' or 'comparison'")
parser.add_option("--dummy_dir", default="dummy/", action="store", type="string", dest="dummy_dir", help="Dummy directory (default = ./)", metavar="DUMMY_DIR")
parser.add_option('-ws','--worspace',dest='workspace',action = 'store',default=os.path.join(os.path.dirname(__file__), 'workspace'),
help = """Define the workspace directory where the data directory and the results directory will be created""")
(options, args) = parser.parse_args()
if options.input_file is None or options.sif_file is None or options.type_of_analysis is None:
parser.error("missing arguments: type option \"-h\" for help")
return options
#-------------#
# Main #
#-------------#
# Add "." to sys.path #
src_path = os.path.abspath(os.path.dirname(__file__))
sys.path.append(src_path)
# Read configuration file #
config = ConfigParser.ConfigParser()
config_file = os.path.join(src_path, "config_marvin.ini")
config.read(config_file)
import hashlib
# Imports my functions #
import functions
# Define which python to be used #
python = os.path.join(config.get("Paths", "python_path"), "python")
# Arguments & Options #
options = parse_options()
# Directory arguments
input_file = os.path.abspath(options.input_file)
dummy_dir = os.path.abspath(options.dummy_dir)
# Create directories if necessary
logs_dir = src_path + "/logs"
if not os.path.exists(logs_dir):
os.mkdir(logs_dir)
f = open(input_file, "r")
# Depending on the type of analysis, we will submit different commands
if options.type_of_analysis == 'profile_creation':
analysis = '-prof'
all_drugs = set()
for line in f:
(drug1, drug2) = line.strip().split('---')
all_drugs.add(drug1)
all_drugs.add(drug2)
f.close()
for drug in all_drugs:
# Check if the p-value file is already created. If so, skip
pvalue_file = data_dir + "/" + drug + "/guild_results_using_sif/output_scores.sif.netcombo.pval"
if os.path.exists(pvalue_file):
continue
guild_path = '/gpfs42/robbyfs/homes/users/qaguirre/guild/scoreN'
command = 'python {}/diana_cluster/scripts/generate_profiles.py -d {} -pt geneid -sif {} -gu {}'.format( src_path, drug, options.sif_file, guild_path )
print(command)
# python /home/quim/project/diana_cluster/scripts/generate_profiles.py -d 'DCC0303' -pt 'geneid' -sif /home/quim/project/diana_cluster/workspace/sif/human_eAFF_geneid_2017.sif -gu /home/quim/project/diana_cluster/diana/toolbox/scoreN
# To run the command at the local machine
#os.system(command)
#To run in the cluster submitting files to queues
functions.submit_command_to_queue(command, max_jobs_in_queue=int(config.get("Cluster", "max_jobs_in_queue")), queue_file="command_queues_marvin.txt", dummy_dir=dummy_dir)
elif options.type_of_analysis == 'comparison':
analysis = '-comp'
for line in f:
(drug1, drug2) = line.strip().split('---')
# Check if the results are already done
comp_results_dir = res_dir + "/results_" + drug1 + "_" + drug2
table_file = comp_results_dir + '/table_results_' + drug1 + '_' + drug2 + '.txt'
if os.path.exists(table_file):
continue
command = 'python {}/diana_cluster/scripts/compare_profiles.py -d1 {} -d2 {} -pt geneid'.format( src_path, drug1, drug2 )
print(command)
# python /home/quim/project/diana_cluster/scripts/compare_profiles.py -d1 'DCC0303' -d2 'DCC1743' -pt 'geneid'
# To run the command at the local machine
#os.system(command)
#To run in the cluster submitting files to queues
functions.submit_command_to_queue(command, max_jobs_in_queue=int(config.get("Cluster", "max_jobs_in_queue")), queue_file="command_queues_marvin.txt", dummy_dir=dummy_dir)
f.close()
else:
print('The type of analysis has been wrongly defined. Introduce \'profile_creation\' or \'comparison\'')
sys.exit(10)
|
quimaguirre/diana
|
scripts/old_scripts/run_experiment_cluster.py
|
Python
|
mit
| 5,102 | 0.010584 |
# COPYRIGHT (c) 2016-2018 Nova Labs SRL
#
# All rights reserved. All use of this software and documentation is
# subject to the License Agreement located in the file LICENSE.
from .Core import *
from .ModuleTarget import *
from .ParametersTarget import *
from abc import abstractmethod
class CoreWorkspaceBase:
def __init__(self):
self.sources = None
self.generated = None
self.build = None
@abstractmethod
def getCorePackage(self, name):
pass
@abstractmethod
def getCoreModule(self, name):
pass
@abstractmethod
def getCoreConfiguration(self, package, name):
pass
@abstractmethod
def getCoreMessage(self, package, name):
pass
@abstractmethod
def getRoot(self, cwd=None):
pass
@abstractmethod
def isValid(self):
pass
def getRoot(self, cwd=None):
if self.root is None: # Check for cached value
self.root = findFileGoingUp("WORKSPACE.json", cwd)
if self.root is not None:
CoreConsole.ok("CoreWorkspace::getRoot: Workspace found in " + CoreConsole.highlightFilename(self.root))
else:
self.reason = "CoreWorkspace::getRoot: Not inside a Workspace"
CoreConsole.fail(self.reason)
return self.root
def getSourcesPath(self):
if self.sources is None: # Check for cached value
if self.getRoot() is not None:
tmp = os.path.join(self.getRoot(), "src")
if os.path.isdir(tmp):
self.sources = tmp
else:
raise CoreError("'src' directory not found inside Workspace", context="CoreWorkspaceBase::getSourcesPath")
else:
self.sources = None
return self.sources
def getGeneratedPath(self):
if self.generated is None: # Check for cached value
if self.getRoot() is not None:
tmp = os.path.join(self.getRoot(), "generated")
if not os.path.isdir(tmp):
try:
os.makedirs(tmp)
except OSError as e:
raise CoreError("I/0 Error: " + str(e.strerror), e.filename, context="CoreWorkspaceBase::getGeneratedPath")
self.generated = tmp
else:
self.generated = None
return self.generated
def getBuildPath(self):
if self.build is None: # Check for cached value
if self.getRoot() is not None:
tmp = os.path.join(self.getRoot(), "build")
if not os.path.isdir(tmp):
try:
os.makedirs(tmp)
except OSError as e:
raise CoreError("I/0 Error: " + str(e.strerror), e.filename, context="CoreWorkspaceBase::getBuildPath")
self.build = tmp
else:
self.build = None
return self.build
def getPackagesRoot(self):
if not self.isValid():
raise CoreError("invalid", context="CoreWorkspaceBase::getPackagesRoot")
return os.path.join(self.getSourcesPath(), "packages")
def getModulesRoot(self):
if not self.isValid():
raise CoreError("invalid", context="CoreWorkspaceBase::getModulesRoot")
return os.path.join(self.getSourcesPath(), "modules")
def getModuleTargetsRoot(self):
if not self.isValid():
raise CoreError("invalid", context="CoreWorkspaceBase::getModuleTargetsRoot")
return os.path.join(self.getSourcesPath(), "targets")
def getParametersRoot(self):
if not self.isValid():
raise CoreError("invalid", context="CoreWorkspaceBase::getParametersRoot")
return os.path.join(self.getSourcesPath(), "targets")
def getParametersTargetsRoot(self):
if not self.isValid():
raise CoreError("invalid", context="CoreWorkspaceBase::getParametersTargetsRoot")
return os.path.join(self.getSourcesPath(), "params")
class CoreWorkspace(CoreContainer, CoreWorkspaceBase):
def __init__(self):
CoreContainer.__init__(self)
CoreWorkspaceBase.__init__(self)
self._validModuleTargets = []
self._invalidModuleTargets = []
self._validParameters = []
self._invalidParameters = []
self._validParametersTargets = []
self._invalidParametersTargets = []
self.root = None
self.sources = None
self.generated = None
self.build = None
self.valid = False
self.opened = False
self.reason = ""
def openJSON(self, jsonFile):
CoreConsole.info("WORKSPACE: " + CoreConsole.highlightFilename(jsonFile))
try:
self.valid = True
except CoreError as e:
self.reason = str(e)
CoreConsole.fail("CoreWorkspace::openJSON: " + self.reason)
self.valid = False
return False
return True
def open(self, root=None):
self.valid = False
try:
if root is not None:
self.root = root
else:
self.root = self.getRoot()
if self.root is None:
return False
jsonFile = os.path.join(self.root, "WORKSPACE.json")
if self.openJSON(jsonFile):
self.openPackages()
self.openModules()
self.openModuleTargets()
self.openParameters()
self.openParametersTargets()
return self.valid
except CoreError as e:
self.reason = str(e)
CoreConsole.fail("CoreWorkspace::open: " + self.reason)
return False
def isValid(self):
return self.valid
# --- MODULE TARGET -----------------------------------------------------------
def listModuleTargets(self):
path = self.getModuleTargetsRoot()
dirs = listDirectories(path, fullpath=True)
tmp = []
for x in dirs:
if ModuleTarget.check(x):
tmp.append(x)
if tmp is not None:
tmp.sort()
return tmp
def openModuleTargets(self):
list = self.listModuleTargets()
self._validModuleTargets = []
self._invalidModuleTargets = []
for x in list:
m = ModuleTarget()
if m.open(x):
self._validModuleTargets.append(m)
else:
self._invalidModuleTargets.append(m)
return self._validModuleTargets
def getModuleTargetByName(self, name):
if name is None:
raise CoreError("CoreContainer::getModule() name is None")
for x in self._validModuleTargets:
if x.name == name:
return x
return None
def validModuleTargets(self):
return self._validModuleTargets
def invalidModuleTargets(self):
return self._invalidModuleTargets
# --- PARAMETERS --------------------------------------------------------------
def listParameters(self):
path = self.getParametersRoot()
dirs = listDirectories(path, fullpath=True)
tmp = []
for x in dirs:
if Parameters.check(x):
tmp.append(x)
if tmp is not None:
tmp.sort()
return tmp
def openParameters(self):
list = self.listParameters()
self._validParameters = []
self._invalidParameters = []
for x in list:
m = Parameters()
if m.open(x):
self._validParameters.append(m)
else:
self._invalidParameters.append(m)
return self._validParameters
def getParameterByName(self, name):
if name is None:
raise CoreError("CoreContainer::getModule() name is None")
for x in self._validParameters:
if x.name == name:
return x
return None
def validParameters(self):
return self._validParameters
def invalidParameters(self):
return self._invalidParameters
def listParametersTargets(self):
path = self.getParametersTargetsRoot()
dirs = listDirectories(path, fullpath=True)
if dirs is not None:
dirs.sort()
tmp = []
for d in dirs:
files = listFilesByAndStripExtension(os.path.join(path, d), "json")
if files is not None:
files.sort()
for f in files:
tmp.append([d, f])
return tmp
def openParametersTargets(self):
list = self.listParametersTargets()
self._validParametersTargets = []
self._invalidParametersTargets = []
for x in list:
m = ParametersTarget()
if m.open(x[0], x[1]):
self._validParametersTargets.append(m)
else:
self._invalidParametersTargets.append(m)
return self._validParametersTargets
def validParameterTargets(self):
return self._validParametersTargets
def invalidParameterTargets(self):
return self._invalidParametersTargets
class Workspace(CoreWorkspaceBase):
def __init__(self):
self.root = None
self.sources = None
self.generated = None
self.build = None
self.valid = False
self.reason = ""
self.requiredModules = []
self.requiredPackages = []
self.core = Core()
self.coreWorkspace = CoreWorkspace()
self.packagesCoreDependencies = []
self.packagesWorkspaceDependencies = []
self.packagesNoneDependencies = []
self.modulesWorkspaceDependencies = []
self.modulesCoreDependencies = []
self.modulesNoneDependencies = []
def open(self, coreRoot=None, workspaceRoot=None):
self.__init__()
if not self.core.open(coreRoot):
self.reason = self.core.reason
return False
else:
if not self.coreWorkspace.open(self.getRoot(workspaceRoot)):
self.reason = self.coreWorkspace.reason
return False
return True
def isValid(self):
return self.core.valid and self.coreWorkspace.valid
def clean(self, force):
root = self.getRoot()
if root is not None:
if not force:
print("OK: " + root)
else:
print("!!!!")
def validModuleTargets(self):
return self.coreWorkspace.validModuleTargets()
def invalidModuleTargets(self):
return self.coreWorkspace.invalidModuleTargets()
def validParameters(self):
return self.coreWorkspace.validParameters()
def invalidParameters(self):
return self.coreWorkspace.invalidParameters()
def validParameterTargets(self):
return self.coreWorkspace.validParameterTargets()
def invalidParameterTargets(self):
return self.coreWorkspace.invalidParameterTargets()
def getParameters(self, name) -> Parameters:
for x in self.validParameters():
if x.name == name:
return x
return None
def getCoreConfiguration(self, package, name):
p = self.getCorePackage(package)
tmp = None
if p is not None:
tmp = CoreConfiguration()
tmp.open(name, p)
return tmp
def getCoreMessage(self, package, name):
p = self.getCorePackage(package)
tmp = None
if p is not None:
tmp = CoreMessage()
tmp.open(name, p)
return tmp
def getCorePackage(self, name):
tmpW = self.coreWorkspace.getPackageByName(name)
tmpC = self.core.getPackageByName(name)
if tmpW is not None:
return tmpW
else:
if tmpC is not None:
return tmpC
return None
def getCoreModule(self, name):
tmpW = self.coreWorkspace.getModuleByName(name)
tmpC = self.core.getModuleByName(name)
if tmpW is not None:
return tmpW
else:
if tmpC is not None:
return tmpC
return None
def getRequiredModules(self):
tmp = []
for x in self.validModuleTargets():
tmp.append(x.module)
self.requiredModules = list(set(tmp))
return self.requiredModules
def getRequiredPackages(self):
tmp = []
for x in self.validModuleTargets():
for y in x.requiredPackages:
tmp.append(y)
m = self.getCoreModule(x.module)
if m is not None:
for y in m.requiredPackages:
tmp.append(y)
for x in self.validParameterTargets():
p = self.getParameters(x.parameters)
if p is not None:
for y in p.requiredPackages():
tmp.append(y)
self.requiredPackages = list(set(tmp))
self.requiredPackages.sort()
return self.requiredPackages
def checkPackagesDependencies(self):
self.packagesWorkspaceDependencies = []
self.packagesCoreDependencies = []
self.packagesNoneDependencies = []
isOk = True
for x in self.getRequiredPackages():
tmpW = self.coreWorkspace.getPackageByName(x)
tmpC = self.core.getPackageByName(x)
if tmpW is not None:
self.packagesWorkspaceDependencies.append(tmpW)
else:
if tmpC is not None:
self.packagesCoreDependencies.append(tmpC)
else:
self.packagesNoneDependencies.append(x)
isOk = False
return isOk
def getPackagesDependenciesSummary(self):
table = []
for x in self.getRequiredPackages():
tmpW = self.coreWorkspace.getPackageByName(x)
tmpC = self.core.getPackageByName(x)
l = CoreConsole.highlight(x)
s = ""
n = ""
if tmpW is not None:
if tmpC is None:
s = "Workspace"
else:
s = "Workspace"
n = "Shadows Core"
else:
if tmpC is not None:
s = "Core"
else:
n = CoreConsole.error("Not found")
table.append([l, s, n])
return table
@staticmethod
def getPackagesDependenciesSummaryFields():
return ["Package", "Source", "Notes"]
def checkModulesDependencies(self):
self.modulesWorkspaceDependencies = []
self.modulesCoreDependencies = []
self.modulesNoneDependencies = []
isOk = True
for x in self.getRequiredModules():
tmpW = self.coreWorkspace.getModuleByName(x)
tmpC = self.core.getModuleByName(x)
if tmpW is not None:
self.modulesWorkspaceDependencies.append(tmpW)
else:
if tmpC is not None:
self.modulesCoreDependencies.append(tmpC)
else:
self.modulesNoneDependencies.append(x)
isOk = False
return isOk
def getModulesDependenciesSummary(self):
table = []
for x in self.getRequiredModules():
tmpW = self.coreWorkspace.getModuleByName(x)
tmpC = self.core.getModuleByName(x)
l = CoreConsole.highlight(x)
s = ""
n = ""
if tmpW is not None:
if tmpC is None:
s = "Workspace"
else:
s = "Workspace"
n = "Shadows Core"
else:
if tmpC is not None:
s = "Core"
else:
n = CoreConsole.error("Not found")
table.append([l, s, n])
return table
@staticmethod
def getModulesDependenciesSummaryFields():
return ["Module", "Source", "Notes"]
|
novalabs/core-tools
|
novalabs/core/CoreWorkspace.py
|
Python
|
gpl-3.0
| 16,247 | 0.000923 |
import logging
import socket
import re
from os import path, remove, makedirs, rename, environ
from . import docker_client, pull_image
from . import DockerConfig
from . import DockerPool
from cattle import Config
from cattle.compute import BaseComputeDriver
from cattle.agent.handler import KindBasedMixin
from cattle.type_manager import get_type, MARSHALLER
from cattle import utils
from cattle.utils import JsonObject
from docker.errors import APIError, NotFound
from cattle.plugins.host_info.main import HostInfo
from cattle.plugins.docker.util import add_label, is_no_op, remove_container
from cattle.progress import Progress
from cattle.lock import lock
from cattle.plugins.docker.network import setup_ipsec, setup_links, \
setup_mac_and_ip, setup_ports, setup_network_mode, setup_dns
from cattle.plugins.docker.agent import setup_cattle_config_url
log = logging.getLogger('docker')
SYSTEM_LABEL = 'io.rancher.container.system'
UUID_LABEL = 'io.rancher.container.uuid'
CREATE_CONFIG_FIELDS = [
('labels', 'labels'),
('environment', 'environment'),
('directory', 'working_dir'),
('user', 'user'),
('domainName', 'domainname'),
('memory', 'mem_limit'),
('memorySwap', 'memswap_limit'),
('cpuSet', 'cpuset'),
('cpuShares', 'cpu_shares'),
('tty', 'tty'),
('stdinOpen', 'stdin_open'),
('detach', 'detach'),
('workingDir', 'working_dir'),
('entryPoint', 'entrypoint')]
START_CONFIG_FIELDS = [
('capAdd', 'cap_add'),
('capDrop', 'cap_drop'),
('dnsSearch', 'dns_search'),
('dns', 'dns'),
('extraHosts', 'extra_hosts'),
('publishAllPorts', 'publish_all_ports'),
('lxcConf', 'lxc_conf'),
('logConfig', 'log_config'),
('securityOpt', 'security_opt'),
('restartPolicy', 'restart_policy'),
('pidMode', 'pid_mode'),
('devices', 'devices')]
def _is_running(client, container):
if container is None:
return False
inspect = client.inspect_container(container)
try:
return inspect['State']['Running']
except KeyError:
return False
def _is_stopped(client, container):
return not _is_running(client, container)
def _to_upper_case(key):
return key[0].upper() + key[1:]
class DockerCompute(KindBasedMixin, BaseComputeDriver):
def __init__(self):
KindBasedMixin.__init__(self, kind='docker')
BaseComputeDriver.__init__(self)
self.host_info = HostInfo(docker_client())
self.system_images = self.get_agent_images(docker_client())
def get_agent_images(self, client):
images = client.images(filters={'label': SYSTEM_LABEL})
system_images = {}
for i in images:
try:
label_val = i['Labels'][SYSTEM_LABEL]
for l in i['RepoTags']:
system_images[l] = label_val
if l.endswith(':latest'):
alias = l[:-7]
system_images[alias] = label_val
except KeyError:
pass
return system_images
@staticmethod
def get_container_by(client, func):
containers = client.containers(all=True, trunc=False)
containers = filter(func, containers)
if len(containers) > 0:
return containers[0]
return None
@staticmethod
def find_first(containers, func):
containers = filter(func, containers)
if len(containers) > 0:
return containers[0]
return None
def on_ping(self, ping, pong):
if not DockerConfig.docker_enabled():
return
self._add_resources(ping, pong)
self._add_instances(ping, pong)
def _add_instances(self, ping, pong):
if not utils.ping_include_instances(ping):
return
utils.ping_add_resources(pong, {
'type': 'hostUuid',
'uuid': DockerConfig.docker_uuid()
})
containers = []
running, nonrunning = self._get_all_containers_by_state()
for key, container in running.iteritems():
self.add_container('running', container, containers)
for key, container in nonrunning.iteritems():
self.add_container('stopped', container, containers)
utils.ping_add_resources(pong, *containers)
utils.ping_set_option(pong, 'instances', True)
def add_container(self, state, container, containers):
try:
labels = container['Labels']
except KeyError:
labels = []
container_data = {
'type': 'instance',
'uuid': self._get_uuid(container),
'state': state,
'systemContainer': self._get_sys_container(container),
'dockerId': container['Id'],
'image': container['Image'],
'labels': labels,
'created': container['Created'],
}
containers.append(container_data)
def _get_all_containers_by_state(self):
client = docker_client(timeout=2)
nonrunning_containers = {}
for c in client.containers(all=True):
# Blank status only wait to distinguish created from stopped
if c['Status'] != '' and c['Status'] != 'Created':
nonrunning_containers[c['Id']] = c
running_containers = {}
for c in client.containers(all=False):
running_containers[c['Id']] = c
del nonrunning_containers[c['Id']]
return running_containers, nonrunning_containers
def _get_sys_container(self, container):
try:
image = container['Image']
if image in self.system_images:
return self.system_images[image]
except (TypeError, KeyError):
pass
try:
return container['Labels']['io.rancher.container.system']
except (TypeError, KeyError):
pass
def _get_uuid(self, container):
try:
uuid = container['Labels'][UUID_LABEL]
if uuid:
return uuid
except (TypeError, KeyError):
pass
names = container['Names']
if not names:
# No name?? Make one up
return 'no-uuid-%s' % container['Id']
if names[0].startswith('/'):
return names[0][1:]
else:
return names[0]
def _determine_state(self, container):
status = container['Status']
if status == '' or (status is not None and
status.lower() == 'created'):
return 'created'
elif 'Up ' in status:
return 'running'
elif 'Exited ' in status:
return 'stopped'
else:
# Unknown. Assume running and state should sync up eventually.
return 'running'
def _get_host_labels(self):
try:
return self.host_info.host_labels()
except:
log.exception("Error getting host labels")
return {}
def _get_host_create_labels(self):
labels = Config.labels()
if labels:
return labels
return {}
def _add_resources(self, ping, pong):
if not utils.ping_include_resources(ping):
return
stats = None
if utils.ping_include_stats(ping):
try:
stats = self.host_info.collect_data()
except:
log.exception("Error getting host info stats")
physical_host = Config.physical_host()
compute = {
'type': 'host',
'kind': 'docker',
'hostname': Config.hostname(),
'createLabels': self._get_host_create_labels(),
'labels': self._get_host_labels(),
'physicalHostUuid': physical_host['uuid'],
'uuid': DockerConfig.docker_uuid(),
'info': stats
}
pool = {
'type': 'storagePool',
'kind': 'docker',
'name': compute['hostname'] + ' Storage Pool',
'hostUuid': compute['uuid'],
'uuid': compute['uuid'] + '-pool'
}
resolved_ip = socket.gethostbyname(DockerConfig.docker_host_ip())
ip = {
'type': 'ipAddress',
'uuid': resolved_ip,
'address': resolved_ip,
'hostUuid': compute['uuid'],
}
proxy = Config.host_proxy()
if proxy is not None:
compute['apiProxy'] = proxy
utils.ping_add_resources(pong, physical_host, compute, pool, ip)
def inspect(self, container):
return docker_client().inspect_container(container)
@staticmethod
def _name_filter(name, container):
names = container.get('Names')
if names is None:
return False
found = False
for n in names:
if n.endswith(name):
found = True
break
return found
@staticmethod
def _id_filter(id, container):
container_id = container.get('Id')
return id == container_id
@staticmethod
def _agent_id_filter(id, container):
try:
return container['Labels']['io.rancher.container.agent_id'] == id
except (TypeError, KeyError, AttributeError):
pass
def get_container(self, client, instance, by_agent=False):
if instance is None:
return None
# First look for UUID label directly
labeled_containers = client.containers(all=True, trunc=False, filters={
'label': '{}={}'.format(UUID_LABEL, instance.uuid)})
if len(labeled_containers) > 0:
return labeled_containers[0]
# Next look by UUID using fallback method
container_list = client.containers(all=True, trunc=False)
container = self.find_first(container_list,
lambda x: self._get_uuid(x) ==
instance.uuid)
if container:
return container
if hasattr(instance, 'externalId') and instance.externalId:
container = self.find_first(container_list,
lambda x: self._id_filter(
instance.externalId, x))
if container:
return container
if by_agent and hasattr(instance, 'agentId') and instance.agentId:
container = self.find_first(container_list,
lambda x: self._agent_id_filter(
str(instance.agentId), x))
return container
def _is_instance_active(self, instance, host):
if is_no_op(instance):
return True
client = docker_client()
container = self.get_container(client, instance)
return _is_running(client, container)
@staticmethod
def _setup_legacy_command(create_config, instance, command):
# This can be removed shortly once cattle removes
# commandArgs
if command is None or len(command.strip()) == 0:
return None
command_args = []
try:
command_args = instance.data.fields.commandArgs
except (KeyError, AttributeError):
pass
if command_args is not None and len(command_args) > 0:
command = [command]
command.extend(command_args)
if command is not None:
create_config['command'] = command
@staticmethod
def _setup_command(create_config, instance):
command = ""
try:
command = instance.data.fields.command
except (KeyError, AttributeError):
return None
if isinstance(command, basestring):
DockerCompute._setup_legacy_command(create_config, instance,
command)
else:
if command is not None:
create_config['command'] = command
@staticmethod
def _setup_dns_search(config, instance):
try:
if instance.systemContainer:
return
except (KeyError, AttributeError):
pass
# if only rancher search is specified,
# prepend search with params read from the system
all_rancher = True
try:
dns_search = config['dns_search']
if dns_search is None or len(dns_search) == 0:
return
for search in dns_search:
if search.endswith('rancher.internal'):
continue
all_rancher = False
break
except KeyError:
return
if not all_rancher:
return
# read host's resolv.conf
with open('/etc/resolv.conf', 'r') as f:
for line in f:
# in case multiple search lines
# respect the last one
s = []
if line.startswith('search'):
s = line.split()[1:]
for search in s[::-1]:
if search not in dns_search:
dns_search.insert(0, search)
@staticmethod
def _setup_links(start_config, instance):
links = {}
if 'instanceLinks' not in instance:
return
for link in instance.instanceLinks:
if link.targetInstanceId is not None:
links[link.targetInstance.uuid] = link.linkName
start_config['links'] = links
@staticmethod
def _setup_ports(create_config, instance, start_config):
ports = []
bindings = {}
try:
for port in instance.ports:
ports.append((port.privatePort, port.protocol))
if port.publicPort is not None:
bind = '{0}/{1}'.format(port.privatePort, port.protocol)
bind_addr = ''
try:
if port.data.fields['bindAddress'] is not None:
bind_addr = port.data.fields['bindAddress']
except (AttributeError, KeyError):
pass
host_bind = (bind_addr, port.publicPort)
if bind not in bindings:
bindings[bind] = [host_bind]
else:
bindings[bind].append(host_bind)
except (AttributeError, KeyError):
pass
if len(ports) > 0:
create_config['ports'] = ports
if len(bindings) > 0:
start_config['port_bindings'] = bindings
def _record_state(self, client, instance, docker_id=None):
if docker_id is None:
container = self.get_container(client, instance)
if container is not None:
docker_id = container['Id']
if docker_id is None:
return
cont_dir = Config.container_state_dir()
tmp_file_path = path.join(cont_dir, 'tmp-%s' % docker_id)
if path.exists(tmp_file_path):
remove(tmp_file_path)
file_path = path.join(cont_dir, docker_id)
if path.exists(file_path):
remove(file_path)
if not path.exists(cont_dir):
makedirs(cont_dir)
with open(tmp_file_path, 'w') as outfile:
marshaller = get_type(MARSHALLER)
data = marshaller.to_string(instance)
outfile.write(data)
rename(tmp_file_path, file_path)
def purge_state(self, client, instance):
container = self.get_container(client, instance)
if container is None:
return
docker_id = container['Id']
cont_dir = Config.container_state_dir()
files = [path.join(cont_dir, 'tmp-%s' % docker_id),
path.join(cont_dir, docker_id)]
for f in files:
if path.exists(f):
remove(f)
def instance_activate(self, req=None, instanceHostMap=None,
processData=None, **kw):
instance, host = \
BaseComputeDriver.get_instance_host_from_map(self, instanceHostMap)
progress = Progress(req)
client = docker_client()
if instance is not None:
instance.processData = processData
with lock(instance):
if self._is_instance_active(instance, host):
self._record_state(client, instance)
return self._reply(req, self.
_get_response_data(req, instanceHostMap))
self._do_instance_activate(instance, host, progress)
data = self._get_response_data(req, instanceHostMap)
return self._reply(req, data)
def _do_instance_activate(self, instance, host, progress):
if is_no_op(instance):
return
client = docker_client()
image_tag = self._get_image_tag(instance)
name = instance.uuid
if instance.name and re.match(r'^[a-zA-Z0-9][a-zA-Z0-9_.-]+$',
instance.name):
try:
client.inspect_container('r-{}'.format(instance.name))
except NotFound:
name = 'r-{}'.format(instance.name)
create_config = {
'name': name,
'detach': True
}
start_config = {
'publish_all_ports': False,
'privileged': self._is_true(instance, 'privileged'),
'read_only': self._is_true(instance, 'readOnly'),
}
# These _setup_simple_config_fields calls should happen before all
# other config because they stomp over config fields that other
# setup methods might append to. Example: the environment field
self._setup_simple_config_fields(create_config, instance,
CREATE_CONFIG_FIELDS)
self._setup_simple_config_fields(start_config, instance,
START_CONFIG_FIELDS)
add_label(create_config, {UUID_LABEL: instance.uuid})
if instance.name:
add_label(create_config,
{'io.rancher.container.name': instance.name})
self._setup_dns_search(start_config, instance)
self._setup_logging(start_config, instance)
self._setup_hostname(create_config, instance)
self._setup_command(create_config, instance)
self._setup_ports(create_config, instance, start_config)
self._setup_volumes(create_config, instance, start_config, client)
self._setup_links(start_config, instance)
self._setup_networking(instance, host, create_config, start_config)
self._flag_system_container(instance, create_config)
self._setup_proxy(instance, create_config)
setup_cattle_config_url(instance, create_config)
create_config['host_config'] = \
client.create_host_config(**start_config)
self._setup_device_options(create_config['host_config'], instance)
container = self.get_container(client, instance)
created = False
if container is None:
container = self._create_container(client, create_config,
image_tag, instance, name,
progress)
created = True
container_id = container['Id']
log.info('Starting docker container [%s] docker id [%s] %s', name,
container_id, start_config)
try:
client.start(container_id)
except Exception as e:
if created:
remove_container(client, container)
raise e
self._record_state(client, instance, docker_id=container['Id'])
def _create_container(self, client, create_config, image_tag, instance,
name, progress):
log.info('Creating docker container [%s] from config %s', name,
create_config)
labels = create_config['labels']
if labels.get('io.rancher.container.pull_image', None) == 'always':
self._do_instance_pull(JsonObject({
'image': instance.image,
'tag': None,
'mode': 'all',
'complete': False,
}), progress)
try:
del create_config['name']
command = ''
try:
command = create_config['command']
del create_config['command']
except KeyError:
pass
config = client.create_container_config(image_tag,
command,
**create_config)
try:
id = instance.data
config['VolumeDriver'] = id.fields['volumeDriver']
except (KeyError, AttributeError):
pass
container = client.create_container_from_config(config, name)
except APIError as e:
if e.message.response.status_code == 404:
pull_image(instance.image, progress)
container = client.create_container_from_config(config,
name)
else:
raise
return container
def _flag_system_container(self, instance, create_config):
try:
if instance.systemContainer:
add_label(create_config, {
'io.rancher.container.system': instance.systemContainer})
except (KeyError, AttributeError):
pass
def _setup_proxy(self, instance, create_config):
try:
if instance.systemContainer:
if 'environment' not in create_config:
create_config['environment'] = {}
for i in ['http_proxy', 'https_proxy', 'NO_PROXY']:
try:
create_config['environment'][i] = environ[i]
except KeyError:
pass
except (KeyError, AttributeError):
pass
def _setup_simple_config_fields(self, config, instance, fields):
for src, dest in fields:
try:
src_obj = instance.data.fields[src]
config[dest] = JsonObject.unwrap(src_obj)
except (KeyError, AttributeError):
pass
def _setup_volumes(self, create_config, instance, start_config, client):
try:
volumes = instance.data.fields['dataVolumes']
volumes_map = {}
binds_map = {}
if volumes is not None and len(volumes) > 0:
for i in volumes:
parts = i.split(':', 3)
if len(parts) == 1:
volumes_map[parts[0]] = {}
else:
if len(parts) == 3:
mode = parts[2]
else:
mode = 'rw'
bind = {'bind': parts[1], 'mode': mode}
binds_map[parts[0]] = bind
create_config['volumes'] = volumes_map
start_config['binds'] = binds_map
except (KeyError, AttributeError):
pass
try:
containers = []
for vfc in instance['dataVolumesFromContainers']:
container = self.get_container(client, vfc)
if container:
containers.append(container['Id'])
if containers:
start_config['volumes_from'] = containers
except KeyError:
pass
try:
for v in instance['volumesFromDataVolumeMounts']:
if not DockerPool.is_volume_active(v):
DockerPool.do_volume_activate(v)
except KeyError:
pass
def _get_image_tag(self, instance):
try:
return instance.image.data.dockerImage.fullName
except (KeyError, AttributeError):
raise Exception('Can not start container with no image')
def _setup_logging(self, start_config, instance):
try:
if start_config.get('log_config', None) is None:
return
type = start_config['log_config']['driver']
del start_config['log_config']['driver']
start_config['log_config']['type'] = type
except (KeyError, AttributeError):
pass
for i in ['type', 'config']:
bad = True
try:
obj = start_config['log_config'][i]
if obj is not None:
bad = False
start_config['log_config'][i] = JsonObject.unwrap(obj)
except (KeyError, AttributeError):
pass
if bad and 'log_config' in start_config:
del start_config['log_config']
def _setup_hostname(self, create_config, instance):
try:
create_config['hostname'] = instance.hostname
except (KeyError, AttributeError):
pass
def _setup_device_options(self, config, instance):
option_configs = \
[('readIops', [], 'BlkioDeviceReadIOps', 'Rate'),
('writeIops', [], 'BlkioDeviceWriteIOps', 'Rate'),
('readBps', [], 'BlkioDeviceReadBps', 'Rate'),
('writeBps', [], 'BlkioDeviceWriteBps', 'Rate'),
('weight', [], 'BlkioWeightDevice', 'Weight')]
try:
device_options = instance.data.fields['blkioDeviceOptions']
except (KeyError, AttributeError):
return
for dev, options in device_options.iteritems():
if dev == 'DEFAULT_DISK':
dev = self.host_info.get_default_disk()
if not dev:
log.warn("Couldn't find default device. Not setting"
"device options: %s", options)
continue
for k, dev_list, _, field in option_configs:
if k in options and options[k] is not None:
value = options[k]
dev_list.append({'Path': dev, field: value})
for _, dev_list, docker_field, _ in option_configs:
if len(dev_list):
config[docker_field] = dev_list
def _setup_networking(self, instance, host, create_config, start_config):
client = docker_client()
ports_supported, hostname_supported = setup_network_mode(instance,
self, client,
create_config,
start_config)
setup_mac_and_ip(instance, create_config, set_mac=ports_supported,
set_hostname=hostname_supported)
setup_ports(instance, create_config, start_config, ports_supported)
setup_links(instance, create_config, start_config)
setup_ipsec(instance, host, create_config, start_config)
setup_dns(instance)
def _is_true(self, instance, key):
try:
return instance.data.fields[key] is True
except (KeyError, AttributeError):
return False
def _get_instance_host_map_data(self, obj):
client = docker_client()
inspect = None
docker_mounts = None
existing = self.get_container(client, obj.instance)
docker_ports = []
docker_ip = None
try:
if existing is not None:
inspect = client.inspect_container(existing['Id'])
docker_mounts = self._get_mount_data(obj.host, existing['Id'])
docker_ip = inspect['NetworkSettings']['IPAddress']
if existing.get('Ports') is not None:
for port in existing['Ports']:
private_port = '{0}/{1}'.format(port['PrivatePort'],
port['Type'])
port_spec = private_port
bind_addr = ''
if 'IP' in port:
bind_addr = '%s:' % port['IP']
public_port = ''
if 'PublicPort' in port:
public_port = '%s:' % port['PublicPort']
elif 'IP' in port:
public_port = ':'
port_spec = bind_addr + public_port + port_spec
docker_ports.append(port_spec)
except NotFound:
pass
update = {
'instance': {
'+data': {
'dockerContainer': existing,
'dockerInspect': inspect,
'+fields': {
'dockerHostIp': DockerConfig.docker_host_ip(),
'dockerPorts': docker_ports,
'dockerIp': docker_ip
}
}
}
}
if existing is not None:
update['instance']['externalId'] = existing['Id']
if docker_mounts is not None:
update['instance']['+data']['dockerMounts'] = docker_mounts
return update
def _get_mount_data(self, host, container_id):
try:
client = docker_client(version='1.21')
inspect = client.inspect_container(container_id)
return inspect['Mounts']
except (KeyError, APIError):
pass
def _is_instance_inactive(self, instance, host):
if is_no_op(instance):
return True
c = docker_client()
container = self.get_container(c, instance)
return _is_stopped(c, container)
def _do_instance_deactivate(self, instance, host, progress):
if is_no_op(instance):
return
c = docker_client()
timeout = 10
try:
timeout = int(instance.processData.timeout)
except (TypeError, KeyError, AttributeError):
pass
container = self.get_container(c, instance)
c.stop(container['Id'], timeout=timeout)
container = self.get_container(c, instance)
if not _is_stopped(c, container):
c.kill(container['Id'])
container = self.get_container(c, instance)
if not _is_stopped(c, container):
raise Exception('Failed to stop container {0}'
.format(instance.uuid))
def _do_instance_force_stop(self, instanceForceStop):
try:
docker_client().stop(instanceForceStop['id'])
except APIError as e:
if e.message.response.status_code != 404:
raise e
def _is_instance_removed(self, instance, host):
client = docker_client()
container = self.get_container(client, instance)
return container is None
def _do_instance_remove(self, instance, host, progress):
client = docker_client()
container = self.get_container(client, instance)
if container is None:
return
remove_container(client, container)
def _do_instance_pull(self, pull_info, progress):
client = docker_client()
image = pull_info.image.data.dockerImage
try:
existing = client.inspect_image(image.fullName)
except APIError:
existing = None
if pull_info.mode == 'cached' and existing is None:
return existing
if pull_info.complete:
if existing is not None:
client.remove_image(image.fullName + pull_info.tag)
return
DockerPool.image_pull(pull_info.image, progress)
if pull_info.tag is not None:
image_info = DockerPool.parse_repo_tag(image.fullName)
client.tag(image.fullName, image_info['repo'],
image_info['tag'] + pull_info.tag, force=True)
return client.inspect_image(image.fullName)
def _do_instance_inspect(self, instanceInspectRequest):
client = docker_client()
container = None
try:
container_id = instanceInspectRequest.id
container = self.get_container_by(client,
lambda x: self._id_filter(
container_id, x))
except (KeyError, AttributeError):
pass
if not container:
try:
name = '/{0}'.format(instanceInspectRequest.name)
container = self.get_container_by(client,
lambda x: self._name_filter(
name, x))
except (KeyError, AttributeError):
pass
if container:
inspect = client.inspect_container(container)
return inspect
|
rancherio/python-agent
|
cattle/plugins/docker/compute.py
|
Python
|
apache-2.0
| 33,197 | 0.00009 |
#!/usr/bin/env python
__author__ = 'Jamie Diprose'
import rospy
from sensor_msgs.msg import JointState
from ros_pololu_servo.msg import servo_pololu
import math
class EinsteinController():
def __init__(self):
rospy.init_node('einstein_controller')
rospy.Subscriber("joint_angles", JointState, self.handle_joint_angles, queue_size=10)
self.pololu_pub = rospy.Publisher("cmd_pololu", servo_pololu)
self.joint_ids = {'neck_yaw': 23, 'neck_roll': 2, 'neck_pitch': 3}
def handle_joint_angles(self, msg):
rospy.logdebug("Received a joint angle target")
for i, joint_name in enumerate(msg.name):
servo_msg = servo_pololu()
servo_msg.id = self.joint_ids[joint_name]
servo_msg.angle = msg.position[i]
servo_msg.speed = (msg.velocity * 255.0)
servo_msg.acceleration = msg.effort #TODO: check this
self.pololu_pub.publish(servo_msg)
#tTODO: enforce joint angles
if __name__ == '__main__':
rospy.loginfo("Starting einstein_controller...")
controller = EinsteinController()
controller.start()
rospy.loginfo("einstein_controller started")
rospy.spin()
rospy.loginfo("einstein_controller stopped")
|
jdddog/einstein_robot
|
einstein_driver/src/einstein_controller.py
|
Python
|
bsd-3-clause
| 1,255 | 0.004781 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-19 21:08
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Subway',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('coordinates', django.contrib.gis.db.models.fields.PointField(null=True, srid=4326)),
('name', models.CharField(max_length=64)),
],
options={
'abstract': False,
},
),
]
|
KraftSoft/together
|
location/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 767 | 0.002608 |
# Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import re
from twisted.internet.protocol import Protocol
from lxml import etree
from fake_switches.netconf import dict_2_etree, NS_BASE_1_0, normalize_operation_name, SimpleDatastore, \
Response, OperationNotSupported, NetconfError
from fake_switches.netconf.capabilities import Base1_0
class NetconfProtocol(Protocol):
def __init__(self, datastore=None, capabilities=None, additionnal_namespaces=None, logger=None):
self.logger = logger or logging.getLogger("fake_switches.netconf")
self.input_buffer = ""
self.session_count = 0
self.been_greeted = False
self.datastore = datastore or SimpleDatastore()
caps_class_list = capabilities or []
caps_class_list.insert(0, Base1_0)
self.capabilities = [cap(self.datastore) for cap in caps_class_list]
self.additionnal_namespaces = additionnal_namespaces or {}
def __call__(self, *args, **kwargs):
return self
def connectionMade(self):
self.logger.info("Connected, sending <hello>")
self.session_count += 1
self.say(dict_2_etree({
"hello": [
{"session-id": str(self.session_count)},
{"capabilities": [{"capability": cap.get_url()} for cap in self.capabilities]}
]
}))
def dataReceived(self, data):
self.logger.info("Received : %s" % repr(data))
self.input_buffer += data
if self.input_buffer.rstrip().endswith("]]>]]>"):
self.process(self.input_buffer.rstrip()[0:-6])
self.input_buffer = ""
def process(self, data):
if not self.been_greeted:
self.logger.info("Client's greeting received")
self.been_greeted = True
return
xml_request_root = remove_namespaces(etree.fromstring(data))
message_id = xml_request_root.get("message-id")
operation = xml_request_root[0]
self.logger.info("Operation requested %s" % repr(operation.tag))
handled = False
operation_name = normalize_operation_name(operation)
for capability in self.capabilities:
if hasattr(capability, operation_name):
try:
self.reply(message_id, getattr(capability, operation_name)(operation))
except NetconfError as e:
self.reply(message_id, error_to_response(e))
handled = True
if not handled:
self.reply(message_id, error_to_response(OperationNotSupported(operation_name)))
def reply(self, message_id, response):
reply = etree.Element("rpc-reply", xmlns=NS_BASE_1_0, nsmap=self.additionnal_namespaces)
reply.attrib["message-id"] = message_id
reply.append(response.etree)
self.say(reply)
if response.require_disconnect:
self.logger.info("Disconnecting")
self.transport.loseConnection()
def say(self, etree_root):
self.logger.info("Saying : %s" % repr(etree.tostring(etree_root)))
self.transport.write(etree.tostring(etree_root, pretty_print=True) + "]]>]]>\n")
def error_to_response(error):
error_specs = {
"error-message": error.message
}
if error.type: error_specs["error-type"] = error.type
if error.tag: error_specs["error-tag"] = error.tag
if error.severity: error_specs["error-severity"] = error.severity
if error.info: error_specs["error-info"] = error.info
return Response(dict_2_etree({"rpc-error": error_specs}))
def remove_namespaces(xml_root):
xml_root.tag = unqualify(xml_root.tag)
for child in xml_root:
remove_namespaces(child)
return xml_root
def unqualify(tag):
return re.sub("\{[^\}]*\}", "", tag)
|
mlecours/fake-switches
|
fake_switches/netconf/netconf_protocol.py
|
Python
|
apache-2.0
| 4,337 | 0.003689 |
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DEBUG = False
ALLOWED_HOSTS = ['localhost', '127.0.0.1']
SECRET_KEY = 'my-key'
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = [
'tests',
'cloudinary_storage',
# 'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
],
},
},
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
STATIC_URL = '/static/'
STATICFILES_STORAGE = 'cloudinary_storage.storage.StaticHashedCloudinaryStorage'
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = 'cloudinary_storage.storage.MediaCloudinaryStorage'
CLOUDINARY_STORAGE = {
'CLOUD_NAME': os.getenv('CLOUDINARY_CLOUD_NAME', 'my-cloud-name'),
'API_KEY': os.getenv('CLOUDINARY_API_KEY', 'my-api-key'),
'API_SECRET': os.getenv('CLOUDINARY_API_SECRET', 'my-api-secret')
}
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
},
}
|
klis87/django-cloudinary-storage
|
tests/settings.py
|
Python
|
mit
| 2,246 | 0.000445 |
import unittest
from isbn_verifier import is_valid
# Tests adapted from `problem-specifications//canonical-data.json`
class IsbnVerifierTest(unittest.TestCase):
def test_valid_isbn(self):
self.assertIs(is_valid("3-598-21508-8"), True)
def test_invalid_isbn_check_digit(self):
self.assertIs(is_valid("3-598-21508-9"), False)
def test_valid_isbn_with_a_check_digit_of_10(self):
self.assertIs(is_valid("3-598-21507-X"), True)
def test_check_digit_is_a_character_other_than_x(self):
self.assertIs(is_valid("3-598-21507-A"), False)
def test_invalid_character_in_isbn(self):
self.assertIs(is_valid("3-598-P1581-X"), False)
def test_x_is_only_valid_as_a_check_digit(self):
self.assertIs(is_valid("3-598-2X507-9"), False)
def test_valid_isbn_without_separating_dashes(self):
self.assertIs(is_valid("3598215088"), True)
def test_isbn_without_separating_dashes_and_x_as_check_digit(self):
self.assertIs(is_valid("359821507X"), True)
def test_isbn_without_check_digit_and_dashes(self):
self.assertIs(is_valid("359821507"), False)
def test_too_long_isbn_and_no_dashes(self):
self.assertIs(is_valid("3598215078X"), False)
def test_too_short_isbn(self):
self.assertIs(is_valid("00"), False)
def test_isbn_without_check_digit(self):
self.assertIs(is_valid("3-598-21507"), False)
def test_check_digit_of_x_should_not_be_used_for_0(self):
self.assertIs(is_valid("3-598-21515-X"), False)
def test_empty_isbn(self):
self.assertIs(is_valid(""), False)
def test_input_is_9_characters(self):
self.assertIs(is_valid("134456729"), False)
def test_invalid_characters_are_not_ignored(self):
self.assertIs(is_valid("3132P34035"), False)
def test_input_is_too_long_but_contains_a_valid_isbn(self):
self.assertIs(is_valid("98245726788"), False)
if __name__ == "__main__":
unittest.main()
|
TGITS/programming-workouts
|
exercism/python/isbn-verifier/isbn_verifier_test.py
|
Python
|
mit
| 1,989 | 0 |
from datetime import datetime
import mock
from nose.tools import eq_
import mkt
import mkt.site.tests
from mkt.account.serializers import (AccountSerializer, AccountInfoSerializer,
TOSSerializer)
from mkt.users.models import UserProfile
class TestAccountSerializer(mkt.site.tests.TestCase):
def setUp(self):
self.account = UserProfile()
def serializer(self):
return AccountSerializer(instance=self.account)
def test_display_name_returns_name(self):
with mock.patch.object(UserProfile, 'name', 'Account name'):
eq_(self.serializer().data['display_name'], 'Account name')
def test_recommendations(self):
# Test default.
eq_(self.serializer().data['enable_recommendations'], True)
self.account.enable_recommendations = False
eq_(self.serializer().data['enable_recommendations'], False)
class TestAccountInfoSerializer(mkt.site.tests.TestCase):
UNKNOWN = mkt.LOGIN_SOURCE_LOOKUP[mkt.LOGIN_SOURCE_UNKNOWN]
FIREFOX_ACCOUNTS = mkt.LOGIN_SOURCE_LOOKUP[mkt.LOGIN_SOURCE_FXA]
PERSONA = mkt.LOGIN_SOURCE_LOOKUP[mkt.LOGIN_SOURCE_BROWSERID]
def setUp(self):
self.account = UserProfile()
self.account.pk = 25
def serializer(self):
return AccountInfoSerializer(instance=self.account)
def test_source_is_a_slug_default(self):
eq_(self.serializer().data['source'], self.PERSONA)
def test_source_is_unknown(self):
self.account.source = mkt.LOGIN_SOURCE_UNKNOWN
eq_(self.serializer().data['source'], self.PERSONA)
def test_source_is_fxa(self):
self.account.source = mkt.LOGIN_SOURCE_FXA
eq_(self.serializer().data['source'], self.FIREFOX_ACCOUNTS)
def test_source_is_invalid(self):
self.account.source = -1
eq_(self.serializer().data['source'], self.PERSONA)
def test_source_is_unrelated(self):
self.account.source = mkt.LOGIN_SOURCE_BROWSERID
eq_(self.serializer().data['source'], self.PERSONA)
def test_account_has_no_pk(self):
self.account.source = mkt.LOGIN_SOURCE_FXA
self.account.pk = None
eq_(self.serializer().data['source'], self.UNKNOWN)
def test_source_is_read_only(self):
serializer = AccountInfoSerializer(
instance=None,
data={'source': mkt.LOGIN_SOURCE_FXA, 'display_name': 'Hey!'},
partial=True)
eq_(serializer.is_valid(), True)
# This works because the model field is `editable=False`.
eq_(serializer.save().source, mkt.LOGIN_SOURCE_UNKNOWN)
def test_not_verified(self):
self.account.is_verified = False
eq_(self.serializer().data['verified'], False)
def test_verified(self):
self.account.is_verified = True
eq_(self.serializer().data['verified'], True)
class TestTOSSerializer(mkt.site.tests.TestCase):
def setUp(self):
self.account = UserProfile()
def serializer(self):
context = {
'request': mkt.site.tests.req_factory_factory('')
}
context['request'].user = self.account
return TOSSerializer(instance=self.account, context=context)
def test_has_signed(self):
eq_(self.serializer().data['has_signed'], False)
self.account.read_dev_agreement = datetime.now()
eq_(self.serializer().data['has_signed'], True)
|
ingenioustechie/zamboni
|
mkt/account/tests/test_serializers.py
|
Python
|
bsd-3-clause
| 3,416 | 0 |
from .sample_filter import SampleFilter, GtFilter
from .sv_gt_filter import SvGtFilter
import logging
from collections import OrderedDict, defaultdict
class FamilyFilter(object):
'''
Determine whether variants/alleles fit given inheritance
patterns for families.
'''
def __init__(self, ped, vcf, infer_inheritance=True, g2p=None,
check_g2p_consequence=None, force_inheritance=None,
logging_level=logging.WARNING):
'''
Initialize with Family object from ped_file.py and a
VcfReader object from vcf_reader.py. You may also specify an
inheritance pattern (either 'recessive' or 'dominant'). If
inheritance_pattern is not specified an attempt is made to
infer an appropriate inheritance pattern based on the family
structure and affecteds.
Args:
ped: A PedFile object from ped_file.py. Must contain
at least one affected individual.
vcf: A VcfReader object containing data from at least
some of the affected individuals in the given
family.
infer_inheritance:
If True, infer possible inheritance patterns
for each family in the PedFile. Inferred patterns
are stored in self.inheritance_patterns dict
(keys are families, values are lists of
inheritance patterns).
g2p: G2P object from vase.g2p for filtering on
presence and inheritance requirements from a G2P
file.
check_g2p_consequence:
If using a G2P object for gene filtering, also
filter on consequence type as described for each
gene. Note that the mapping of mutation
consequence to consequence type is quite crude
and should be used with caution (see the
mutation_to_csq dict in vase/g2p.py for the
mappings used).
force_inheritance:
Optionally specify an inheritance pattern to
test for each family - either 'dominant' or
'recessive' is allowed. If infer_inheritance is
True, these patterns will be tested in addition
to inferred patterns.
logging_level:
The level at which logging messages are
displayed. Defaults to logging.WARNING
'''
self.logger = self._get_logger(logging_level)
self.affected = tuple(ped.get_affected())
self.unaffected = tuple(ped.get_unaffected())
self.obligate_carriers = dict()
self.ped = ped
self.vcf = vcf
self.g2p = g2p
self.check_g2p_consequence = check_g2p_consequence
if not self.affected:
raise RuntimeError("No affected individuals found in PED file '{}'"
.format(ped.filename))
self.vcf_affected = list(x for x in self.affected
if x in self.vcf.header.samples)
if not self.vcf_affected:
raise RuntimeError("No affected individuals in PED file '{}'"
.format(ped.filename) + " found in VCF " +
"'{}'".format(vcf.filename))
self.vcf_unaffected = list(x for x in self.unaffected
if x in self.vcf.header.samples)
self.vcf_samples = self.vcf_affected + self.vcf_unaffected
self.inheritance_patterns = defaultdict(list)
if infer_inheritance:
self._infer_inheritance()
if force_inheritance:
if force_inheritance not in ('dominant', 'recessive'):
raise RuntimeError("Unrecognised inheritance pattern " +
"specified with 'force_inheritance' " +
"argument. Valid options are 'dominant' " +
"or 'recessive'.")
for fid in self.ped.families:
self.inheritance_patterns[fid].append(force_inheritance)
def _infer_inheritance(self):
'''
Simplistic method for determining likely relevant
inheritance pattern. For affected individuals in a family
a check is made whether parents or grandparents are also
affected. Currently only dominant or recessive inheritance
is inferred, no attempt to infer X-linked or mitochondrial
inheritance is made and it will not spot pseudodominance.
'''
for fid, fam in self.ped.families.items():
n_affected = 0
no_parents = True
both_pars_unaffected = False
dominant = False
denovo = False
recessive = False
self.logger.info("Assessing inheritance pattern of family {}"
.format(fid))
f_aff = tuple(fam.get_affected())
obligate_carriers = set()
if not f_aff:
continue
for iid in f_aff:
self.logger.info("Checking affected individual {}".format(iid))
n_affected += 1
indv = fam.individuals[iid]
if not indv.parents:
self.logger.info("No parents for affected individual {}"
.format(iid))
continue
no_parents = False
p_unaff = 0
for par in indv.parents:
# is parent affected
if par not in fam.individuals:
if par in self.vcf.header.samples:
self.logger.warn("Family '{}' parent '{}' ".format(
fid, par) + "not specified in " +
"PED, but present in VCF - " +
"assuming unaffected")
self.vcf_samples.append(par)
self.vcf_unaffected.append(par)
p_unaff += 1
continue
parent = fam.individuals[par]
par_to_child = False
gpar_to_child = False
if parent.is_affected():
self.logger.info("Apparent vertical transmission " +
"from {} -> {}" .format(par, iid))
par_to_child = True
else:
p_unaff += 1
for gpar in parent.parents:
if fam.individuals[gpar].is_affected():
gpar_to_child = True
msg = "Apparent vertical transmission "
if par_to_child:
msg += ("from {} -> {} -> {}"
.format(gpar, par, iid))
else:
msg += ("with partial penetrance from " +
"{} -> ({}) -> {}"
.format(gpar, par, iid))
obligate_carriers.add(par)
self.logger.info(msg)
if par_to_child or gpar_to_child:
dominant = True
if p_unaff == 2:
both_pars_unaffected = True
if not dominant:
recessive = True
if no_parents or not both_pars_unaffected:
# missing information on one/both parents - could be dominant
dominant = True
if recessive and n_affected == 1 and not no_parents:
f_par = fam.individuals[f_aff[0]].parents
if len(f_par) != 2:
self.logger.info("Can not analyze {} under ".format(fid) +
"a de novo model due to missing parents" +
" in ped")
dominant = True
elif (f_par[0] not in self.vcf.header.samples or
f_par[1] not in self.vcf.header.samples):
self.logger.info("Can not analyze {} under ".format(fid) +
"a de novo model due to missing parents" +
" in VCF")
else:
denovo = True
elif recessive and n_affected > 1:
# we can entertain apparent de novos due to somatic mosaicism
# if all affecteds share a parent
pars = fam.individuals[f_aff[0]].parents
shared_pars = None
if len(pars) != 2:
self.logger.info("Can not analyze {} under ".format(fid) +
"a de novo model due to missing parents" +
" in ped")
dominant = True
else:
shared_pars = set(pars)
for i in range(1, len(f_aff)):
ipars = self.ped.individuals[f_aff[i]].parents
if ipars is None:
break
shared_pars = shared_pars.intersection(ipars)
if not shared_pars:
break
if shared_pars:
denovo = True
for par in shared_pars:
if par not in self.vcf_samples:
self.logger.info("Can not analyze {}".format(fid) +
"under a de novo model due to " +
"missing parents in VCF")
denovo = False
break
self.inheritance_patterns[fid] = []
if recessive:
self.logger.info("Family '{}' " .format(fid) + "can be " +
"analysed under a recessive model")
self.inheritance_patterns[fid].append('recessive')
if denovo:
dmodel = "de novo"
if n_affected > 1:
dmodel += " (with germline mosaicism)"
self.logger.info("Family '{}' " .format(fid) + "can be " +
"analysed under a {} model" .format(dmodel))
self.inheritance_patterns[fid].append('de_novo')
if dominant:
self.logger.info("Family '{}' " .format(fid) + "can be " +
"analysed under a dominant model")
self.inheritance_patterns[fid].append('dominant')
self.obligate_carriers[fid] = tuple(obligate_carriers)
def _get_logger(self, logging_level):
logger = logging.getLogger(__name__)
if not logger.hasHandlers():
logger.setLevel(logging_level)
formatter = logging.Formatter(
'[%(asctime)s] %(name)s - %(levelname)s - %(message)s')
ch = logging.StreamHandler()
ch.setLevel(logger.level)
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
class InheritanceFilter(object):
'''
Parent class for RecessiveFilter/DominantFilter/DeNovoFilter
object.
'''
def __init__(self, family_filter, gt_args, min_families=1,
report_file=None, snpeff_mode=False):
'''
Create genotype filter objects and initialise family filtering
arguments.
Args:
family_filter:
Parent FamilyFilter object, initialized with
VCF and PED files.
gt_args:
A dict of arguments to use for filtering
genotypes. These should all correspond to
arguments to provide to SampleFilter objects.
min_families:
Require at least this many families to have
qualifying alleles in a feature before
outputting. Default=1.
report_file:
Deprecated. Use vase_reporter to after
inheritance filtering to process VCFs instead.
snpeff_mode:
Use SnpEff annotations instead of VEP annotations
from input VCF.
'''
self.family_filter = family_filter
self.min_families = min_families
self.ped = family_filter.ped
self.samples = family_filter.vcf_samples
self.unaffected = family_filter.vcf_unaffected
self.gt_filter = GtFilter(family_filter.vcf,
gq=gt_args.get('gq'),
dp=gt_args.get('dp'),
max_dp=gt_args.get('max_dp'),
het_ab=gt_args.get('het_ab'),
hom_ab=gt_args.get('hom_ab'))
self._gt_fields = set(self.gt_filter.fields)
if gt_args.get('min_control_gq') is None:
gt_args['min_control_gq'] = gt_args.get('gq')
if gt_args.get('min_control_dp') is None:
gt_args['min_control_dp'] = gt_args.get('dp')
if gt_args.get('max_control_dp') is None:
gt_args['max_control_dp'] = gt_args.get('max_dp')
if gt_args.get('control_het_ab') is None:
gt_args['control_het_ab'] = gt_args.get('het_ab')
if gt_args.get('control_hom_ab') is None:
gt_args['control_hom_ab'] = gt_args.get('hom_ab')
self.con_gt_filter = GtFilter(family_filter.vcf,
gq=gt_args.get('min_control_gq'),
dp=gt_args.get('min_control_dp'),
max_dp=gt_args.get('max_control_dp'),
het_ab=gt_args.get('control_het_ab'),
hom_ab=gt_args.get('control_hom_ab'),
ref_ab_filter=gt_args.get('con_ref_ab'))
self._gt_fields.update(self.con_gt_filter.fields)
if gt_args.get('sv_min_control_gq') is None:
gt_args['sv_min_control_gq'] = gt_args.get('sv_gq')
if gt_args.get('sv_min_control_dp') is None:
gt_args['sv_min_control_dp'] = gt_args.get('sv_dp')
if gt_args.get('sv_max_control_dp') is None:
gt_args['sv_max_control_dp'] = gt_args.get('sv_max_dp')
if gt_args.get('sv_control_het_ab') is None:
gt_args['sv_control_het_ab'] = gt_args.get('sv_het_ab')
if gt_args.get('sv_control_hom_ab') is None:
gt_args['sv_control_hom_ab'] = gt_args.get('sv_hom_ab')
if gt_args.get('control_del_dhffc') is None:
gt_args['control_del_dhffc'] = gt_args.get('del_dhffc')
if gt_args.get('control_dup_dhbfc') is None:
gt_args['control_dup_dhbfc'] = gt_args.get('dup_dhbfc')
self.sv_gt_filter = SvGtFilter(family_filter.vcf,
gq=gt_args.get('sv_gq'),
dp=gt_args.get('sv_dp'),
max_dp=gt_args.get('sv_max_dp'),
het_ab=gt_args.get('sv_het_ab'),
hom_ab=gt_args.get('sv_hom_ab'),
del_dhffc=gt_args.get('del_dhffc'),
dup_dhbfc=gt_args.get('dup_dhbfc'))
self._sv_gt_fields = set(self.sv_gt_filter.fields)
self.sv_con_gt_filter = SvGtFilter(
family_filter.vcf,
gq=gt_args.get('sv_min_control_gq'),
dp=gt_args.get('sv_min_control_dp'),
max_dp=gt_args.get('sv_max_control_dp'),
het_ab=gt_args.get('sv_control_het_ab'),
hom_ab=gt_args.get('sv_control_hom_ab'),
ref_ab_filter=gt_args.get('sv_con_ref_ab'),
del_dhffc=gt_args.get('control_del_dhffc'),
dup_dhbfc=gt_args.get('control_dup_dhbfc'))
self._sv_gt_fields.update(self.sv_con_gt_filter.fields)
self._prev_coordinate = (None, None) # to ensure records are processed
self._processed_contigs = set() # in coordinate order
if snpeff_mode:
try:
self._csq_header = self.family_filter.vcf.header.ann_fields
except KeyError:
self._csq_header = None # only required for report file
self.csq_attribute = 'ANN'
self.feature_label = 'Feature_ID'
else:
try:
self._csq_header = self.family_filter.vcf.header.csq_fields
except KeyError:
self._csq_header = None # only required for report file
self.csq_attribute = 'CSQ'
self.feature_label = 'Feature'
if self.report_file:
self._write_report_header()
def get_header_fields(self):
'''
Return dict of dicts with INFO header field names as keys
and dicts of features as values. These are suitable for
handing to VcfHeader class's add_header_field() method.
Each INFO field must be defined in self.header_fields in
the child class, which should be a list of tuples where
each tuple consists of the name anddescription of the
field.
'''
hf = dict()
for f in self.header_fields:
hf[f[0]] = {'Number': 'A',
'Type': 'String',
'Description': f[1]}
return hf
def confirm_heterozygous(self, record, samples):
for s in samples:
if len(set(record.samples[s]['GT'])) != 2:
return False
return True
def _get_allele_counts(self, allele, rec):
a_counts = dict()
gt_filter_args = dict()
if rec.IS_SV:
gt_filter = self.sv_gt_filter
control_filter = self.sv_con_gt_filter
gt_filter_args['svtype'] = rec.record.info.get('SVTYPE', '')
else:
gt_filter = self.gt_filter
control_filter = self.con_gt_filter
for samp in self.unaffected:
if control_filter.gt_is_ok(rec.record.samples, samp, allele,
**gt_filter_args):
a_counts[samp] = rec.record.samples[samp]['GT'].count(allele)
else:
a_counts[samp] = None
if (rec.record.samples[samp]['GT'] == (0, 0) and
control_filter.ad_over_threshold is not None):
if control_filter.ad_over_threshold(rec.record.samples, samp,
allele):
a_counts[samp] = 1
for samp in self.affected:
if gt_filter.gt_is_ok(rec.record.samples, samp, allele,
**gt_filter_args):
a_counts[samp] = rec.record.samples[samp]['GT'].count(allele)
else:
a_counts[samp] = None
return a_counts
def _check_sorted(self, record):
if self._prev_coordinate[0] != record.chrom:
if record.chrom in self._processed_contigs:
raise RuntimeError("Input must be sorted by chromosome and " +
"position for recessive filtering. " +
"Contig '{}' " .format(record.chrom) +
"encountered before and after contig " +
"'{}'." .format(self._prev_coordinate[0]))
if self._prev_coordinate[0] is not None:
self._processed_contigs.add(self._prev_coordinate[0])
elif record.pos < self._prev_coordinate[1]:
raise RuntimeError("Input must be sorted by chromosome and " +
"position for inheritance filtering. " +
"Encountered position {}:{} after {}:{}"
.format(record.chrom, record.pos,
self._prev_coordinate[0],
self._prev_coordinate[1]))
self._prev_coordinate = (record.chrom, record.pos)
def process_record(self, record):
'''Return True if record should be printed/kept'''
return NotImplementedError("process_record method should be " +
"overriden by child class!")
def _write_report_header(self):
if self._csq_header is not None:
header = str.join("\t", (x for x in self._csq_header if x !=
'Allele'))
header += "\tALT_No.\t" + str.join("\t", self.annot_fields)
header += "\tCHROM\tPOS\tID\tREF\tALT\tALLELE\tQUAL\tFILTER"
self.report_file.write(header + "\n")
def check_g2p(self, record, ignore_csq, inheritance, csqs=None):
if self.family_filter.g2p:
if csqs is None:
csqs = getattr(record, self.csq_attribute)
if self.family_filter.check_g2p_consequence:
fail = (not x for x in
self.family_filter.g2p.csq_and_allelic_requirement_met(
csqs, inheritance))
else:
fail = (not x for x in
self.family_filter.g2p.allelic_requirement_met(
csqs, inheritance))
if ignore_csq:
ignore_csq = [x or y for x, y in zip(ignore_csq, fail)]
else:
ignore_csq = list(fail)
return ignore_csq
class RecessiveFilter(InheritanceFilter):
'''
This class assumes that each family has a shared biallelic
genetic cause of disease. It will not cope with phenocopies,
pseudodominance or other more complicated inheritance patterns.
'''
def __init__(self, family_filter, gt_args, min_families=1,
snpeff_mode=False, strict=False, exclude_denovo=False,
report_file=None):
'''
Args:
family_filter:
FamilyFilter object
gt_args:
A dict of arguments to use for filtering
genotypes. These should all correspond to
arguments to provide to SampleFilter objects.
min_families:
Require at least this many families to have a
qualifying biallelic combination of alleles in
a feature before outputting. Default=1.
snpeff_mode:
Use SnpEff annotations instead of VEP annotations
from input VCF.
strict: If True, for any affected sample with
parents, require confirmation of parental
genotypes. If either parent genotype is a
no-call for a record, then the record will
be ignored. Default=False.
exclude_denovo:
If True, where there is data available from
both parents for an affected individual
ignore apparent de novo occuring alleles.
Default=False.
report_file:
Output filehandle for writing summaries of
segregating variants to. Default=None.
'''
self.prefix = "VASE_biallelic"
self.header_fields = [
("VASE_biallelic_homozygous",
'Samples that carry homozygous biallelic changes ' +
' parsed by {}' .format(type(self).__name__)),
("VASE_biallelic_compound_het",
'Samples that carry compound heterozygous biallelic changes ' +
'parsed by {}'.format(type(self).__name__)),
("VASE_biallelic_de_novo",
'Samples that carry biallelic alleles that appear to have ' +
'arisen de novo'),
('VASE_biallelic_families',
'Family IDs for VASE_biallelic alleles'),
("VASE_biallelic_features",
'Features (e.g. transcripts) that contain qualifying ' +
'biallelic variants parsed by {}' .format(
type(self).__name__))]
self.annot_fields = ('homozygous', 'compound_het', 'de_novo',
'families', 'features')
self.report_file = report_file
super().__init__(family_filter, gt_args, min_families=min_families,
snpeff_mode=snpeff_mode, report_file=report_file)
self.families = tuple(x for x in
self.family_filter.inheritance_patterns
if 'recessive' in
self.family_filter.inheritance_patterns[x])
self.affected = tuple(x for x in family_filter.vcf_affected if
self.ped.individuals[x].fid in self.families)
self._fam_to_aff = dict()
for fid in self.families:
self._fam_to_aff[fid] = set(x for x in
self.ped.families[fid].get_affected()
if x in self.affected)
self.family_filter.logger.info("Analysing family {} ".format(fid) +
"under a recessive model")
self.strict = strict
self.exclude_denovo = exclude_denovo
self._potential_recessives = dict()
self._current_features = set()
self._processed_features = set()
def process_record(self, record, ignore_alleles=[], ignore_csq=[]):
'''
Returns True if record should be stored for checking against
other records overlapping the same features to see if they
constitute biallelic variation.
Stores potential recessive records per allele for
segregation checking once overlapping features have been
traversed.
Args:
record: VaseRecord
ignore_alleles:
List of booleans indicating for each ALT in
order whether it should be ignored in relation
to possible recessive variation (e.g. if MAF is
too high, no likely pathogenic consequence
etc.). This will normally have been generated
by VaseRunner via VcfFilter and/or VepFilter
classes.
ignore_csq:
List of booleans indicating for each CSQ in
order whether it should be ignored in relation
to possible recessive variation. This should
normally have been generated by a corresponding
VepFilter object.
'''
stored = False
self._check_sorted(record.record)
record_csqs = getattr(record, self.csq_attribute)
self._current_features = set(c[self.feature_label] for c in record_csqs
if c[self.feature_label] != '')
ignore_csq = self.check_g2p(record, ignore_csq, 'recessive',
csqs=record_csqs)
if ignore_csq and all(ignore_csq):
return False
gt_filter_args = dict()
if record.IS_SV:
gt_filter = self.sv_gt_filter
control_filter = self.sv_con_gt_filter
gt_filter_args['svtype'] = record.info.get('SVTYPE', '')
else:
gt_filter = self.gt_filter
control_filter = self.con_gt_filter
skip_fam = set()
added_prs = OrderedDict()
for i in range(len(record.alts)):
if ignore_alleles and ignore_alleles[i]:
continue
alt = i + 1
skip_allele = False
fams_with_allele = []
for un in self.unaffected:
if record.samples[un]['GT'] == (alt, alt):
if control_filter.gt_is_ok(record.samples, un, alt,
**gt_filter_args):
# hom in a control - skip allele
skip_allele = True
break
if skip_allele:
continue
for fid in self.families:
if fid in skip_fam:
continue
have_allele = set() # affecteds carrying this allele
for aff in self._fam_to_aff[fid]:
# check all affecteds carry this allele
if (alt in record.samples[aff]['GT'] and
gt_filter.gt_is_ok(record.samples, aff, alt,
**gt_filter_args)):
have_allele.add(aff)
else:
break
if have_allele == self._fam_to_aff[fid]:
# all affecteds in family carry allele
fams_with_allele.append(fid)
if fams_with_allele:
# store record and consequences
try:
csqs = []
for j in range(len(record_csqs)):
if ignore_csq and ignore_csq[j]:
continue
if record_csqs[j]['alt_index'] == alt:
# store record and csq details
csqs.append(record_csqs[j])
if csqs:
stored = True
alt_counts = self._get_allele_counts(alt, record)
pr = PotentialSegregant(
record=record, allele=alt, csqs=csqs,
allele_counts=alt_counts,
families=fams_with_allele,
feature_label=self.feature_label)
for feat in pr.features:
if feat in added_prs:
added_prs[feat][pr.alt_id] = pr
else:
added_prs[feat] = OrderedDict(
[(pr.alt_id, pr)])
if feat in self._potential_recessives:
self._potential_recessives[feat][pr.alt_id] = pr
else:
self._potential_recessives[feat] = OrderedDict(
[(pr.alt_id, pr)])
except KeyError:
raise RuntimeError("Could not identify CSQ or ANN " +
"fields in VCF header. Please ensure " +
"your input is annotated with " +
"Ensembl's VEP to perform recessive " +
"filtering")
return stored
def process_potential_recessives(self, final=False):
'''
Check whether stored PotentialSegregant alleles make up
biallelic variation in the same transcript for affected
individuals/families. Adds labels to INFO fields of VCF
records and returns an OrderedDict of 'var_ids' to
lists of PotentialSegregant objects that appear to
segregate consistent with recessive inheritance.
Clears the cache of stored PotentialSegregant alleles.
'''
segregating = OrderedDict() # key=alt_id, val=SegregatingBiallelic
for feat, prs in self._potential_recessives.items():
if not final and feat in self._current_features:
continue
feat_segregating = [] # list of tuples of values for creating SegregatingBiallelic
un_hets = defaultdict(list) # store het alleles carried by each unaffected
aff_hets = defaultdict(list) # store het alleles carried by each affected
biallelics = defaultdict(list) # store biallelic combinations for affecteds
for pid, p in prs.items():
for un in self.unaffected:
if p.allele_counts[un] == 1: # already checked for homs when adding
# store allele carried in this unaffected
un_hets[un].append(pid)
for aff in (x for x in self.affected
if self.ped.fid_from_iid(x) in p.families):
if p.allele_counts[aff] == 1:
aff_hets[aff].append(pid)
elif p.allele_counts[aff] == 2:
biallelics[aff].append(tuple([pid]))
incompatibles = [] # create a list of sets of incompatible hets
for hets in un_hets.values():
if len(hets):
incompatibles.append(set(hets))
for aff, hets in aff_hets.items():
for i in range(len(hets)):
for j in range(i+1, len(hets)):
incomp = False
for iset in incompatibles:
if iset.issuperset([hets[i], hets[j]]):
incomp = True
break
if not incomp:
if not prs[hets[i]].record.in_cis_with(sample=aff,
allele=prs[hets[i]].allele,
other=prs[hets[j]].record,
other_allele=prs[hets[j]].allele):
# check phase groups in case alleles in cis
biallelics[aff].append(
tuple([hets[i], hets[j]]))
if not biallelics:
continue
# see if all affecteds in the same family share the same biallelics
for fid, affs in self._fam_to_aff.items():
b_affs = set(x for x in affs if x in biallelics)
if len(b_affs) == 0 or b_affs != affs:
continue
affs = list(affs)
absent_in_aff = False
for i in range(len(affs)):
for bi in biallelics[affs[i]]:
for j in range(i+1, len(affs)):
if bi not in biallelics[affs[j]]:
absent_in_aff = True
break
if not absent_in_aff:
segs, de_novo = self._check_parents(feat, bi, affs)
if not segs:
continue
if len(bi) == 1:
model = 'homozygous'
else:
model = 'compound_het'
for bi_pr in (prs[x] for x in bi):
feat_segregating.append((bi_pr, affs, [fid],
model, [feat],
de_novo[bi_pr.alt_id],
self.prefix))
fam_count = len(set([fam for tup in feat_segregating for fam in
tup[2]]))
if fam_count >= self.min_families:
for tp in feat_segregating:
if tp[0] in segregating:
segregating[tp[0]].add_samples(*tp[1:6])
else:
segregating[tp[0]] = SegregatingVariant(*tp)
var_to_segregants = OrderedDict()
for sb in segregating.values():
sb.annotate_record(self.report_file, self.annot_fields)
if sb.segregant.var_id in var_to_segregants:
var_to_segregants[sb.segregant.var_id].append(sb.segregant)
else:
var_to_segregants[sb.segregant.var_id] = [sb.segregant]
# clear the cache except for the last entry which will be a new gene
# self._potential_recessives = self._last_added
self._potential_recessives = OrderedDict(
(k, v) for k, v in self._potential_recessives.items() if k in
self._current_features)
return var_to_segregants
def _check_parents(self, feat, alleles, samples):
'''
Check transmission of alleles (i.e. one from each parent)
if parents available. Should have already checked that
alleles are not present in this combination in any
unaffected individual.
Returns a tuple of booleans - first value is True if
parental genotypes do not contradict recessive inheritance
while the second value is a dict of alleles to lists of
samples in which the allele allele appears to have arisen
de novo.
'''
dns = defaultdict(list)
counts = []
for al in alleles:
counts.append(self._potential_recessives[feat][al].allele_counts)
if len(counts) == 1: # homozygous
counts.append(counts[0])
for samp in samples:
parents = self.ped.individuals[samp].parents
par = list(x for x in parents if x in self.samples)
if len(par) == 0:
continue
if self.strict:
for p in par:
if None in (counts[i][p] for i in range(len(counts))):
# require both parental genotypes if self.strict
return (False, dns)
if len(par) == 2: # can check for de novos
for i in range(len(counts)):
if counts[i][par[0]] == 0 and counts[i][par[1]] == 0:
# apparent de novo
self.family_filter.logger.debug(
"Apparent de novo allele " +
"{} for sample {} (parents = {} + {}) ".format(
alleles[-i], samp, par[0], par[1]) +
"for recessive combination {}|{}".format(
alleles[0], alleles[-1]))
dns[alleles[-i]].append(samp)
if self.exclude_denovo:
return (False, dns)
elif len(par) == 1:
# if only one parent and both alleles are absent it is more
# likely that the two alleles are in cis from other parent
if counts[0][par[0]] == 0 and counts[1][par[0]] == 0:
return(False, dns)
# NOTE: we could do a check here to ensure that any non-affected
# parent does not carry both alleles, but this *SHOULD* have
# already been done earlier in process_potential_recessives
# function for ALL unaffecteds anyway
return (True, dns)
class DominantFilter(InheritanceFilter):
'''
Identify variants that fit a dominant pattern in
given families.
'''
def __init__(self, family_filter, gt_args, min_families=1,
snpeff_mode=False, report_file=None):
'''
Initialize with parent IDs, children IDs and VcfReader
object.
Args:
family_filter:
FamilyFilter object
gt_args:
A dict of arguments to use for filtering
genotypes. These should all correspond to
arguments to provide to SampleFilter objects.
min_families:
Require at least this many families to have a
qualifying variant in a feature before
outputting. Default=1.
snpeff_mode:
Use SnpEff annotations instead of VEP annotations
from input VCF.
'''
self.prefix = "VASE_dominant"
self.header_fields = [
("VASE_dominant_samples",
'Sample IDs for alleles that segregate according to a ' +
'dominant inheritance pattern in an affected sample as' +
' parsed by {}' .format(type(self).__name__)),
('VASE_dominant_unaffected_carrier',
'Sample IDs for unaffected carriers of ' +
'VASE_dominant alleles'),
('VASE_dominant_families',
'Family IDs for VASE_dominant alleles'),
("VASE_dominant_features",
'Features (e.g. transcripts) that contain qualifying ' +
'dominant variants parsed by {}' .format(
type(self).__name__))]
self.annot_fields = ('samples', 'unaffected_carrier', 'families',
'features')
self.report_file = report_file
super().__init__(family_filter, gt_args, min_families=min_families,
snpeff_mode=snpeff_mode, report_file=report_file,)
self.families = tuple(x for x in
self.family_filter.inheritance_patterns
if 'dominant' in
self.family_filter.inheritance_patterns[x])
self.affected = tuple(x for x in family_filter.vcf_affected if
self.ped.individuals[x].fid in self.families)
self.filters = dict()
self._potential_dominants = dict()
self._last_added = OrderedDict()
self._current_features = set()
for fam in self.families:
f_aff = tuple(x for x in self.ped.families[fam].get_affected()
if (x in self.affected or
x in self.family_filter.obligate_carriers[fam]))
f_unaff = tuple(x for x in self.ped.families[fam].get_unaffected()
if (x in self.unaffected and x not in
self.family_filter.obligate_carriers[fam]))
if fam in self.family_filter.obligate_carriers:
self.obligate_carriers = tuple(
x for x in f_aff if x in
self.family_filter.obligate_carriers[fam])
else:
self.obligate_carriers = ()
dom_filter = SampleFilter(family_filter.vcf, cases=f_aff,
controls=f_unaff, confirm_missing=True,
**gt_args)
self.filters[fam] = dom_filter
self.family_filter.logger.info("Analysing family {} ".format(fam) +
"under a dominant model")
def process_record(self, record, ignore_alleles=[], ignore_csq=[]):
'''
Returns True if an allele segregates consistent with
dominant inheritance.
Args:
record: VaseRecord
ignore_alleles:
List of booleans indicating for each ALT in
order whether it should be ignored in relation
to possible dominant variation (e.g. if MAF is
too high, no likely pathogenic consequence
etc.). This will normally have been generated
by VaseRunner via VcfFilter and/or VepFilter
classes.
'''
dom_alleles = ([[] for i in range(len(record.record.alts))])
fam_alleles = ([[] for i in range(len(record.record.alts))])
ignore_csq = self.check_g2p(record, ignore_csq, 'dominant')
if ignore_csq and all(ignore_csq):
return False
if self.min_families > 1:
self._check_sorted(record.record)
for i in range(len(record.record.alts)):
if ignore_alleles[i]:
continue
allele = i + 1
for fam, dfilter in self.filters.items():
# looking for (potentially shared) de novos in a single family
is_dom = not dfilter.filter(record, allele)
if is_dom:
if self.confirm_heterozygous(record.record, dfilter.cases):
dom_alleles[i].extend(dfilter.cases)
fam_alleles[i].append(fam)
self.family_filter.logger.debug(
"Apparent dominant allele {}:{}-{}/{} ".format(
record.record.chrom, record.record.pos,
record.record.ref,
record.record.alleles[allele]) +
"present in {} ".format(dfilter.cases) +
"and absent in {}".format(dfilter.controls))
segs = []
for i in range(len(dom_alleles)):
if not dom_alleles[i]:
continue
allele = i + 1
csqs = []
record_csqs = getattr(record, self.csq_attribute)
try:
for j in range(len(record_csqs)):
if ignore_csq and ignore_csq[j]:
continue
if record_csqs[j]['alt_index'] == allele:
# store record and csq details
csqs.append(record_csqs[j])
except KeyError:
if self.min_families > 1:
raise RuntimeError("Could not identify CSQ or ANN fields" +
" in VCF header. Please ensure your " +
"input is annotated with Ensembl's " +
"VEP to perform dominant filtering.")
if self.min_families <= 1 or csqs:
a_counts = self._get_allele_counts(allele, record)
pd = PotentialSegregant(record=record, allele=allele,
csqs=csqs, allele_counts=a_counts,
families=fam_alleles[i],
feature_label=self.feature_label)
segs.append(pd)
if self.min_families > 1:
for feat, od in self._last_added.items():
if feat in self._potential_dominants:
self._potential_dominants[feat].update(od)
else:
self._potential_dominants[feat] = od
self._last_added = OrderedDict()
for seg in segs:
for feat in seg.features:
self._last_added[feat] = OrderedDict([(seg.alt_id, seg)])
else:
for seg in segs:
affs = (x for x in self.affected
if x not in self.obligate_carriers and
self.ped.fid_from_iid(x) in seg.families)
sv = SegregatingVariant(seg, affs, seg.families, 'samples',
seg.features, [], self.prefix)
obcs = tuple(x for x in self.obligate_carriers if
self.ped.fid_from_iid(x) in seg.families)
if obcs:
obfs = set(self.ped.fid_from_iid(x) for x in obcs)
sv.add_samples(obcs, obfs, 'unaffected_carrier',
seg.features, [])
sv.annotate_record(self.report_file, self.annot_fields)
return len(segs) > 0
def process_dominants(self, final=False):
'''
Check whether stored PotentialSegregant alleles make up
dominant variation in the same transcript for the minimum
number of families. Adds labels to INFO fields of VCF
records and returns an OrderedDict of 'var_ids' to
lists of PotentialSegregant objects that appear to
constitute dominant variation.
Clears the cache of stored PotentialSegregant alleles.
'''
sds = OrderedDict()
feat_processed = []
if not self._potential_dominants:
# if cache is empy, we never encountered the next set of features
self._potential_dominants = self._last_added
self._last_added = OrderedDict()
elif final:
for feat in self._last_added:
if feat in self._potential_dominants:
self._potential_dominants[feat].update(
self._last_added[feat])
else:
self._potential_dominants[feat] = self._last_added[feat]
self._last_added = OrderedDict()
for feat, pds in self._potential_dominants.items():
if feat in self._current_features: # still processing this feature
continue
feat_fams = set()
feat_processed.append(feat)
for pid, p in pds.items():
feat_fams.update(p.families)
if len(feat_fams) >= self.min_families:
for p in pds.values():
samps = (x for x in self.affected
if self.ped.fid_from_iid(x) in p.families)
if p.alt_id in sds:
sds[p.alt_id].add_samples(samps, p.families,
'samples', [feat], [])
else:
sv = SegregatingVariant(p, samps, p.families,
'samples', [feat], [],
self.prefix)
sds[p.alt_id] = sv
var_to_segregants = OrderedDict()
for sv in sds.values():
sv.annotate_record(self.report_file, self.annot_fields)
if sv.segregant.var_id in var_to_segregants:
var_to_segregants[sv.segregant.var_id].append(sv.segregant)
else:
var_to_segregants[sv.segregant.var_id] = [sv.segregant]
# clear the cache of processed features
for feat in feat_processed:
del self._potential_dominants[feat]
return var_to_segregants
class DeNovoFilter(InheritanceFilter):
'''
Identify and output variants occuring in a child and absent from
the parents.
'''
def __init__(self, family_filter, gt_args, min_families=1,
confirm_het=False, snpeff_mode=False, report_file=None):
'''
Initialize with parent IDs, children IDs and VcfReader
object.
Args:
family_filter:
FamilyFilter object
gt_args:
A dict of arguments to use for filtering
genotypes. These should all correspond to
arguments to provide to SampleFilter objects.
min_families:
Require at least this many families to have a
qualifying variant in a feature before
outputting. Default=1.
confirm_het:
If True, apparent de novos are required to be
called as heterozygous. Default=False.
snpeff_mode:
Use SnpEff annotations instead of VEP annotations
from input VCF.
'''
self.prefix = "VASE_de_novo"
self.header_fields = [("VASE_de_novo_samples",
'Samples that carry alleles occurring de novo parsed by ' +
'{}' .format(type(self).__name__)),
('VASE_de_novo_families',
'Family IDs for VASE_de_novo alleles'),
("VASE_de_novo_features",
'Features (e.g. transcripts) that contain qualifying ' +
'de novo variants parsed by {}' .format(
type(self).__name__)),]
self.annot_fields = ('samples', 'families', 'features')
self.report_file = report_file
super().__init__(family_filter, gt_args, min_families=min_families,
snpeff_mode=snpeff_mode, report_file=report_file)
self.families = tuple(x for x in
self.family_filter.inheritance_patterns if
'de_novo' in
self.family_filter.inheritance_patterns[x])
self.affected = tuple(x for x in family_filter.vcf_affected if
self.ped.individuals[x].fid in self.families)
self._potential_denovos = dict()
self._last_added = OrderedDict()
self._current_features = set()
self.confirm_het = confirm_het
self.filters = defaultdict(list)
self.prefix = "VASE_de_novo"
for fam in self.families:
f_aff = tuple(x for x in self.ped.families[fam].get_affected()
if x in self.affected)
par_child_combos = defaultdict(list)
for aff in f_aff:
pars = tuple(x for x in
self.ped.families[fam].individuals[aff].parents
if x in self.samples)
if len(pars) == 2:
par_child_combos[pars].append(aff)
for parents, children in par_child_combos.items():
par_filter = SampleFilter(family_filter.vcf, cases=children,
controls=parents,
confirm_missing=True, **gt_args)
self.filters[fam].append(par_filter)
self.family_filter.logger.info(
"Analysing family {} parents ({}) and children ({})"
.format(fam, str.join(", ", parents),
str.join(", ", children)) +
" combinations under a de novo dominant model")
def process_record(self, record, ignore_alleles=[], ignore_csq=[]):
'''
Returns True if allele is an apparent de novo variant.
Args:
record: VaseRecord
ignore_alleles:
List of booleans indicating for each ALT in
order whether it should be ignored in relation
to possible de novo variation (e.g. if MAF is
too high, no likely pathogenic consequence
etc.). This will normally have been generated
by VaseRunner via VcfFilter and/or VepFilter
classes.
'''
if self.min_families > 1:
self._check_sorted(record.record)
ignore_csq = self.check_g2p(record, ignore_csq, 'de novo')
if ignore_csq and all(ignore_csq):
return False
denovo_alleles = ([[] for i in range(len(record.record.alts))])
fam_alleles = ([[] for i in range(len(record.record.alts))])
for i in range(len(record.alts)):
if ignore_alleles[i]:
continue
allele = i + 1
for fam, filters in self.filters.items():
# looking for (potentially shared) de novos in a single family
dns = []
for dfilter in filters:
is_denovo = not dfilter.filter(record, allele)
if is_denovo:
if (not self.confirm_het or self.confirm_heterozygous(
record.record, dfilter.cases)):
dns.append(dfilter.cases)
self.family_filter.logger.debug(
"Apparent de novo allele {}:{}-{}/{} ".format(
record.record.chrom, record.record.pos,
record.record.ref,
record.record.alleles[allele]) +
"present in {} ".format(dfilter.cases) +
"and absent in {}".format(dfilter.controls))
if len(dns) == len(filters): # all affecteds in fam have dnm
([denovo_alleles[i].extend(x) for x in dns])
fam_alleles[i].append(fam)
segs = []
for i in range(len(denovo_alleles)):
if not denovo_alleles[i]:
continue
allele = i + 1
csqs = []
try:
record_csqs = getattr(record, self.csq_attribute)
for j in range(len(record_csqs)):
if ignore_csq and ignore_csq[j]:
continue
if record_csqs[j]['alt_index'] == allele:
# store record and csq details
csqs.append(record_csqs[j])
except KeyError:
if self.min_families > 1:
raise RuntimeError("Could not identify CSQ or ANN fields" +
" in VCF header. Please ensure your " +
"input is annotated with Ensembl's " +
"VEP to perform de novo filtering.")
if self.min_families <= 1 or csqs:
a_counts = self._get_allele_counts(allele, record)
pd = PotentialSegregant(record=record, allele=allele,
csqs=csqs, allele_counts=a_counts,
families=fam_alleles[i],
feature_label=self.feature_label)
segs.append(pd)
if self.min_families > 1:
for feat, od in self._last_added.items():
if feat in self._potential_denovos:
self._potential_denovos[feat].update(od)
else:
self._potential_denovos[feat] = od
self._last_added = OrderedDict()
for seg in segs:
for feat in seg.features:
self._last_added[feat] = OrderedDict([(seg.alt_id, seg)])
else:
for seg in segs:
affs = (x for x in self.affected if self.ped.fid_from_iid(x)
in seg.families)
sv = SegregatingVariant(seg, affs, seg.families, 'samples',
seg.features, [], self.prefix)
sv.annotate_record(self.report_file, self.annot_fields)
return len(segs) > 0
def process_de_novos(self, final=False):
'''
Check whether stored PotentialSegregant alleles make up
de novo dominant variation in the same transcript for the
minimum number of families. Adds labels to INFO fields of
VCF records and returns an OrderedDict of 'var_ids' to
lists of PotentialSegregant objects that appear to
constitute de novo dominant variation.
Clears the cache of stored PotentialSegregant alleles.
'''
sds = OrderedDict()
feat_processed = []
if not self._potential_denovos:
# if cache is empy, we never encountered the next set of features
self._potential_denovos = self._last_added
self._last_added = OrderedDict()
elif final:
for feat in self._last_added:
if feat in self._potential_denovos:
self._potential_denovos[feat].update(
self._last_added[feat])
else:
self._potential_denovos[feat] = self._last_added[feat]
self._last_added = OrderedDict()
for feat, pds in self._potential_denovos.items():
if feat in self._current_features: # still processing this feature
continue
feat_fams = set()
feat_processed.append(feat)
for pid, p in pds.items():
feat_fams.update(p.families)
if len(feat_fams) >= self.min_families:
for p in pds.values():
samps = (x for x in self.affected
if self.ped.fid_from_iid(x) in p.families)
if p.alt_id in sds:
sds[p.alt_id].add_samples(samps, p.families,
'samples', [feat], [])
else:
sv = SegregatingVariant(p, samps, p.families,
'samples', [feat], [],
self.prefix)
sds[p.alt_id] = sv
var_to_segregants = OrderedDict()
for sv in sds.values():
sv.annotate_record(self.report_file, self.annot_fields)
if sv.segregant.var_id in var_to_segregants:
var_to_segregants[sv.segregant.var_id].append(sv.segregant)
else:
var_to_segregants[sv.segregant.var_id] = [sv.segregant]
# clear the cache of processed features
for feat in feat_processed:
del self._potential_denovos[feat]
return var_to_segregants
class ControlFilter(SampleFilter):
''' Filter variants if they are present in a control sample. '''
def __init__(self, vcf, family_filter, gt_args, n_controls=0):
'''
Args:
vcf: Input VcfReader object.
family_filter:
FamilyFilter object containing information on
which samples are controls in the input VCF.
gt_args:
A dict of arguments to use for filtering
genotypes. These should all correspond to
arguments to provide to SampleFilter objects.
n_controls:
Minimum number of controls required to carry an
ALT allele for it to be filtered. Alleles will
only be filtered if carried by this number of
controls or more. Default=0.
'''
if n_controls and n_controls > len(family_filter.vcf_unaffected):
n_controls = len(family_filter.vcf_unaffected)
super().__init__(vcf, controls=family_filter.vcf_unaffected,
n_controls=n_controls, confirm_missing=False,
**gt_args)
class SegregatingVariant(object):
'''
Stores details of alleles that segregate in a manner consistent
with inheritance pattern.
'''
__slots__ = ['recessive', 'samples', 'families', 'model', 'features',
'segregant', 'prefix', 'de_novos']
def __init__(self, segregant, samples, families, model, features,
de_novos=(), prefix='VASE_segregant'):
'''
Initialize with a PotentialSegregant object, an iterable of
sample IDs carrying the PotentialSegregant a string
indicating the model of inheritance (e.g. 'compound_het'),
the name of the associated features (e.g. transcript IDs),
prefix for INFO fields and a list of individuals for whom
the allele appears to have arisen de novo.
'''
self.segregant = segregant
self.samples = list(samples)
self.families = set(families)
self.model = [model] * len(self.samples)
self.features = set(features)
self.prefix = prefix
self.de_novos = set(de_novos)
def __eq__(self, other):
return self.segregant == other.segregant
def __hash__(self):
return hash(self.segregant)
def add_samples(self, samples, families, model, features, de_novos):
''' Add samples with corresponding model of inheritance '''
self.samples.extend(samples)
self.families.update(families)
self.model.extend([model] * (len(self.samples) - len(self.model)))
self.features.update(features)
self.de_novos.update(de_novos)
def annotate_record(self, report_file=None, annot_order=[]):
''' Add INFO field annotations for VcfRecords '''
annots = defaultdict(set)
for i in range(len(self.model)):
k = self.prefix
if self.model[i]:
k += "_" + self.model[i]
annots[k].add(self.samples[i])
for k in annots:
annots[k] = str.join("|", sorted(annots[k]))
annots[self.prefix + '_families'] = str.join("|",
sorted(self.families))
annots[self.prefix + '_features'] = str.join("|",
sorted(self.features))
if self.de_novos:
annots[self.prefix + '_de_novo'] = str.join("|",
sorted(self.de_novos))
converted = self._convert_annotations(annots)
for k, v in converted.items():
self.segregant.record.info[k] = v
if report_file:
report_file.write(self._annot_to_string(annots, annot_order)
+ "\n")
def _annot_to_string(self, annots, annot_order):
s = ''
csq_to_join = []
for k in (x for x in self.segregant.csqs[0] if x != 'Allele'):
csq_to_join.append(str.join("|", (str(self.segregant.csqs[i][k])
if self.segregant.csqs[i][k]
else '.' for i in range(
len(self.segregant.csqs)))))
s = str.join("\t", csq_to_join)
if annot_order:
annot_order = [self.prefix + "_" + x for x in annot_order]
s += "\t" + str.join("\t", (annots[k] if isinstance(annots[k], str)
else '.' for k in annot_order))
else:
s += "\t" + str.join("\t", (annots[k] if isinstance(annots[k], str)
else '.' for k in sorted(annots)))
r = self.segregant.record
allele = r.alleles[self.segregant.allele]
s += "\t" + str.join("\t", (str(x) for x in (r.chrom, r.pos, r.id,
r.ref, r.alt, allele,
r.qual, r.filter_string)))
return s
def _convert_annotations(self, annots):
''' Convert to per-allele (Number=A) format for INFO field '''
converted_annots = dict()
for k, v in annots.items():
if k in self.segregant.record.info:
allele_fields = list(self.segregant.record.info[k])
else:
allele_fields = ['.'] * len(self.segregant.record.alts)
i = self.segregant.allele - 1
allele_fields[i] = v
converted_annots[k] = allele_fields
return converted_annots
class PotentialSegregant(object):
'''
Class for storing variant details for records that might make up
biallelic variants in affected samples.
'''
__slots__ = ['allele', 'allele_counts', 'features', 'families', 'alt_id',
'var_id', 'record', 'csqs']
def __init__(self, record, allele, csqs, allele_counts, families,
feature_label='Feature'):
self.allele = allele
self.allele_counts = allele_counts
self.families = families
self.var_id = "{}:{}-{}/{}".format(record.chrom, record.pos,
record.ref, record.alt)
self.alt_id = "{}:{}-{}/{}".format(record.chrom, record.pos,
record.ref, record.alleles[allele])
self.features = set(x[feature_label] for x in csqs if
x[feature_label] != '')
if not self.features:
# if is intergenic and there is no Feature ID, use var ID
# this way we can capture variants at same site if looking for n>1
# in several families, but won't classify all intergenic variants
# as the same "Feature"
self.features.add(self.var_id.replace(',', '_'))
self.csqs = csqs
self.record = record
def __eq__(self, other):
return self.alt_id == other.alt_id
def __hash__(self):
return hash(self.alt_id)
|
gantzgraf/vape
|
vase/family_filter.py
|
Python
|
gpl-3.0
| 69,896 | 0.000229 |
import inspect
import os
import time
import sys
import numpy as np
import tensorflow as tf
import shutil
import data_engine
VGG_MEAN = [103.939, 116.779, 123.68]
image_height = 720
image_width = 960
feature_height = int(np.ceil(image_height / 16.))
feature_width = int(np.ceil(image_width / 16.))
class RPN:
def __init__(self, vgg16_npy_path=None):
if vgg16_npy_path is None:
path = inspect.getfile(Vgg16)
path = os.path.abspath(os.path.join(path, os.pardir))
path = os.path.join(path, 'vgg16.npy')
vgg16_npy_path = path
print path
self.data_dict = np.load(vgg16_npy_path, encoding='latin1').item()
print('npy file loaded')
def build(self, rgb, label, label_weight, bbox_target, bbox_loss_weight, learning_rate):
start_time = time.time()
print('build model started')
# Convert RGB to BGR
red, green, blue = tf.split(rgb, 3, 3)
assert red.get_shape().as_list()[1:] == [image_height, image_width, 1]
assert green.get_shape().as_list()[1:] == [image_height, image_width, 1]
assert blue.get_shape().as_list()[1:] == [image_height, image_width, 1]
bgr = tf.concat([
blue - VGG_MEAN[0],
green - VGG_MEAN[1],
red - VGG_MEAN[2],
],3)
assert bgr.get_shape().as_list()[1:] == [image_height, image_width, 3]
# Conv layer 1
self.conv1_1 = self.conv_layer_const(bgr, 'conv1_1')
self.conv1_2 = self.conv_layer_const(self.conv1_1, 'conv1_2')
self.pool1 = self.max_pool(self.conv1_2, 'pool1')
# Conv layer 2
self.conv2_1 = self.conv_layer_const(self.pool1, 'conv2_1')
self.conv2_2 = self.conv_layer_const(self.conv2_1, 'conv2_2')
self.pool2 = self.max_pool(self.conv2_2, 'pool2')
# Conv layer 3
self.conv3_1, conv3_1_wd = self.conv_layer(self.pool2, 'conv3_1')
self.conv3_2, conv3_2_wd = self.conv_layer(self.conv3_1, 'conv3_2')
self.conv3_3, conv3_3_wd = self.conv_layer(self.conv3_2, 'conv3_3')
self.weight_dacay = conv3_1_wd + conv3_2_wd + conv3_3_wd
self.pool3 = self.max_pool(self.conv3_3, 'pool3')
# Conv layer 4
self.conv4_1, conv4_1_wd = self.conv_layer(self.pool3, 'conv4_1')
self.conv4_2, conv4_2_wd = self.conv_layer(self.conv4_1, 'conv4_2')
self.conv4_3, conv4_3_wd = self.conv_layer(self.conv4_2, 'conv4_3')
self.weight_dacay += conv4_1_wd + conv4_2_wd + conv4_3_wd
self.pool4 = self.max_pool(self.conv4_3, 'pool4')
# Conv layer 5
self.conv5_1, conv5_1_wd = self.conv_layer(self.pool4, 'conv5_1')
self.conv5_2, conv5_2_wd = self.conv_layer(self.conv5_1, 'conv5_2')
self.conv5_3, conv5_3_wd = self.conv_layer(self.conv5_2, 'conv5_3')
self.weight_dacay += conv5_1_wd + conv5_2_wd + conv5_3_wd
# RPN_TEST_6(>=7)
normalization_factor = tf.sqrt(tf.reduce_mean(tf.square(self.conv5_3)))
self.gamma3 = tf.Variable(np.sqrt(2), dtype=tf.float32, name='gamma3')
self.gamma4 = tf.Variable(1.0, dtype=tf.float32, name='gamma4')
# Pooling to the same size
self.pool3_p = tf.nn.max_pool(self.pool3, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME',
name='pool3_proposal')
# L2 Normalization
self.pool3_p = self.pool3_p / (
tf.sqrt(tf.reduce_mean(tf.square(self.pool3_p))) / normalization_factor) * self.gamma3
self.pool4_p = self.pool4 / (
tf.sqrt(tf.reduce_mean(tf.square(self.pool4))) / normalization_factor) * self.gamma4
# Proposal Convolution
self.conv_proposal_3, conv_proposal_3_wd = self.conv_layer_new(self.pool3_p, 'conv_proposal_3',
kernel_size=[5, 2], out_channel=256, stddev=0.01)
self.relu_proposal_3 = tf.nn.relu(self.conv_proposal_3)
self.conv_proposal_4, conv_proposal_4_wd = self.conv_layer_new(self.pool4_p, 'conv_proposal_4',
kernel_size=[5, 2], out_channel=512, stddev=0.01)
self.relu_proposal_4 = tf.nn.relu(self.conv_proposal_4)
self.conv_proposal_5, conv_proposal_5_wd = self.conv_layer_new(self.conv5_3, 'conv_proposal_5',
kernel_size=[5, 2], out_channel=512, stddev=0.01)
self.relu_proposal_5 = tf.nn.relu(self.conv_proposal_5)
self.weight_dacay += conv_proposal_3_wd + conv_proposal_4_wd + conv_proposal_5_wd
# Concatrate
self.relu_proposal_all = tf.concat( [self.relu_proposal_3, self.relu_proposal_4, self.relu_proposal_5],3)
# RPN_TEST_6(>=7)
self.conv_cls_score, conv_cls_wd = self.conv_layer_new(self.relu_proposal_all, 'conv_cls_score',
kernel_size=[1, 1], out_channel=18, stddev=0.01)
self.conv_bbox_pred, conv_bbox_wd = self.conv_layer_new(self.relu_proposal_all, 'conv_bbox_pred',
kernel_size=[1, 1], out_channel=36, stddev=0.01)
self.weight_dacay += conv_cls_wd + conv_bbox_wd
assert self.conv_cls_score.get_shape().as_list()[1:] == [feature_height, feature_width, 18]
assert self.conv_bbox_pred.get_shape().as_list()[1:] == [feature_height, feature_width, 36]
self.cls_score = tf.reshape(self.conv_cls_score, [-1, 2])
self.bbox_pred = tf.reshape(self.conv_bbox_pred, [-1, 4])
self.prob = tf.nn.softmax(self.cls_score, name="prob")
self.cross_entropy = tf.reduce_sum(
tf.nn.softmax_cross_entropy_with_logits(labels=label,
logits=self.cls_score) * label_weight) / tf.reduce_sum(label_weight)
bbox_error = tf.abs(self.bbox_pred - bbox_target)
bbox_loss = 0.5 * bbox_error * bbox_error * tf.cast(bbox_error < 1, tf.float32) + (bbox_error - 0.5) * tf.cast(
bbox_error >= 1, tf.float32)
self.bb_loss = tf.reduce_sum(
tf.reduce_sum(bbox_loss, reduction_indices=[1]) * bbox_loss_weight) / tf.reduce_sum(bbox_loss_weight)
self.loss = self.cross_entropy + 0.0005 * self.weight_dacay + 0.5 * self.bb_loss
self.train_step = tf.train.MomentumOptimizer(learning_rate, 0.9).minimize(self.loss)
self.data_dict = None
print('build model finished: %ds' % (time.time() - start_time))
def avg_pool(self, bottom, name):
return tf.nn.avg_pool(bottom, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name=name)
def max_pool(self, bottom, name):
return tf.nn.max_pool(bottom, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME', name=name)
def conv_layer(self, bottom, name):
with tf.variable_scope(name):
filt = self.get_conv_filter(name)
conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME')
conv_biases = self.get_bias(name)
bias = tf.nn.bias_add(conv, conv_biases)
relu = tf.nn.relu(bias)
weight_dacay = tf.nn.l2_loss(filt, name='weight_dacay')
return relu, weight_dacay
def conv_layer_const(self, bottom, name):
with tf.variable_scope(name):
filt = self.get_conv_filter_const(name)
conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME')
conv_biases = self.get_bias_const(name)
bias = tf.nn.bias_add(conv, conv_biases)
relu = tf.nn.relu(bias)
return relu
def conv_layer_new(self, bottom, name, kernel_size=[3, 3], out_channel=512, stddev=0.01):
with tf.variable_scope(name):
shape = bottom.get_shape().as_list()[-1]
filt = tf.Variable(
tf.random_normal([kernel_size[0], kernel_size[1], shape, out_channel], mean=0.0, stddev=stddev),
name='filter')
conv_biases = tf.Variable(tf.zeros([out_channel]), name='biases')
conv = tf.nn.conv2d(bottom, filt, [1, 1, 1, 1], padding='SAME')
bias = tf.nn.bias_add(conv, conv_biases)
weight_dacay = tf.nn.l2_loss(filt, name='weight_dacay')
return bias, weight_dacay
def get_conv_filter(self, name):
return tf.Variable(self.data_dict[name][0], name='filter')
def get_bias(self, name):
return tf.Variable(self.data_dict[name][1], name='biases')
def get_conv_filter_const(self, name):
return tf.constant(self.data_dict[name][0], name='filter')
def get_bias_const(self, name):
return tf.constant(self.data_dict[name][1], name='biases')
def save(self, save_dir, step=None):
params = {}
for var in tf.trainable_variables():
param_name = var.name.split('/')
if param_name[1] in params.keys():
params[param_name[1]].append(sess.run(var))
else:
params[param_name[1]] = [sess.run(var)]
if step == None:
step = 100000
np.save(save_dir + 'params_' + str(step) + '.npy', params)
def checkFile(fileName):
if os.path.isfile(fileName):
return True
else:
print fileName, 'is not found!'
exit()
def checkDir(fileName, creat=False):
if os.path.isdir(fileName):
if creat:
shutil.rmtree(fileName)
os.mkdir(fileName)
else:
if creat:
os.mkdir(fileName)
else:
print fileName, 'is not found!'
exit()
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'please input GPU index'
exit()
gpuNow = '/gpu:'+sys.argv[1]
print_time = 100
step = 10000
batch_size = 256
saveTime = 2000
modelSaveDir = './models/'
vggModelPath = './models/vgg16.npy'
imageLoadDir = './yourImagePath/'
anoLoadDir = './yourAnnotationPath/'
checkDir(modelSaveDir, False)
checkDir(imageLoadDir, False)
checkDir(anoLoadDir, False)
with tf.device(gpuNow):
sess = tf.Session()
image = tf.placeholder(tf.float32, [1, image_height, image_width, 3])
label = tf.placeholder(tf.float32, [None, 2])
label_weight = tf.placeholder(tf.float32, [None])
bbox_target = tf.placeholder(tf.float32, [None, 4])
bbox_loss_weight = tf.placeholder(tf.float32, [None])
learning_rate = tf.placeholder(tf.float32)
cnn = RPN(vggModelPath)
with tf.name_scope('content_rpn'):
cnn.build(image, label, label_weight, bbox_target, bbox_loss_weight, learning_rate)
sess.run(tf.initialize_all_variables())
for var in tf.trainable_variables():
print var.name, var.get_shape().as_list(), sess.run(tf.nn.l2_loss(var))
cnnData = data_engine.CNNData(batch_size, imageLoadDir, anoLoadDir)
print 'Training Begin'
train_loss = []
train_cross_entropy = []
train_bbox_loss = []
start_time = time.time()
for i in xrange(1, step + 1):
batch = cnnData.prepare_data()
if i <= 7000:
l_r = 0.001
else:
if i <= 9000:
l_r = 0.0001
else:
l_r = 0.00001
(_, train_loss_iter, train_cross_entropy_iter, train_bbox_loss_iter, cls, bbox) = sess.run(
[cnn.train_step, cnn.loss, cnn.cross_entropy, cnn.bb_loss, cnn.cls_score, cnn.bbox_pred],
feed_dict={image: batch[0], label: batch[1], label_weight: batch[2], bbox_target: batch[3],
bbox_loss_weight: batch[4], learning_rate: l_r})
train_loss.append(train_loss_iter)
if i % print_time == 0:
print ' step :', i, 'time :', time.time() - start_time, 'loss :', np.mean(
train_loss), 'l_r :', l_r
train_loss = []
if i% saveTime == 0:
cnn.save(modelSaveDir, i)
|
huangshiyu13/RPNplus
|
train.py
|
Python
|
mit
| 12,215 | 0.00393 |
import sys
import os
import re
import shutil
from setuptools import setup
name = 'django-skivvy'
package = 'skivvy'
description = ('Write faster integration tests for Django views – with less '
'code.')
url = 'https://github.com/oliverroick/django-skivvy'
author = 'Oliver Roick'
author_email = 'oliver.roick@gmail.com'
license = 'AGPL'
readme_file = os.path.join(os.path.dirname(__file__), 'README.rst')
with open(readme_file, 'r') as f:
long_description = f.readline().strip()
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
init_py = open(os.path.join(package, '__init__.py')).read()
return re.search("^__version__ = ['\"]([^'\"]+)['\"]",
init_py, re.MULTILINE).group(1)
def get_packages(package):
"""
Return root package and all sub-packages.
"""
return [dirpath
for dirpath, dirnames, filenames in os.walk(package)
if os.path.exists(os.path.join(dirpath, '__init__.py'))]
def get_package_data(package):
"""
Return all files under the root package, that are not in a
package themselves.
"""
walk = [(dirpath.replace(package + os.sep, '', 1), filenames)
for dirpath, dirnames, filenames in os.walk(package)
if not os.path.exists(os.path.join(dirpath, '__init__.py'))]
filepaths = []
for base, filenames in walk:
filepaths.extend([os.path.join(base, filename)
for filename in filenames])
return {package: filepaths}
version = get_version(package)
if sys.argv[-1] == 'publish':
if os.system("pip freeze | grep twine"):
print("twine not installed.\nUse `pip install twine`.\nExiting.")
sys.exit()
shutil.rmtree('dist', ignore_errors=True)
shutil.rmtree('build', ignore_errors=True)
os.system("python setup.py sdist")
os.system("python setup.py bdist_wheel")
os.system("twine upload dist/*")
print("You probably want to also tag the version now:")
print(" git tag -a {0} -m 'version {0}'".format(version))
print(" git push --tags")
sys.exit()
setup(
name=name,
version=version,
url=url,
license=license,
description=description,
long_description=long_description,
author=author,
author_email=author_email,
packages=get_packages(package),
package_data=get_package_data(package),
install_requires=[],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.1',
'Framework :: Django :: 2.2',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Natural Language :: English',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Testing :: Mocking',
]
)
|
Cadasta/django-skivvy
|
setup.py
|
Python
|
agpl-3.0
| 3,207 | 0 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Fabrizio Colonna <colofabrix@tin.it>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
author:
- "Fabrizio Colonna (@ColOfAbRiX)"
module: parted
short_description: Configure block device partitions
version_added: "2.3"
description:
- This module allows configuring block device partition using the C(parted)
command line tool. For a full description of the fields and the options
check the GNU parted manual.
notes:
- When fetching information about a new disk and when the version of parted
installed on the system is before version 3.1, the module queries the kernel
through C(/sys/) to obtain disk information. In this case the units CHS and
CYL are not supported.
requirements:
- This module requires parted version 1.8.3 and above.
- If the version of parted is below 3.1, it requires a Linux version running
the sysfs file system C(/sys/).
options:
device:
description: The block device (disk) where to operate.
required: True
align:
description: Set alignment for newly created partitions.
choices: ['none', 'cylinder', 'minimal', 'optimal']
default: optimal
number:
description:
- The number of the partition to work with or the number of the partition
that will be created. Required when performing any action on the disk,
except fetching information.
unit:
description:
- Selects the current default unit that Parted will use to display
locations and capacities on the disk and to interpret those given by the
user if they are not suffixed by an unit. When fetching information about
a disk, it is always recommended to specify a unit.
choices: [
's', 'B', 'KB', 'KiB', 'MB', 'MiB', 'GB', 'GiB', 'TB', 'TiB', '%', 'cyl',
'chs', 'compact'
]
default: KiB
label:
description: Creates a new disk label.
choices: [
'aix', 'amiga', 'bsd', 'dvh', 'gpt', 'loop', 'mac', 'msdos', 'pc98',
'sun'
]
default: msdos
part_type:
description:
- Is one of 'primary', 'extended' or 'logical' and may be specified only
with 'msdos' or 'dvh' partition tables. A name must be specified for a
'gpt' partition table. Neither part-type nor name may be used with a
'sun' partition table.
choices: ['primary', 'extended', 'logical']
default: primary
part_start:
description:
- Where the partition will start as offset from the beginning of the disk,
that is, the "distance" from the start of the disk. The distance can be
specified with all the units supported by parted (except compat) and
it is case sensitive. E.g. C(10GiB), C(15%).
default: 0%
part_end :
description:
- Where the partition will end as offset from the beginning of the disk,
that is, the "distance" from the start of the disk. The distance can be
specified with all the units supported by parted (except compat) and
it is case sensitive. E.g. C(10GiB), C(15%).
default: 100%
name:
description:
- Sets the name for the partition number (GPT, Mac, MIPS and PC98 only).
flags:
description: A list of the flags that has to be set on the partition.
state:
description:
- If to create or delete a partition. If set to C(info) the module will
only return the device information.
choices: ['present', 'absent', 'info']
default: info
'''
RETURN = '''
partition_info:
description: Current partition information
returned: success
type: complex
contains:
device:
description: Generic device information.
type: dict
partitions:
description: List of device partitions.
type: list
sample: >
{
"disk": {
"dev": "/dev/sdb",
"logical_block": 512,
"model": "VMware Virtual disk",
"physical_block": 512,
"size": 5.0,
"table": "msdos",
"unit": "gib"
},
"partitions": [{
"begin": 0.0,
"end": 1.0,
"flags": ["boot", "lvm"],
"fstype": "",
"name": "",
"num": 1,
"size": 1.0
}, {
"begin": 1.0,
"end": 5.0,
"flags": [],
"fstype": "",
"name": "",
"num": 2,
"size": 4.0
}]
}
'''
EXAMPLES = """
# Create a new primary partition
- parted:
device: /dev/sdb
number: 1
state: present
# Remove partition number 1
- parted:
device: /dev/sdb
number: 1
state: absent
# Create a new primary partition with a size of 1GiB
- parted:
device: /dev/sdb
number: 1
state: present
part_end: 1GiB
# Create a new primary partition for LVM
- parted:
device: /dev/sdb
number: 2
flags: [ lvm ]
state: present
part_start: 1GiB
# Read device information (always use unit when probing)
- parted: device=/dev/sdb unit=MiB
register: sdb_info
# Remove all partitions from disk
- parted:
device: /dev/sdb
number: "{{ item.num }}"
state: absent
with_items:
- "{{ sdb_info.partitions }}"
"""
from ansible.module_utils.basic import AnsibleModule
import math
import re
import os
# Reference prefixes (International System of Units and IEC)
units_si = ['B', 'KB', 'MB', 'GB', 'TB']
units_iec = ['B', 'KiB', 'MiB', 'GiB', 'TiB']
parted_units = units_si + units_iec + ['s', '%', 'cyl', 'chs', 'compact']
def parse_unit(size_str, unit=''):
"""
Parses a string containing a size of information
"""
matches = re.search(r'^([\d.]+)([\w%]+)?$', size_str)
if matches is None:
# "<cylinder>,<head>,<sector>" format
matches = re.search(r'^(\d+),(\d+),(\d+)$', size_str)
if matches is None:
module.fail_json(
msg="Error interpreting parted size output: '%s'" % size_str
)
size = {
'cylinder': int(matches.group(1)),
'head': int(matches.group(2)),
'sector': int(matches.group(3))
}
unit = 'chs'
else:
# Normal format: "<number>[<unit>]"
if matches.group(2) is not None:
unit = matches.group(2)
size = float(matches.group(1))
return size, unit
def parse_partition_info(parted_output, unit):
"""
Parses the output of parted and transforms the data into
a dictionary.
Parted Machine Parseable Output:
See: https://lists.alioth.debian.org/pipermail/parted-devel/2006-December/00
0573.html
- All lines end with a semicolon (;)
- The first line indicates the units in which the output is expressed.
CHS, CYL and BYT stands for CHS, Cylinder and Bytes respectively.
- The second line is made of disk information in the following format:
"path":"size":"transport-type":"logical-sector-size":"physical-sector-siz
e":"partition-table-type":"model-name";
- If the first line was either CYL or CHS, the next line will contain
information on no. of cylinders, heads, sectors and cylinder size.
- Partition information begins from the next line. This is of the format:
(for BYT)
"number":"begin":"end":"size":"filesystem-type":"partition-name":"flags-s
et";
(for CHS/CYL)
"number":"begin":"end":"filesystem-type":"partition-name":"flags-set";
"""
lines = [x for x in parted_output.split('\n') if x.strip() != '']
# Generic device info
generic_params = lines[1].rstrip(';').split(':')
# The unit is read once, because parted always returns the same unit
size, unit = parse_unit(generic_params[1], unit)
generic = {
'dev': generic_params[0],
'size': size,
'unit': unit.lower(),
'table': generic_params[5],
'model': generic_params[6],
'logical_block': int(generic_params[3]),
'physical_block': int(generic_params[4])
}
# CYL and CHS have an additional line in the output
if unit in ['cyl', 'chs']:
chs_info = lines[2].rstrip(';').split(':')
cyl_size, cyl_unit = parse_unit(chs_info[3])
generic['chs_info'] = {
'cylinders': int(chs_info[0]),
'heads': int(chs_info[1]),
'sectors': int(chs_info[2]),
'cyl_size': cyl_size,
'cyl_size_unit': cyl_unit.lower()
}
lines = lines[1:]
parts = []
for line in lines[2:]:
part_params = line.rstrip(';').split(':')
# CHS use a different format than BYT, but contrary to what stated by
# the author, CYL is the same as BYT. I've tested this undocumented
# behaviour down to parted version 1.8.3, which is the first version
# that supports the machine parseable output.
if unit != 'chs':
size = parse_unit(part_params[3])[0]
fstype = part_params[4]
name = part_params[5]
flags = part_params[6]
else:
size = ""
fstype = part_params[3]
name = part_params[4]
flags = part_params[5]
parts.append({
'num': int(part_params[0]),
'begin': parse_unit(part_params[1])[0],
'end': parse_unit(part_params[2])[0],
'size': size,
'fstype': fstype,
'name': name,
'flags': [f.strip() for f in flags.split(', ') if f != ''],
'unit': unit.lower(),
})
return {'generic': generic, 'partitions': parts}
def format_disk_size(size_bytes, unit):
"""
Formats a size in bytes into a different unit, like parted does. It doesn't
manage CYL and CHS formats, though.
This function has been adapted from https://github.com/Distrotech/parted/blo
b/279d9d869ff472c52b9ec2e180d568f0c99e30b0/libparted/unit.c
"""
global units_si, units_iec
unit = unit.lower()
# Shortcut
if size_bytes == 0:
return 0.0
# Cases where we default to 'compact'
if unit in ['', 'compact', 'cyl', 'chs']:
index = max(0, int(
(math.log10(size_bytes) - 1.0) / 3.0
))
unit = 'b'
if index < len(units_si):
unit = units_si[index]
# Find the appropriate multiplier
multiplier = 1.0
if unit in units_si:
multiplier = 1000.0 ** units_si.index(unit)
elif unit in units_iec:
multiplier = 1024.0 ** units_iec.index(unit)
output = size_bytes / multiplier * (1 + 1E-16)
# Corrections to round up as per IEEE754 standard
if output < 10:
w = output + 0.005
elif output < 100:
w = output + 0.05
else:
w = output + 0.5
if w < 10:
precision = 2
elif w < 100:
precision = 1
else:
precision = 0
# Round and return
return round(output, precision), unit
def get_unlabeled_device_info(device, unit):
"""
Fetches device information directly from the kernel and it is used when
parted cannot work because of a missing label. It always returns a 'unknown'
label.
"""
device_name = os.path.basename(device)
base = "/sys/block/%s" % device_name
vendor = read_record(base + "/device/vendor", "Unknown")
model = read_record(base + "/device/model", "model")
logic_block = int(read_record(base + "/queue/logical_block_size", 0))
phys_block = int(read_record(base + "/queue/physical_block_size", 0))
size_bytes = int(read_record(base + "/size", 0)) * logic_block
size, unit = format_disk_size(size_bytes, unit)
return {
'generic': {
'dev': device,
'table': "unknown",
'size': size,
'unit': unit,
'logical_block': logic_block,
'physical_block': phys_block,
'model': "%s %s" % (vendor, model),
},
'partitions': []
}
def get_device_info(device, unit):
"""
Fetches information about a disk and its partitions and it returns a
dictionary.
"""
global module, parted_exec
# If parted complains about missing labels, it means there are no partitions.
# In this case only, use a custom function to fetch information and emulate
# parted formats for the unit.
label_needed = check_parted_label(device)
if label_needed:
return get_unlabeled_device_info(device, unit)
command = "%s -s -m %s -- unit '%s' print" % (parted_exec, device, unit)
rc, out, err = module.run_command(command)
if rc != 0 and 'unrecognised disk label' not in err:
module.fail_json(msg=(
"Error while getting device information with parted "
"script: '%s'" % command),
rc=rc, out=out, err=err
)
return parse_partition_info(out, unit)
def check_parted_label(device):
"""
Determines if parted needs a label to complete its duties. Versions prior
to 3.1 don't return data when there is no label. For more information see:
http://upstream.rosalinux.ru/changelogs/libparted/3.1/changelog.html
"""
global parted_exec
# Check the version
parted_major, parted_minor, _ = parted_version()
if (parted_major == 3 and parted_minor >= 1) or parted_major > 3:
return False
# Older parted versions return a message in the stdout and RC > 0.
rc, out, err = module.run_command("%s -s -m %s print" % (parted_exec, device))
if rc != 0 and 'unrecognised disk label' in out.lower():
return True
return False
def parted_version():
"""
Returns the major and minor version of parted installed on the system.
"""
global module, parted_exec
rc, out, err = module.run_command("%s --version" % parted_exec)
if rc != 0:
module.fail_json(
msg="Failed to get parted version.", rc=rc, out=out, err=err
)
lines = [x for x in out.split('\n') if x.strip() != '']
if len(lines) == 0:
module.fail_json(msg="Failed to get parted version.", rc=0, out=out)
matches = re.search(r'^parted.+(\d+)\.(\d+)(?:\.(\d+))?$', lines[0])
if matches is None:
module.fail_json(msg="Failed to get parted version.", rc=0, out=out)
# Convert version to numbers
major = int(matches.group(1))
minor = int(matches.group(2))
rev = 0
if matches.group(3) is not None:
rev = int(matches.group(3))
return major, minor, rev
def parted(script, device, align):
"""
Runs a parted script.
"""
global module, parted_exec
if script and not module.check_mode:
command = "%s -s -m -a %s %s -- %s" % (parted_exec, align, device, script)
rc, out, err = module.run_command(command)
if rc != 0:
module.fail_json(
msg="Error while running parted script: %s" % command.strip(),
rc=rc, out=out, err=err
)
def read_record(file_path, default=None):
"""
Reads the first line of a file and returns it.
"""
try:
f = open(file_path, 'r')
try:
return f.readline().strip()
finally:
f.close()
except IOError:
return default
def part_exists(partitions, attribute, number):
"""
Looks if a partition that has a specific value for a specific attribute
actually exists.
"""
return any(
part[attribute] and
part[attribute] == number for part in partitions
)
def check_size_format(size_str):
"""
Checks if the input string is an allowed size
"""
size, unit = parse_unit(size_str)
return unit in parted_units
def main():
global module, units_si, units_iec, parted_exec
changed = False
output_script = ""
script = ""
module = AnsibleModule(
argument_spec={
'device': {'required': True, 'type': 'str'},
'align': {
'default': 'optimal',
'choices': ['none', 'cylinder', 'minimal', 'optimal'],
'type': 'str'
},
'number': {'default': None, 'type': 'int'},
# unit <unit> command
'unit': {
'default': 'KiB',
'choices': parted_units,
'type': 'str'
},
# mklabel <label-type> command
'label': {
'choices': [
'aix', 'amiga', 'bsd', 'dvh', 'gpt', 'loop', 'mac', 'msdos',
'pc98', 'sun'
],
'type': 'str'
},
# mkpart <part-type> [<fs-type>] <start> <end> command
'part_type': {
'default': 'primary',
'choices': ['primary', 'extended', 'logical'],
'type': 'str'
},
'part_start': {'default': '0%', 'type': 'str'},
'part_end': {'default': '100%', 'type': 'str'},
# name <partition> <name> command
'name': {'type': 'str'},
# set <partition> <flag> <state> command
'flags': {'type': 'list'},
# rm/mkpart command
'state': {
'choices': ['present', 'absent', 'info'],
'default': 'info',
'type': 'str'
}
},
supports_check_mode=True,
)
# Data extraction
device = module.params['device']
align = module.params['align']
number = module.params['number']
unit = module.params['unit']
label = module.params['label']
part_type = module.params['part_type']
part_start = module.params['part_start']
part_end = module.params['part_end']
name = module.params['name']
state = module.params['state']
flags = module.params['flags']
# Parted executable
parted_exec = module.get_bin_path('parted', True)
# Conditioning
if number and number < 0:
module.fail_json(msg="The partition number must be non negative.")
if not check_size_format(part_start):
module.fail_json(
msg="The argument 'part_start' doesn't respect required format."
"The size unit is case sensitive.",
err=parse_unit(part_start)
)
if not check_size_format(part_end):
module.fail_json(
msg="The argument 'part_end' doesn't respect required format."
"The size unit is case sensitive.",
err=parse_unit(part_end)
)
# Read the current disk information
current_device = get_device_info(device, unit)
current_parts = current_device['partitions']
if state == 'present':
# Default value for the label
if not label:
label = 'msdos'
# Assign label if required
if current_device['generic'].get('table', None) != label:
script += "mklabel %s " % label
# Create partition if required
if part_type and not part_exists(current_parts, 'num', number):
script += "mkpart %s %s %s " % (
part_type,
part_start,
part_end
)
# Set the unit of the run
if unit and script:
script = "unit %s %s" % (unit, script)
# Execute the script and update the data structure.
# This will create the partition for the next steps
if script:
output_script += script
parted(script, device, align)
changed = True
script = ""
current_parts = get_device_info(device, unit)['partitions']
if part_exists(current_parts, 'num', number) or module.check_mode:
partition = {'flags': []} # Empty structure for the check-mode
if not module.check_mode:
partition = [p for p in current_parts if p['num'] == number][0]
# Assign name to the the partition
if name is not None and partition.get('name', None) != name:
script += "name %s %s " % (number, name)
# Manage flags
if flags:
# Compute only the changes in flags status
flags_off = list(set(partition['flags']) - set(flags))
flags_on = list(set(flags) - set(partition['flags']))
for f in flags_on:
script += "set %s %s on " % (number, f)
for f in flags_off:
script += "set %s %s off " % (number, f)
# Set the unit of the run
if unit and script:
script = "unit %s %s" % (unit, script)
# Execute the script
if script:
output_script += script
changed = True
parted(script, device, align)
elif state == 'absent':
# Remove the partition
if part_exists(current_parts, 'num', number) or module.check_mode:
script = "rm %s " % number
output_script += script
changed = True
parted(script, device, align)
elif state == 'info':
output_script = "unit '%s' print " % unit
# Final status of the device
final_device_status = get_device_info(device, unit)
module.exit_json(
changed=changed,
disk=final_device_status['generic'],
partitions=final_device_status['partitions'],
script=output_script.strip()
)
if __name__ == '__main__':
main()
|
sidartaoliveira/ansible
|
lib/ansible/modules/system/parted.py
|
Python
|
gpl-3.0
| 22,160 | 0.000632 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Radim Rehurek <me@radimrehurek.com>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
import os
from smart_open import smart_open
try:
import cPickle as _pickle
except ImportError:
import pickle as _pickle
from gensim.models.doc2vec import Doc2Vec
from gensim.models.word2vec import Word2Vec
try:
from annoy import AnnoyIndex
except ImportError:
raise ImportError("Annoy has not been installed, if you wish to use the annoy indexer, please run `pip install annoy`")
class AnnoyIndexer(object):
def __init__(self, model=None, num_trees=None):
self.index = None
self.labels = None
self.model = model
self.num_trees = num_trees
if model and num_trees:
if isinstance(self.model, Doc2Vec):
self.build_from_doc2vec()
elif isinstance(self.model, Word2Vec):
self.build_from_word2vec()
else:
raise ValueError("Only a Word2Vec or Doc2Vec instance can be used")
def save(self, fname, protocol=2):
fname_dict = fname + '.d'
self.index.save(fname)
d = {'f': self.model.vector_size, 'num_trees': self.num_trees, 'labels': self.labels}
with smart_open(fname_dict, 'wb') as fout:
_pickle.dump(d, fout, protocol=protocol)
def load(self, fname):
fname_dict = fname+'.d'
if not (os.path.exists(fname) and os.path.exists(fname_dict)):
raise IOError(
"Can't find index files '%s' and '%s' - Unable to restore AnnoyIndexer state." % (fname, fname_dict))
else:
with smart_open(fname_dict) as f:
d = _pickle.loads(f.read())
self.num_trees = d['num_trees']
self.index = AnnoyIndex(d['f'])
self.index.load(fname)
self.labels = d['labels']
def build_from_word2vec(self):
"""Build an Annoy index using word vectors from a Word2Vec model"""
self.model.init_sims()
return self._build_from_model(self.model.wv.syn0norm, self.model.index2word
, self.model.vector_size)
def build_from_doc2vec(self):
"""Build an Annoy index using document vectors from a Doc2Vec model"""
docvecs = self.model.docvecs
docvecs.init_sims()
labels = [docvecs.index_to_doctag(i) for i in range(0, docvecs.count)]
return self._build_from_model(docvecs.doctag_syn0norm, labels, self.model.vector_size)
def _build_from_model(self, vectors, labels, num_features):
index = AnnoyIndex(num_features)
for vector_num, vector in enumerate(vectors):
index.add_item(vector_num, vector)
index.build(self.num_trees)
self.index = index
self.labels = labels
def most_similar(self, vector, num_neighbors):
"""Find the top-N most similar items"""
ids, distances = self.index.get_nns_by_vector(
vector, num_neighbors, include_distances=True)
return [(self.labels[ids[i]], 1 - distances[i] / 2) for i in range(len(ids))]
|
olavurmortensen/gensim
|
gensim/similarities/index.py
|
Python
|
lgpl-2.1
| 3,188 | 0.002509 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Deepin, Inc.
# 2011 Wang Yong
# 2012 Reza Faiz A
#
# Author: Wang Yong <lazycat.manatee@gmail.com>
# Maintainer: Wang Yong <lazycat.manatee@gmail.com>
# Reza Faiz A <ylpmiskrad@gmail.com>
# Remixed : Reza Faiz A <ylpmiskrad@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from appItem import *
from draw import *
from lang import __, getDefaultLanguage
import gtk
import updateView
import utils
class UpdatePage(object):
'''Interface for update page.'''
def __init__(self, repoCache, switchStatus, downloadQueue, entryDetailCallback,
sendVoteCallback, fetchVoteCallback, upgradeSelectedPkgsCallback,
addIgnorePkgCallback, showIgnorePageCallback):
'''Init for update page.'''
# Init.
self.repoCache = repoCache
self.box = gtk.VBox()
self.updateView = updateView.UpdateView(
repoCache,
switchStatus,
downloadQueue,
entryDetailCallback,
sendVoteCallback,
fetchVoteCallback,
addIgnorePkgCallback,
)
self.topbar = Topbar(self.repoCache,
self.updateView.selectAllPkg,
self.updateView.unselectAllPkg,
self.updateView.getSelectList,
upgradeSelectedPkgsCallback,
showIgnorePageCallback)
# Connect components.
self.box.pack_start(self.topbar.eventbox, False, False)
self.box.pack_start(self.updateView.scrolledwindow)
self.box.show_all()
class Topbar(object):
'''Top bar.'''
def __init__(self, repoCache,
selectAllPkgCallback, unselectAllPkgCallback,
getSelectListCallback, upgradeSelectedPkgsCallback,
showIgnorePageCallback):
'''Init for top bar.'''
# Init.
self.repoCache = repoCache
self.paddingX = 5
self.selectAllPkgCallback = selectAllPkgCallback
self.unselectAllPkgCallback = unselectAllPkgCallback
self.showIgnorePageCallback = showIgnorePageCallback
self.box = gtk.HBox()
self.boxAlign = gtk.Alignment()
self.boxAlign.set(0.0, 0.5, 1.0, 1.0)
self.boxAlign.set_padding(0, 0, TOPBAR_PADDING_LEFT, TOPBAR_PADDING_UPDATE_RIGHT)
self.boxAlign.add(self.box)
self.eventbox = gtk.EventBox()
drawTopbar(self.eventbox)
upgradeBox = gtk.HBox()
upgradeAlign = gtk.Alignment()
upgradeAlign.set(1.0, 0.0, 0.0, 1.0)
upgradeAlign.add(upgradeBox)
self.numLabel = gtk.Label()
self.ignoreNumBox = gtk.HBox()
self.ignoreNumAlign = gtk.Alignment()
self.ignoreNumAlign.set(0.0, 0.5, 0.0, 0.0)
self.ignoreNumAlign.add(self.ignoreNumBox)
self.selectAllId = "selectAll"
self.unselectAllId = "unselectAll"
self.labelId = self.selectAllId
(self.selectAllBox, self.selectAllEventBox) = setDefaultRadioButton(
__("Select All"), self.selectAllId, self.setLabelId, self.getLabelId, self.selectAllPkgStatus
)
upgradeBox.pack_start(self.selectAllBox, False, False, self.paddingX)
(self.unselectAllBox, self.unselectAllEventBox) = setDefaultRadioButton(
__("Unselect All"), self.unselectAllId, self.setLabelId, self.getLabelId, self.unselectAllPkgStatus
)
upgradeBox.pack_start(self.unselectAllBox, False, False, self.paddingX)
(self.upgradeButton, upgradeButtonAlign) = newActionButton(
"search", 0.0, 0.5, "cell", False, __("Action Update"), BUTTON_FONT_SIZE_MEDIUM, "bigButtonFont")
upgradeBox.pack_start(upgradeButtonAlign, False, False, 26)
self.upgradeButton.connect("button-press-event", lambda w, e: upgradeSelectedPkgsCallback(getSelectListCallback()))
# Connect.
self.updateNum(self.repoCache.getUpgradableNum())
self.numLabel.set_alignment(0.0, 0.5)
self.box.pack_start(self.numLabel, False, False, self.paddingX)
self.box.pack_start(self.ignoreNumAlign, True, True, self.paddingX)
self.box.pack_start(upgradeAlign, True, True, self.paddingX)
self.eventbox.add(self.boxAlign)
self.updateIgnoreNum(self.repoCache.getIgnoreNum())
def selectAllPkgStatus(self):
'''Select all pkg status.'''
self.selectAllEventBox.queue_draw()
self.unselectAllEventBox.queue_draw()
self.selectAllPkgCallback()
def unselectAllPkgStatus(self):
'''Select all pkg status.'''
self.selectAllEventBox.queue_draw()
self.unselectAllEventBox.queue_draw()
self.unselectAllPkgCallback()
def setLabelId(self, lId):
'''Set label id.'''
self.labelId = lId
def getLabelId(self):
'''Get label id.'''
return self.labelId
def updateIgnoreNum(self, ignoreNum):
'''Update ignore number label.'''
utils.containerRemoveAll(self.ignoreNumBox)
if ignoreNum > 0:
(ignoreLabel, ignoreEventBox) = setDefaultClickableDynamicLabel(
__("No Notify UpdatePage") % (ignoreNum),
"topbarButton",
)
ignoreEventBox.connect("button-press-event", lambda w, e: self.showIgnorePageCallback())
self.ignoreNumBox.add(ignoreEventBox)
self.ignoreNumBox.show_all()
def updateNum(self, upgradeNum):
'''Update number.'''
if upgradeNum == 0:
markup = ""
else:
markup = (__("Topbar UpdatePage") % (LABEL_FONT_SIZE,
appTheme.getDynamicColor("topbarNum").getColor(),
LABEL_FONT_SIZE,
str(upgradeNum),
LABEL_FONT_SIZE))
self.numLabel.set_markup(markup)
# LocalWords: efe
|
Zulfikarlatief/tealinux-software-center
|
src/updatePage.py
|
Python
|
gpl-3.0
| 6,849 | 0.007446 |
# coding=utf8
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import os
from os.path import join
import tempfile
import shutil
from six.moves import configparser
import pytest
from tests import setenv, test_doc0
from knowhow.index import Index
import knowhow.util as util
@pytest.fixture
def tmpd(request):
tempdir = tempfile.mkdtemp()
request.addfinalizer(lambda: shutil.rmtree(tempdir))
return tempdir
@pytest.fixture
def conf():
try:
c = configparser.SafeConfigParser()
except AttributeError:
c = configparser.ConfigParser()
c.add_section("main")
c.set("main", "data", util.decode("/app/data"))
return c
@pytest.fixture
def conf_path(conf, tmpd):
path = join(tmpd, "knowhow.ini")
with open(path, "w") as f:
conf.write(f)
return path
@pytest.fixture
def tmp_app_index_dir_paths(tmpd):
app_dir = join(tmpd, "app")
index_dir = join(tmpd, "index")
return tmpd, app_dir, index_dir
@pytest.fixture
def tmp_app_index_dirs(tmp_app_index_dir_paths):
tmpd, appd, indexd = tmp_app_index_dir_paths
os.mkdir(appd)
os.mkdir(indexd)
return tmpd, appd, indexd
@pytest.fixture
def index_empty(request, tmp_app_index_dirs):
_, app_dir, index_dir = tmp_app_index_dirs
orig_home = os.environ.get("KNOWHOW_HOME")
orig_data = os.environ.get("KNOWHOW_DATA")
def restore():
setenv("KNOWHOW_HOME", orig_home)
setenv("KNOWHOW_DATA", orig_data)
request.addfinalizer(restore)
os.environ["KNOWHOW_HOME"] = app_dir
os.environ["KNOWHOW_DATA"] = index_dir
index = Index(app_dir=app_dir, index_dir=index_dir)
index.open(clear=True)
return index
@pytest.fixture
def index_one(index_empty):
index_empty.add(**test_doc0)
return index_empty
|
eukaryote/knowhow
|
tests/conftest.py
|
Python
|
mit
| 1,892 | 0 |
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class SexyItem(scrapy.Item):
# define the fields for your item here like:
name = scrapy.Field()
dirname = scrapy.Field()
file_urls = scrapy.Field()
files = scrapy.Field()
|
eryxlee/scrapy
|
sexy/sexy/items.py
|
Python
|
gpl-2.0
| 358 | 0.002793 |
class Printer(object):
"""
"""
def __init__(self):
self._depth = -1
self._str = str
self.emptyPrinter = str
def doprint(self, expr):
"""Returns the pretty representation for expr (as a string)"""
return self._str(self._print(expr))
def _print(self, expr):
self._depth += 1
# See if the class of expr is known, or if one of its super
# classes is known, and use that pretty function
res = None
for cls in expr.__class__.__mro__:
if hasattr(self, '_print_'+cls.__name__):
res = getattr(self, '_print_'+cls.__name__)(expr)
break
# Unknown object, just use its string representation
if res is None:
res = self.emptyPrinter(expr)
self._depth -= 1
return res
|
certik/sympy-oldcore
|
sympy/printing/printer.py
|
Python
|
bsd-3-clause
| 847 | 0 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-11-02 10:04
from __future__ import unicode_literals
from django.db import migrations
def update_version_queues(apps, schema_editor):
VersionQueue = apps.get_model('repository', 'VersionQueue')
for queue in VersionQueue.objects.all():
queue.title = queue.preprint.title
queue.abstract = queue.preprint.abstract
queue.save()
class Migration(migrations.Migration):
dependencies = [
('repository', '0019_auto_20201030_1423'),
]
operations = [
migrations.RunPython(
update_version_queues,
reverse_code=migrations.RunPython.noop,
)
]
|
BirkbeckCTP/janeway
|
src/repository/migrations/0020_vq_title_abstracts.py
|
Python
|
agpl-3.0
| 693 | 0.001443 |
# Copyright 2021 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
register_host_test("ec_app")
|
coreboot/chrome-ec
|
zephyr/test/ec_app/BUILD.py
|
Python
|
bsd-3-clause
| 195 | 0 |
################################
# These variables are overwritten by Zenoss when the ZenPack is exported
# or saved. Do not modify them directly here.
# NB: PACKAGES is deprecated
NAME = "ZenPacks.community.SquidMon"
VERSION = "1.0"
AUTHOR = "Josh Baird"
LICENSE = "GPLv2"
NAMESPACE_PACKAGES = ['ZenPacks', 'ZenPacks.community']
PACKAGES = ['ZenPacks', 'ZenPacks.community', 'ZenPacks.community.SquidMon']
INSTALL_REQUIRES = []
COMPAT_ZENOSS_VERS = '>=2.4'
PREV_ZENPACK_NAME = ""
# STOP_REPLACEMENTS
################################
# Zenoss will not overwrite any changes you make below here.
from setuptools import setup, find_packages
setup(
# This ZenPack metadata should usually be edited with the Zenoss
# ZenPack edit page. Whenever the edit page is submitted it will
# overwrite the values below (the ones it knows about) with new values.
name = NAME,
version = VERSION,
author = AUTHOR,
license = LICENSE,
# This is the version spec which indicates what versions of Zenoss
# this ZenPack is compatible with
compatZenossVers = COMPAT_ZENOSS_VERS,
# previousZenPackName is a facility for telling Zenoss that the name
# of this ZenPack has changed. If no ZenPack with the current name is
# installed then a zenpack of this name if installed will be upgraded.
prevZenPackName = PREV_ZENPACK_NAME,
# Indicate to setuptools which namespace packages the zenpack
# participates in
namespace_packages = NAMESPACE_PACKAGES,
# Tell setuptools what packages this zenpack provides.
packages = find_packages(),
# Tell setuptools to figure out for itself which files to include
# in the binary egg when it is built.
include_package_data = True,
# The MANIFEST.in file is the recommended way of including additional files
# in your ZenPack. package_data is another.
#package_data = {}
# Indicate dependencies on other python modules or ZenPacks. This line
# is modified by zenoss when the ZenPack edit page is submitted. Zenoss
# tries to put add/delete the names it manages at the beginning of this
# list, so any manual additions should be added to the end. Things will
# go poorly if this line is broken into multiple lines or modified to
# dramatically.
install_requires = INSTALL_REQUIRES,
# Every ZenPack egg must define exactly one zenoss.zenpacks entry point
# of this form.
entry_points = {
'zenoss.zenpacks': '%s = %s' % (NAME, NAME),
},
# All ZenPack eggs must be installed in unzipped form.
zip_safe = False,
)
|
zenoss/ZenPacks.community.SquidMon
|
setup.py
|
Python
|
gpl-2.0
| 2,623 | 0.012962 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
from .tinytag import TinyTag, StringWalker, ID3, Ogg, Wave, Flac
__version__ = '0.9.1'
if __name__ == '__main__':
print(TinyTag.get(sys.argv[1]))
|
bradchristensen/cherrymusic
|
tinytag/__init__.py
|
Python
|
gpl-3.0
| 194 | 0.005155 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Monk-ee (magic.monkee.magic@gmail.com).
#
"""__init__.py: Init for unit testing this module."""
__author__ = "monkee"
__maintainer__ = "monk-ee"
__email__ = "magic.monkee.magic@gmail.com"
__status__ = "Development"
import unittest
from PuppetDBClientTestCaseV2 import PuppetDBClientTestCaseV2
from PuppetDBClientTestCaseV3 import PuppetDBClientTestCaseV3
def all_tests():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(PuppetDBClientTestCaseV2))
suite.addTest(unittest.makeSuite(PuppetDBClientTestCaseV3))
return suite
|
monk-ee/AWSBillingToDynamoDB
|
tests/__init__.py
|
Python
|
gpl-2.0
| 623 | 0 |
# -*- coding: utf-8 -*-
# Copyright(C) 2013 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
from decimal import Decimal
from datetime import time, datetime, timedelta
from weboob.tools.browser import BasePage
from weboob.tools.json import json
from weboob.tools.mech import ClientForm
from weboob.capabilities.base import UserError, Currency
__all__ = ['CitiesPage', 'SearchPage', 'SearchErrorPage', 'SearchInProgressPage',
'ResultsPage', 'ForeignPage']
class ForeignPage(BasePage):
def on_loaded(self):
raise UserError('Your IP address is localized in a country not supported by this module (%s). Currently only the French website is supported.' % self.group_dict['country'])
class CitiesPage(BasePage):
def get_stations(self):
result = json.loads(self.document[self.document.find('{'):-2])
return result['CITIES']
class SearchPage(BasePage):
def search(self, departure, arrival, date, age, card, comfort_class):
self.browser.select_form(name='saisie')
self.browser['ORIGIN_CITY'] = departure.encode(self.browser.ENCODING)
self.browser['DESTINATION_CITY'] = arrival.encode(self.browser.ENCODING)
if date is None:
date = datetime.now() + timedelta(hours=1)
elif date < datetime.now():
raise UserError("You cannot look for older departures")
self.browser['OUTWARD_DATE'] = date.strftime('%d/%m/%y')
self.browser['OUTWARD_TIME'] = [str(date.hour)]
self.browser['PASSENGER_1'] = [age]
self.browser['PASSENGER_1_CARD'] = [card]
self.browser['COMFORT_CLASS'] = [str(comfort_class)]
self.browser.controls.append(ClientForm.TextControl('text', 'nbAnimalsForTravel', {'value': ''}))
self.browser['nbAnimalsForTravel'] = '0'
self.browser.submit()
class SearchErrorPage(BasePage):
def on_loaded(self):
p = self.document.getroot().cssselect('div.messagesError p')
if len(p) > 0:
message = p[0].text.strip()
raise UserError(message)
class SearchInProgressPage(BasePage):
def on_loaded(self):
link = self.document.xpath('//a[@id="url_redirect_proposals"]')[0]
self.browser.location(link.attrib['href'])
class ResultsPage(BasePage):
def get_value(self, div, name, last=False):
i = -1 if last else 0
p = div.cssselect(name)[i]
sub = p.find('p')
if sub is not None:
txt = sub.tail.strip()
if txt == '':
p.remove(sub)
else:
return unicode(txt)
return unicode(self.parser.tocleanstring(p))
def parse_hour(self, div, name, last=False):
txt = self.get_value(div, name, last)
hour, minute = map(int, txt.split('h'))
return time(hour, minute)
def iter_results(self):
for div in self.document.getroot().cssselect('div.train_info'):
info = None
price = None
currency = None
for td in div.cssselect('td.price'):
txt = self.parser.tocleanstring(td)
p = Decimal(re.sub('([^\d\.]+)', '', txt))
if price is None or p < price:
info = list(div.cssselect('strong.price_label')[0].itertext())[-1].strip().strip(':')
price = p
currency = Currency.get_currency(txt)
yield {'type': self.get_value(div, 'div.transporteur-txt'),
'time': self.parse_hour(div, 'div.departure div.hour'),
'departure': self.get_value(div, 'div.departure div.station'),
'arrival': self.get_value(div, 'div.arrival div.station', last=True),
'arrival_time': self.parse_hour(div, 'div.arrival div.hour', last=True),
'price': price,
'currency': currency,
'price_info': info,
}
|
yannrouillard/weboob
|
modules/voyagessncf/pages.py
|
Python
|
agpl-3.0
| 4,591 | 0.003921 |
from pymander.exceptions import CantParseLine
from pymander.handlers import LineHandler, RegexLineHandler, ArgparseLineHandler
from pymander.contexts import StandardPrompt
from pymander.commander import Commander
from pymander.decorators import bind_command
class DeeperLineHandler(LineHandler):
def try_execute(self, line):
if line.strip() == 'deeper':
deeper_context = self.context.clone()
deeper_context.name = '{0} / ctx {1}'.format(self.context.name, id(deeper_context))
self.context.write('Going deeper!\nNow in: {0}\n'.format(deeper_context))
return deeper_context
raise CantParseLine(line)
class RaynorLineHandler(LineHandler):
def try_execute(self, line):
if line.strip() == 'kerrigan':
self.context.write('Oh, Sarah...\n')
return
raise CantParseLine(line)
class BerryLineHandler(RegexLineHandler):
@bind_command(r'pick a (?P<berry_kind>\w+)')
def pick_berry(self, berry_kind):
self.context.write('Picked a {0}\n'.format(berry_kind))
@bind_command(r'make (?P<berry_kind>\w+) jam')
def make_jam(self, berry_kind):
self.context.write('Made some {0} jam\n'.format(berry_kind))
class GameLineHandler(ArgparseLineHandler):
@bind_command('play', [
['game', {'type': str, 'default': 'nothing'}],
['--well', {'action': 'store_true'}],
])
def play(self, game, well):
self.context.write('I play {0}{1}\n'.format(game, ' very well' if well else ''))
@bind_command('win')
def win(self):
self.context.write('I just won!\n')
def main():
com = Commander(
StandardPrompt([
DeeperLineHandler(),
BerryLineHandler(),
GameLineHandler(),
RaynorLineHandler(),
])
)
com.mainloop()
if __name__ == '__main__':
main()
|
altvod/pymander
|
examples/simple.py
|
Python
|
mit
| 1,893 | 0.002113 |
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework.routers import DefaultRouter
from sk_map.api.map import MapViewSet, WallViewSet, BoxViewSet, PointViewSet, MenViewSet,\
WallListViewSet, BoxListViewSet, PointListViewSet, MenListViewSet, MapListViewSet
from sk_auth.api.auth import RegisterView, AuthAPIView
from sk_game.api.game import GameViewSet
from sk_skins.api.skins import SkinView
action = {'get': 'retrieve', 'put': 'update', 'delete': 'destroy'}
action_with_patch = {'get': 'retrieve', 'put': 'update', 'delete': 'destroy', 'patch': 'partial_update'}
action_no_pk = {'get': 'list', 'post': 'create'}
router = DefaultRouter()
router.register(r'skins', SkinView)
router.register(r'auth/register', RegisterView)
urlpatterns = router.urls
urlpatterns_game = [
url('^game/(?P<map>\d+)/$', GameViewSet.as_view({'get': 'retrieve', 'patch': 'partial_update'})),
url('^game/$', GameViewSet.as_view({'get': 'retrieve', 'put': 'update', 'delete': 'destroy', 'post': 'create'})),
]
urlpatterns_map = {
url('^map/(?P<pk>\d+)/$', MapViewSet.as_view(action_with_patch)),
url('^map/$', MapListViewSet.as_view(action_no_pk)),
}
urlpatterns_map_obj = [
url('^wall/(?P<pk>\d+)/$', WallViewSet.as_view(action)),
url('^wall/$', WallListViewSet.as_view(action_no_pk)),
url('^box/(?P<pk>\d+)/$', BoxViewSet.as_view(action)),
url('^box/$', BoxListViewSet.as_view(action_no_pk)),
url('^point/(?P<pk>\d+)/$', PointViewSet.as_view(action)),
url('^point/$', PointListViewSet.as_view(action_no_pk)),
url('^men/(?P<pk>\d+)/$', MenViewSet.as_view(action)),
url('^men/$', MenListViewSet.as_view(action_no_pk)),
]
urlpatterns_admin =[
url(r'^admin/', include(admin.site.urls)),
]
urlpatterns_auth = [
url(r'^auth/', AuthAPIView.as_view(), name='login_view')
]
patterns_swagger = [
url(r'^docs/', include('rest_framework_swagger.urls')),
]
urlpatterns += urlpatterns_admin
urlpatterns += urlpatterns_auth
urlpatterns += patterns_swagger
urlpatterns += urlpatterns_map_obj
urlpatterns += urlpatterns_game
urlpatterns += urlpatterns_map
|
chepe4pi/sokoban_api
|
sokoban/urls.py
|
Python
|
gpl-2.0
| 2,156 | 0.00603 |
#
# Copyright 2001 - 2016 Ludek Smid [http://www.ospace.net/]
#
# This file is part of Outer Space.
#
# Outer Space is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Outer Space is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Outer Space; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import math
import ige.ospace.Const as Const
from ige.IDataHolder import makeIDataHolder
from Techs import noop as techDefaultHandler
def init(configDir):
global techs, Tech
import Techs
Techs.init(configDir)
from Techs import techs, Tech
## General
turnsPerDay = 24
galaxyStartDelay = turnsPerDay * 2
playerTimeout = 60 * 60 * 24 * 28 # 28 days
novicePlayerTimeout = 60 * 60 * 24 * 14 # 14 days
messageTimeout = 60 * 60 * 24 * 14 # 14 days
## New player
startingPopulation = 9000
startingBio = 1000
startingMin = 1000
startingEn = 1000
startingScannerPwr = 100
## Production
maxProdQueueLen = 10
buildOnSamePlanetMod = 1
buildOnAnotherPlanetMod = 2
unusedProdMod = 0.75
# structure economy revamp constants
basePlanetProdProd = 5 # prevents deadlocked planets, makes small planets more competitive
structDefaultHpRatio = 0.1 # structures are build with this percentage of HPs
structDefaultCpCosts = 0.2 # structures costs this amount of what is in XMLs
structFromShipHpRatio = 1.0 # structures from ships are build with this percentage of HPs
structNewPlayerHpRatio = 1.0 # structures from ships are build with this percentage of HPs
structTransferWaste = 0.5 # when replacing building, how much CP of old building is transfered to new one
structTransferMaxRatio = 0.5 # when replacing building, what is maximum effect of transfered CPs
# as we now build structures damaged, repair and decay are part of economy revamp
# repair ratio is dynamic on cost of building. it's full of magic constants
# goal is to have 480 CP building to repair in ~2 days (which is twice the legacy repair
# ratio), and the most expansive ones (adv. stargate) ~ 6 days.
# We are using log10() as it's quicker than log()
_magicBase = 1.0 / (turnsPerDay * 2)
_repairMagicBase = math.log10(480 * structDefaultCpCosts) ** 2 * _magicBase
repairRatioFunc = lambda x: _repairMagicBase / math.log10(x) ** 2
# building decay ratio bigger or equivalent of 480 CP repair
decayRatioFunc = lambda x: min( _magicBase, repairRatioFunc(x))
decayProdQueue = 0.02
## Environment
envInterval = 1000
envAutoMod = 10.0
envMax = 200
envSelfUpgradeChance = {"H": 5, "C": 1, "B": 500, "m": 100, "r": 100, "p": 100, "e": 100} # in ten thousandths (10 000)
planetSpec = {}
planetSpec[u'A'] = makeIDataHolder(
minBio = 0,
maxBio = 0,
upgradeTo = None,
downgradeTo = None,
)
planetSpec[u'G'] = makeIDataHolder(
minBio = 0,
maxBio = 0,
upgradeTo = None,
downgradeTo = None,
)
planetSpec[u'C'] = makeIDataHolder(
minBio = 0,
maxBio = 6,
upgradeTo = u'D',
upgradeEnReqs = (5, 180),
downgradeTo = None,
)
planetSpec[u'R'] = makeIDataHolder(
minBio = 0,
maxBio = 6,
upgradeTo = u'D',
upgradeEnReqs = (5, 180),
downgradeTo = None,
)
planetSpec[u'D'] = makeIDataHolder(
minBio = 6,
maxBio = 12,
upgradeTo = u'H',
upgradeEnReqs = (25, 150),
downgradeTo = u'R',
)
planetSpec[u'H'] = makeIDataHolder(
minBio = 12,
maxBio = 25,
upgradeTo = u'M',
upgradeEnReqs = (50, 125),
downgradeTo = u'D',
)
planetSpec[u'M'] = makeIDataHolder(
minBio = 25,
maxBio = 75,
upgradeTo = u'E',
upgradeEnReqs = (50, 100),
downgradeTo = u'H',
)
planetSpec[u'E'] = makeIDataHolder(
minBio = 75,
maxBio = 125,
upgradeTo = u"I",
upgradeEnReqs = (50, 100),
downgradeTo = u'M',
)
planetSpec[u"I"] = makeIDataHolder( # gaia
minBio = 125,
maxBio = 200,
upgradeTo = None,
downgradeTo = u"E",
)
## New colony settings
colonyMinBio = 600
colonyMinMin = 600
colonyMinEn = 600
## Storage
popPerSlot = 0
bioPerSlot = 0
minPerSlot = 0
enPerSlot = 0
popBaseStor = 4800
bioBaseStor = 4800
minBaseStor = 4800
enBaseStor = 4800
autoMinStorTurns = 2
tlPopReserve = 100
## Resources
stratResRate = turnsPerDay * 6
stratResAmountBig = 10
stratResAmountSmall = 1
## Population
popGrowthRate = 0.02
popMinGrowthRate = int(5000 * popGrowthRate) # Increase the Minimum Population Growth from 20 to 100 per turn
popDieRate = 0.1
popMinDieRate = 100
popKillMod = 0.25
popSlotKillMod = 5 # how many people per 1 DMG get killed when slot is hit
popSlotHP = 100 # HP of habitable structures on slot (where people live)
## Research
maxRsrchQueueLen = 10
techBaseImprovement = 1
techMaxImprovement = 5
techImprCostMod = {1:480, 2:480, 3:720, 4:960, 5:1200, 6: 1440, 7: 1680} #per level
sciPtsPerCitizen = {1: 0, 2: 0.00075, 3: 0.00150, 4: 0.00175, 5: 0.00200, 6: 0.002125, 7: 0.00225, 99: 0} #per level
techImprEff = {1:0.750, 2:0.875, 3:1.000, 4:1.125, 5:1.250} #per sublevel
#maxSciPtsTL = {1:100, 2:200, 3:300, 4:400, 5:500, 6:600, 7:700}
#sciPtsStepFraction = 0.25
## Scanner
maxSignature = 100
scannerMinPwr = 1
scannerMaxPwr = 150
level1InfoScanPwr = 1000
level2InfoScanPwr = 1200
level3InfoScanPwr = 1400
level4InfoScanPwr = 1600
maxScanPwr = 200000
mapForgetScanPwr = 0.94
partnerScanPwr = 300000
## Fleets
maxCmdQueueLen = 10
signatureBase = 1.10
operProdRatio = 0.001
combatRetreatWait = 3
starGateDamage = 0.2 # damage for 100% speed boost (double for 200%, etc...)
shipDecayRatio = 0.04
maxDamageAbsorb = 5 # max absorbed damage for tech "damageAbsorb" property.
# max seq_mod equipments of equipType; anything not in list is unlimited
maxEquipType = {
'ECM' : 1, # +Missile DEF
'Combat Bonuses' : 1, # +%ATT, +%DEF
'Combat Modifiers' : 1, # +ATT, +DEF
'Shields' : 1, # not hardshields
'Stealth' : 1,
'Auto Repair' : 1,
}
## Buildings
plShieldRegen = 0.05 #regen rate of planetary shield
## Diplomacy
baseRelationChange = -5
relLostWhenAttacked = -1000000
defaultRelation = Const.REL_NEUTRAL
contactTimeout = 6 * turnsPerDay
voteForImpAnnounceOffset = 2 * turnsPerDay
voteForImpPeriod = 6 * turnsPerDay
ratioNeededForImp = 0.6666
pactDescrs = {}
pactDescrs[Const.PACT_ALLOW_CIVILIAN_SHIPS] = makeIDataHolder(
targetRel = 500,
relChng = 10,
validityInterval = (0, 10000),
)
pactDescrs[Const.PACT_ALLOW_MILITARY_SHIPS] = makeIDataHolder(
targetRel = 750,
relChng = 8,
validityInterval = (0, 10000),
)
pactDescrs[Const.PACT_ALLOW_TANKING] = makeIDataHolder(
targetRel = 750,
relChng = 7,
validityInterval = (0, 10000),
)
pactDescrs[Const.PACT_MINOR_CP_COOP] = makeIDataHolder(
targetRel = 1000,
relChng = 6,
effectivity = 0.05,
validityInterval = (625, 10000),
)
pactDescrs[Const.PACT_MAJOR_CP_COOP] = makeIDataHolder(
targetRel = 1000,
relChng = 1,
effectivity = 0.05,
validityInterval = (875, 10000),
)
pactDescrs[Const.PACT_SHARE_SCANNER] = makeIDataHolder(
targetRel = 1000,
relChng = 1,
validityInterval = (625, 10000),
)
pactDescrs[Const.PACT_MINOR_SCI_COOP] = makeIDataHolder(
targetRel = 750,
relChng = 1,
effectivity = 0.05,
validityInterval = (625, 10000),
)
pactDescrs[Const.PACT_MAJOR_SCI_COOP] = makeIDataHolder(
targetRel = 1000,
relChng = 1,
effectivity = 0.05,
validityInterval = (875, 10000),
)
## Morale
baseGovPwr = 50000
maxMorale = 100.0
minMoraleTrgt = 30.0
revoltThr = 25.0
moraleChngPerc = 0.03
moraleHighPopPenalty = 2.0
moraleBasePop = 10000
moraleLowPop = 5000
moraleLowPopBonus = 40.0
moraleLostWhenSurrender = 0.0
moraleLostNoFood = 1.0
moraleModPlHit = 96.0 # how many morale point per 1 per cent of damage
moralePerPointChance = 5.0 # for every point below revoltThr % chance for revolt
moraleProdStep = 10
moraleProdBonus = [-0.875, -0.75, -0.625, -0.50, -0.375, -0.25, -0.125, 0.0, 0.0, 0.125, 0.25]
# we expect pop reserve from TL to get into unemployed
# tlPopReserve * TL1
# if we get no reserve, there is a hit, if we get at least
# the reserve, it's a bonus, linear in between
unemployedMoraleLow = -20
unemployedMoraleHigh = 10
## Revolt
revoltDestrBio = 0.05
revoltDestrMin = 0.05
revoltDestrEn = 0.05
revoltPenalty = 0.75
## Messages
messageMaxAge = turnsPerDay * 3
## Projects
projECOINIT3PlBio = 1
## Ships
shipImprovementMod = 1.05
shipMaxImprovements = 5
shipMaxDesigns = 40
shipExpToLevel = {0:1, 1:2, 2:2, 3:3, 4:3, 5:3, 6:3, 7:4, 8:4, 9:4, 10:4, 11:4,
12:4, 13:4, 15:5}
shipDefLevel = 5
shipLevelEff = {1:0.50, 2:0.75, 3:1.00, 4:1.25, 5:1.50}
shipBaseExpMod = 20
shipBaseExp = {0:10, 1:20, 2:40, 3:80, 4:160}
shipTargetPerc = [25, 50, 90, 100]
shipMinUpgrade = 120
shipUpgradeMod = 1.375
shipUpgradePts = [1, 3, 10]
weaponDmgDegrade = [1.0, 0.5, 0.25, 0.125]
## EMR
emrMinDuration = 36
emrMaxDuration = 60
emrPeriod = 576
emrSeasons = [None, None, None, None]
emrSeasons[0] = makeIDataHolder(
name = "spring",
startTime = 0,
endTime = 143,
emrLevelMin = 0.75,
emrLevelMax = 1.25,
)
emrSeasons[1] = makeIDataHolder(
name = "summer",
startTime = 144,
endTime = 287,
emrLevelMin = 0.50,
emrLevelMax = 1.00,
)
emrSeasons[2] = makeIDataHolder(
name = "fall",
startTime = 287,
endTime = 431,
emrLevelMin = 0.50,
emrLevelMax = 1.50,
)
emrSeasons[3] = makeIDataHolder(
name = "winter",
startTime = 432,
endTime = 575,
emrLevelMin = 1.00,
emrLevelMax = 1.50,
)
## Pirates
## General
pirateInfluenceRange = 7.5 # in parsecs
pirateGovPwr = int(500000 * 1.25)
## Fame
pirateGainFamePropability = lambda d: 2 - d * 0.2
pirateLoseFameProbability = lambda d: 1 - (15 - d) * 0.2
pirateCaptureInRangeFame = 1
pirateSurvivalFame = 1
pirateCaptureOutOfRangeFame = -1
## Colonization
pirateColonyCostMod = 1.5 # base multiplier - all other multipliers are multiplied by this
pirateTL3StratResColonyCostMod = 0.25
piratePlayerZoneCostMod = 1.25
pirateColonyFameZoneCost = lambda d: min(d * 0.1 + pirateTL3StratResColonyCostMod,1)
pirateColonyPlayerZoneCost = lambda d: piratePlayerZoneCostMod + (d - 15) * 0.01 * piratePlayerZoneCostMod
## Techs
pirateCanStealImprovements = 3
pirateGrantHSE = 60*24*3600 #60 days; AI only
pirateGrantASSEM = 105*24*3600 #105 days; AI only
pirateGrantCOND = 105*24*3600 #105 days; AI only
## Timed events (not implemented)
pirateTimerMod = 3*24*3600 # +/- up to 3 days for each grant
pirateTimerRum = 20*24*3600 #20 days; grant Brewery, Rum strategic resource, and Drunken Factory (110% Pirate Prison; requires Rum)
pirateTimerEnslavement = 60*24*3600 #60 days; grant Prison
pirateTimerEDENStructure = 120*24*3600 #120 days; grant EDEN Factory (you have discovered a prototype factory...; 135% Pirate Prison; requires Rum)
pirateTimerBerserk = 150*24*3600 #150 days; grant "Berserk" ship module (major defense penalty; major ATT bonus; requires Rum)
pirateTimerSlaveMine = 180*24*3600 #180 days; grant Slave Mine (mining facility with hamster wheel for power; 160% Pirate Prison; requires Rum)
## Bonuses
galLeaderBonus = 0.05
galImperatorBonus = 0.10
## Combat
combatStructureHitMod = 0.75
combatShipHitMod = 0.75
combatHitXferMod = 3.00
combatStructDefense = 1
|
dahaic/outerspace
|
server/lib/ige/ospace/Rules/__init__.py
|
Python
|
gpl-2.0
| 11,626 | 0.027697 |
"""__Main__."""
import sys
import os
import logging
import argparse
import traceback
import shelve
from datetime import datetime
from CONSTANTS import CONSTANTS
from settings.settings import load_config, load_core, load_remote, load_email
from settings.settings import load_html, load_sms
from core import read_structure, readStructureFromFile, updateStructure
from core import clean_video_db, syncDirTree, transferLongVersions
from core import executeToDoFile, build_html_report, umount
from core import check_and_correct_videos_errors, clean_remote
from core import get_new_file_ids_from_structure, mount, check_mkv_videos
from notifications import send_sms_notification, send_mail_report, send_mail_log
def get_args():
"""Get args."""
parser = argparse.ArgumentParser(description='pyHomeVM')
parser.add_argument('-c', '--config_file_path',
action='store',
default='settings/dev_config.cfg',
help='path to config file that is to be used.')
parser.add_argument('-s', '--sms', help='Enables sms notifications',
action='store_true')
parser.add_argument('-l', '--log', help='Enables log sending by e-mail',
action='store_true')
parser.add_argument('-r', '--report',
help='Enables html report sending by e-mail',
action='store_true')
parser.add_argument('-rem', '--remote',
help='Enables transfer of long versions to remote storage',
action='store_true')
parser.add_argument('-b', '--backup',
help='Enables backup of first videos',
action='store_true')
parser.add_argument('-stats',
help='Gets you statistics about your videos',
action='store_true')
args = parser.parse_args()
return args
def load_logger():
"""Load logger."""
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(CONSTANTS['log_file_path'])
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
def main(argv=None):
"""Run main."""
start_time = datetime.now()
args = get_args() # Get args
logger = load_logger() # Set logger
logger.info('PROGRAM STARTED')
pid = str(os.getpid())
pidfile = "/tmp/pyHomeVM.pid"
config = load_config(args.config_file_path) # load config file
if os.path.isfile(pidfile):
logger.info('Program already running')
html = load_html(config)
email = load_email(config)
send_mail_log(CONSTANTS['log_file_path'], email, html)
sys.exit()
file(pidfile, 'w').write(pid)
(ffmpeg, local) = load_core(config) # load core configs
remote = load_remote(config)
html = load_html(config)
sms = load_sms(config)
email = load_email(config)
if(args.log):
email = load_email(config)
if(args.report):
html = load_html(config)
if(args.remote):
remote = load_remote(config)
if(args.sms):
sms = load_sms(config)
video_db = shelve.open(CONSTANTS['video_db_path'], writeback=True)
try:
if not os.path.exists(CONSTANTS['structure_file_path']):
raise Exception("Directory structure definition file not found.")
past_structure = readStructureFromFile(CONSTANTS)
except Exception:
logger.info(traceback.format_exc())
logger.info('{} not found'.format(CONSTANTS['structure_file_path']))
past_structure = {} # Start as new
new_structure = read_structure(local)
video_ids = get_new_file_ids_from_structure(new_structure, video_db)
check_and_correct_videos_errors(video_ids, video_db, local, ffmpeg)
logger.info('Checked for errors and corrupted')
html_data = updateStructure(
past_structure,
read_structure(local),
local,
ffmpeg,
remote,
video_db)
sms_sent_file = os.path.join(CONSTANTS['script_root_dir'], 'sms_sent')
if(mount(remote)):
logger.info('Mount succesfull')
syncDirTree(local, remote)
transferLongVersions(local, remote, video_db)
if(os.path.isfile(CONSTANTS['todo_file_path'])):
executeToDoFile(CONSTANTS['todo_file_path'], local, CONSTANTS)
if(os.path.exists(sms_sent_file)):
os.remove(sms_sent_file)
logger.info('sms_sent file has been deleted')
clean_remote(remote)
umount(remote)
else:
logger.info('Mount unssuccesfull')
if(not os.path.exists(sms_sent_file) and args.sms):
send_sms_notification(sms)
logger.info('Sms sent')
with open(sms_sent_file, 'w') as sms_not:
msg = 'SMS has been sent {}'.format(CONSTANTS['TODAY'])
sms_not.write(msg)
logger.info(msg)
if(args.report and (
html_data['new'] != '' or
html_data['modified'] != '' or
html_data['deleted'] != '' or
html_data['moved'] != '')):
html_report = build_html_report(html_data, CONSTANTS, html)
send_mail_report(html_report, email)
logger.info('Mail report sent')
if(args.log):
send_mail_log(CONSTANTS['log_file_path'], email, html)
logger.info('log file sent')
clean_video_db(video_db)
check_mkv_videos(local, video_db)
logger.info('DB cleaned')
video_db.close()
logger.info('Script ran in {}'.format(datetime.now() - start_time))
os.unlink(pidfile)
if __name__ == "__main__":
sys.exit(main())
|
Hoohm/pyHomeVM
|
pyHomeVM/__main__.py
|
Python
|
gpl-3.0
| 5,792 | 0.000691 |
# -*-coding:Utf-8 -*
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO Ematelot SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'recruter' de la commande 'matelot'."""
from primaires.interpreteur.masque.parametre import Parametre
class PrmRecruter(Parametre):
"""Commande 'matelot recruter'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "recruter", "recruit")
self.schema = "(<nombre> <personnage_present>)"
self.tronquer = True
self.aide_courte = "recrute un matelot"
self.aide_longue = \
"Cette commande permet de recruter un matelot présent " \
"dans la même salle que vous. Deux cas sont à distinguer " \
": si vous êtes à terre (si vous êtes dans un bureau de " \
"recrutement par exemple), vous pouvez demander aux matelots " \
"récemment recrutés de rejoindre votre bord. Si vous êtes " \
"sur un navire (que vous venez d'aborder, par exemple), vous " \
"pouvez demander à un matelot de rejoindre votre navire si " \
"celui-ci est assez proche. Cette commande prend deux " \
"arguments : le numéro correspondant à votre navire. Vous " \
"pouvez entrer la commande sans paramètre pour le connaître, " \
"les navires que vous possédez (et qui peuvent être utilisés " \
"pour le recrutement) seront affichés. Le second paramètre " \
"est un fragment du nom du personnage que vous souhaitez " \
"recruter. Si la commande réussi, le matelot recruté " \
"rejoindra le navire ciblé d'ici quelques instants. Veillez " \
"à rester accosté si vous êtes dans un port, sans quoi les " \
"matelots ne pourront pas vous rejoindre."
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
salle = personnage.salle
navires = importeur.navigation.get_navires_possedes(personnage)
navire = getattr(salle, "navire", None)
if dic_masques["nombre"] and dic_masques["personnage_present"]:
nombre = dic_masques["nombre"].nombre
cible = dic_masques["personnage_present"].personnage
cle = getattr(cible, "cle", None)
try:
fiche = importeur.navigation.fiches[cle]
except KeyError:
personnage.envoyer("|err|Vous ne pouvez recruter {}.|ff|",
cible)
return
try:
n_cible = navires[nombre - 1]
except IndexError:
personnage << "|err|Ce navire n'est pas visible.|ff|"
return
if cible.etats:
personnage.envoyer("{} est occupé.", cible)
return
# Feint de partir
if navire is None:
sortie = [s for s in salle.sorties][0]
salle.envoyer("{{}} s'en va vers {}.".format(
sortie.nom_complet), cible)
else:
salle.envoyer("{} saute à l'eau.", cible)
matelot = navire.equipage.get_matelot_depuis_personnage(
cible)
if matelot:
navire.equipage.supprimer_matelot(matelot.nom)
cible.salle = None
nom = "matelot_" + cible.identifiant
importeur.diffact.ajouter_action(nom, 15, fiche.recruter,
cible, n_cible)
personnage.envoyer("Vous recrutez {{}} sur {}.".format(
n_cible.desc_survol), cible)
else:
if navires:
msg = "Navires que vous possédez :\n"
for i, navire in enumerate(navires):
msg += "\n |ent|{}|ff| - {}".format(i + 1,
navire.desc_survol)
else:
msg = "|att|Vous ne possédez aucun navire " \
"pouvant servir au recrutement.|ff|"
personnage << msg
|
stormi/tsunami
|
src/secondaires/navigation/commandes/matelot/recruter.py
|
Python
|
bsd-3-clause
| 5,585 | 0.00072 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.